X-Git-Url: https://git.libre-soc.org/?a=blobdiff_plain;f=gcc%2Fpostreload.c;h=88d5234eef367da8eeb79050b1484965baa83fcd;hb=1e44e857e05c165f6f01aeb56a7a43ee765bfc99;hp=478a552608f73db6f1cc3e07dda2aa527fb521fd;hpb=e15eb172b0dd7451f121c908a97ab86a45759993;p=gcc.git diff --git a/gcc/postreload.c b/gcc/postreload.c index 478a552608f..88d5234eef3 100644 --- a/gcc/postreload.c +++ b/gcc/postreload.c @@ -1,5 +1,5 @@ /* Perform simple optimizations to clean up the result of reload. - Copyright (C) 1987-2013 Free Software Foundation, Inc. + Copyright (C) 1987-2015 Free Software Foundation, Inc. This file is part of GCC. @@ -20,49 +20,62 @@ along with GCC; see the file COPYING3. If not see #include "config.h" #include "system.h" #include "coretypes.h" -#include "tm.h" - -#include "machmode.h" -#include "hard-reg-set.h" +#include "backend.h" +#include "predict.h" +#include "tree.h" #include "rtl.h" +#include "df.h" + #include "tm_p.h" -#include "obstack.h" #include "insn-config.h" #include "flags.h" -#include "function.h" +#include "alias.h" +#include "expmed.h" +#include "dojump.h" +#include "explow.h" +#include "calls.h" +#include "emit-rtl.h" +#include "varasm.h" +#include "stmt.h" #include "expr.h" +#include "insn-codes.h" #include "optabs.h" #include "regs.h" -#include "basic-block.h" +#include "cfgrtl.h" +#include "cfgbuild.h" +#include "cfgcleanup.h" #include "reload.h" #include "recog.h" +#include "alloc-pool.h" #include "cselib.h" #include "diagnostic-core.h" #include "except.h" -#include "tree.h" #include "target.h" #include "tree-pass.h" -#include "df.h" #include "dbgcnt.h" +#ifndef LOAD_EXTEND_OP +#define LOAD_EXTEND_OP(M) UNKNOWN +#endif + static int reload_cse_noop_set_p (rtx); -static bool reload_cse_simplify (rtx, rtx); +static bool reload_cse_simplify (rtx_insn *, rtx); static void reload_cse_regs_1 (void); -static int reload_cse_simplify_set (rtx, rtx); -static int reload_cse_simplify_operands (rtx, rtx); +static int reload_cse_simplify_set (rtx, rtx_insn *); +static int reload_cse_simplify_operands (rtx_insn *, rtx); static void reload_combine (void); -static void reload_combine_note_use (rtx *, rtx, int, rtx); +static void reload_combine_note_use (rtx *, rtx_insn *, int, rtx); static void reload_combine_note_store (rtx, const_rtx, void *); -static bool reload_cse_move2add (rtx); +static bool reload_cse_move2add (rtx_insn *); static void move2add_note_store (rtx, const_rtx, void *); /* Call cse / combine like post-reload optimization phases. FIRST is the first instruction. */ static void -reload_cse_regs (rtx first ATTRIBUTE_UNUSED) +reload_cse_regs (rtx_insn *first ATTRIBUTE_UNUSED) { bool moves_converted; reload_cse_regs_1 (); @@ -88,7 +101,7 @@ reload_cse_noop_set_p (rtx set) /* Try to simplify INSN. Return true if the CFG may have changed. */ static bool -reload_cse_simplify (rtx insn, rtx testreg) +reload_cse_simplify (rtx_insn *insn, rtx testreg) { rtx body = PATTERN (insn); basic_block insn_bb = BLOCK_FOR_INSN (insn); @@ -207,8 +220,8 @@ reload_cse_regs_1 (void) { bool cfg_changed = false; basic_block bb; - rtx insn; - rtx testreg = gen_rtx_REG (VOIDmode, -1); + rtx_insn *insn; + rtx testreg = gen_rtx_REG (word_mode, LAST_VIRTUAL_REGISTER + 1); cselib_init (CSELIB_RECORD_MEMORY); init_alias_analysis (); @@ -236,7 +249,7 @@ reload_cse_regs_1 (void) and change the set into a register copy. */ static int -reload_cse_simplify_set (rtx set, rtx insn) +reload_cse_simplify_set (rtx set, rtx_insn *insn) { int did_change = 0; int dreg; @@ -245,9 +258,7 @@ reload_cse_simplify_set (rtx set, rtx insn) int old_cost; cselib_val *val; struct elt_loc_list *l; -#ifdef LOAD_EXTEND_OP enum rtx_code extend_op = UNKNOWN; -#endif bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)); dreg = true_regnum (SET_DEST (set)); @@ -260,7 +271,6 @@ reload_cse_simplify_set (rtx set, rtx insn) dclass = REGNO_REG_CLASS (dreg); -#ifdef LOAD_EXTEND_OP /* When replacing a memory with a register, we need to honor assumptions that combine made wrt the contents of sign bits. We'll do this by generating an extend instruction instead of a reg->reg copy. Thus @@ -270,7 +280,6 @@ reload_cse_simplify_set (rtx set, rtx insn) && (extend_op = LOAD_EXTEND_OP (GET_MODE (src))) != UNKNOWN && !REG_P (SET_DEST (set))) return 0; -#endif val = cselib_lookup (src, GET_MODE (SET_DEST (set)), 0, VOIDmode); if (! val) @@ -283,7 +292,7 @@ reload_cse_simplify_set (rtx set, rtx insn) old_cost = register_move_cost (GET_MODE (src), REGNO_REG_CLASS (REGNO (src)), dclass); else - old_cost = set_src_cost (src, speed); + old_cost = set_src_cost (src, GET_MODE (SET_DEST (set)), speed); for (l = val->locs; l; l = l->next) { @@ -292,44 +301,41 @@ reload_cse_simplify_set (rtx set, rtx insn) if (CONSTANT_P (this_rtx) && ! references_value_p (this_rtx, 0)) { -#ifdef LOAD_EXTEND_OP if (extend_op != UNKNOWN) { - HOST_WIDE_INT this_val; + wide_int result; - /* ??? I'm lazy and don't wish to handle CONST_DOUBLE. Other - constants, such as SYMBOL_REF, cannot be extended. */ - if (!CONST_INT_P (this_rtx)) + if (!CONST_SCALAR_INT_P (this_rtx)) continue; - this_val = INTVAL (this_rtx); switch (extend_op) { case ZERO_EXTEND: - this_val &= GET_MODE_MASK (GET_MODE (src)); + result = wide_int::from (std::make_pair (this_rtx, + GET_MODE (src)), + BITS_PER_WORD, UNSIGNED); break; case SIGN_EXTEND: - /* ??? In theory we're already extended. */ - if (this_val == trunc_int_for_mode (this_val, GET_MODE (src))) - break; + result = wide_int::from (std::make_pair (this_rtx, + GET_MODE (src)), + BITS_PER_WORD, SIGNED); + break; default: gcc_unreachable (); } - this_rtx = GEN_INT (this_val); + this_rtx = immed_wide_int_const (result, word_mode); } -#endif - this_cost = set_src_cost (this_rtx, speed); + + this_cost = set_src_cost (this_rtx, GET_MODE (SET_DEST (set)), speed); } else if (REG_P (this_rtx)) { -#ifdef LOAD_EXTEND_OP if (extend_op != UNKNOWN) { this_rtx = gen_rtx_fmt_e (extend_op, word_mode, this_rtx); - this_cost = set_src_cost (this_rtx, speed); + this_cost = set_src_cost (this_rtx, word_mode, speed); } else -#endif this_cost = register_move_cost (GET_MODE (this_rtx), REGNO_REG_CLASS (REGNO (this_rtx)), dclass); @@ -344,7 +350,6 @@ reload_cse_simplify_set (rtx set, rtx insn) && REG_P (this_rtx) && !REG_P (SET_SRC (set)))) { -#ifdef LOAD_EXTEND_OP if (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set))) < BITS_PER_WORD && extend_op != UNKNOWN #ifdef CANNOT_CHANGE_MODE_CLASS @@ -358,7 +363,6 @@ reload_cse_simplify_set (rtx set, rtx insn) ORIGINAL_REGNO (wide_dest) = ORIGINAL_REGNO (SET_DEST (set)); validate_change (insn, &SET_DEST (set), wide_dest, 1); } -#endif validate_unshare_change (insn, &SET_SRC (set), this_rtx, 1); old_cost = this_cost, did_change = 1; @@ -380,7 +384,7 @@ reload_cse_simplify_set (rtx set, rtx insn) hard registers. */ static int -reload_cse_simplify_operands (rtx insn, rtx testreg) +reload_cse_simplify_operands (rtx_insn *insn, rtx testreg) { int i, j; @@ -401,15 +405,11 @@ reload_cse_simplify_operands (rtx insn, rtx testreg) /* Array of alternatives, sorted in order of decreasing desirability. */ int *alternative_order; - extract_insn (insn); + extract_constrain_insn (insn); if (recog_data.n_alternatives == 0 || recog_data.n_operands == 0) return 0; - /* Figure out which alternative currently matches. */ - if (! constrain_operands (1)) - fatal_insn_not_found (insn); - alternative_reject = XALLOCAVEC (int, recog_data.n_alternatives); alternative_nregs = XALLOCAVEC (int, recog_data.n_alternatives); alternative_order = XALLOCAVEC (int, recog_data.n_alternatives); @@ -434,7 +434,6 @@ reload_cse_simplify_operands (rtx insn, rtx testreg) continue; op = recog_data.operand[i]; -#ifdef LOAD_EXTEND_OP if (MEM_P (op) && GET_MODE_BITSIZE (GET_MODE (op)) < BITS_PER_WORD && LOAD_EXTEND_OP (GET_MODE (op)) != UNKNOWN) @@ -485,7 +484,7 @@ reload_cse_simplify_operands (rtx insn, rtx testreg) safe to optimize, but is it worth the trouble? */ continue; } -#endif /* LOAD_EXTEND_OP */ + if (side_effects_p (op)) continue; v = cselib_lookup (op, recog_data.operand_mode[i], 0, VOIDmode); @@ -497,9 +496,10 @@ reload_cse_simplify_operands (rtx insn, rtx testreg) SET_HARD_REG_BIT (equiv_regs[i], REGNO (l->loc)); } + alternative_mask preferred = get_preferred_alternatives (insn); for (i = 0; i < recog_data.n_operands; i++) { - enum machine_mode mode; + machine_mode mode; int regno; const char *p; @@ -539,8 +539,7 @@ reload_cse_simplify_operands (rtx insn, rtx testreg) if (! TEST_HARD_REG_BIT (equiv_regs[i], regno)) continue; - SET_REGNO_RAW (testreg, regno); - PUT_MODE (testreg, mode); + set_mode_and_regno (testreg, mode, regno); /* We found a register equal to this operand. Now look for all alternatives that can accept this register and have not been @@ -553,29 +552,15 @@ reload_cse_simplify_operands (rtx insn, rtx testreg) switch (c) { - case '=': case '+': case '?': - case '#': case '&': case '!': - case '*': case '%': - case '0': case '1': case '2': case '3': case '4': - case '5': case '6': case '7': case '8': case '9': - case '<': case '>': case 'V': case 'o': - case 'E': case 'F': case 'G': case 'H': - case 's': case 'i': case 'n': - case 'I': case 'J': case 'K': case 'L': - case 'M': case 'N': case 'O': case 'P': - case 'p': case 'X': case TARGET_MEM_CONSTRAINT: - /* These don't say anything we care about. */ - break; - - case 'g': case 'r': - rclass = reg_class_subunion[(int) rclass][(int) GENERAL_REGS]; + case 'g': + rclass = reg_class_subunion[rclass][GENERAL_REGS]; break; default: rclass = (reg_class_subunion - [(int) rclass] - [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]); + [rclass] + [reg_class_for_constraint (lookup_constraint (p))]); break; case ',': case '\0': @@ -584,13 +569,13 @@ reload_cse_simplify_operands (rtx insn, rtx testreg) alternative yet and the operand being replaced is not a cheap CONST_INT. */ if (op_alt_regno[i][j] == -1 - && recog_data.alternative_enabled_p[j] + && TEST_BIT (preferred, j) && reg_fits_class_p (testreg, rclass, 0, mode) && (!CONST_INT_P (recog_data.operand[i]) - || (set_src_cost (recog_data.operand[i], + || (set_src_cost (recog_data.operand[i], mode, optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn))) - > set_src_cost (testreg, + > set_src_cost (testreg, mode, optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)))))) { @@ -623,7 +608,6 @@ reload_cse_simplify_operands (rtx insn, rtx testreg) int best = i; int best_reject = alternative_reject[alternative_order[i]]; int best_nregs = alternative_nregs[alternative_order[i]]; - int tmp; for (j = i + 1; j < recog_data.n_alternatives; j++) { @@ -639,9 +623,7 @@ reload_cse_simplify_operands (rtx insn, rtx testreg) } } - tmp = alternative_order[best]; - alternative_order[best] = alternative_order[i]; - alternative_order[i] = tmp; + std::swap (alternative_order[best], alternative_order[i]); } /* Substitute the operands as determined by op_alt_regno for the best @@ -650,7 +632,7 @@ reload_cse_simplify_operands (rtx insn, rtx testreg) for (i = 0; i < recog_data.n_operands; i++) { - enum machine_mode mode = recog_data.operand_mode[i]; + machine_mode mode = recog_data.operand_mode[i]; if (op_alt_regno[i][j] == -1) continue; @@ -661,7 +643,7 @@ reload_cse_simplify_operands (rtx insn, rtx testreg) for (i = recog_data.n_dups - 1; i >= 0; i--) { int op = recog_data.dup_num[i]; - enum machine_mode mode = recog_data.operand_mode[op]; + machine_mode mode = recog_data.operand_mode[op]; if (op_alt_regno[op][j] == -1) continue; @@ -686,7 +668,7 @@ reload_cse_simplify_operands (rtx insn, rtx testreg) struct reg_use { /* The insn where a register has been used. */ - rtx insn; + rtx_insn *insn; /* Points to the memory reference enclosing the use, if any, NULL_RTX otherwise. */ rtx containing_mem; @@ -784,7 +766,7 @@ reload_combine_split_ruids (int split_ruid) information about uses in that particular insn. */ static void -reload_combine_purge_insn_uses (rtx insn) +reload_combine_purge_insn_uses (rtx_insn *insn) { unsigned i; @@ -869,9 +851,9 @@ reload_combine_closest_single_use (unsigned regno, int ruid_limit) should make this change on debug insns. */ static void -fixup_debug_insns (rtx reg, rtx replacement, rtx from, rtx to) +fixup_debug_insns (rtx reg, rtx replacement, rtx_insn *from, rtx_insn *to) { - rtx insn; + rtx_insn *insn; for (insn = from; insn != to; insn = NEXT_INSN (insn)) { rtx t; @@ -892,7 +874,7 @@ fixup_debug_insns (rtx reg, rtx replacement, rtx from, rtx to) static bool try_replace_in_use (struct reg_use *use, rtx reg, rtx src) { - rtx use_insn = use->insn; + rtx_insn *use_insn = use->insn; rtx mem = use->containing_mem; bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (use_insn)); @@ -926,12 +908,13 @@ try_replace_in_use (struct reg_use *use, rtx reg, rtx src) && CONSTANT_P (XEXP (SET_SRC (new_set), 1))) { rtx new_src; - int old_cost = set_src_cost (SET_SRC (new_set), speed); + machine_mode mode = GET_MODE (SET_DEST (new_set)); + int old_cost = set_src_cost (SET_SRC (new_set), mode, speed); gcc_assert (rtx_equal_p (XEXP (SET_SRC (new_set), 0), reg)); new_src = simplify_replace_rtx (SET_SRC (new_set), reg, src); - if (set_src_cost (new_src, speed) <= old_cost + if (set_src_cost (new_src, mode, speed) <= old_cost && validate_change (use_insn, &SET_SRC (new_set), new_src, 0)) return true; @@ -947,14 +930,14 @@ try_replace_in_use (struct reg_use *use, rtx reg, rtx src) recognized and should be handled normally. */ static bool -reload_combine_recognize_const_pattern (rtx insn) +reload_combine_recognize_const_pattern (rtx_insn *insn) { int from_ruid = reload_combine_ruid; rtx set, pat, reg, src, addreg; unsigned int regno; struct reg_use *use; bool must_move_add; - rtx add_moved_after_insn = NULL_RTX; + rtx_insn *add_moved_after_insn = NULL; int add_moved_after_ruid = 0; int clobbered_regno = -1; @@ -965,7 +948,7 @@ reload_combine_recognize_const_pattern (rtx insn) reg = SET_DEST (set); src = SET_SRC (set); if (!REG_P (reg) - || hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] != 1 + || REG_NREGS (reg) != 1 || GET_MODE (reg) != Pmode || reg == stack_pointer_rtx) return false; @@ -1010,7 +993,7 @@ reload_combine_recognize_const_pattern (rtx insn) if (use && GET_MODE (*use->usep) == Pmode) { bool delete_add = false; - rtx use_insn = use->insn; + rtx_insn *use_insn = use->insn; int use_ruid = use->ruid; /* Avoid moving the add insn past a jump. */ @@ -1023,11 +1006,9 @@ reload_combine_recognize_const_pattern (rtx insn) && reg_state[clobbered_regno].real_store_ruid >= use_ruid) break; -#ifdef HAVE_cc0 /* Do not separate cc0 setter and cc0 user on HAVE_cc0 targets. */ - if (must_move_add && sets_cc0_p (PATTERN (use_insn))) + if (HAVE_cc0 && must_move_add && sets_cc0_p (PATTERN (use_insn))) break; -#endif gcc_assert (reg_state[regno].store_ruid <= use_ruid); /* Avoid moving a use of ADDREG past a point where it is stored. */ @@ -1091,7 +1072,7 @@ reload_combine_recognize_const_pattern (rtx insn) INSN; false if it wasn't recognized and should be handled normally. */ static bool -reload_combine_recognize_pattern (rtx insn) +reload_combine_recognize_pattern (rtx_insn *insn) { rtx set, reg, src; unsigned int regno; @@ -1102,8 +1083,7 @@ reload_combine_recognize_pattern (rtx insn) reg = SET_DEST (set); src = SET_SRC (set); - if (!REG_P (reg) - || hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] != 1) + if (!REG_P (reg) || REG_NREGS (reg) != 1) return false; regno = REGNO (reg); @@ -1133,7 +1113,7 @@ reload_combine_recognize_pattern (rtx insn) && last_label_ruid < reg_state[regno].use_ruid) { rtx base = XEXP (src, 1); - rtx prev = prev_nonnote_nondebug_insn (insn); + rtx_insn *prev = prev_nonnote_nondebug_insn (insn); rtx prev_set = prev ? single_set (prev) : NULL_RTX; rtx index_reg = NULL_RTX; rtx reg_sum = NULL_RTX; @@ -1242,7 +1222,7 @@ reload_combine_recognize_pattern (rtx insn) static void reload_combine (void) { - rtx insn, prev; + rtx_insn *insn, *prev; basic_block bb; unsigned int r; int min_labelno, n_labels; @@ -1354,9 +1334,12 @@ reload_combine (void) if (CALL_P (insn)) { rtx link; + HARD_REG_SET used_regs; + + get_call_reg_set_usage (insn, &used_regs, call_used_reg_set); for (r = 0; r < FIRST_PSEUDO_REGISTER; r++) - if (call_used_regs[r]) + if (TEST_HARD_REG_BIT (used_regs, r)) { reg_state[r].use_index = RELOAD_COMBINE_MAX_USES; reg_state[r].store_ruid = reload_combine_ruid; @@ -1370,12 +1353,8 @@ reload_combine (void) if ((GET_CODE (setuse) == USE || GET_CODE (setuse) == CLOBBER) && REG_P (usage_rtx)) { - unsigned int i; - unsigned int start_reg = REGNO (usage_rtx); - unsigned int num_regs - = hard_regno_nregs[start_reg][GET_MODE (usage_rtx)]; - unsigned int end_reg = start_reg + num_regs - 1; - for (i = start_reg; i <= end_reg; i++) + unsigned int end_regno = END_REGNO (usage_rtx); + for (unsigned int i = REGNO (usage_rtx); i < end_regno; ++i) if (GET_CODE (XEXP (link, 0)) == CLOBBER) { reg_state[i].use_index = RELOAD_COMBINE_MAX_USES; @@ -1437,7 +1416,7 @@ reload_combine_note_store (rtx dst, const_rtx set, void *data ATTRIBUTE_UNUSED) { int regno = 0; int i; - enum machine_mode mode = GET_MODE (dst); + machine_mode mode = GET_MODE (dst); if (GET_CODE (dst) == SUBREG) { @@ -1457,9 +1436,8 @@ reload_combine_note_store (rtx dst, const_rtx set, void *data ATTRIBUTE_UNUSED) || GET_CODE (dst) == PRE_DEC || GET_CODE (dst) == POST_DEC || GET_CODE (dst) == PRE_MODIFY || GET_CODE (dst) == POST_MODIFY) { - regno = REGNO (XEXP (dst, 0)); - mode = GET_MODE (XEXP (dst, 0)); - for (i = hard_regno_nregs[regno][mode] - 1 + regno; i >= regno; i--) + unsigned int end_regno = END_REGNO (XEXP (dst, 0)); + for (unsigned int i = REGNO (XEXP (dst, 0)); i < end_regno; ++i) { /* We could probably do better, but for now mark the register as used in an unknown fashion and set/clobbered at this @@ -1507,7 +1485,7 @@ reload_combine_note_store (rtx dst, const_rtx set, void *data ATTRIBUTE_UNUSED) *XP is the pattern of INSN, or a part of it. Called from reload_combine, and recursively by itself. */ static void -reload_combine_note_use (rtx *xp, rtx insn, int ruid, rtx containing_mem) +reload_combine_note_use (rtx *xp, rtx_insn *insn, int ruid, rtx containing_mem) { rtx x = *xp; enum rtx_code code = x->code; @@ -1529,13 +1507,11 @@ reload_combine_note_use (rtx *xp, rtx insn, int ruid, rtx containing_mem) /* If this is the USE of a return value, we can't change it. */ if (REG_P (XEXP (x, 0)) && REG_FUNCTION_VALUE_P (XEXP (x, 0))) { - /* Mark the return register as used in an unknown fashion. */ + /* Mark the return register as used in an unknown fashion. */ rtx reg = XEXP (x, 0); - int regno = REGNO (reg); - int nregs = hard_regno_nregs[regno][GET_MODE (reg)]; - - while (--nregs >= 0) - reg_state[regno + nregs].use_index = -1; + unsigned int end_regno = END_REGNO (reg); + for (unsigned int regno = REGNO (reg); regno < end_regno; ++regno) + reg_state[regno].use_index = -1; return; } break; @@ -1566,7 +1542,7 @@ reload_combine_note_use (rtx *xp, rtx insn, int ruid, rtx containing_mem) /* No spurious USEs of pseudo registers may remain. */ gcc_assert (regno < FIRST_PSEUDO_REGISTER); - nregs = hard_regno_nregs[regno][GET_MODE (x)]; + nregs = REG_NREGS (x); /* We can't substitute into multi-hard-reg uses. */ if (nregs > 1) @@ -1664,7 +1640,7 @@ static int reg_set_luid[FIRST_PSEUDO_REGISTER]; static HOST_WIDE_INT reg_offset[FIRST_PSEUDO_REGISTER]; static int reg_base_reg[FIRST_PSEUDO_REGISTER]; static rtx reg_symbol_ref[FIRST_PSEUDO_REGISTER]; -static enum machine_mode reg_mode[FIRST_PSEUDO_REGISTER]; +static machine_mode reg_mode[FIRST_PSEUDO_REGISTER]; /* move2add_luid is linearly increased while scanning the instructions from first to last. It is used to set reg_set_luid in @@ -1688,7 +1664,7 @@ static void move2add_record_mode (rtx reg) { int regno, nregs; - enum machine_mode mode = GET_MODE (reg); + machine_mode mode = GET_MODE (reg); if (GET_CODE (reg) == SUBREG) { @@ -1698,7 +1674,7 @@ move2add_record_mode (rtx reg) else if (REG_P (reg)) { regno = REGNO (reg); - nregs = hard_regno_nregs[regno][mode]; + nregs = REG_NREGS (reg); } else gcc_unreachable (); @@ -1724,7 +1700,7 @@ move2add_record_sym_value (rtx reg, rtx sym, rtx off) /* Check if REGNO contains a valid value in MODE. */ static bool -move2add_valid_value_p (int regno, enum machine_mode mode) +move2add_valid_value_p (int regno, machine_mode mode) { if (reg_set_luid[regno] <= move2add_last_label_luid) return false; @@ -1761,7 +1737,7 @@ move2add_valid_value_p (int regno, enum machine_mode mode) Return true if we made a change. */ static bool -move2add_use_add2_insn (rtx reg, rtx sym, rtx off, rtx insn) +move2add_use_add2_insn (rtx reg, rtx sym, rtx off, rtx_insn *insn) { rtx pat = PATTERN (insn); rtx src = SET_SRC (pat); @@ -1801,7 +1777,7 @@ move2add_use_add2_insn (rtx reg, rtx sym, rtx off, rtx insn) changed = validate_change (insn, &SET_SRC (pat), tem, 0); else if (sym == NULL_RTX && GET_MODE (reg) != BImode) { - enum machine_mode narrow_mode; + machine_mode narrow_mode; for (narrow_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); narrow_mode != VOIDmode && narrow_mode != GET_MODE (reg); @@ -1815,14 +1791,17 @@ move2add_use_add2_insn (rtx reg, rtx sym, rtx off, rtx insn) rtx narrow_src = gen_int_mode (INTVAL (off), narrow_mode); rtx new_set - = gen_rtx_SET (VOIDmode, - gen_rtx_STRICT_LOW_PART (VOIDmode, + = gen_rtx_SET (gen_rtx_STRICT_LOW_PART (VOIDmode, narrow_reg), narrow_src); - changed = validate_change (insn, &PATTERN (insn), - new_set, 0); - if (changed) - break; + get_full_set_rtx_cost (new_set, &newcst); + if (costs_lt_p (&newcst, &oldcst, speed)) + { + changed = validate_change (insn, &PATTERN (insn), + new_set, 0); + if (changed) + break; + } } } } @@ -1842,7 +1821,7 @@ move2add_use_add2_insn (rtx reg, rtx sym, rtx off, rtx insn) Return true iff we made a change. */ static bool -move2add_use_add3_insn (rtx reg, rtx sym, rtx off, rtx insn) +move2add_use_add3_insn (rtx reg, rtx sym, rtx off, rtx_insn *insn) { rtx pat = PATTERN (insn); rtx src = SET_SRC (pat); @@ -1916,10 +1895,10 @@ move2add_use_add3_insn (rtx reg, rtx sym, rtx off, rtx insn) /* Convert move insns with constant inputs to additions if they are cheaper. Return true if any changes were made. */ static bool -reload_cse_move2add (rtx first) +reload_cse_move2add (rtx_insn *first) { int i; - rtx insn; + rtx_insn *insn; bool changed = false; for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--) @@ -1999,7 +1978,7 @@ reload_cse_move2add (rtx first) && reg_base_reg[regno] == reg_base_reg[REGNO (src)] && move2add_valid_value_p (REGNO (src), GET_MODE (reg))) { - rtx next = next_nonnote_nondebug_insn (insn); + rtx_insn *next = next_nonnote_nondebug_insn (insn); rtx set = NULL_RTX; if (next) set = single_set (next); @@ -2033,14 +2012,14 @@ reload_cse_move2add (rtx first) get_full_set_rtx_cost (set, &oldcst); SET_SRC (set) = tem; - get_full_set_src_cost (tem, &newcst); + get_full_set_src_cost (tem, GET_MODE (reg), &newcst); SET_SRC (set) = old_src; costs_add_n_insns (&oldcst, 1); if (costs_lt_p (&newcst, &oldcst, speed) && have_add2_insn (reg, new_src)) { - rtx newpat = gen_rtx_SET (VOIDmode, reg, tem); + rtx newpat = gen_rtx_SET (reg, tem); success = validate_change (next, &PATTERN (next), newpat, 0); @@ -2135,11 +2114,11 @@ reload_cse_move2add (rtx first) number of calls to gen_rtx_SET to avoid memory allocation if possible. */ && SCALAR_INT_MODE_P (GET_MODE (XEXP (cnd, 0))) - && hard_regno_nregs[REGNO (XEXP (cnd, 0))][GET_MODE (XEXP (cnd, 0))] == 1 + && REG_NREGS (XEXP (cnd, 0)) == 1 && CONST_INT_P (XEXP (cnd, 1))) { rtx implicit_set = - gen_rtx_SET (VOIDmode, XEXP (cnd, 0), XEXP (cnd, 1)); + gen_rtx_SET (XEXP (cnd, 0), XEXP (cnd, 1)); move2add_note_store (SET_DEST (implicit_set), implicit_set, insn); } } @@ -2148,12 +2127,29 @@ reload_cse_move2add (rtx first) unknown values. */ if (CALL_P (insn)) { + rtx link; + for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--) { if (call_used_regs[i]) /* Reset the information about this register. */ reg_mode[i] = VOIDmode; } + + for (link = CALL_INSN_FUNCTION_USAGE (insn); link; + link = XEXP (link, 1)) + { + rtx setuse = XEXP (link, 0); + rtx usage_rtx = XEXP (setuse, 0); + if (GET_CODE (setuse) == CLOBBER + && REG_P (usage_rtx)) + { + unsigned int end_regno = END_REGNO (usage_rtx); + for (unsigned int r = REGNO (usage_rtx); r < end_regno; ++r) + /* Reset the information about this register. */ + reg_mode[r] = VOIDmode; + } + } } } return changed; @@ -2167,9 +2163,9 @@ reload_cse_move2add (rtx first) static void move2add_note_store (rtx dst, const_rtx set, void *data) { - rtx insn = (rtx) data; + rtx_insn *insn = (rtx_insn *) data; unsigned int regno = 0; - enum machine_mode mode = GET_MODE (dst); + machine_mode mode = GET_MODE (dst); /* Some targets do argument pushes without adding REG_INC notes. */ @@ -2315,30 +2311,6 @@ move2add_note_store (rtx dst, const_rtx set, void *data) } } -static bool -gate_handle_postreload (void) -{ - return (optimize > 0 && reload_completed); -} - - -static unsigned int -rest_of_handle_postreload (void) -{ - if (!dbg_cnt (postreload_cse)) - return 0; - - /* Do a very simple CSE pass over just the hard registers. */ - reload_cse_regs (get_insns ()); - /* Reload_cse_regs can eliminate potentially-trapping MEMs. - Remove any EH edges associated with them. */ - if (cfun->can_throw_non_call_exceptions - && purge_all_dead_edges ()) - cleanup_cfg (0); - - return 0; -} - namespace { const pass_data pass_data_postreload_cse = @@ -2346,14 +2318,12 @@ const pass_data pass_data_postreload_cse = RTL_PASS, /* type */ "postreload", /* name */ OPTGROUP_NONE, /* optinfo_flags */ - true, /* has_gate */ - true, /* has_execute */ TV_RELOAD_CSE_REGS, /* tv_id */ 0, /* properties_required */ 0, /* properties_provided */ 0, /* properties_destroyed */ 0, /* todo_flags_start */ - ( TODO_df_finish | TODO_verify_rtl_sharing | 0 ), /* todo_flags_finish */ + TODO_df_finish, /* todo_flags_finish */ }; class pass_postreload_cse : public rtl_opt_pass @@ -2364,11 +2334,29 @@ public: {} /* opt_pass methods: */ - bool gate () { return gate_handle_postreload (); } - unsigned int execute () { return rest_of_handle_postreload (); } + virtual bool gate (function *) { return (optimize > 0 && reload_completed); } + + virtual unsigned int execute (function *); }; // class pass_postreload_cse +unsigned int +pass_postreload_cse::execute (function *fun) +{ + if (!dbg_cnt (postreload_cse)) + return 0; + + /* Do a very simple CSE pass over just the hard registers. */ + reload_cse_regs (get_insns ()); + /* Reload_cse_regs can eliminate potentially-trapping MEMs. + Remove any EH edges associated with them. */ + if (fun->can_throw_non_call_exceptions + && purge_all_dead_edges ()) + cleanup_cfg (0); + + return 0; +} + } // anon namespace rtl_opt_pass *