/* Perform simple optimizations to clean up the result of reload.
Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
- 2010 Free Software Foundation, Inc.
+ 2010, 2011 Free Software Foundation, Inc.
This file is part of GCC.
#include "output.h"
#include "cselib.h"
#include "diagnostic-core.h"
-#include "toplev.h"
#include "except.h"
#include "tree.h"
#include "target.h"
if (REG_P (value)
&& ! REG_FUNCTION_VALUE_P (value))
value = 0;
- delete_insn_and_edges (insn);
+ if (check_for_inc_dec (insn))
+ delete_insn_and_edges (insn);
return;
}
if (i < 0)
{
- delete_insn_and_edges (insn);
+ if (check_for_inc_dec (insn))
+ delete_insn_and_edges (insn);
/* We're done with this insn. */
return;
}
int did_change = 0;
int dreg;
rtx src;
- enum reg_class dclass;
+ reg_class_t dclass;
int old_cost;
cselib_val *val;
struct elt_loc_list *l;
return 0;
#endif
- val = cselib_lookup (src, GET_MODE (SET_DEST (set)), 0);
+ val = cselib_lookup (src, GET_MODE (SET_DEST (set)), 0, VOIDmode);
if (! val)
return 0;
old_cost = register_move_cost (GET_MODE (src),
REGNO_REG_CLASS (REGNO (src)), dclass);
else
- old_cost = rtx_cost (src, SET, speed);
+ old_cost = set_src_cost (src, speed);
for (l = val->locs; l; l = l->next)
{
this_rtx = GEN_INT (this_val);
}
#endif
- this_cost = rtx_cost (this_rtx, SET, speed);
+ this_cost = set_src_cost (this_rtx, speed);
}
else if (REG_P (this_rtx))
{
if (extend_op != UNKNOWN)
{
this_rtx = gen_rtx_fmt_e (extend_op, word_mode, this_rtx);
- this_cost = rtx_cost (this_rtx, SET, speed);
+ this_cost = set_src_cost (this_rtx, speed);
}
else
#endif
continue;
}
#endif /* LOAD_EXTEND_OP */
- v = cselib_lookup (op, recog_data.operand_mode[i], 0);
+ if (side_effects_p (op))
+ continue;
+ v = cselib_lookup (op, recog_data.operand_mode[i], 0, VOIDmode);
if (! v)
continue;
if (! TEST_HARD_REG_BIT (equiv_regs[i], regno))
continue;
- SET_REGNO (testreg, regno);
+ SET_REGNO_RAW (testreg, regno);
PUT_MODE (testreg, mode);
/* We found a register equal to this operand. Now look for all
alternative yet and the operand being replaced is not
a cheap CONST_INT. */
if (op_alt_regno[i][j] == -1
+ && recog_data.alternative_enabled_p[j]
&& reg_fits_class_p (testreg, rclass, 0, mode)
&& (!CONST_INT_P (recog_data.operand[i])
- || (rtx_cost (recog_data.operand[i], SET,
- optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)))
- > rtx_cost (testreg, SET,
- optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn))))))
+ || (set_src_cost (recog_data.operand[i],
+ optimize_bb_for_speed_p
+ (BLOCK_FOR_INSN (insn)))
+ > set_src_cost (testreg,
+ optimize_bb_for_speed_p
+ (BLOCK_FOR_INSN (insn))))))
{
alternative_nregs[j]++;
op_alt_regno[i][j] = regno;
&& CONSTANT_P (XEXP (SET_SRC (new_set), 1)))
{
rtx new_src;
- int old_cost = rtx_cost (SET_SRC (new_set), SET, speed);
+ int old_cost = set_src_cost (SET_SRC (new_set), speed);
gcc_assert (rtx_equal_p (XEXP (SET_SRC (new_set), 0), reg));
new_src = simplify_replace_rtx (SET_SRC (new_set), reg, src);
- if (rtx_cost (new_src, SET, speed) <= old_cost
+ if (set_src_cost (new_src, speed) <= old_cost
&& validate_change (use_insn, &SET_SRC (new_set),
new_src, 0))
return true;
&& reg_state[clobbered_regno].real_store_ruid >= use_ruid)
break;
+#ifdef HAVE_cc0
+ /* Do not separate cc0 setter and cc0 user on HAVE_cc0 targets. */
+ if (must_move_add && sets_cc0_p (PATTERN (use_insn)))
+ break;
+#endif
+
gcc_assert (reg_state[regno].store_ruid <= use_ruid);
/* Avoid moving a use of ADDREG past a point where it is stored. */
if (reg_state[REGNO (addreg)].store_ruid > use_ruid)
reload_combine (void)
{
rtx insn, prev;
- int i;
basic_block bb;
unsigned int r;
int min_labelno, n_labels;
for (insn = get_last_insn (); insn; insn = prev)
{
+ bool control_flow_insn;
rtx note;
prev = PREV_INSN (insn);
if (LABEL_P (insn))
last_label_ruid = reload_combine_ruid;
else if (BARRIER_P (insn))
- for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
- if (! fixed_regs[r])
+ {
+ /* Crossing a barrier resets all the use information. */
+ for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
+ if (! fixed_regs[r])
reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
+ }
+ else if (INSN_P (insn) && volatile_insn_p (PATTERN (insn)))
+ /* Optimizations across insns being marked as volatile must be
+ prevented. All the usage information is invalidated
+ here. */
+ for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
+ if (! fixed_regs[r]
+ && reg_state[r].use_index != RELOAD_COMBINE_MAX_USES)
+ reg_state[r].use_index = -1;
if (! NONDEBUG_INSN_P (insn))
continue;
reload_combine_ruid++;
- if (control_flow_insn_p (insn))
+ control_flow_insn = control_flow_insn_p (insn);
+ if (control_flow_insn)
last_jump_ruid = reload_combine_ruid;
if (reload_combine_recognize_const_pattern (insn)
{
unsigned int i;
unsigned int start_reg = REGNO (usage_rtx);
- unsigned int num_regs =
- hard_regno_nregs[start_reg][GET_MODE (usage_rtx)];
- unsigned int end_reg = start_reg + num_regs - 1;
+ unsigned int num_regs
+ = hard_regno_nregs[start_reg][GET_MODE (usage_rtx)];
+ unsigned int end_reg = start_reg + num_regs - 1;
for (i = start_reg; i <= end_reg; i++)
if (GET_CODE (XEXP (link, 0)) == CLOBBER)
{
reg_state[i].use_index = -1;
}
}
-
}
- else if (JUMP_P (insn)
- && GET_CODE (PATTERN (insn)) != RETURN)
+
+ if (control_flow_insn && GET_CODE (PATTERN (insn)) != RETURN)
{
/* Non-spill registers might be used at the call destination in
some unknown fashion, so we have to mark the unknown use. */
else
live = &ever_live_at_start;
- for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
- if (TEST_HARD_REG_BIT (*live, i))
- reg_state[i].use_index = -1;
+ for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
+ if (TEST_HARD_REG_BIT (*live, r))
+ reg_state[r].use_index = -1;
}
- reload_combine_note_use (&PATTERN (insn), insn,
- reload_combine_ruid, NULL_RTX);
+ reload_combine_note_use (&PATTERN (insn), insn, reload_combine_ruid,
+ NULL_RTX);
+
for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
{
- if (REG_NOTE_KIND (note) == REG_INC
- && REG_P (XEXP (note, 0)))
+ if (REG_NOTE_KIND (note) == REG_INC && REG_P (XEXP (note, 0)))
{
int regno = REGNO (XEXP (note, 0));
-
reg_state[regno].store_ruid = reload_combine_ruid;
reg_state[regno].real_store_ruid = reload_combine_ruid;
reg_state[regno].use_index = -1;
GET_MODE (dst));
dst = SUBREG_REG (dst);
}
+
+ /* Some targets do argument pushes without adding REG_INC notes. */
+
+ if (MEM_P (dst))
+ {
+ dst = XEXP (dst, 0);
+ if (GET_CODE (dst) == PRE_INC || GET_CODE (dst) == POST_INC
+ || GET_CODE (dst) == PRE_DEC || GET_CODE (dst) == POST_DEC
+ || GET_CODE (dst) == PRE_MODIFY || GET_CODE (dst) == POST_MODIFY)
+ {
+ regno = REGNO (XEXP (dst, 0));
+ mode = GET_MODE (XEXP (dst, 0));
+ for (i = hard_regno_nregs[regno][mode] - 1 + regno; i >= regno; i--)
+ {
+ /* We could probably do better, but for now mark the register
+ as used in an unknown fashion and set/clobbered at this
+ insn. */
+ reg_state[i].use_index = -1;
+ reg_state[i].store_ruid = reload_combine_ruid;
+ reg_state[i].real_store_ruid = reload_combine_ruid;
+ }
+ }
+ else
+ return;
+ }
+
if (!REG_P (dst))
return;
regno += REGNO (dst);
#define MODES_OK_FOR_MOVE2ADD(OUTMODE, INMODE) \
(GET_MODE_SIZE (OUTMODE) == GET_MODE_SIZE (INMODE) \
|| (GET_MODE_SIZE (OUTMODE) <= GET_MODE_SIZE (INMODE) \
- && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (OUTMODE), \
- GET_MODE_BITSIZE (INMODE))))
+ && TRULY_NOOP_TRUNCATION_MODES_P (OUTMODE, INMODE)))
/* This function is called with INSN that sets REG to (SYM + OFF),
while REG is known to already have value (SYM + offset).
if (INTVAL (off) == reg_offset [regno])
changed = validate_change (insn, &SET_SRC (pat), reg, 0);
}
- else if (rtx_cost (new_src, PLUS, speed) < rtx_cost (src, SET, speed)
- && have_add2_insn (reg, new_src))
+ else
{
+ struct full_rtx_costs oldcst, newcst;
rtx tem = gen_rtx_PLUS (GET_MODE (reg), reg, new_src);
- changed = validate_change (insn, &SET_SRC (pat), tem, 0);
- }
- else if (sym == NULL_RTX && GET_MODE (reg) != BImode)
- {
- enum machine_mode narrow_mode;
- for (narrow_mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
- narrow_mode != VOIDmode
- && narrow_mode != GET_MODE (reg);
- narrow_mode = GET_MODE_WIDER_MODE (narrow_mode))
+
+ get_full_set_rtx_cost (pat, &oldcst);
+ SET_SRC (pat) = tem;
+ get_full_set_rtx_cost (pat, &newcst);
+ SET_SRC (pat) = src;
+
+ if (costs_lt_p (&newcst, &oldcst, speed)
+ && have_add2_insn (reg, new_src))
+ changed = validate_change (insn, &SET_SRC (pat), tem, 0);
+ else if (sym == NULL_RTX && GET_MODE (reg) != BImode)
{
- if (have_insn_for (STRICT_LOW_PART, narrow_mode)
- && ((reg_offset[regno]
- & ~GET_MODE_MASK (narrow_mode))
- == (INTVAL (off)
- & ~GET_MODE_MASK (narrow_mode))))
+ enum machine_mode narrow_mode;
+ for (narrow_mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
+ narrow_mode != VOIDmode
+ && narrow_mode != GET_MODE (reg);
+ narrow_mode = GET_MODE_WIDER_MODE (narrow_mode))
{
- rtx narrow_reg = gen_rtx_REG (narrow_mode,
- REGNO (reg));
- rtx narrow_src = gen_int_mode (INTVAL (off),
- narrow_mode);
- rtx new_set =
- gen_rtx_SET (VOIDmode,
- gen_rtx_STRICT_LOW_PART (VOIDmode,
- narrow_reg),
- narrow_src);
- changed = validate_change (insn, &PATTERN (insn),
- new_set, 0);
- if (changed)
- break;
+ if (have_insn_for (STRICT_LOW_PART, narrow_mode)
+ && ((reg_offset[regno] & ~GET_MODE_MASK (narrow_mode))
+ == (INTVAL (off) & ~GET_MODE_MASK (narrow_mode))))
+ {
+ rtx narrow_reg = gen_rtx_REG (narrow_mode,
+ REGNO (reg));
+ rtx narrow_src = gen_int_mode (INTVAL (off),
+ narrow_mode);
+ rtx new_set
+ = gen_rtx_SET (VOIDmode,
+ gen_rtx_STRICT_LOW_PART (VOIDmode,
+ narrow_reg),
+ narrow_src);
+ changed = validate_change (insn, &PATTERN (insn),
+ new_set, 0);
+ if (changed)
+ break;
+ }
}
}
}
rtx pat = PATTERN (insn);
rtx src = SET_SRC (pat);
int regno = REGNO (reg);
- int min_cost = INT_MAX;
int min_regno = 0;
bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
int i;
bool changed = false;
+ struct full_rtx_costs oldcst, newcst, mincst;
+ rtx plus_expr;
+
+ init_costs_to_max (&mincst);
+ get_full_set_rtx_cost (pat, &oldcst);
+
+ plus_expr = gen_rtx_PLUS (GET_MODE (reg), reg, const0_rtx);
+ SET_SRC (pat) = plus_expr;
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (reg_set_luid[i] > move2add_last_label_luid
no-op moves. */
if (new_src == const0_rtx)
{
- min_cost = 0;
+ init_costs_to_zero (&mincst);
min_regno = i;
break;
}
else
{
- int cost = rtx_cost (new_src, PLUS, speed);
- if (cost < min_cost)
+ XEXP (plus_expr, 1) = new_src;
+ get_full_set_rtx_cost (pat, &newcst);
+
+ if (costs_lt_p (&newcst, &mincst, speed))
{
- min_cost = cost;
+ mincst = newcst;
min_regno = i;
}
}
}
+ SET_SRC (pat) = src;
- if (min_cost < rtx_cost (src, SET, speed))
+ if (costs_lt_p (&mincst, &oldcst, speed))
{
rtx tem;
/* See above why we create (set (reg) (reg)) here. */
success
= validate_change (next, &SET_SRC (set), reg, 0);
- else if ((rtx_cost (new_src, PLUS, speed)
- < COSTS_N_INSNS (1) + rtx_cost (src3, SET, speed))
- && have_add2_insn (reg, new_src))
+ else
{
- rtx newpat = gen_rtx_SET (VOIDmode,
- reg,
- gen_rtx_PLUS (GET_MODE (reg),
- reg,
- new_src));
- success
- = validate_change (next, &PATTERN (next),
- newpat, 0);
+ rtx old_src = SET_SRC (set);
+ struct full_rtx_costs oldcst, newcst;
+ rtx tem = gen_rtx_PLUS (GET_MODE (reg), reg, new_src);
+
+ get_full_set_rtx_cost (set, &oldcst);
+ SET_SRC (set) = tem;
+ get_full_set_src_cost (tem, &newcst);
+ SET_SRC (set) = old_src;
+ costs_add_n_insns (&oldcst, 1);
+
+ if (costs_lt_p (&newcst, &oldcst, speed)
+ && have_add2_insn (reg, new_src))
+ {
+ rtx newpat = gen_rtx_SET (VOIDmode, reg, tem);
+ success
+ = validate_change (next, &PATTERN (next),
+ newpat, 0);
+ }
}
if (success)
delete_insn (insn);
&& (MODES_OK_FOR_MOVE2ADD
(dst_mode, reg_mode[REGNO (XEXP (src, 1))])))
{
- if (reg_base_reg[REGNO (XEXP (src, 1))] < 0)
+ if (reg_base_reg[REGNO (XEXP (src, 1))] < 0
+ && reg_symbol_ref[REGNO (XEXP (src, 1))] == NULL_RTX)
offset = reg_offset[REGNO (XEXP (src, 1))];
/* Maybe the first register is known to be a
constant. */
else if (reg_set_luid[REGNO (base_reg)]
> move2add_last_label_luid
&& (MODES_OK_FOR_MOVE2ADD
- (dst_mode, reg_mode[REGNO (XEXP (src, 1))]))
- && reg_base_reg[REGNO (base_reg)] < 0)
+ (dst_mode, reg_mode[REGNO (base_reg)]))
+ && reg_base_reg[REGNO (base_reg)] < 0
+ && reg_symbol_ref[REGNO (base_reg)] == NULL_RTX)
{
offset = reg_offset[REGNO (base_reg)];
base_reg = XEXP (src, 1);
0, /* properties_destroyed */
0, /* todo_flags_start */
TODO_df_finish | TODO_verify_rtl_sharing |
- TODO_dump_func /* todo_flags_finish */
+ 0 /* todo_flags_finish */
}
};