+2016-11-03 Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
+
+ * ccmp.c (expand_ccmp_expr_1): Adjust.
+ (expand_ccmp_expr): Likewise.
+ (expand_ccmp_next): Likewise.
+ * config/aarch64/aarch64.c (aarch64_gen_ccmp_next): Likewise.
+ (aarch64_gen_ccmp_first): Likewise.
+ * doc/tm.texi: Regenerate.
+ * target.def (gen_ccmp_first): Change argument types to rtx_insn *.
+ (gen_ccmp_next): Likewise.
+
2016-11-03 Bin Cheng <bin.cheng@arm.com>
* tree-vect-loop.c (destroy_loop_vec_info): Handle cond_expr.
GEN_SEQ returns all compare insns. */
static rtx
expand_ccmp_next (gimple *g, tree_code code, rtx prev,
- rtx *prep_seq, rtx *gen_seq)
+ rtx_insn **prep_seq, rtx_insn **gen_seq)
{
rtx_code rcode;
int unsignedp = TYPE_UNSIGNED (TREE_TYPE (gimple_assign_rhs1 (g)));
PREP_SEQ returns all insns to prepare opearand.
GEN_SEQ returns all compare insns. */
static rtx
-expand_ccmp_expr_1 (gimple *g, rtx *prep_seq, rtx *gen_seq)
+expand_ccmp_expr_1 (gimple *g, rtx_insn **prep_seq, rtx_insn **gen_seq)
{
- rtx prep_seq_1, gen_seq_1;
- rtx prep_seq_2, gen_seq_2;
tree exp = gimple_assign_rhs_to_tree (g);
tree_code code = TREE_CODE (exp);
gimple *gs0 = get_gimple_for_ssa_name (TREE_OPERAND (exp, 0));
rcode0 = get_rtx_code (code0, unsignedp0);
rcode1 = get_rtx_code (code1, unsignedp1);
+ rtx_insn *prep_seq_1, *gen_seq_1;
tmp = targetm.gen_ccmp_first (&prep_seq_1, &gen_seq_1, rcode0,
gimple_assign_rhs1 (gs0),
gimple_assign_rhs2 (gs0));
if (tmp != NULL)
{
ret = expand_ccmp_next (gs1, code, tmp, &prep_seq_1, &gen_seq_1);
- cost1 = seq_cost (safe_as_a <rtx_insn *> (prep_seq_1), speed_p);
- cost1 += seq_cost (safe_as_a <rtx_insn *> (gen_seq_1), speed_p);
+ cost1 = seq_cost (prep_seq_1, speed_p);
+ cost1 += seq_cost (gen_seq_1, speed_p);
}
/* FIXME: Temporary workaround for PR69619.
Avoid exponential compile time due to expanding gs0 and gs1 twice.
If gs0 and gs1 are complex, the cost will be high, so avoid
reevaluation if above an arbitrary threshold. */
+ rtx_insn *prep_seq_2, *gen_seq_2;
if (tmp == NULL || cost1 < COSTS_N_INSNS (25))
tmp2 = targetm.gen_ccmp_first (&prep_seq_2, &gen_seq_2, rcode1,
gimple_assign_rhs1 (gs1),
{
ret2 = expand_ccmp_next (gs0, code, tmp2, &prep_seq_2,
&gen_seq_2);
- cost2 = seq_cost (safe_as_a <rtx_insn *> (prep_seq_2), speed_p);
- cost2 += seq_cost (safe_as_a <rtx_insn *> (gen_seq_2), speed_p);
+ cost2 = seq_cost (prep_seq_2, speed_p);
+ cost2 += seq_cost (gen_seq_2, speed_p);
}
if (cost2 < cost1)
{
rtx_insn *last;
rtx tmp;
- rtx prep_seq, gen_seq;
-
- prep_seq = gen_seq = NULL_RTX;
if (!ccmp_candidate_p (g))
return NULL_RTX;
last = get_last_insn ();
+
+ rtx_insn *prep_seq = NULL, *gen_seq = NULL;
tmp = expand_ccmp_expr_1 (g, &prep_seq, &gen_seq);
if (tmp)
}
static rtx
-aarch64_gen_ccmp_first (rtx *prep_seq, rtx *gen_seq,
+aarch64_gen_ccmp_first (rtx_insn **prep_seq, rtx_insn **gen_seq,
int code, tree treeop0, tree treeop1)
{
machine_mode op_mode, cmp_mode, cc_mode = CCmode;
}
static rtx
-aarch64_gen_ccmp_next (rtx *prep_seq, rtx *gen_seq, rtx prev, int cmp_code,
- tree treeop0, tree treeop1, int bit_code)
+aarch64_gen_ccmp_next (rtx_insn **prep_seq, rtx_insn **gen_seq, rtx prev,
+ int cmp_code, tree treeop0, tree treeop1, int bit_code)
{
rtx op0, op1, target;
machine_mode op_mode, cmp_mode, cc_mode = CCmode;
struct expand_operand ops[6];
int aarch64_cond;
- push_to_sequence ((rtx_insn*) *prep_seq);
+ push_to_sequence (*prep_seq);
expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
op_mode = GET_MODE (op0);
create_fixed_operand (&ops[4], prev);
create_fixed_operand (&ops[5], GEN_INT (aarch64_cond));
- push_to_sequence ((rtx_insn*) *gen_seq);
+ push_to_sequence (*gen_seq);
if (!maybe_expand_insn (icode, 6, ops))
{
end_sequence ();
modes and they have different conditional execution capability, such as ARM.
@end deftypefn
-@deftypefn {Target Hook} rtx TARGET_GEN_CCMP_FIRST (rtx *@var{prep_seq}, rtx *@var{gen_seq}, int @var{code}, tree @var{op0}, tree @var{op1})
+@deftypefn {Target Hook} rtx TARGET_GEN_CCMP_FIRST (rtx_insn **@var{prep_seq}, rtx_insn **@var{gen_seq}, int @var{code}, tree @var{op0}, tree @var{op1})
This function prepares to emit a comparison insn for the first compare in a
sequence of conditional comparisions. It returns an appropriate comparison
with @code{CC} for passing to @code{gen_ccmp_next} or @code{cbranch_optab}.
@var{code} is the @code{rtx_code} of the compare for @var{op0} and @var{op1}.
@end deftypefn
-@deftypefn {Target Hook} rtx TARGET_GEN_CCMP_NEXT (rtx *@var{prep_seq}, rtx *@var{gen_seq}, rtx @var{prev}, int @var{cmp_code}, tree @var{op0}, tree @var{op1}, int @var{bit_code})
+@deftypefn {Target Hook} rtx TARGET_GEN_CCMP_NEXT (rtx_insn **@var{prep_seq}, rtx_insn **@var{gen_seq}, rtx @var{prev}, int @var{cmp_code}, tree @var{op0}, tree @var{op1}, int @var{bit_code})
This function prepares to emit a conditional comparison within a sequence
of conditional comparisons. It returns an appropriate comparison with
@code{CC} for passing to @code{gen_ccmp_next} or @code{cbranch_optab}.
insns are saved in @var{gen_seq}. They will be emitted when all the\n\
compares in the the conditional comparision are generated without error.\n\
@var{code} is the @code{rtx_code} of the compare for @var{op0} and @var{op1}.",
- rtx, (rtx *prep_seq, rtx *gen_seq, int code, tree op0, tree op1),
+ rtx, (rtx_insn **prep_seq, rtx_insn **gen_seq, int code, tree op0, tree op1),
NULL)
DEFHOOK
be appropriate for passing to @code{gen_ccmp_next} or @code{cbranch_optab}.\n\
@var{code} is the @code{rtx_code} of the compare for @var{op0} and @var{op1}.\n\
@var{bit_code} is @code{AND} or @code{IOR}, which is the op on the compares.",
- rtx, (rtx *prep_seq, rtx *gen_seq, rtx prev, int cmp_code, tree op0, tree op1, int bit_code),
+ rtx, (rtx_insn **prep_seq, rtx_insn **gen_seq, rtx prev, int cmp_code, tree op0, tree op1, int bit_code),
NULL)
/* Return a new value for loop unroll size. */