+2017-01-26 Vladimir Makarov <vmakarov@redhat.com>
+
+ PR target/79131
+ * lra-assigns.c (setup_live_pseudos_and_spill_after_risky): Take
+ endianess for subregs into account.
+ * lra-constraints.c (lra_constraints): Do risky transformations
+ always on the first iteration.
+ * lra-lives.c (check_pseudos_live_through_calls): Add arg
+ last_call_used_reg_set.
+ (process_bb_lives): Define and use last_call_used_reg_set.
+ * lra.c (lra): Always continue after lra_constraints on the first
+ iteration.
+
2017-01-26 Kirill Yukhin <kirill.yukhin@gmail.com>
* gcc.target/i386/avx512bw-kshiftlq-2.c: Use unsigned long long
/* If it is multi-register pseudos they should start on
the same hard register. */
|| hard_regno != reg_renumber[conflict_regno])
- add_to_hard_reg_set (&conflict_set,
- lra_reg_info[conflict_regno].biggest_mode,
- reg_renumber[conflict_regno]);
+ {
+ int conflict_hard_regno = reg_renumber[conflict_regno];
+ machine_mode biggest_mode = lra_reg_info[conflict_regno].biggest_mode;
+ int biggest_nregs = hard_regno_nregs[conflict_hard_regno][biggest_mode];
+ int nregs_diff = (biggest_nregs
+ - (hard_regno_nregs
+ [conflict_hard_regno]
+ [PSEUDO_REGNO_MODE (conflict_regno)]));
+ add_to_hard_reg_set (&conflict_set,
+ biggest_mode,
+ conflict_hard_regno
+ - (WORDS_BIG_ENDIAN ? nregs_diff : 0));
+ }
if (! overlaps_hard_reg_set_p (conflict_set, mode, hard_regno))
{
update_lives (regno, false);
&& REGNO (pic_offset_table_rtx) >= FIRST_PSEUDO_REGISTER)
lra_risky_transformations_p = true;
else
- lra_risky_transformations_p = false;
+ /* On the first iteration we should check IRA assignment
+ correctness. In rare cases, the assignments can be wrong as
+ early clobbers operands are ignored in IRA. */
+ lra_risky_transformations_p = first_p;
new_insn_uid_start = get_max_uid ();
new_regno_start = first_p ? lra_constraint_new_regno_start : max_reg_num ();
/* Mark used hard regs for target stack size calulations. */
}
/* Check that REGNO living through calls and setjumps, set up conflict
- regs, and clear corresponding bits in PSEUDOS_LIVE_THROUGH_CALLS and
- PSEUDOS_LIVE_THROUGH_SETJUMPS. */
+ regs using LAST_CALL_USED_REG_SET, and clear corresponding bits in
+ PSEUDOS_LIVE_THROUGH_CALLS and PSEUDOS_LIVE_THROUGH_SETJUMPS. */
static inline void
-check_pseudos_live_through_calls (int regno)
+check_pseudos_live_through_calls (int regno,
+ HARD_REG_SET last_call_used_reg_set)
{
int hr;
return;
sparseset_clear_bit (pseudos_live_through_calls, regno);
IOR_HARD_REG_SET (lra_reg_info[regno].conflict_hard_regs,
- call_used_reg_set);
+ last_call_used_reg_set);
for (hr = 0; hr < FIRST_PSEUDO_REGISTER; hr++)
if (HARD_REGNO_CALL_PART_CLOBBERED (hr, PSEUDO_REGNO_MODE (regno)))
rtx_insn *next;
rtx link, *link_loc;
bool need_curr_point_incr;
-
+ HARD_REG_SET last_call_used_reg_set;
+
reg_live_out = df_get_live_out (bb);
sparseset_clear (pseudos_live);
sparseset_clear (pseudos_live_through_calls);
sparseset_clear (pseudos_live_through_setjumps);
+ CLEAR_HARD_REG_SET (last_call_used_reg_set);
REG_SET_TO_HARD_REG_SET (hard_regs_live, reg_live_out);
AND_COMPL_HARD_REG_SET (hard_regs_live, eliminable_regset);
EXECUTE_IF_SET_IN_BITMAP (reg_live_out, FIRST_PSEUDO_REGISTER, j, bi)
need_curr_point_incr
|= mark_regno_live (reg->regno, reg->biggest_mode,
curr_point);
- check_pseudos_live_through_calls (reg->regno);
+ check_pseudos_live_through_calls (reg->regno,
+ last_call_used_reg_set);
}
for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
if (call_p)
{
- if (flag_ipa_ra)
+ if (! flag_ipa_ra)
+ COPY_HARD_REG_SET(last_call_used_reg_set, call_used_reg_set);
+ else
{
HARD_REG_SET this_call_used_reg_set;
get_call_reg_set_usage (curr_insn, &this_call_used_reg_set,
call_used_reg_set);
+ bool flush = (! hard_reg_set_empty_p (last_call_used_reg_set)
+ && ! hard_reg_set_equal_p (last_call_used_reg_set,
+ this_call_used_reg_set));
+
EXECUTE_IF_SET_IN_SPARSESET (pseudos_live, j)
- IOR_HARD_REG_SET (lra_reg_info[j].actual_call_used_reg_set,
- this_call_used_reg_set);
+ {
+ IOR_HARD_REG_SET (lra_reg_info[j].actual_call_used_reg_set,
+ this_call_used_reg_set);
+ if (flush)
+ check_pseudos_live_through_calls
+ (j, last_call_used_reg_set);
+ }
+ COPY_HARD_REG_SET(last_call_used_reg_set, this_call_used_reg_set);
}
sparseset_ior (pseudos_live_through_calls,
need_curr_point_incr
|= mark_regno_live (reg->regno, reg->biggest_mode,
curr_point);
- check_pseudos_live_through_calls (reg->regno);
+ check_pseudos_live_through_calls (reg->regno,
+ last_call_used_reg_set);
}
for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
if (sparseset_cardinality (pseudos_live_through_calls) == 0)
break;
if (sparseset_bit_p (pseudos_live_through_calls, j))
- check_pseudos_live_through_calls (j);
+ check_pseudos_live_through_calls (j, last_call_used_reg_set);
}
if (need_curr_point_incr)
lra (FILE *f)
{
int i;
- bool live_p, scratch_p, inserted_p;
+ bool live_p, inserted_p;
lra_dump_file = f;
lra_constraint_new_regno_start = lra_new_regno_start = max_reg_num ();
lra_bad_spill_regno_start = INT_MAX;
remove_scratches ();
- scratch_p = lra_constraint_new_regno_start != max_reg_num ();
/* A function that has a non-local label that can reach the exit
block via non-exceptional paths must save all call-saved
for (;;)
{
/* We should try to assign hard registers to scratches even
- if there were no RTL transformations in
- lra_constraints. */
+ if there were no RTL transformations in lra_constraints.
+ Also we should check IRA assignments on the first
+ iteration as they can be wrong because of early clobbers
+ operands which are ignored in IRA. */
if (! lra_constraints (lra_constraint_iter == 0)
- && (lra_constraint_iter > 1
- || (! scratch_p && ! caller_save_needed)))
+ && lra_constraint_iter > 1)
break;
/* Constraint transformations may result in that eliminable
hard regs become uneliminable and pseudos which use them
+2017-01-26 Vladimir Makarov <vmakarov@redhat.com>
+
+ PR target/79131
+ * gcc.target/arm/pr79131.c: New.
+
2017-01-26 Bin Cheng <bin.cheng@arm.com>
* gcc.target/aarch64/ldp_vec_64_1.c: Xfail.
--- /dev/null
+/* { dg-do compile } */
+/* { dg-options "-O2 -mbig-endian" } */
+
+long long a;
+enum { NILFS_SEGMENT_USAGE_ACTIVE, NILFS_SEGMENT_USAGE_DIRTY } b;
+void nilfs_sufile_mod_counter(long long p1) {
+ long c = p1;
+ unsigned d = __builtin_bswap64(a);
+ a = __builtin_bswap64(d + c);
+}
+void nilfs_sufile_do_free() {
+ int e, f;
+ e = __builtin_bswap32(b) & 1 << NILFS_SEGMENT_USAGE_DIRTY;
+ f = e;
+ nilfs_sufile_mod_counter(f ? -1 : 0);
+}