+2011-06-20 Changpeng Fang <changpeng.fang@amd.com>
+
+ PR i386/49089
+ * config/i386/i386.c (avx256_split_unaligned_load): New definition.
+ (avx256_split_unaligned_store): New definition.
+ (ix86_option_override_internal): Enable avx256 unaligned load/store
+ splitting only when avx256_split_unaligned_load/store is set.
+
2011-06-20 Bernd Schmidt <bernds@codesourcery.com>
* regrename.c (scan_rtx_reg): Handle the case where we write to an
= m_PENT | m_ATOM | m_PPRO | m_AMD_MULTIPLE | m_PENT4
| m_NOCONA | m_CORE2I7 | m_GENERIC;
+static const unsigned int x86_avx256_split_unaligned_load
+ = m_COREI7 | m_GENERIC;
+
+static const unsigned int x86_avx256_split_unaligned_store
+ = m_COREI7 | m_BDVER1 | m_GENERIC;
+
/* In case the average insn count for single function invocation is
lower than this constant, emit fast (but longer) prologue and
epilogue code. */
if (flag_expensive_optimizations
&& !(target_flags_explicit & MASK_VZEROUPPER))
target_flags |= MASK_VZEROUPPER;
- if (!(target_flags_explicit & MASK_AVX256_SPLIT_UNALIGNED_LOAD))
+ if ((x86_avx256_split_unaligned_load & ix86_tune_mask)
+ && !(target_flags_explicit & MASK_AVX256_SPLIT_UNALIGNED_LOAD))
target_flags |= MASK_AVX256_SPLIT_UNALIGNED_LOAD;
- if (!(target_flags_explicit & MASK_AVX256_SPLIT_UNALIGNED_STORE))
+ if ((x86_avx256_split_unaligned_store & ix86_tune_mask)
+ && !(target_flags_explicit & MASK_AVX256_SPLIT_UNALIGNED_STORE))
target_flags |= MASK_AVX256_SPLIT_UNALIGNED_STORE;
}
}