+2017-11-15 Sebastian Peryt <sebastian.peryt@intel.com>
+
+ PR target/82941
+ PR target/82942
+ * config/i386/i386.c (pass_insert_vzeroupper): Modify gate condition
+ to return true on Xeon and not on Xeon Phi.
+ (ix86_check_avx256_register): Changed to ...
+ (ix86_check_avx_upper_register): ... this. Add extra check for
+ VALID_AVX512F_REG_OR_XI_MODE.
+ (ix86_avx_u128_mode_needed): Changed
+ ix86_check_avx256_register to ix86_check_avx_upper_register.
+ (ix86_check_avx256_stores): Changed to ...
+ (ix86_check_avx_upper_stores): ... this. Changed
+ ix86_check_avx256_register to ix86_check_avx_upper_register.
+ (ix86_avx_u128_mode_after): Changed
+ avx_reg256_found to avx_upper_reg_found. Changed
+ ix86_check_avx256_stores to ix86_check_avx_upper_stores.
+ (ix86_avx_u128_mode_entry): Changed
+ ix86_check_avx256_register to ix86_check_avx_upper_register.
+ (ix86_avx_u128_mode_exit): Ditto.
+ * config/i386/i386.h: (host_detect_local_cpu): New define.
+
2017-11-15 Dominik Infuehr <dominik.infuehr@theobroma-systems.com>
* config/arm/xgene1.md (xgene1): Split into automatons
int i;
/* vzeroupper instructions are inserted immediately after reload to
- account for possible spills from 256bit registers. The pass
+ account for possible spills from 256bit or 512bit registers. The pass
reuses mode switching infrastructure by re-running mode insertion
pass, so disable entities that have already been processed. */
for (i = 0; i < MAX_386_ENTITIES; i++)
/* opt_pass methods: */
virtual bool gate (function *)
{
- return TARGET_AVX && !TARGET_AVX512F
+ return TARGET_AVX && !TARGET_AVX512ER
&& TARGET_VZEROUPPER && flag_expensive_optimizations
&& !optimize_size;
}
return X86_DIRFLAG_ANY;
}
-/* Check if a 256bit AVX register is referenced inside of EXP. */
+/* Check if a 256bit or 512 bit AVX register is referenced inside of EXP. */
static bool
-ix86_check_avx256_register (const_rtx exp)
+ix86_check_avx_upper_register (const_rtx exp)
{
if (SUBREG_P (exp))
exp = SUBREG_REG (exp);
return (REG_P (exp)
- && VALID_AVX256_REG_OR_OI_MODE (GET_MODE (exp)));
+ && (VALID_AVX256_REG_OR_OI_MODE (GET_MODE (exp))
+ || VALID_AVX512F_REG_OR_XI_MODE (GET_MODE (exp))));
}
/* Return needed mode for entity in optimize_mode_switching pass. */
rtx link;
/* Needed mode is set to AVX_U128_CLEAN if there are
- no 256bit modes used in function arguments. */
+ no 256bit or 512bit modes used in function arguments. */
for (link = CALL_INSN_FUNCTION_USAGE (insn);
link;
link = XEXP (link, 1))
{
rtx arg = XEXP (XEXP (link, 0), 0);
- if (ix86_check_avx256_register (arg))
+ if (ix86_check_avx_upper_register (arg))
return AVX_U128_DIRTY;
}
}
return AVX_U128_CLEAN;
}
- /* Require DIRTY mode if a 256bit AVX register is referenced. Hardware
- changes state only when a 256bit register is written to, but we need
- to prevent the compiler from moving optimal insertion point above
- eventual read from 256bit register. */
+ /* Require DIRTY mode if a 256bit or 512bit AVX register is referenced.
+ Hardware changes state only when a 256bit register is written to,
+ but we need to prevent the compiler from moving optimal insertion
+ point above eventual read from 256bit or 512 bit register. */
subrtx_iterator::array_type array;
FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
- if (ix86_check_avx256_register (*iter))
+ if (ix86_check_avx_upper_register (*iter))
return AVX_U128_DIRTY;
return AVX_U128_ANY;
return 0;
}
-/* Check if a 256bit AVX register is referenced in stores. */
+/* Check if a 256bit or 512bit AVX register is referenced in stores. */
static void
-ix86_check_avx256_stores (rtx dest, const_rtx, void *data)
+ix86_check_avx_upper_stores (rtx dest, const_rtx, void *data)
{
- if (ix86_check_avx256_register (dest))
+ if (ix86_check_avx_upper_register (dest))
{
bool *used = (bool *) data;
*used = true;
return AVX_U128_CLEAN;
/* We know that state is clean after CALL insn if there are no
- 256bit registers used in the function return register. */
+ 256bit or 512bit registers used in the function return register. */
if (CALL_P (insn))
{
- bool avx_reg256_found = false;
- note_stores (pat, ix86_check_avx256_stores, &avx_reg256_found);
+ bool avx_upper_reg_found = false;
+ note_stores (pat, ix86_check_avx_upper_stores, &avx_upper_reg_found);
- return avx_reg256_found ? AVX_U128_DIRTY : AVX_U128_CLEAN;
+ return avx_upper_reg_found ? AVX_U128_DIRTY : AVX_U128_CLEAN;
}
/* Otherwise, return current mode. Remember that if insn
- references AVX 256bit registers, the mode was already changed
- to DIRTY from MODE_NEEDED. */
+ references AVX 256bit or 512bit registers, the mode was already
+ changed to DIRTY from MODE_NEEDED. */
return mode;
}
tree arg;
/* Entry mode is set to AVX_U128_DIRTY if there are
- 256bit modes used in function arguments. */
+ 256bit or 512bit modes used in function arguments. */
for (arg = DECL_ARGUMENTS (current_function_decl); arg;
arg = TREE_CHAIN (arg))
{
rtx incoming = DECL_INCOMING_RTL (arg);
- if (incoming && ix86_check_avx256_register (incoming))
+ if (incoming && ix86_check_avx_upper_register (incoming))
return AVX_U128_DIRTY;
}
{
rtx reg = crtl->return_rtx;
- /* Exit mode is set to AVX_U128_DIRTY if there are
- 256bit modes used in the function return register. */
- if (reg && ix86_check_avx256_register (reg))
+ /* Exit mode is set to AVX_U128_DIRTY if there are 256bit
+ or 512 bit modes used in the function return register. */
+ if (reg && ix86_check_avx_upper_register (reg))
return AVX_U128_DIRTY;
return AVX_U128_CLEAN;