+2019-11-05 Andreas Krebbel <krebbel@linux.ibm.com>
+
+ * gcc.target/s390/s390.exp
+ (check_effective_target_s390_useable_hw): Add inline asm for z14
+ and z15. Replace instruction for z13 with lochiz. Add register
+ clobbers. Check also for __zarch__ when doing the __VX__ test.
+
2019-11-05 Christophe Lyon <christophe.lyon@linaro.org>
* gcc.target/arm/attr-crypto.c: Skip if -mpure-code is used.
int main (void)
{
asm (".machinemode zarch" : : );
- #if __ARCH__ >= 11
- asm ("lcbb %%r2,0(%%r15),0" : : );
+ #if __ARCH__ >= 13
+ asm ("ncrk %%r2,%%r2,%%r2" : : : "r2");
+ #elif __ARCH__ >= 12
+ asm ("agh %%r2,0(%%r15)" : : : "r2");
+ #elif __ARCH__ >= 11
+ asm ("lochiz %%r2,42" : : : "r2");
#elif __ARCH__ >= 10
- asm ("risbgn %%r2,%%r2,0,0,0" : : );
+ asm ("risbgn %%r2,%%r2,0,0,0" : : : "r2");
#elif __ARCH__ >= 9
- asm ("sgrk %%r2,%%r2,%%r2" : : );
+ asm ("sgrk %%r2,%%r2,%%r2" : : : "r2");
#elif __ARCH__ >= 8
- asm ("rosbg %%r2,%%r2,0,0,0" : : );
+ asm ("rosbg %%r2,%%r2,0,0,0" : : : "r2");
#elif __ARCH__ >= 7
- asm ("nilf %%r2,0" : : );
+ asm ("nilf %%r2,0" : : : "r2");
#elif __ARCH__ >= 6
- asm ("lay %%r2,0(%%r15)" : : );
+ asm ("lay %%r2,0(%%r15)" : : : "r2");
#elif __ARCH__ >= 5
asm ("tam" : : );
#endif
asm ("etnd %0" : "=d" (nd));
}
#endif
- #ifdef __VX__
- asm ("vzero %%v0" : : );
+ #if defined (__VX__) && defined (__zarch__)
+ asm ("vzero %%v0" : : : "v0");
#endif
return 0;
}