|
From: <sv...@va...> - 2015-08-15 12:21:51
|
Author: rhyskidd
Date: Sat Aug 15 13:21:42 2015
New Revision: 15553
Log:
Increase test coverage on OS X, by re-enabling the none/tests/amd64/avx2-1 regression test.
n-i-bz
(Unfortunately I don’t have right here the hw support, but build environment works)
$ perl tests/vg_regtest none/tests/amd64/avx2-1
avx2-1: (skipping, prereq failed: test -x avx2-1 && ../../../tests/x86_amd64_features amd64-avx)
== 0 tests, 0 stderr failures, 0 stdout failures, 0 stderrB failures, 0 stdoutB failures, 0 post failures ==
On OS X 10.10
Before:
== 594 tests, 215 stderr failures, 9 stdout failures, 0 stderrB failures, 0 stdoutB failures, 30 post failures ==
After:
== 594 tests, 215 stderr failures, 9 stdout failures, 0 stderrB failures, 0 stdoutB failures, 30 post failures ==
Modified:
trunk/none/tests/amd64/Makefile.am
trunk/none/tests/amd64/avx2-1.c
Modified: trunk/none/tests/amd64/Makefile.am
==============================================================================
--- trunk/none/tests/amd64/Makefile.am (original)
+++ trunk/none/tests/amd64/Makefile.am Sat Aug 15 13:21:42 2015
@@ -110,6 +110,11 @@
check_PROGRAMS += avx-1
endif
endif
+if BUILD_AVX2_TESTS
+if !COMPILER_IS_ICC
+ check_PROGRAMS += avx2-1
+endif
+endif
if BUILD_SSSE3_TESTS
check_PROGRAMS += ssse3_misaligned
endif
@@ -152,11 +157,6 @@
if BUILD_LOOPNEL_TESTS
check_PROGRAMS += loopnel
endif
-if BUILD_AVX2_TESTS
-if !COMPILER_IS_ICC
- check_PROGRAMS += avx2-1
-endif
-endif
endif
AM_CFLAGS += @FLAG_M64@
Modified: trunk/none/tests/amd64/avx2-1.c
==============================================================================
--- trunk/none/tests/amd64/avx2-1.c (original)
+++ trunk/none/tests/amd64/avx2-1.c Sat Aug 15 13:21:42 2015
@@ -9,7 +9,11 @@
typedef unsigned long int UWord;
typedef unsigned long long int ULong;
+#if defined(VGO_darwin)
UChar randArray[1027] __attribute__((used));
+#else
+UChar _randArray[1027] __attribute__((used));
+#endif
#define IS_32_ALIGNED(_ptr) (0 == (0x1F & (UWord)(_ptr)))
@@ -975,7 +979,7 @@
"vpslld $25, %%xmm7, %%xmm8;"
"vpsrld $25, %%xmm8, %%xmm8;"
"vblendvps %%xmm6, %%xmm8, %%xmm7, %%xmm8;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vgatherdps %%xmm6, 3(%%r14,%%xmm8,4), %%xmm9;"
"xorl %%r14d, %%r14d")
@@ -983,7 +987,7 @@
"vpslld $25, %%ymm7, %%ymm8;"
"vpsrld $25, %%ymm8, %%ymm8;"
"vblendvps %%ymm6, %%ymm8, %%ymm7, %%ymm8;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vgatherdps %%ymm6, 3(%%r14,%%ymm8,4), %%ymm9;"
"xorl %%r14d, %%r14d")
@@ -993,7 +997,7 @@
"vpmovsxdq %%xmm6, %%xmm9;"
"vblendvpd %%xmm9, %%xmm8, %%xmm7, %%xmm8;"
"vmovdqa 96(%0), %%ymm9;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vgatherqps %%xmm6, 3(%%r14,%%xmm8,4), %%xmm9;"
"xorl %%r14d, %%r14d")
@@ -1003,7 +1007,7 @@
"vpmovsxdq %%xmm6, %%ymm9;"
"vblendvpd %%ymm9, %%ymm8, %%ymm7, %%ymm8;"
"vmovdqa 96(%0), %%ymm9;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vgatherqps %%xmm6, 3(%%r14,%%ymm8,4), %%xmm9;"
"xorl %%r14d, %%r14d")
@@ -1013,7 +1017,7 @@
"vpmovsxdq %%xmm6, %%xmm9;"
"vblendvpd %%xmm9, %%xmm8, %%xmm7, %%xmm8;"
"vmovdqa 96(%0), %%ymm9;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vmovq %%r14, %%xmm7;"
"vpsllq $2, %%xmm8, %%xmm8;"
"vpbroadcastq %%xmm7, %%xmm7;"
@@ -1029,7 +1033,7 @@
"vpmovsxdq %%xmm6, %%ymm9;"
"vblendvpd %%ymm9, %%ymm8, %%ymm7, %%ymm8;"
"vmovdqa 96(%0), %%ymm9;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vmovq %%r14, %%xmm7;"
"vpsllq $2, %%ymm8, %%ymm8;"
"vpbroadcastq %%xmm7, %%ymm7;"
@@ -1045,7 +1049,7 @@
"vshufps $13, %%xmm6, %%xmm6, %%xmm9;"
"vblendvps %%xmm9, %%xmm8, %%xmm7, %%xmm8;"
"vmovdqa 96(%0), %%ymm9;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vgatherdpd %%xmm6, 3(%%r14,%%xmm8,8), %%xmm9;"
"xorl %%r14d, %%r14d")
@@ -1056,7 +1060,7 @@
"vshufps $221, %%ymm9, %%ymm6, %%ymm9;"
"vblendvps %%ymm9, %%ymm8, %%ymm7, %%ymm8;"
"vmovdqa 96(%0), %%ymm9;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vgatherdpd %%ymm6, 3(%%r14,%%xmm8,8), %%ymm9;"
"xorl %%r14d, %%r14d")
@@ -1064,7 +1068,7 @@
"vpsllq $58, %%xmm7, %%xmm8;"
"vpsrlq $58, %%xmm8, %%xmm8;"
"vblendvpd %%xmm6, %%xmm8, %%xmm7, %%xmm8;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vgatherqpd %%xmm6, 3(%%r14,%%xmm8,8), %%xmm9;"
"xorl %%r14d, %%r14d")
@@ -1072,7 +1076,7 @@
"vpsllq $58, %%ymm7, %%ymm8;"
"vpsrlq $58, %%ymm8, %%ymm8;"
"vblendvpd %%ymm6, %%ymm8, %%ymm7, %%ymm8;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vgatherqpd %%ymm6, 3(%%r14,%%ymm8,8), %%ymm9;"
"xorl %%r14d, %%r14d")
@@ -1080,7 +1084,7 @@
"vpsllq $58, %%xmm7, %%xmm8;"
"vpsrlq $58, %%xmm8, %%xmm8;"
"vblendvpd %%xmm6, %%xmm8, %%xmm7, %%xmm8;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vmovq %%r14, %%xmm7;"
"vpsllq $2, %%xmm8, %%xmm8;"
"vpbroadcastq %%xmm7, %%xmm7;"
@@ -1094,7 +1098,7 @@
"vpsllq $58, %%ymm7, %%ymm8;"
"vpsrlq $58, %%ymm8, %%ymm8;"
"vblendvpd %%ymm6, %%ymm8, %%ymm7, %%ymm8;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vmovq %%r14, %%xmm7;"
"vpsllq $2, %%ymm8, %%ymm8;"
"vpbroadcastq %%xmm7, %%ymm7;"
@@ -1108,7 +1112,7 @@
"vpslld $25, %%xmm7, %%xmm8;"
"vpsrld $25, %%xmm8, %%xmm8;"
"vblendvps %%xmm6, %%xmm8, %%xmm7, %%xmm8;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vpgatherdd %%xmm6, 3(%%r14,%%xmm8,4), %%xmm9;"
"xorl %%r14d, %%r14d")
@@ -1116,7 +1120,7 @@
"vpslld $25, %%ymm7, %%ymm8;"
"vpsrld $25, %%ymm8, %%ymm8;"
"vblendvps %%ymm6, %%ymm8, %%ymm7, %%ymm8;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vpgatherdd %%ymm6, 3(%%r14,%%ymm8,4), %%ymm9;"
"xorl %%r14d, %%r14d")
@@ -1126,7 +1130,7 @@
"vpmovsxdq %%xmm6, %%xmm9;"
"vblendvpd %%xmm9, %%xmm8, %%xmm7, %%xmm8;"
"vmovdqa 96(%0), %%ymm9;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vpgatherqd %%xmm6, 3(%%r14,%%xmm8,4), %%xmm9;"
"xorl %%r14d, %%r14d")
@@ -1136,7 +1140,7 @@
"vpmovsxdq %%xmm6, %%ymm9;"
"vblendvpd %%ymm9, %%ymm8, %%ymm7, %%ymm8;"
"vmovdqa 96(%0), %%ymm9;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vpgatherqd %%xmm6, 3(%%r14,%%ymm8,4), %%xmm9;"
"xorl %%r14d, %%r14d")
@@ -1146,7 +1150,7 @@
"vpmovsxdq %%xmm6, %%xmm9;"
"vblendvpd %%xmm9, %%xmm8, %%xmm7, %%xmm8;"
"vmovdqa 96(%0), %%ymm9;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vmovq %%r14, %%xmm7;"
"vpsllq $2, %%xmm8, %%xmm8;"
"vpbroadcastq %%xmm7, %%xmm7;"
@@ -1162,7 +1166,7 @@
"vpmovsxdq %%xmm6, %%ymm9;"
"vblendvpd %%ymm9, %%ymm8, %%ymm7, %%ymm8;"
"vmovdqa 96(%0), %%ymm9;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vmovq %%r14, %%xmm7;"
"vpsllq $2, %%ymm8, %%ymm8;"
"vpbroadcastq %%xmm7, %%ymm7;"
@@ -1178,7 +1182,7 @@
"vshufps $13, %%xmm6, %%xmm6, %%xmm9;"
"vblendvps %%xmm9, %%xmm8, %%xmm7, %%xmm8;"
"vmovdqa 96(%0), %%ymm9;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vpgatherdq %%xmm6, 3(%%r14,%%xmm8,8), %%xmm9;"
"xorl %%r14d, %%r14d")
@@ -1189,7 +1193,7 @@
"vshufps $221, %%ymm9, %%ymm6, %%ymm9;"
"vblendvps %%ymm9, %%ymm8, %%ymm7, %%ymm8;"
"vmovdqa 96(%0), %%ymm9;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vpgatherdq %%ymm6, 3(%%r14,%%xmm8,8), %%ymm9;"
"xorl %%r14d, %%r14d")
@@ -1197,7 +1201,7 @@
"vpsllq $58, %%xmm7, %%xmm8;"
"vpsrlq $58, %%xmm8, %%xmm8;"
"vblendvpd %%xmm6, %%xmm8, %%xmm7, %%xmm8;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vpgatherqq %%xmm6, 3(%%r14,%%xmm8,8), %%xmm9;"
"xorl %%r14d, %%r14d")
@@ -1205,7 +1209,7 @@
"vpsllq $58, %%ymm7, %%ymm8;"
"vpsrlq $58, %%ymm8, %%ymm8;"
"vblendvpd %%ymm6, %%ymm8, %%ymm7, %%ymm8;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vpgatherqq %%ymm6, 3(%%r14,%%ymm8,8), %%ymm9;"
"xorl %%r14d, %%r14d")
@@ -1213,7 +1217,7 @@
"vpsllq $58, %%xmm7, %%xmm8;"
"vpsrlq $58, %%xmm8, %%xmm8;"
"vblendvpd %%xmm6, %%xmm8, %%xmm7, %%xmm8;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vmovq %%r14, %%xmm7;"
"vpsllq $2, %%xmm8, %%xmm8;"
"vpbroadcastq %%xmm7, %%xmm7;"
@@ -1227,7 +1231,7 @@
"vpsllq $58, %%ymm7, %%ymm8;"
"vpsrlq $58, %%ymm8, %%ymm8;"
"vblendvpd %%ymm6, %%ymm8, %%ymm7, %%ymm8;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vmovq %%r14, %%xmm7;"
"vpsllq $2, %%ymm8, %%ymm8;"
"vpbroadcastq %%xmm7, %%ymm7;"
@@ -1466,7 +1470,11 @@
DO_D( VPMASKMOVD_256_StoreForm );
DO_D( VPMASKMOVQ_128_StoreForm );
DO_D( VPMASKMOVQ_256_StoreForm );
+#if defined(VGO_darwin)
{ int i; for (i = 0; i < sizeof(randArray); i++) randArray[i] = randUChar(); }
+#else
+ { int i; for (i = 0; i < sizeof(_randArray); i++) _randArray[i] = randUChar(); }
+#endif
DO_D( VGATHERDPS_128 );
DO_D( VGATHERDPS_256 );
DO_D( VGATHERQPS_128_1 );
|