|
|
190885 |
From 1e65cc3c2278019406125f13d48494ece9f47e95 Mon Sep 17 00:00:00 2001
|
|
|
190885 |
From: Noah Goldstein <goldstein.w.n@gmail.com>
|
|
|
190885 |
Date: Mon, 19 Apr 2021 17:48:10 -0400
|
|
|
190885 |
Subject: [PATCH] x86: Optimize less_vec evex and avx512
|
|
|
190885 |
memset-vec-unaligned-erms.S
|
|
|
190885 |
|
|
|
190885 |
No bug. This commit adds optimized cased for less_vec memset case that
|
|
|
190885 |
uses the avx512vl/avx512bw mask store avoiding the excessive
|
|
|
190885 |
branches. test-memset and test-wmemset are passing.
|
|
|
190885 |
|
|
|
190885 |
Signed-off-by: Noah Goldstein <goldstein.w.n@gmail.com>
|
|
|
190885 |
(cherry picked from commit f53790272ce7bdc5ecd14b45f65d0464d2a61a3a)
|
|
|
190885 |
---
|
|
|
190885 |
sysdeps/x86_64/multiarch/ifunc-impl-list.c | 40 ++++++++++-----
|
|
|
190885 |
sysdeps/x86_64/multiarch/ifunc-memset.h | 6 ++-
|
|
|
190885 |
.../multiarch/memset-avx512-unaligned-erms.S | 2 +-
|
|
|
190885 |
.../multiarch/memset-evex-unaligned-erms.S | 2 +-
|
|
|
190885 |
.../multiarch/memset-vec-unaligned-erms.S | 51 +++++++++++++++----
|
|
|
190885 |
5 files changed, 74 insertions(+), 27 deletions(-)
|
|
|
190885 |
|
|
|
190885 |
diff --git a/sysdeps/x86_64/multiarch/ifunc-impl-list.c b/sysdeps/x86_64/multiarch/ifunc-impl-list.c
|
|
|
190885 |
index 85b8863a..d59d65f8 100644
|
|
|
190885 |
--- a/sysdeps/x86_64/multiarch/ifunc-impl-list.c
|
|
|
190885 |
+++ b/sysdeps/x86_64/multiarch/ifunc-impl-list.c
|
|
|
190885 |
@@ -204,19 +204,23 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
|
|
|
190885 |
__memset_chk_avx2_unaligned_erms_rtm)
|
|
|
190885 |
IFUNC_IMPL_ADD (array, i, __memset_chk,
|
|
|
190885 |
(CPU_FEATURE_USABLE (AVX512VL)
|
|
|
190885 |
- && CPU_FEATURE_USABLE (AVX512BW)),
|
|
|
190885 |
+ && CPU_FEATURE_USABLE (AVX512BW)
|
|
|
190885 |
+ && CPU_FEATURE_USABLE (BMI2)),
|
|
|
190885 |
__memset_chk_evex_unaligned)
|
|
|
190885 |
IFUNC_IMPL_ADD (array, i, __memset_chk,
|
|
|
190885 |
(CPU_FEATURE_USABLE (AVX512VL)
|
|
|
190885 |
- && CPU_FEATURE_USABLE (AVX512BW)),
|
|
|
190885 |
+ && CPU_FEATURE_USABLE (AVX512BW)
|
|
|
190885 |
+ && CPU_FEATURE_USABLE (BMI2)),
|
|
|
190885 |
__memset_chk_evex_unaligned_erms)
|
|
|
190885 |
IFUNC_IMPL_ADD (array, i, __memset_chk,
|
|
|
190885 |
(CPU_FEATURE_USABLE (AVX512VL)
|
|
|
190885 |
- && CPU_FEATURE_USABLE (AVX512BW)),
|
|
|
190885 |
+ && CPU_FEATURE_USABLE (AVX512BW)
|
|
|
190885 |
+ && CPU_FEATURE_USABLE (BMI2)),
|
|
|
190885 |
__memset_chk_avx512_unaligned_erms)
|
|
|
190885 |
IFUNC_IMPL_ADD (array, i, __memset_chk,
|
|
|
190885 |
(CPU_FEATURE_USABLE (AVX512VL)
|
|
|
190885 |
- && CPU_FEATURE_USABLE (AVX512BW)),
|
|
|
190885 |
+ && CPU_FEATURE_USABLE (AVX512BW)
|
|
|
190885 |
+ && CPU_FEATURE_USABLE (BMI2)),
|
|
|
190885 |
__memset_chk_avx512_unaligned)
|
|
|
190885 |
IFUNC_IMPL_ADD (array, i, __memset_chk,
|
|
|
190885 |
CPU_FEATURE_USABLE (AVX512F),
|
|
|
190885 |
@@ -247,19 +251,23 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
|
|
|
190885 |
__memset_avx2_unaligned_erms_rtm)
|
|
|
190885 |
IFUNC_IMPL_ADD (array, i, memset,
|
|
|
190885 |
(CPU_FEATURE_USABLE (AVX512VL)
|
|
|
190885 |
- && CPU_FEATURE_USABLE (AVX512BW)),
|
|
|
190885 |
+ && CPU_FEATURE_USABLE (AVX512BW)
|
|
|
190885 |
+ && CPU_FEATURE_USABLE (BMI2)),
|
|
|
190885 |
__memset_evex_unaligned)
|
|
|
190885 |
IFUNC_IMPL_ADD (array, i, memset,
|
|
|
190885 |
(CPU_FEATURE_USABLE (AVX512VL)
|
|
|
190885 |
- && CPU_FEATURE_USABLE (AVX512BW)),
|
|
|
190885 |
+ && CPU_FEATURE_USABLE (AVX512BW)
|
|
|
190885 |
+ && CPU_FEATURE_USABLE (BMI2)),
|
|
|
190885 |
__memset_evex_unaligned_erms)
|
|
|
190885 |
IFUNC_IMPL_ADD (array, i, memset,
|
|
|
190885 |
(CPU_FEATURE_USABLE (AVX512VL)
|
|
|
190885 |
- && CPU_FEATURE_USABLE (AVX512BW)),
|
|
|
190885 |
+ && CPU_FEATURE_USABLE (AVX512BW)
|
|
|
190885 |
+ && CPU_FEATURE_USABLE (BMI2)),
|
|
|
190885 |
__memset_avx512_unaligned_erms)
|
|
|
190885 |
IFUNC_IMPL_ADD (array, i, memset,
|
|
|
190885 |
(CPU_FEATURE_USABLE (AVX512VL)
|
|
|
190885 |
- && CPU_FEATURE_USABLE (AVX512BW)),
|
|
|
190885 |
+ && CPU_FEATURE_USABLE (AVX512BW)
|
|
|
190885 |
+ && CPU_FEATURE_USABLE (BMI2)),
|
|
|
190885 |
__memset_avx512_unaligned)
|
|
|
190885 |
IFUNC_IMPL_ADD (array, i, memset,
|
|
|
190885 |
CPU_FEATURE_USABLE (AVX512F),
|
|
|
190885 |
@@ -739,10 +747,14 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
|
|
|
190885 |
&& CPU_FEATURE_USABLE (RTM)),
|
|
|
190885 |
__wmemset_avx2_unaligned_rtm)
|
|
|
190885 |
IFUNC_IMPL_ADD (array, i, wmemset,
|
|
|
190885 |
- CPU_FEATURE_USABLE (AVX512VL),
|
|
|
190885 |
+ (CPU_FEATURE_USABLE (AVX512VL)
|
|
|
190885 |
+ && CPU_FEATURE_USABLE (AVX512BW)
|
|
|
190885 |
+ && CPU_FEATURE_USABLE (BMI2)),
|
|
|
190885 |
__wmemset_evex_unaligned)
|
|
|
190885 |
IFUNC_IMPL_ADD (array, i, wmemset,
|
|
|
190885 |
- CPU_FEATURE_USABLE (AVX512VL),
|
|
|
190885 |
+ (CPU_FEATURE_USABLE (AVX512VL)
|
|
|
190885 |
+ && CPU_FEATURE_USABLE (AVX512BW)
|
|
|
190885 |
+ && CPU_FEATURE_USABLE (BMI2)),
|
|
|
190885 |
__wmemset_avx512_unaligned))
|
|
|
190885 |
|
|
|
190885 |
#ifdef SHARED
|
|
|
190885 |
@@ -946,10 +958,14 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
|
|
|
190885 |
CPU_FEATURE_USABLE (AVX2),
|
|
|
190885 |
__wmemset_chk_avx2_unaligned)
|
|
|
190885 |
IFUNC_IMPL_ADD (array, i, __wmemset_chk,
|
|
|
190885 |
- CPU_FEATURE_USABLE (AVX512VL),
|
|
|
190885 |
+ (CPU_FEATURE_USABLE (AVX512VL)
|
|
|
190885 |
+ && CPU_FEATURE_USABLE (AVX512BW)
|
|
|
190885 |
+ && CPU_FEATURE_USABLE (BMI2)),
|
|
|
190885 |
__wmemset_chk_evex_unaligned)
|
|
|
190885 |
IFUNC_IMPL_ADD (array, i, __wmemset_chk,
|
|
|
190885 |
- CPU_FEATURE_USABLE (AVX512F),
|
|
|
190885 |
+ (CPU_FEATURE_USABLE (AVX512VL)
|
|
|
190885 |
+ && CPU_FEATURE_USABLE (AVX512BW)
|
|
|
190885 |
+ && CPU_FEATURE_USABLE (BMI2)),
|
|
|
190885 |
__wmemset_chk_avx512_unaligned))
|
|
|
190885 |
#endif
|
|
|
190885 |
|
|
|
190885 |
diff --git a/sysdeps/x86_64/multiarch/ifunc-memset.h b/sysdeps/x86_64/multiarch/ifunc-memset.h
|
|
|
190885 |
index 19795938..100e3707 100644
|
|
|
190885 |
--- a/sysdeps/x86_64/multiarch/ifunc-memset.h
|
|
|
190885 |
+++ b/sysdeps/x86_64/multiarch/ifunc-memset.h
|
|
|
190885 |
@@ -54,7 +54,8 @@ IFUNC_SELECTOR (void)
|
|
|
190885 |
&& !CPU_FEATURES_ARCH_P (cpu_features, Prefer_No_AVX512))
|
|
|
190885 |
{
|
|
|
190885 |
if (CPU_FEATURE_USABLE_P (cpu_features, AVX512VL)
|
|
|
190885 |
- && CPU_FEATURE_USABLE_P (cpu_features, AVX512BW))
|
|
|
190885 |
+ && CPU_FEATURE_USABLE_P (cpu_features, AVX512BW)
|
|
|
190885 |
+ && CPU_FEATURE_USABLE_P (cpu_features, BMI2))
|
|
|
190885 |
{
|
|
|
190885 |
if (CPU_FEATURE_USABLE_P (cpu_features, ERMS))
|
|
|
190885 |
return OPTIMIZE (avx512_unaligned_erms);
|
|
|
190885 |
@@ -68,7 +69,8 @@ IFUNC_SELECTOR (void)
|
|
|
190885 |
if (CPU_FEATURE_USABLE_P (cpu_features, AVX2))
|
|
|
190885 |
{
|
|
|
190885 |
if (CPU_FEATURE_USABLE_P (cpu_features, AVX512VL)
|
|
|
190885 |
- && CPU_FEATURE_USABLE_P (cpu_features, AVX512BW))
|
|
|
190885 |
+ && CPU_FEATURE_USABLE_P (cpu_features, AVX512BW)
|
|
|
190885 |
+ && CPU_FEATURE_USABLE_P (cpu_features, BMI2))
|
|
|
190885 |
{
|
|
|
190885 |
if (CPU_FEATURE_USABLE_P (cpu_features, ERMS))
|
|
|
190885 |
return OPTIMIZE (evex_unaligned_erms);
|
|
|
190885 |
diff --git a/sysdeps/x86_64/multiarch/memset-avx512-unaligned-erms.S b/sysdeps/x86_64/multiarch/memset-avx512-unaligned-erms.S
|
|
|
190885 |
index 22e7b187..8ad842fc 100644
|
|
|
190885 |
--- a/sysdeps/x86_64/multiarch/memset-avx512-unaligned-erms.S
|
|
|
190885 |
+++ b/sysdeps/x86_64/multiarch/memset-avx512-unaligned-erms.S
|
|
|
190885 |
@@ -19,6 +19,6 @@
|
|
|
190885 |
# define SECTION(p) p##.evex512
|
|
|
190885 |
# define MEMSET_SYMBOL(p,s) p##_avx512_##s
|
|
|
190885 |
# define WMEMSET_SYMBOL(p,s) p##_avx512_##s
|
|
|
190885 |
-
|
|
|
190885 |
+# define USE_LESS_VEC_MASK_STORE 1
|
|
|
190885 |
# include "memset-vec-unaligned-erms.S"
|
|
|
190885 |
#endif
|
|
|
190885 |
diff --git a/sysdeps/x86_64/multiarch/memset-evex-unaligned-erms.S b/sysdeps/x86_64/multiarch/memset-evex-unaligned-erms.S
|
|
|
190885 |
index ae0a4d6e..640f0929 100644
|
|
|
190885 |
--- a/sysdeps/x86_64/multiarch/memset-evex-unaligned-erms.S
|
|
|
190885 |
+++ b/sysdeps/x86_64/multiarch/memset-evex-unaligned-erms.S
|
|
|
190885 |
@@ -19,6 +19,6 @@
|
|
|
190885 |
# define SECTION(p) p##.evex
|
|
|
190885 |
# define MEMSET_SYMBOL(p,s) p##_evex_##s
|
|
|
190885 |
# define WMEMSET_SYMBOL(p,s) p##_evex_##s
|
|
|
190885 |
-
|
|
|
190885 |
+# define USE_LESS_VEC_MASK_STORE 1
|
|
|
190885 |
# include "memset-vec-unaligned-erms.S"
|
|
|
190885 |
#endif
|
|
|
190885 |
diff --git a/sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S b/sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S
|
|
|
190885 |
index bae5cba4..f877ac9d 100644
|
|
|
190885 |
--- a/sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S
|
|
|
190885 |
+++ b/sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S
|
|
|
190885 |
@@ -63,6 +63,8 @@
|
|
|
190885 |
# endif
|
|
|
190885 |
#endif
|
|
|
190885 |
|
|
|
190885 |
+#define PAGE_SIZE 4096
|
|
|
190885 |
+
|
|
|
190885 |
#ifndef SECTION
|
|
|
190885 |
# error SECTION is not defined!
|
|
|
190885 |
#endif
|
|
|
190885 |
@@ -213,11 +215,38 @@ L(loop):
|
|
|
190885 |
cmpq %rcx, %rdx
|
|
|
190885 |
jne L(loop)
|
|
|
190885 |
VZEROUPPER_SHORT_RETURN
|
|
|
190885 |
+
|
|
|
190885 |
+ .p2align 4
|
|
|
190885 |
L(less_vec):
|
|
|
190885 |
/* Less than 1 VEC. */
|
|
|
190885 |
# if VEC_SIZE != 16 && VEC_SIZE != 32 && VEC_SIZE != 64
|
|
|
190885 |
# error Unsupported VEC_SIZE!
|
|
|
190885 |
# endif
|
|
|
190885 |
+# ifdef USE_LESS_VEC_MASK_STORE
|
|
|
190885 |
+ /* Clear high bits from edi. Only keeping bits relevant to page
|
|
|
190885 |
+ cross check. Note that we are using rax which is set in
|
|
|
190885 |
+ MEMSET_VDUP_TO_VEC0_AND_SET_RETURN as ptr from here on out.
|
|
|
190885 |
+ */
|
|
|
190885 |
+ andl $(PAGE_SIZE - 1), %edi
|
|
|
190885 |
+ /* Check if VEC_SIZE store cross page. Mask stores suffer serious
|
|
|
190885 |
+ performance degradation when it has to fault supress. */
|
|
|
190885 |
+ cmpl $(PAGE_SIZE - VEC_SIZE), %edi
|
|
|
190885 |
+ ja L(cross_page)
|
|
|
190885 |
+# if VEC_SIZE > 32
|
|
|
190885 |
+ movq $-1, %rcx
|
|
|
190885 |
+ bzhiq %rdx, %rcx, %rcx
|
|
|
190885 |
+ kmovq %rcx, %k1
|
|
|
190885 |
+# else
|
|
|
190885 |
+ movl $-1, %ecx
|
|
|
190885 |
+ bzhil %edx, %ecx, %ecx
|
|
|
190885 |
+ kmovd %ecx, %k1
|
|
|
190885 |
+# endif
|
|
|
190885 |
+ vmovdqu8 %VEC(0), (%rax) {%k1}
|
|
|
190885 |
+ VZEROUPPER_RETURN
|
|
|
190885 |
+
|
|
|
190885 |
+ .p2align 4
|
|
|
190885 |
+L(cross_page):
|
|
|
190885 |
+# endif
|
|
|
190885 |
# if VEC_SIZE > 32
|
|
|
190885 |
cmpb $32, %dl
|
|
|
190885 |
jae L(between_32_63)
|
|
|
190885 |
@@ -234,36 +263,36 @@ L(less_vec):
|
|
|
190885 |
cmpb $1, %dl
|
|
|
190885 |
ja L(between_2_3)
|
|
|
190885 |
jb 1f
|
|
|
190885 |
- movb %cl, (%rdi)
|
|
|
190885 |
+ movb %cl, (%rax)
|
|
|
190885 |
1:
|
|
|
190885 |
VZEROUPPER_RETURN
|
|
|
190885 |
# if VEC_SIZE > 32
|
|
|
190885 |
/* From 32 to 63. No branch when size == 32. */
|
|
|
190885 |
L(between_32_63):
|
|
|
190885 |
- VMOVU %YMM0, -32(%rdi,%rdx)
|
|
|
190885 |
- VMOVU %YMM0, (%rdi)
|
|
|
190885 |
+ VMOVU %YMM0, -32(%rax,%rdx)
|
|
|
190885 |
+ VMOVU %YMM0, (%rax)
|
|
|
190885 |
VZEROUPPER_RETURN
|
|
|
190885 |
# endif
|
|
|
190885 |
# if VEC_SIZE > 16
|
|
|
190885 |
/* From 16 to 31. No branch when size == 16. */
|
|
|
190885 |
L(between_16_31):
|
|
|
190885 |
- VMOVU %XMM0, -16(%rdi,%rdx)
|
|
|
190885 |
- VMOVU %XMM0, (%rdi)
|
|
|
190885 |
+ VMOVU %XMM0, -16(%rax,%rdx)
|
|
|
190885 |
+ VMOVU %XMM0, (%rax)
|
|
|
190885 |
VZEROUPPER_RETURN
|
|
|
190885 |
# endif
|
|
|
190885 |
/* From 8 to 15. No branch when size == 8. */
|
|
|
190885 |
L(between_8_15):
|
|
|
190885 |
- movq %rcx, -8(%rdi,%rdx)
|
|
|
190885 |
- movq %rcx, (%rdi)
|
|
|
190885 |
+ movq %rcx, -8(%rax,%rdx)
|
|
|
190885 |
+ movq %rcx, (%rax)
|
|
|
190885 |
VZEROUPPER_RETURN
|
|
|
190885 |
L(between_4_7):
|
|
|
190885 |
/* From 4 to 7. No branch when size == 4. */
|
|
|
190885 |
- movl %ecx, -4(%rdi,%rdx)
|
|
|
190885 |
- movl %ecx, (%rdi)
|
|
|
190885 |
+ movl %ecx, -4(%rax,%rdx)
|
|
|
190885 |
+ movl %ecx, (%rax)
|
|
|
190885 |
VZEROUPPER_RETURN
|
|
|
190885 |
L(between_2_3):
|
|
|
190885 |
/* From 2 to 3. No branch when size == 2. */
|
|
|
190885 |
- movw %cx, -2(%rdi,%rdx)
|
|
|
190885 |
- movw %cx, (%rdi)
|
|
|
190885 |
+ movw %cx, -2(%rax,%rdx)
|
|
|
190885 |
+ movw %cx, (%rax)
|
|
|
190885 |
VZEROUPPER_RETURN
|
|
|
190885 |
END (MEMSET_SYMBOL (__memset, unaligned_erms))
|
|
|
190885 |
--
|
|
|
190885 |
GitLab
|
|
|
190885 |
|