076f82
commit 7079931c51547854323fe2ed6fdccf2a1b8b04d7
076f82
Author: Noah Goldstein <goldstein.w.n@gmail.com>
076f82
Date:   Wed Jun 29 16:07:05 2022 -0700
076f82
076f82
    x86: Move and slightly improve memset_erms
076f82
    
076f82
    Implementation wise:
076f82
        1. Remove the VZEROUPPER as memset_{impl}_unaligned_erms does not
076f82
           use the L(stosb) label that was previously defined.
076f82
    
076f82
        2. Don't give the hotpath (fallthrough) to zero size.
076f82
    
076f82
    Code positioning wise:
076f82
    
076f82
    Move memset_{chk}_erms to its own file.  Leaving it in between the
076f82
    memset_{impl}_unaligned both adds unnecessary complexity to the
076f82
    file and wastes space in a relatively hot cache section.
076f82
    
076f82
    (cherry picked from commit 4a3f29e7e475dd4e7cce2a24c187e6fb7b5b0a05)
076f82
076f82
diff --git a/sysdeps/x86_64/multiarch/Makefile b/sysdeps/x86_64/multiarch/Makefile
076f82
index 0e39e63ef6be6a86..da9f16286a763556 100644
076f82
--- a/sysdeps/x86_64/multiarch/Makefile
076f82
+++ b/sysdeps/x86_64/multiarch/Makefile
076f82
@@ -29,6 +29,7 @@ sysdep_routines += \
076f82
   memset-avx2-unaligned-erms-rtm \
076f82
   memset-avx512-no-vzeroupper \
076f82
   memset-avx512-unaligned-erms \
076f82
+  memset-erms \
076f82
   memset-evex-unaligned-erms \
076f82
   memset-sse2-unaligned-erms \
076f82
   rawmemchr-avx2 \
076f82
diff --git a/sysdeps/x86_64/multiarch/memset-erms.S b/sysdeps/x86_64/multiarch/memset-erms.S
076f82
new file mode 100644
076f82
index 0000000000000000..e83cccc731f0a7ea
076f82
--- /dev/null
076f82
+++ b/sysdeps/x86_64/multiarch/memset-erms.S
076f82
@@ -0,0 +1,44 @@
076f82
+/* memset implement with rep stosb
076f82
+   Copyright (C) 2022 Free Software Foundation, Inc.
076f82
+   This file is part of the GNU C Library.
076f82
+
076f82
+   The GNU C Library is free software; you can redistribute it and/or
076f82
+   modify it under the terms of the GNU Lesser General Public
076f82
+   License as published by the Free Software Foundation; either
076f82
+   version 2.1 of the License, or (at your option) any later version.
076f82
+
076f82
+   The GNU C Library is distributed in the hope that it will be useful,
076f82
+   but WITHOUT ANY WARRANTY; without even the implied warranty of
076f82
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
076f82
+   Lesser General Public License for more details.
076f82
+
076f82
+   You should have received a copy of the GNU Lesser General Public
076f82
+   License along with the GNU C Library; if not, see
076f82
+   <https://www.gnu.org/licenses/>.  */
076f82
+
076f82
+
076f82
+#include <sysdep.h>
076f82
+
076f82
+#if defined USE_MULTIARCH && IS_IN (libc)
076f82
+	.text
076f82
+ENTRY (__memset_chk_erms)
076f82
+	cmp	%RDX_LP, %RCX_LP
076f82
+	jb	HIDDEN_JUMPTARGET (__chk_fail)
076f82
+END (__memset_chk_erms)
076f82
+
076f82
+/* Only used to measure performance of REP STOSB.  */
076f82
+ENTRY (__memset_erms)
076f82
+	/* Skip zero length.  */
076f82
+	test	%RDX_LP, %RDX_LP
076f82
+	jz	 L(stosb_return_zero)
076f82
+	mov	%RDX_LP, %RCX_LP
076f82
+	movzbl	%sil, %eax
076f82
+	mov	%RDI_LP, %RDX_LP
076f82
+	rep stosb
076f82
+	mov	%RDX_LP, %RAX_LP
076f82
+	ret
076f82
+L(stosb_return_zero):
076f82
+	movq	%rdi, %rax
076f82
+	ret
076f82
+END (__memset_erms)
076f82
+#endif
076f82
diff --git a/sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S b/sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S
076f82
index abc12d9cda1b3843..905d0fa4643d5768 100644
076f82
--- a/sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S
076f82
+++ b/sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S
076f82
@@ -156,37 +156,6 @@ L(entry_from_wmemset):
076f82
 #if defined USE_MULTIARCH && IS_IN (libc)
076f82
 END (MEMSET_SYMBOL (__memset, unaligned))
076f82
 
076f82
-# if VEC_SIZE == 16
076f82
-ENTRY (__memset_chk_erms)
076f82
-	cmp	%RDX_LP, %RCX_LP
076f82
-	jb	HIDDEN_JUMPTARGET (__chk_fail)
076f82
-END (__memset_chk_erms)
076f82
-
076f82
-/* Only used to measure performance of REP STOSB.  */
076f82
-ENTRY (__memset_erms)
076f82
-	/* Skip zero length.  */
076f82
-	test	%RDX_LP, %RDX_LP
076f82
-	jnz	 L(stosb)
076f82
-	movq	%rdi, %rax
076f82
-	ret
076f82
-# else
076f82
-/* Provide a hidden symbol to debugger.  */
076f82
-	.hidden	MEMSET_SYMBOL (__memset, erms)
076f82
-ENTRY (MEMSET_SYMBOL (__memset, erms))
076f82
-# endif
076f82
-L(stosb):
076f82
-	mov	%RDX_LP, %RCX_LP
076f82
-	movzbl	%sil, %eax
076f82
-	mov	%RDI_LP, %RDX_LP
076f82
-	rep stosb
076f82
-	mov	%RDX_LP, %RAX_LP
076f82
-	VZEROUPPER_RETURN
076f82
-# if VEC_SIZE == 16
076f82
-END (__memset_erms)
076f82
-# else
076f82
-END (MEMSET_SYMBOL (__memset, erms))
076f82
-# endif
076f82
-
076f82
 # if defined SHARED && IS_IN (libc)
076f82
 ENTRY_CHK (MEMSET_CHK_SYMBOL (__memset_chk, unaligned_erms))
076f82
 	cmp	%RDX_LP, %RCX_LP