|
|
b9ba6d |
2011-03-02 Harsha Jagasia <harsha.jagasia@amd.com>
|
|
|
b9ba6d |
Ulrich Drepper <drepper@gmail.com>
|
|
|
b9ba6d |
|
|
|
b9ba6d |
* sysdeps/x86_64/memset.S: After aligning destination, code
|
|
|
b9ba6d |
branches to different locations depending on the value of
|
|
|
b9ba6d |
misalignment, when multiarch is enabled. Fix this.
|
|
|
b9ba6d |
|
|
|
b9ba6d |
2011-03-02 Harsha Jagasia <harsha.jagasia@amd.com>
|
|
|
b9ba6d |
|
|
|
b9ba6d |
* sysdeps/x86_64/cacheinfo.c (init_cacheinfo):
|
|
|
b9ba6d |
Set _x86_64_preferred_memory_instruction for AMD processsors.
|
|
|
b9ba6d |
* sysdeps/x86_64/multiarch/init-arch.c (__init_cpu_features):
|
|
|
b9ba6d |
Set bit_Prefer_SSE_for_memop for AMD processors.
|
|
|
b9ba6d |
|
|
|
b9ba6d |
2010-11-07 H.J. Lu <hongjiu.lu@intel.com>
|
|
|
b9ba6d |
|
|
|
b9ba6d |
* sysdeps/x86_64/memset.S: Check USE_MULTIARCH and USE_SSE2 for
|
|
|
b9ba6d |
IFUNC support.
|
|
|
b9ba6d |
* sysdeps/x86_64/multiarch/Makefile (sysdep_routines): Add
|
|
|
b9ba6d |
memset-x86-64.
|
|
|
b9ba6d |
* sysdeps/x86_64/multiarch/bzero.S: New file.
|
|
|
b9ba6d |
* sysdeps/x86_64/multiarch/cacheinfo.c: New file.
|
|
|
b9ba6d |
* sysdeps/x86_64/multiarch/memset-x86-64.S: New file.
|
|
|
b9ba6d |
* sysdeps/x86_64/multiarch/memset.S: New file.
|
|
|
b9ba6d |
* sysdeps/x86_64/multiarch/memset_chk.S: New file.
|
|
|
b9ba6d |
* sysdeps/x86_64/multiarch/init-arch.c (__init_cpu_features):
|
|
|
b9ba6d |
Set bit_Prefer_SSE_for_memop for Intel processors.
|
|
|
b9ba6d |
* sysdeps/x86_64/multiarch/init-arch.h (bit_Prefer_SSE_for_memop):
|
|
|
b9ba6d |
Define.
|
|
|
b9ba6d |
(index_Prefer_SSE_for_memop): Define.
|
|
|
b9ba6d |
(HAS_PREFER_SSE_FOR_MEMOP): Define.
|
|
|
b9ba6d |
|
|
|
b9ba6d |
Index: glibc-2.12-2-gc4ccff1/sysdeps/x86_64/cacheinfo.c
|
|
|
b9ba6d |
===================================================================
|
|
|
b9ba6d |
--- glibc-2.12-2-gc4ccff1.orig/sysdeps/x86_64/cacheinfo.c
|
|
|
b9ba6d |
+++ glibc-2.12-2-gc4ccff1/sysdeps/x86_64/cacheinfo.c
|
|
|
b9ba6d |
@@ -613,6 +613,25 @@ init_cacheinfo (void)
|
|
|
b9ba6d |
long int core = handle_amd (_SC_LEVEL2_CACHE_SIZE);
|
|
|
b9ba6d |
shared = handle_amd (_SC_LEVEL3_CACHE_SIZE);
|
|
|
b9ba6d |
|
|
|
b9ba6d |
+#ifndef DISABLE_PREFERRED_MEMORY_INSTRUCTION
|
|
|
b9ba6d |
+# ifdef USE_MULTIARCH
|
|
|
b9ba6d |
+ eax = __cpu_features.cpuid[COMMON_CPUID_INDEX_1].eax;
|
|
|
b9ba6d |
+ ebx = __cpu_features.cpuid[COMMON_CPUID_INDEX_1].ebx;
|
|
|
b9ba6d |
+ ecx = __cpu_features.cpuid[COMMON_CPUID_INDEX_1].ecx;
|
|
|
b9ba6d |
+ edx = __cpu_features.cpuid[COMMON_CPUID_INDEX_1].edx;
|
|
|
b9ba6d |
+# else
|
|
|
b9ba6d |
+ __cpuid (1, eax, ebx, ecx, edx);
|
|
|
b9ba6d |
+# endif
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ /* AMD prefers SSSE3 instructions for memory/string routines
|
|
|
b9ba6d |
+ if they are avaiable, otherwise it prefers integer
|
|
|
b9ba6d |
+ instructions. */
|
|
|
b9ba6d |
+ if ((ecx & 0x200))
|
|
|
b9ba6d |
+ __x86_64_preferred_memory_instruction = 3;
|
|
|
b9ba6d |
+ else
|
|
|
b9ba6d |
+ __x86_64_preferred_memory_instruction = 0;
|
|
|
b9ba6d |
+#endif
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
/* Get maximum extended function. */
|
|
|
b9ba6d |
__cpuid (0x80000000, max_cpuid_ex, ebx, ecx, edx);
|
|
|
b9ba6d |
|
|
|
b9ba6d |
Index: glibc-2.12-2-gc4ccff1/sysdeps/x86_64/memset.S
|
|
|
b9ba6d |
===================================================================
|
|
|
b9ba6d |
--- glibc-2.12-2-gc4ccff1.orig/sysdeps/x86_64/memset.S
|
|
|
b9ba6d |
+++ glibc-2.12-2-gc4ccff1/sysdeps/x86_64/memset.S
|
|
|
b9ba6d |
@@ -24,7 +24,7 @@
|
|
|
b9ba6d |
#define __STOS_UPPER_BOUNDARY $65536
|
|
|
b9ba6d |
|
|
|
b9ba6d |
.text
|
|
|
b9ba6d |
-#ifndef NOT_IN_libc
|
|
|
b9ba6d |
+#if !defined NOT_IN_libc && !defined USE_MULTIARCH
|
|
|
b9ba6d |
ENTRY(__bzero)
|
|
|
b9ba6d |
mov %rsi,%rdx /* Adjust parameter. */
|
|
|
b9ba6d |
xorl %esi,%esi /* Fill with 0s. */
|
|
|
b9ba6d |
@@ -34,10 +34,10 @@ weak_alias (__bzero, bzero)
|
|
|
b9ba6d |
#endif
|
|
|
b9ba6d |
|
|
|
b9ba6d |
#if defined PIC && !defined NOT_IN_libc
|
|
|
b9ba6d |
-ENTRY (__memset_chk)
|
|
|
b9ba6d |
+ENTRY_CHK (__memset_chk)
|
|
|
b9ba6d |
cmpq %rdx, %rcx
|
|
|
b9ba6d |
jb HIDDEN_JUMPTARGET (__chk_fail)
|
|
|
b9ba6d |
-END (__memset_chk)
|
|
|
b9ba6d |
+END_CHK (__memset_chk)
|
|
|
b9ba6d |
#endif
|
|
|
b9ba6d |
ENTRY (memset)
|
|
|
b9ba6d |
L(memset_entry):
|
|
|
b9ba6d |
@@ -591,157 +591,15 @@ L(A6Q1): mov %dx,-0xe(%rdi)
|
|
|
b9ba6d |
L(A7Q0): mov %dl,-0x7(%rdi)
|
|
|
b9ba6d |
L(A6Q0): mov %dx,-0x6(%rdi)
|
|
|
b9ba6d |
mov %edx,-0x4(%rdi)
|
|
|
b9ba6d |
- jmp L(aligned_now)
|
|
|
b9ba6d |
-
|
|
|
b9ba6d |
- .balign 16
|
|
|
b9ba6d |
-L(aligned_now):
|
|
|
b9ba6d |
-
|
|
|
b9ba6d |
- cmpl $0x1,__x86_64_preferred_memory_instruction(%rip)
|
|
|
b9ba6d |
- jg L(SSE_pre)
|
|
|
b9ba6d |
-
|
|
|
b9ba6d |
-L(8byte_move_try):
|
|
|
b9ba6d |
- cmpq __STOS_LOWER_BOUNDARY,%r8
|
|
|
b9ba6d |
- jae L(8byte_stos_try)
|
|
|
b9ba6d |
-
|
|
|
b9ba6d |
- .balign 16
|
|
|
b9ba6d |
-L(8byte_move):
|
|
|
b9ba6d |
- movq %r8,%rcx
|
|
|
b9ba6d |
- shrq $7,%rcx
|
|
|
b9ba6d |
- jz L(8byte_move_skip)
|
|
|
b9ba6d |
-
|
|
|
b9ba6d |
- .p2align 4
|
|
|
b9ba6d |
-
|
|
|
b9ba6d |
-L(8byte_move_loop):
|
|
|
b9ba6d |
- decq %rcx
|
|
|
b9ba6d |
-
|
|
|
b9ba6d |
- movq %rdx, (%rdi)
|
|
|
b9ba6d |
- movq %rdx, 8 (%rdi)
|
|
|
b9ba6d |
- movq %rdx, 16 (%rdi)
|
|
|
b9ba6d |
- movq %rdx, 24 (%rdi)
|
|
|
b9ba6d |
- movq %rdx, 32 (%rdi)
|
|
|
b9ba6d |
- movq %rdx, 40 (%rdi)
|
|
|
b9ba6d |
- movq %rdx, 48 (%rdi)
|
|
|
b9ba6d |
- movq %rdx, 56 (%rdi)
|
|
|
b9ba6d |
- movq %rdx, 64 (%rdi)
|
|
|
b9ba6d |
- movq %rdx, 72 (%rdi)
|
|
|
b9ba6d |
- movq %rdx, 80 (%rdi)
|
|
|
b9ba6d |
- movq %rdx, 88 (%rdi)
|
|
|
b9ba6d |
- movq %rdx, 96 (%rdi)
|
|
|
b9ba6d |
- movq %rdx, 104 (%rdi)
|
|
|
b9ba6d |
- movq %rdx, 112 (%rdi)
|
|
|
b9ba6d |
- movq %rdx, 120 (%rdi)
|
|
|
b9ba6d |
-
|
|
|
b9ba6d |
- leaq 128 (%rdi),%rdi
|
|
|
b9ba6d |
-
|
|
|
b9ba6d |
- jnz L(8byte_move_loop)
|
|
|
b9ba6d |
-
|
|
|
b9ba6d |
-L(8byte_move_skip):
|
|
|
b9ba6d |
- andl $127,%r8d
|
|
|
b9ba6d |
- lea (%rdi,%r8,1),%rdi
|
|
|
b9ba6d |
-
|
|
|
b9ba6d |
-#ifndef PIC
|
|
|
b9ba6d |
- lea L(setPxQx)(%rip),%r11
|
|
|
b9ba6d |
- jmpq *(%r11,%r8,8) # old scheme remained for nonPIC
|
|
|
b9ba6d |
-#else
|
|
|
b9ba6d |
- lea L(Got0)(%rip),%r11
|
|
|
b9ba6d |
- lea L(setPxQx)(%rip),%rcx
|
|
|
b9ba6d |
- movswq (%rcx,%r8,2),%rcx
|
|
|
b9ba6d |
- lea (%rcx,%r11,1),%r11
|
|
|
b9ba6d |
- jmpq *%r11
|
|
|
b9ba6d |
-#endif
|
|
|
b9ba6d |
-
|
|
|
b9ba6d |
- .balign 16
|
|
|
b9ba6d |
-L(8byte_stos_try):
|
|
|
b9ba6d |
- mov __x86_64_shared_cache_size(%rip),%r9d // ck largest cache size
|
|
|
b9ba6d |
- cmpq %r8,%r9 // calculate the lesser of remaining
|
|
|
b9ba6d |
- cmovaq %r8,%r9 // bytes and largest cache size
|
|
|
b9ba6d |
- jbe L(8byte_stos)
|
|
|
b9ba6d |
-
|
|
|
b9ba6d |
-L(8byte_move_reuse_try):
|
|
|
b9ba6d |
- cmp __STOS_UPPER_BOUNDARY,%r8
|
|
|
b9ba6d |
- jae L(8byte_move)
|
|
|
b9ba6d |
-
|
|
|
b9ba6d |
- .balign 16
|
|
|
b9ba6d |
-L(8byte_stos):
|
|
|
b9ba6d |
- movq %r9,%rcx
|
|
|
b9ba6d |
- andq $-8,%r9
|
|
|
b9ba6d |
-
|
|
|
b9ba6d |
- shrq $3,%rcx
|
|
|
b9ba6d |
- jz L(8byte_stos_skip)
|
|
|
b9ba6d |
-
|
|
|
b9ba6d |
- xchgq %rax,%rdx
|
|
|
b9ba6d |
-
|
|
|
b9ba6d |
- rep
|
|
|
b9ba6d |
- stosq
|
|
|
b9ba6d |
-
|
|
|
b9ba6d |
- xchgq %rax,%rdx
|
|
|
b9ba6d |
-
|
|
|
b9ba6d |
-L(8byte_stos_skip):
|
|
|
b9ba6d |
- subq %r9,%r8
|
|
|
b9ba6d |
- ja L(8byte_nt_move)
|
|
|
b9ba6d |
-
|
|
|
b9ba6d |
- andl $7,%r8d
|
|
|
b9ba6d |
- lea (%rdi,%r8,1),%rdi
|
|
|
b9ba6d |
-#ifndef PIC
|
|
|
b9ba6d |
- lea L(setPxQx)(%rip),%r11
|
|
|
b9ba6d |
- jmpq *(%r11,%r8,8) # old scheme remained for nonPIC
|
|
|
b9ba6d |
-#else
|
|
|
b9ba6d |
- lea L(Got0)(%rip),%r11
|
|
|
b9ba6d |
- lea L(setPxQx)(%rip),%rcx
|
|
|
b9ba6d |
- movswq (%rcx,%r8,2),%rcx
|
|
|
b9ba6d |
- lea (%rcx,%r11,1),%r11
|
|
|
b9ba6d |
- jmpq *%r11
|
|
|
b9ba6d |
-#endif
|
|
|
b9ba6d |
|
|
|
b9ba6d |
- .balign 16
|
|
|
b9ba6d |
-L(8byte_nt_move):
|
|
|
b9ba6d |
- movq %r8,%rcx
|
|
|
b9ba6d |
- shrq $7,%rcx
|
|
|
b9ba6d |
- jz L(8byte_nt_move_skip)
|
|
|
b9ba6d |
-
|
|
|
b9ba6d |
- .balign 16
|
|
|
b9ba6d |
-L(8byte_nt_move_loop):
|
|
|
b9ba6d |
- decq %rcx
|
|
|
b9ba6d |
-
|
|
|
b9ba6d |
- movntiq %rdx, (%rdi)
|
|
|
b9ba6d |
- movntiq %rdx, 8 (%rdi)
|
|
|
b9ba6d |
- movntiq %rdx, 16 (%rdi)
|
|
|
b9ba6d |
- movntiq %rdx, 24 (%rdi)
|
|
|
b9ba6d |
- movntiq %rdx, 32 (%rdi)
|
|
|
b9ba6d |
- movntiq %rdx, 40 (%rdi)
|
|
|
b9ba6d |
- movntiq %rdx, 48 (%rdi)
|
|
|
b9ba6d |
- movntiq %rdx, 56 (%rdi)
|
|
|
b9ba6d |
- movntiq %rdx, 64 (%rdi)
|
|
|
b9ba6d |
- movntiq %rdx, 72 (%rdi)
|
|
|
b9ba6d |
- movntiq %rdx, 80 (%rdi)
|
|
|
b9ba6d |
- movntiq %rdx, 88 (%rdi)
|
|
|
b9ba6d |
- movntiq %rdx, 96 (%rdi)
|
|
|
b9ba6d |
- movntiq %rdx, 104 (%rdi)
|
|
|
b9ba6d |
- movntiq %rdx, 112 (%rdi)
|
|
|
b9ba6d |
- movntiq %rdx, 120 (%rdi)
|
|
|
b9ba6d |
-
|
|
|
b9ba6d |
- leaq 128 (%rdi),%rdi
|
|
|
b9ba6d |
-
|
|
|
b9ba6d |
- jnz L(8byte_nt_move_loop)
|
|
|
b9ba6d |
-
|
|
|
b9ba6d |
- sfence
|
|
|
b9ba6d |
-
|
|
|
b9ba6d |
-L(8byte_nt_move_skip):
|
|
|
b9ba6d |
- andl $127,%r8d
|
|
|
b9ba6d |
+#ifndef USE_MULTIARCH
|
|
|
b9ba6d |
+ jmp L(aligned_now)
|
|
|
b9ba6d |
|
|
|
b9ba6d |
- lea (%rdi,%r8,1),%rdi
|
|
|
b9ba6d |
-#ifndef PIC
|
|
|
b9ba6d |
- lea L(setPxQx)(%rip),%r11
|
|
|
b9ba6d |
- jmpq *(%r11,%r8,8) # old scheme remained for nonPIC
|
|
|
b9ba6d |
+L(SSE_pre):
|
|
|
b9ba6d |
#else
|
|
|
b9ba6d |
- lea L(Got0)(%rip),%r11
|
|
|
b9ba6d |
- lea L(setPxQx)(%rip),%rcx
|
|
|
b9ba6d |
- movswq (%rcx,%r8,2),%rcx
|
|
|
b9ba6d |
- lea (%rcx,%r11,1),%r11
|
|
|
b9ba6d |
- jmpq *%r11
|
|
|
b9ba6d |
+L(aligned_now):
|
|
|
b9ba6d |
#endif
|
|
|
b9ba6d |
-
|
|
|
b9ba6d |
-L(SSE_pre):
|
|
|
b9ba6d |
+#if !defined USE_MULTIARCH || defined USE_SSE2
|
|
|
b9ba6d |
# fill RegXMM0 with the pattern
|
|
|
b9ba6d |
movd %rdx,%xmm0
|
|
|
b9ba6d |
punpcklqdq %xmm0,%xmm0
|
|
|
b9ba6d |
@@ -1342,11 +1200,162 @@ L(SSExDx):
|
|
|
b9ba6d |
.short L(SSE15QB)-L(SSE0Q0)
|
|
|
b9ba6d |
#endif
|
|
|
b9ba6d |
.popsection
|
|
|
b9ba6d |
+#endif /* !defined USE_MULTIARCH || defined USE_SSE2 */
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ .balign 16
|
|
|
b9ba6d |
+#ifndef USE_MULTIARCH
|
|
|
b9ba6d |
+L(aligned_now):
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ cmpl $0x1,__x86_64_preferred_memory_instruction(%rip)
|
|
|
b9ba6d |
+ jg L(SSE_pre)
|
|
|
b9ba6d |
+#endif /* USE_MULTIARCH */
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+L(8byte_move_try):
|
|
|
b9ba6d |
+ cmpq __STOS_LOWER_BOUNDARY,%r8
|
|
|
b9ba6d |
+ jae L(8byte_stos_try)
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ .balign 16
|
|
|
b9ba6d |
+L(8byte_move):
|
|
|
b9ba6d |
+ movq %r8,%rcx
|
|
|
b9ba6d |
+ shrq $7,%rcx
|
|
|
b9ba6d |
+ jz L(8byte_move_skip)
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ .p2align 4
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+L(8byte_move_loop):
|
|
|
b9ba6d |
+ decq %rcx
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ movq %rdx, (%rdi)
|
|
|
b9ba6d |
+ movq %rdx, 8 (%rdi)
|
|
|
b9ba6d |
+ movq %rdx, 16 (%rdi)
|
|
|
b9ba6d |
+ movq %rdx, 24 (%rdi)
|
|
|
b9ba6d |
+ movq %rdx, 32 (%rdi)
|
|
|
b9ba6d |
+ movq %rdx, 40 (%rdi)
|
|
|
b9ba6d |
+ movq %rdx, 48 (%rdi)
|
|
|
b9ba6d |
+ movq %rdx, 56 (%rdi)
|
|
|
b9ba6d |
+ movq %rdx, 64 (%rdi)
|
|
|
b9ba6d |
+ movq %rdx, 72 (%rdi)
|
|
|
b9ba6d |
+ movq %rdx, 80 (%rdi)
|
|
|
b9ba6d |
+ movq %rdx, 88 (%rdi)
|
|
|
b9ba6d |
+ movq %rdx, 96 (%rdi)
|
|
|
b9ba6d |
+ movq %rdx, 104 (%rdi)
|
|
|
b9ba6d |
+ movq %rdx, 112 (%rdi)
|
|
|
b9ba6d |
+ movq %rdx, 120 (%rdi)
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ leaq 128 (%rdi),%rdi
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ jnz L(8byte_move_loop)
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+L(8byte_move_skip):
|
|
|
b9ba6d |
+ andl $127,%r8d
|
|
|
b9ba6d |
+ lea (%rdi,%r8,1),%rdi
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+#ifndef PIC
|
|
|
b9ba6d |
+ lea L(setPxQx)(%rip),%r11
|
|
|
b9ba6d |
+ jmpq *(%r11,%r8,8) # old scheme remained for nonPIC
|
|
|
b9ba6d |
+#else
|
|
|
b9ba6d |
+ lea L(Got0)(%rip),%r11
|
|
|
b9ba6d |
+ lea L(setPxQx)(%rip),%rcx
|
|
|
b9ba6d |
+ movswq (%rcx,%r8,2),%rcx
|
|
|
b9ba6d |
+ lea (%rcx,%r11,1),%r11
|
|
|
b9ba6d |
+ jmpq *%r11
|
|
|
b9ba6d |
+#endif
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ .balign 16
|
|
|
b9ba6d |
+L(8byte_stos_try):
|
|
|
b9ba6d |
+ mov __x86_64_shared_cache_size(%rip),%r9d // ck largest cache size
|
|
|
b9ba6d |
+ cmpq %r8,%r9 // calculate the lesser of remaining
|
|
|
b9ba6d |
+ cmovaq %r8,%r9 // bytes and largest cache size
|
|
|
b9ba6d |
+ jbe L(8byte_stos)
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+L(8byte_move_reuse_try):
|
|
|
b9ba6d |
+ cmp __STOS_UPPER_BOUNDARY,%r8
|
|
|
b9ba6d |
+ jae L(8byte_move)
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ .balign 16
|
|
|
b9ba6d |
+L(8byte_stos):
|
|
|
b9ba6d |
+ movq %r9,%rcx
|
|
|
b9ba6d |
+ andq $-8,%r9
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ shrq $3,%rcx
|
|
|
b9ba6d |
+ jz L(8byte_stos_skip)
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ xchgq %rax,%rdx
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ rep
|
|
|
b9ba6d |
+ stosq
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ xchgq %rax,%rdx
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+L(8byte_stos_skip):
|
|
|
b9ba6d |
+ subq %r9,%r8
|
|
|
b9ba6d |
+ ja L(8byte_nt_move)
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ andl $7,%r8d
|
|
|
b9ba6d |
+ lea (%rdi,%r8,1),%rdi
|
|
|
b9ba6d |
+#ifndef PIC
|
|
|
b9ba6d |
+ lea L(setPxQx)(%rip),%r11
|
|
|
b9ba6d |
+ jmpq *(%r11,%r8,8) # old scheme remained for nonPIC
|
|
|
b9ba6d |
+#else
|
|
|
b9ba6d |
+ lea L(Got0)(%rip),%r11
|
|
|
b9ba6d |
+ lea L(setPxQx)(%rip),%rcx
|
|
|
b9ba6d |
+ movswq (%rcx,%r8,2),%rcx
|
|
|
b9ba6d |
+ lea (%rcx,%r11,1),%r11
|
|
|
b9ba6d |
+ jmpq *%r11
|
|
|
b9ba6d |
+#endif
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ .balign 16
|
|
|
b9ba6d |
+L(8byte_nt_move):
|
|
|
b9ba6d |
+ movq %r8,%rcx
|
|
|
b9ba6d |
+ shrq $7,%rcx
|
|
|
b9ba6d |
+ jz L(8byte_nt_move_skip)
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ .balign 16
|
|
|
b9ba6d |
+L(8byte_nt_move_loop):
|
|
|
b9ba6d |
+ decq %rcx
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ movntiq %rdx, (%rdi)
|
|
|
b9ba6d |
+ movntiq %rdx, 8 (%rdi)
|
|
|
b9ba6d |
+ movntiq %rdx, 16 (%rdi)
|
|
|
b9ba6d |
+ movntiq %rdx, 24 (%rdi)
|
|
|
b9ba6d |
+ movntiq %rdx, 32 (%rdi)
|
|
|
b9ba6d |
+ movntiq %rdx, 40 (%rdi)
|
|
|
b9ba6d |
+ movntiq %rdx, 48 (%rdi)
|
|
|
b9ba6d |
+ movntiq %rdx, 56 (%rdi)
|
|
|
b9ba6d |
+ movntiq %rdx, 64 (%rdi)
|
|
|
b9ba6d |
+ movntiq %rdx, 72 (%rdi)
|
|
|
b9ba6d |
+ movntiq %rdx, 80 (%rdi)
|
|
|
b9ba6d |
+ movntiq %rdx, 88 (%rdi)
|
|
|
b9ba6d |
+ movntiq %rdx, 96 (%rdi)
|
|
|
b9ba6d |
+ movntiq %rdx, 104 (%rdi)
|
|
|
b9ba6d |
+ movntiq %rdx, 112 (%rdi)
|
|
|
b9ba6d |
+ movntiq %rdx, 120 (%rdi)
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ leaq 128 (%rdi),%rdi
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ jnz L(8byte_nt_move_loop)
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ sfence
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+L(8byte_nt_move_skip):
|
|
|
b9ba6d |
+ andl $127,%r8d
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ lea (%rdi,%r8,1),%rdi
|
|
|
b9ba6d |
+#ifndef PIC
|
|
|
b9ba6d |
+ lea L(setPxQx)(%rip),%r11
|
|
|
b9ba6d |
+ jmpq *(%r11,%r8,8) # old scheme remained for nonPIC
|
|
|
b9ba6d |
+#else
|
|
|
b9ba6d |
+ lea L(Got0)(%rip),%r11
|
|
|
b9ba6d |
+ lea L(setPxQx)(%rip),%rcx
|
|
|
b9ba6d |
+ movswq (%rcx,%r8,2),%rcx
|
|
|
b9ba6d |
+ lea (%rcx,%r11,1),%r11
|
|
|
b9ba6d |
+ jmpq *%r11
|
|
|
b9ba6d |
+#endif
|
|
|
b9ba6d |
|
|
|
b9ba6d |
END (memset)
|
|
|
b9ba6d |
libc_hidden_builtin_def (memset)
|
|
|
b9ba6d |
|
|
|
b9ba6d |
-#if defined PIC && !defined NOT_IN_libc
|
|
|
b9ba6d |
+#if defined PIC && !defined NOT_IN_libc && !defined USE_MULTIARCH
|
|
|
b9ba6d |
strong_alias (__memset_chk, __memset_zero_constant_len_parameter)
|
|
|
b9ba6d |
.section .gnu.warning.__memset_zero_constant_len_parameter
|
|
|
b9ba6d |
.string "memset used with constant zero length parameter; this could be due to transposed parameters"
|
|
|
b9ba6d |
Index: glibc-2.12-2-gc4ccff1/sysdeps/x86_64/multiarch/Makefile
|
|
|
b9ba6d |
===================================================================
|
|
|
b9ba6d |
--- glibc-2.12-2-gc4ccff1.orig/sysdeps/x86_64/multiarch/Makefile
|
|
|
b9ba6d |
+++ glibc-2.12-2-gc4ccff1/sysdeps/x86_64/multiarch/Makefile
|
|
|
b9ba6d |
@@ -7,7 +7,8 @@ ifeq ($(subdir),string)
|
|
|
b9ba6d |
sysdep_routines += stpncpy-c strncpy-c strcmp-ssse3 strncmp-ssse3 \
|
|
|
b9ba6d |
strend-sse4 memcmp-sse4 \
|
|
|
b9ba6d |
strcasestr-nonascii strcasecmp_l-ssse3 \
|
|
|
b9ba6d |
- strncase_l-ssse3
|
|
|
b9ba6d |
+ strncase_l-ssse3 \
|
|
|
b9ba6d |
+ memset-x86-64
|
|
|
b9ba6d |
ifeq (yes,$(config-cflags-sse4))
|
|
|
b9ba6d |
sysdep_routines += strcspn-c strpbrk-c strspn-c strstr-c strcasestr-c
|
|
|
b9ba6d |
CFLAGS-strcspn-c.c += -msse4
|
|
|
b9ba6d |
Index: glibc-2.12-2-gc4ccff1/sysdeps/x86_64/multiarch/bzero.S
|
|
|
b9ba6d |
===================================================================
|
|
|
b9ba6d |
--- /dev/null
|
|
|
b9ba6d |
+++ glibc-2.12-2-gc4ccff1/sysdeps/x86_64/multiarch/bzero.S
|
|
|
b9ba6d |
@@ -0,0 +1,56 @@
|
|
|
b9ba6d |
+/* Multiple versions of bzero
|
|
|
b9ba6d |
+ Copyright (C) 2010 Free Software Foundation, Inc.
|
|
|
b9ba6d |
+ This file is part of the GNU C Library.
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ The GNU C Library is free software; you can redistribute it and/or
|
|
|
b9ba6d |
+ modify it under the terms of the GNU Lesser General Public
|
|
|
b9ba6d |
+ License as published by the Free Software Foundation; either
|
|
|
b9ba6d |
+ version 2.1 of the License, or (at your option) any later version.
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ The GNU C Library is distributed in the hope that it will be useful,
|
|
|
b9ba6d |
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
b9ba6d |
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
b9ba6d |
+ Lesser General Public License for more details.
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ You should have received a copy of the GNU Lesser General Public
|
|
|
b9ba6d |
+ License along with the GNU C Library; if not, write to the Free
|
|
|
b9ba6d |
+ Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
|
|
|
b9ba6d |
+ 02111-1307 USA. */
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+#include <sysdep.h>
|
|
|
b9ba6d |
+#include <init-arch.h>
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ .text
|
|
|
b9ba6d |
+ENTRY(__bzero)
|
|
|
b9ba6d |
+ .type __bzero, @gnu_indirect_function
|
|
|
b9ba6d |
+ cmpl $0, __cpu_features+KIND_OFFSET(%rip)
|
|
|
b9ba6d |
+ jne 1f
|
|
|
b9ba6d |
+ call __init_cpu_features
|
|
|
b9ba6d |
+1: leaq __bzero_x86_64(%rip), %rax
|
|
|
b9ba6d |
+ testl $bit_Prefer_SSE_for_memop, __cpu_features+FEATURE_OFFSET+index_Prefer_SSE_for_memop(%rip)
|
|
|
b9ba6d |
+ jz 2f
|
|
|
b9ba6d |
+ leaq __bzero_sse2(%rip), %rax
|
|
|
b9ba6d |
+2: ret
|
|
|
b9ba6d |
+END(__bzero)
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ .type __bzero_sse2, @function
|
|
|
b9ba6d |
+__bzero_sse2:
|
|
|
b9ba6d |
+ cfi_startproc
|
|
|
b9ba6d |
+ CALL_MCOUNT
|
|
|
b9ba6d |
+ mov %rsi,%rdx /* Adjust parameter. */
|
|
|
b9ba6d |
+ xorl %esi,%esi /* Fill with 0s. */
|
|
|
b9ba6d |
+ jmp __memset_sse2
|
|
|
b9ba6d |
+ cfi_endproc
|
|
|
b9ba6d |
+ .size __bzero_sse2, .-__bzero_sse2
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ .type __bzero_x86_64, @function
|
|
|
b9ba6d |
+__bzero_x86_64:
|
|
|
b9ba6d |
+ cfi_startproc
|
|
|
b9ba6d |
+ CALL_MCOUNT
|
|
|
b9ba6d |
+ mov %rsi,%rdx /* Adjust parameter. */
|
|
|
b9ba6d |
+ xorl %esi,%esi /* Fill with 0s. */
|
|
|
b9ba6d |
+ jmp __memset_x86_64
|
|
|
b9ba6d |
+ cfi_endproc
|
|
|
b9ba6d |
+ .size __bzero_x86_64, .-__bzero_x86_64
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+weak_alias (__bzero, bzero)
|
|
|
b9ba6d |
Index: glibc-2.12-2-gc4ccff1/sysdeps/x86_64/multiarch/cacheinfo.c
|
|
|
b9ba6d |
===================================================================
|
|
|
b9ba6d |
--- /dev/null
|
|
|
b9ba6d |
+++ glibc-2.12-2-gc4ccff1/sysdeps/x86_64/multiarch/cacheinfo.c
|
|
|
b9ba6d |
@@ -0,0 +1,2 @@
|
|
|
b9ba6d |
+#define DISABLE_PREFERRED_MEMORY_INSTRUCTION
|
|
|
b9ba6d |
+#include "../cacheinfo.c"
|
|
|
b9ba6d |
Index: glibc-2.12-2-gc4ccff1/sysdeps/x86_64/multiarch/init-arch.c
|
|
|
b9ba6d |
===================================================================
|
|
|
b9ba6d |
--- glibc-2.12-2-gc4ccff1.orig/sysdeps/x86_64/multiarch/init-arch.c
|
|
|
b9ba6d |
+++ glibc-2.12-2-gc4ccff1/sysdeps/x86_64/multiarch/init-arch.c
|
|
|
b9ba6d |
@@ -59,6 +59,11 @@ __init_cpu_features (void)
|
|
|
b9ba6d |
|
|
|
b9ba6d |
get_common_indeces (&family, &model);
|
|
|
b9ba6d |
|
|
|
b9ba6d |
+ /* Intel processors prefer SSE instruction for memory/string
|
|
|
b9ba6d |
+ routines if they are avaiable. */
|
|
|
b9ba6d |
+ __cpu_features.feature[index_Prefer_SSE_for_memop]
|
|
|
b9ba6d |
+ |= bit_Prefer_SSE_for_memop;
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
unsigned int eax = __cpu_features.cpuid[COMMON_CPUID_INDEX_1].eax;
|
|
|
b9ba6d |
unsigned int extended_family = (eax >> 20) & 0xff;
|
|
|
b9ba6d |
unsigned int extended_model = (eax >> 12) & 0xf0;
|
|
|
b9ba6d |
@@ -92,6 +97,14 @@ __init_cpu_features (void)
|
|
|
b9ba6d |
kind = arch_kind_amd;
|
|
|
b9ba6d |
|
|
|
b9ba6d |
get_common_indeces (&family, &model);
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ unsigned int ecx = __cpu_features.cpuid[COMMON_CPUID_INDEX_1].ecx;
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ /* AMD processors prefer SSE instructions for memory/string routines
|
|
|
b9ba6d |
+ if they are available, otherwise they prefer integer instructions. */
|
|
|
b9ba6d |
+ if ((ecx & 0x200))
|
|
|
b9ba6d |
+ __cpu_features.feature[index_Prefer_SSE_for_memop]
|
|
|
b9ba6d |
+ |= bit_Prefer_SSE_for_memop;
|
|
|
b9ba6d |
}
|
|
|
b9ba6d |
else
|
|
|
b9ba6d |
kind = arch_kind_other;
|
|
|
b9ba6d |
Index: glibc-2.12-2-gc4ccff1/sysdeps/x86_64/multiarch/init-arch.h
|
|
|
b9ba6d |
===================================================================
|
|
|
b9ba6d |
--- glibc-2.12-2-gc4ccff1.orig/sysdeps/x86_64/multiarch/init-arch.h
|
|
|
b9ba6d |
+++ glibc-2.12-2-gc4ccff1/sysdeps/x86_64/multiarch/init-arch.h
|
|
|
b9ba6d |
@@ -16,7 +16,8 @@
|
|
|
b9ba6d |
Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
|
|
|
b9ba6d |
02111-1307 USA. */
|
|
|
b9ba6d |
|
|
|
b9ba6d |
-#define bit_Fast_Rep_String (1 << 0)
|
|
|
b9ba6d |
+#define bit_Fast_Rep_String (1 << 0)
|
|
|
b9ba6d |
+#define bit_Prefer_SSE_for_memop (1 << 3)
|
|
|
b9ba6d |
|
|
|
b9ba6d |
#ifdef __ASSEMBLER__
|
|
|
b9ba6d |
|
|
|
b9ba6d |
@@ -33,6 +34,7 @@
|
|
|
b9ba6d |
# define index_SSE4_2 COMMON_CPUID_INDEX_1*CPUID_SIZE+CPUID_ECX_OFFSET
|
|
|
b9ba6d |
|
|
|
b9ba6d |
#define index_Fast_Rep_String FEATURE_INDEX_1*FEATURE_SIZE
|
|
|
b9ba6d |
+# define index_Prefer_SSE_for_memop FEATURE_INDEX_1*FEATURE_SIZE
|
|
|
b9ba6d |
|
|
|
b9ba6d |
#else /* __ASSEMBLER__ */
|
|
|
b9ba6d |
|
|
|
b9ba6d |
@@ -103,5 +105,12 @@ extern const struct cpu_features *__get_
|
|
|
b9ba6d |
# define HAS_FMA HAS_CPU_FEATURE (COMMON_CPUID_INDEX_1, ecx, 12)
|
|
|
b9ba6d |
|
|
|
b9ba6d |
# define index_Fast_Rep_String FEATURE_INDEX_1
|
|
|
b9ba6d |
+# define index_Prefer_SSE_for_memop FEATURE_INDEX_1
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+#define HAS_ARCH_FEATURE(idx, bit) \
|
|
|
b9ba6d |
+ ((__get_cpu_features ()->feature[idx] & (bit)) != 0)
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+#define HAS_PREFER_SSE_FOR_MEMOP \
|
|
|
b9ba6d |
+ HAS_ARCH_FEATURE (index_Prefer_SSE_for_memop, bit_Prefer_SSE_for_memop)
|
|
|
b9ba6d |
|
|
|
b9ba6d |
#endif /* __ASSEMBLER__ */
|
|
|
b9ba6d |
Index: glibc-2.12-2-gc4ccff1/sysdeps/x86_64/multiarch/memset-x86-64.S
|
|
|
b9ba6d |
===================================================================
|
|
|
b9ba6d |
--- /dev/null
|
|
|
b9ba6d |
+++ glibc-2.12-2-gc4ccff1/sysdeps/x86_64/multiarch/memset-x86-64.S
|
|
|
b9ba6d |
@@ -0,0 +1,18 @@
|
|
|
b9ba6d |
+#include <sysdep.h>
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+#ifndef NOT_IN_libc
|
|
|
b9ba6d |
+# undef ENTRY_CHK
|
|
|
b9ba6d |
+# define ENTRY_CHK(name) \
|
|
|
b9ba6d |
+ .type __memset_chk_x86_64, @function; \
|
|
|
b9ba6d |
+ .globl __memset_chk_x86_64; \
|
|
|
b9ba6d |
+ .p2align 4; \
|
|
|
b9ba6d |
+ __memset_chk_x86_64: cfi_startproc; \
|
|
|
b9ba6d |
+ CALL_MCOUNT
|
|
|
b9ba6d |
+# undef END_CHK
|
|
|
b9ba6d |
+# define END_CHK(name) \
|
|
|
b9ba6d |
+ cfi_endproc; .size __memset_chk_x86_64, .-__memset_chk_x86_64
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+# define libc_hidden_builtin_def(name)
|
|
|
b9ba6d |
+# define memset __memset_x86_64
|
|
|
b9ba6d |
+# include "../memset.S"
|
|
|
b9ba6d |
+#endif
|
|
|
b9ba6d |
Index: glibc-2.12-2-gc4ccff1/sysdeps/x86_64/multiarch/memset.S
|
|
|
b9ba6d |
===================================================================
|
|
|
b9ba6d |
--- /dev/null
|
|
|
b9ba6d |
+++ glibc-2.12-2-gc4ccff1/sysdeps/x86_64/multiarch/memset.S
|
|
|
b9ba6d |
@@ -0,0 +1,74 @@
|
|
|
b9ba6d |
+/* Multiple versions of memset
|
|
|
b9ba6d |
+ Copyright (C) 2010 Free Software Foundation, Inc.
|
|
|
b9ba6d |
+ This file is part of the GNU C Library.
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ The GNU C Library is free software; you can redistribute it and/or
|
|
|
b9ba6d |
+ modify it under the terms of the GNU Lesser General Public
|
|
|
b9ba6d |
+ License as published by the Free Software Foundation; either
|
|
|
b9ba6d |
+ version 2.1 of the License, or (at your option) any later version.
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ The GNU C Library is distributed in the hope that it will be useful,
|
|
|
b9ba6d |
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
b9ba6d |
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
b9ba6d |
+ Lesser General Public License for more details.
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ You should have received a copy of the GNU Lesser General Public
|
|
|
b9ba6d |
+ License along with the GNU C Library; if not, write to the Free
|
|
|
b9ba6d |
+ Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
|
|
|
b9ba6d |
+ 02111-1307 USA. */
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+#include <sysdep.h>
|
|
|
b9ba6d |
+#include <init-arch.h>
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+/* Define multiple versions only for the definition in lib. */
|
|
|
b9ba6d |
+#ifndef NOT_IN_libc
|
|
|
b9ba6d |
+ENTRY(memset)
|
|
|
b9ba6d |
+ .type memset, @gnu_indirect_function
|
|
|
b9ba6d |
+ cmpl $0, __cpu_features+KIND_OFFSET(%rip)
|
|
|
b9ba6d |
+ jne 1f
|
|
|
b9ba6d |
+ call __init_cpu_features
|
|
|
b9ba6d |
+1: leaq __memset_x86_64(%rip), %rax
|
|
|
b9ba6d |
+ testl $bit_Prefer_SSE_for_memop, __cpu_features+FEATURE_OFFSET+index_Prefer_SSE_for_memop(%rip)
|
|
|
b9ba6d |
+ jz 2f
|
|
|
b9ba6d |
+ leaq __memset_sse2(%rip), %rax
|
|
|
b9ba6d |
+2: ret
|
|
|
b9ba6d |
+END(memset)
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+# define USE_SSE2 1
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+# undef ENTRY
|
|
|
b9ba6d |
+# define ENTRY(name) \
|
|
|
b9ba6d |
+ .type __memset_sse2, @function; \
|
|
|
b9ba6d |
+ .globl __memset_sse2; \
|
|
|
b9ba6d |
+ .p2align 4; \
|
|
|
b9ba6d |
+ __memset_sse2: cfi_startproc; \
|
|
|
b9ba6d |
+ CALL_MCOUNT
|
|
|
b9ba6d |
+# undef END
|
|
|
b9ba6d |
+# define END(name) \
|
|
|
b9ba6d |
+ cfi_endproc; .size __memset_sse2, .-__memset_sse2
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+# undef ENTRY_CHK
|
|
|
b9ba6d |
+# define ENTRY_CHK(name) \
|
|
|
b9ba6d |
+ .type __memset_chk_sse2, @function; \
|
|
|
b9ba6d |
+ .globl __memset_chk_sse2; \
|
|
|
b9ba6d |
+ .p2align 4; \
|
|
|
b9ba6d |
+ __memset_chk_sse2: cfi_startproc; \
|
|
|
b9ba6d |
+ CALL_MCOUNT
|
|
|
b9ba6d |
+# undef END_CHK
|
|
|
b9ba6d |
+# define END_CHK(name) \
|
|
|
b9ba6d |
+ cfi_endproc; .size __memset_chk_sse2, .-__memset_chk_sse2
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+# ifdef SHARED
|
|
|
b9ba6d |
+# undef libc_hidden_builtin_def
|
|
|
b9ba6d |
+/* It doesn't make sense to send libc-internal memset calls through a PLT.
|
|
|
b9ba6d |
+ The speedup we get from using GPR instruction is likely eaten away
|
|
|
b9ba6d |
+ by the indirect call in the PLT. */
|
|
|
b9ba6d |
+# define libc_hidden_builtin_def(name) \
|
|
|
b9ba6d |
+ .globl __GI_memset; __GI_memset = __memset_sse2
|
|
|
b9ba6d |
+# endif
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+# undef strong_alias
|
|
|
b9ba6d |
+# define strong_alias(original, alias)
|
|
|
b9ba6d |
+#endif
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+#include "../memset.S"
|
|
|
b9ba6d |
Index: glibc-2.12-2-gc4ccff1/sysdeps/x86_64/multiarch/memset_chk.S
|
|
|
b9ba6d |
===================================================================
|
|
|
b9ba6d |
--- /dev/null
|
|
|
b9ba6d |
+++ glibc-2.12-2-gc4ccff1/sysdeps/x86_64/multiarch/memset_chk.S
|
|
|
b9ba6d |
@@ -0,0 +1,44 @@
|
|
|
b9ba6d |
+/* Multiple versions of __memset_chk
|
|
|
b9ba6d |
+ Copyright (C) 2010 Free Software Foundation, Inc.
|
|
|
b9ba6d |
+ This file is part of the GNU C Library.
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ The GNU C Library is free software; you can redistribute it and/or
|
|
|
b9ba6d |
+ modify it under the terms of the GNU Lesser General Public
|
|
|
b9ba6d |
+ License as published by the Free Software Foundation; either
|
|
|
b9ba6d |
+ version 2.1 of the License, or (at your option) any later version.
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ The GNU C Library is distributed in the hope that it will be useful,
|
|
|
b9ba6d |
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
b9ba6d |
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
b9ba6d |
+ Lesser General Public License for more details.
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+ You should have received a copy of the GNU Lesser General Public
|
|
|
b9ba6d |
+ License along with the GNU C Library; if not, write to the Free
|
|
|
b9ba6d |
+ Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
|
|
|
b9ba6d |
+ 02111-1307 USA. */
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+#include <sysdep.h>
|
|
|
b9ba6d |
+#include <init-arch.h>
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+/* Define multiple versions only for the definition in lib. */
|
|
|
b9ba6d |
+#ifndef NOT_IN_libc
|
|
|
b9ba6d |
+# ifdef SHARED
|
|
|
b9ba6d |
+ENTRY(__memset_chk)
|
|
|
b9ba6d |
+ .type __memset_chk, @gnu_indirect_function
|
|
|
b9ba6d |
+ cmpl $0, __cpu_features+KIND_OFFSET(%rip)
|
|
|
b9ba6d |
+ jne 1f
|
|
|
b9ba6d |
+ call __init_cpu_features
|
|
|
b9ba6d |
+1: leaq __memset_chk_x86_64(%rip), %rax
|
|
|
b9ba6d |
+ testl $bit_Prefer_SSE_for_memop, __cpu_features+FEATURE_OFFSET+index_Prefer_SSE_for_memop(%rip)
|
|
|
b9ba6d |
+ jz 2f
|
|
|
b9ba6d |
+ leaq __memset_chk_sse2(%rip), %rax
|
|
|
b9ba6d |
+2: ret
|
|
|
b9ba6d |
+END(__memset_chk)
|
|
|
b9ba6d |
+
|
|
|
b9ba6d |
+strong_alias (__memset_chk, __memset_zero_constant_len_parameter)
|
|
|
b9ba6d |
+ .section .gnu.warning.__memset_zero_constant_len_parameter
|
|
|
b9ba6d |
+ .string "memset used with constant zero length parameter; this could be due to transposed parameters"
|
|
|
b9ba6d |
+# else
|
|
|
b9ba6d |
+# include "../memset_chk.S"
|
|
|
b9ba6d |
+# endif
|
|
|
b9ba6d |
+#endif
|