190885
From e6e209eb05545a36283335b07a1db9471cbccd1a Mon Sep 17 00:00:00 2001
190885
From: "H.J. Lu" <hjl.tools@gmail.com>
190885
Date: Wed, 2 Mar 2022 13:56:00 -0800
190885
Subject: [PATCH] x86-64 memcpy: Properly handle the length parameter [BZ
190885
 #24097]
190885
190885
On x32, the size_t parameter may be passed in the lower 32 bits of a
190885
64-bit register with the non-zero upper 32 bits.  The string/memory
190885
functions written in assembly can only use the lower 32 bits of a
190885
64-bit register as length or must clear the upper 32 bits before using
190885
the full 64-bit register for length.
190885
190885
This pach fixes memcpy for x32.  Tested on x86-64 and x32.  On x86-64,
190885
libc.so is the same with and withou the fix.
190885
190885
	[BZ #24097]
190885
	CVE-2019-6488
190885
	* sysdeps/x86_64/multiarch/memcpy-ssse3-back.S: Use RDX_LP for
190885
	length.  Clear the upper 32 bits of RDX register.
190885
	* sysdeps/x86_64/multiarch/memcpy-ssse3.S: Likewise.
190885
	* sysdeps/x86_64/multiarch/memmove-avx512-no-vzeroupper.S:
190885
	Likewise.
190885
	* sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S:
190885
	Likewise.
190885
	* sysdeps/x86_64/x32/Makefile (tests): Add tst-size_t-memcpy.
190885
	tst-size_t-wmemchr.
190885
	* sysdeps/x86_64/x32/tst-size_t-memcpy.c: New file.
190885
190885
(cherry picked from commit 231c56760c1e2ded21ad96bbb860b1f08c556c7a)
190885
---
190885
 sysdeps/x86_64/multiarch/memcpy-ssse3-back.S  | 17 ++++--
190885
 sysdeps/x86_64/multiarch/memcpy-ssse3.S       | 17 ++++--
190885
 .../multiarch/memmove-avx512-no-vzeroupper.S  | 16 +++--
190885
 .../multiarch/memmove-vec-unaligned-erms.S    | 54 +++++++++--------
190885
 sysdeps/x86_64/x32/Makefile                   |  2 +-
190885
 sysdeps/x86_64/x32/tst-size_t-memcpy.c        | 58 +++++++++++++++++++
190885
 6 files changed, 122 insertions(+), 42 deletions(-)
190885
 create mode 100644 sysdeps/x86_64/x32/tst-size_t-memcpy.c
190885
190885
diff --git a/sysdeps/x86_64/multiarch/memcpy-ssse3-back.S b/sysdeps/x86_64/multiarch/memcpy-ssse3-back.S
190885
index 3cd11233..568eebd3 100644
190885
--- a/sysdeps/x86_64/multiarch/memcpy-ssse3-back.S
190885
+++ b/sysdeps/x86_64/multiarch/memcpy-ssse3-back.S
190885
@@ -45,28 +45,33 @@
190885
 	.section .text.ssse3,"ax",@progbits
190885
 #if !defined USE_AS_MEMPCPY && !defined USE_AS_MEMMOVE
190885
 ENTRY (MEMPCPY_CHK)
190885
-	cmpq	%rdx, %rcx
190885
+	cmp	%RDX_LP, %RCX_LP
190885
 	jb	HIDDEN_JUMPTARGET (__chk_fail)
190885
 END (MEMPCPY_CHK)
190885
 
190885
 ENTRY (MEMPCPY)
190885
-	movq	%rdi, %rax
190885
-	addq	%rdx, %rax
190885
+	mov	%RDI_LP, %RAX_LP
190885
+	add	%RDX_LP, %RAX_LP
190885
 	jmp	L(start)
190885
 END (MEMPCPY)
190885
 #endif
190885
 
190885
 #if !defined USE_AS_BCOPY
190885
 ENTRY (MEMCPY_CHK)
190885
-	cmpq	%rdx, %rcx
190885
+	cmp	%RDX_LP, %RCX_LP
190885
 	jb	HIDDEN_JUMPTARGET (__chk_fail)
190885
 END (MEMCPY_CHK)
190885
 #endif
190885
 
190885
 ENTRY (MEMCPY)
190885
-	mov	%rdi, %rax
190885
+	mov	%RDI_LP, %RAX_LP
190885
 #ifdef USE_AS_MEMPCPY
190885
-	add	%rdx, %rax
190885
+	add	%RDX_LP, %RAX_LP
190885
+#endif
190885
+
190885
+#ifdef __ILP32__
190885
+	/* Clear the upper 32 bits.  */
190885
+	mov	%edx, %edx
190885
 #endif
190885
 
190885
 #ifdef USE_AS_MEMMOVE
190885
diff --git a/sysdeps/x86_64/multiarch/memcpy-ssse3.S b/sysdeps/x86_64/multiarch/memcpy-ssse3.S
190885
index 0240bfa3..0bd5ee99 100644
190885
--- a/sysdeps/x86_64/multiarch/memcpy-ssse3.S
190885
+++ b/sysdeps/x86_64/multiarch/memcpy-ssse3.S
190885
@@ -45,28 +45,33 @@
190885
 	.section .text.ssse3,"ax",@progbits
190885
 #if !defined USE_AS_MEMPCPY && !defined USE_AS_MEMMOVE
190885
 ENTRY (MEMPCPY_CHK)
190885
-	cmpq	%rdx, %rcx
190885
+	cmp	%RDX_LP, %RCX_LP
190885
 	jb	HIDDEN_JUMPTARGET (__chk_fail)
190885
 END (MEMPCPY_CHK)
190885
 
190885
 ENTRY (MEMPCPY)
190885
-	movq	%rdi, %rax
190885
-	addq	%rdx, %rax
190885
+	mov	%RDI_LP, %RAX_LP
190885
+	add	%RDX_LP, %RAX_LP
190885
 	jmp	L(start)
190885
 END (MEMPCPY)
190885
 #endif
190885
 
190885
 #if !defined USE_AS_BCOPY
190885
 ENTRY (MEMCPY_CHK)
190885
-	cmpq	%rdx, %rcx
190885
+	cmp	%RDX_LP, %RCX_LP
190885
 	jb	HIDDEN_JUMPTARGET (__chk_fail)
190885
 END (MEMCPY_CHK)
190885
 #endif
190885
 
190885
 ENTRY (MEMCPY)
190885
-	mov	%rdi, %rax
190885
+	mov	%RDI_LP, %RAX_LP
190885
 #ifdef USE_AS_MEMPCPY
190885
-	add	%rdx, %rax
190885
+	add	%RDX_LP, %RAX_LP
190885
+#endif
190885
+
190885
+#ifdef __ILP32__
190885
+	/* Clear the upper 32 bits.  */
190885
+	mov	%edx, %edx
190885
 #endif
190885
 
190885
 #ifdef USE_AS_MEMMOVE
190885
diff --git a/sysdeps/x86_64/multiarch/memmove-avx512-no-vzeroupper.S b/sysdeps/x86_64/multiarch/memmove-avx512-no-vzeroupper.S
190885
index effc3ac2..6ca2bbc9 100644
190885
--- a/sysdeps/x86_64/multiarch/memmove-avx512-no-vzeroupper.S
190885
+++ b/sysdeps/x86_64/multiarch/memmove-avx512-no-vzeroupper.S
190885
@@ -24,27 +24,31 @@
190885
 
190885
 	.section .text.avx512,"ax",@progbits
190885
 ENTRY (__mempcpy_chk_avx512_no_vzeroupper)
190885
-	cmpq	%rdx, %rcx
190885
+	cmp	%RDX_LP, %RCX_LP
190885
 	jb	HIDDEN_JUMPTARGET (__chk_fail)
190885
 END (__mempcpy_chk_avx512_no_vzeroupper)
190885
 
190885
 ENTRY (__mempcpy_avx512_no_vzeroupper)
190885
-	movq	%rdi, %rax
190885
-	addq	%rdx, %rax
190885
+	mov	%RDI_LP, %RAX_LP
190885
+	add	%RDX_LP, %RAX_LP
190885
 	jmp	L(start)
190885
 END (__mempcpy_avx512_no_vzeroupper)
190885
 
190885
 ENTRY (__memmove_chk_avx512_no_vzeroupper)
190885
-	cmpq	%rdx, %rcx
190885
+	cmp	%RDX_LP, %RCX_LP
190885
 	jb	HIDDEN_JUMPTARGET (__chk_fail)
190885
 END (__memmove_chk_avx512_no_vzeroupper)
190885
 
190885
 ENTRY (__memmove_avx512_no_vzeroupper)
190885
-	mov	%rdi, %rax
190885
+	mov	%RDI_LP, %RAX_LP
190885
 # ifdef USE_AS_MEMPCPY
190885
-	add	%rdx, %rax
190885
+	add	%RDX_LP, %RAX_LP
190885
 # endif
190885
 L(start):
190885
+# ifdef __ILP32__
190885
+	/* Clear the upper 32 bits.  */
190885
+	mov	%edx, %edx
190885
+# endif
190885
 	lea	(%rsi, %rdx), %rcx
190885
 	lea	(%rdi, %rdx), %r9
190885
 	cmp	$512, %rdx
190885
diff --git a/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S b/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S
190885
index c952576c..274aa1c7 100644
190885
--- a/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S
190885
+++ b/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S
190885
@@ -95,20 +95,20 @@
190885
 	.section SECTION(.text),"ax",@progbits
190885
 #if defined SHARED && IS_IN (libc)
190885
 ENTRY (MEMMOVE_CHK_SYMBOL (__mempcpy_chk, unaligned))
190885
-	cmpq	%rdx, %rcx
190885
+	cmp	%RDX_LP, %RCX_LP
190885
 	jb	HIDDEN_JUMPTARGET (__chk_fail)
190885
 END (MEMMOVE_CHK_SYMBOL (__mempcpy_chk, unaligned))
190885
 #endif
190885
 
190885
 ENTRY (MEMPCPY_SYMBOL (__mempcpy, unaligned))
190885
-	movq	%rdi, %rax
190885
-	addq	%rdx, %rax
190885
+	mov	%RDI_LP, %RAX_LP
190885
+	add	%RDX_LP, %RAX_LP
190885
 	jmp	L(start)
190885
 END (MEMPCPY_SYMBOL (__mempcpy, unaligned))
190885
 
190885
 #if defined SHARED && IS_IN (libc)
190885
 ENTRY (MEMMOVE_CHK_SYMBOL (__memmove_chk, unaligned))
190885
-	cmpq	%rdx, %rcx
190885
+	cmp	%RDX_LP, %RCX_LP
190885
 	jb	HIDDEN_JUMPTARGET (__chk_fail)
190885
 END (MEMMOVE_CHK_SYMBOL (__memmove_chk, unaligned))
190885
 #endif
190885
@@ -116,9 +116,13 @@ END (MEMMOVE_CHK_SYMBOL (__memmove_chk, unaligned))
190885
 ENTRY (MEMMOVE_SYMBOL (__memmove, unaligned))
190885
 	movq	%rdi, %rax
190885
 L(start):
190885
-	cmpq	$VEC_SIZE, %rdx
190885
+# ifdef __ILP32__
190885
+	/* Clear the upper 32 bits.  */
190885
+	movl	%edx, %edx
190885
+# endif
190885
+	cmp	$VEC_SIZE, %RDX_LP
190885
 	jb	L(less_vec)
190885
-	cmpq	$(VEC_SIZE * 2), %rdx
190885
+	cmp	$(VEC_SIZE * 2), %RDX_LP
190885
 	ja	L(more_2x_vec)
190885
 #if !defined USE_MULTIARCH || !IS_IN (libc)
190885
 L(last_2x_vec):
190885
@@ -138,38 +142,38 @@ END (MEMMOVE_SYMBOL (__memmove, unaligned))
190885
 
190885
 # if VEC_SIZE == 16
190885
 ENTRY (__mempcpy_chk_erms)
190885
-	cmpq	%rdx, %rcx
190885
+	cmp	%RDX_LP, %RCX_LP
190885
 	jb	HIDDEN_JUMPTARGET (__chk_fail)
190885
 END (__mempcpy_chk_erms)
190885
 
190885
 /* Only used to measure performance of REP MOVSB.  */
190885
 ENTRY (__mempcpy_erms)
190885
-	movq	%rdi, %rax
190885
+	mov	%RDI_LP, %RAX_LP
190885
 	/* Skip zero length.  */
190885
-	testq	%rdx, %rdx
190885
+	test	%RDX_LP, %RDX_LP
190885
 	jz	2f
190885
-	addq	%rdx, %rax
190885
+	add	%RDX_LP, %RAX_LP
190885
 	jmp	L(start_movsb)
190885
 END (__mempcpy_erms)
190885
 
190885
 ENTRY (__memmove_chk_erms)
190885
-	cmpq	%rdx, %rcx
190885
+	cmp	%RDX_LP, %RCX_LP
190885
 	jb	HIDDEN_JUMPTARGET (__chk_fail)
190885
 END (__memmove_chk_erms)
190885
 
190885
 ENTRY (__memmove_erms)
190885
 	movq	%rdi, %rax
190885
 	/* Skip zero length.  */
190885
-	testq	%rdx, %rdx
190885
+	test	%RDX_LP, %RDX_LP
190885
 	jz	2f
190885
 L(start_movsb):
190885
-	movq	%rdx, %rcx
190885
-	cmpq	%rsi, %rdi
190885
+	mov	%RDX_LP, %RCX_LP
190885
+	cmp	%RSI_LP, %RDI_LP
190885
 	jb	1f
190885
 	/* Source == destination is less common.  */
190885
 	je	2f
190885
-	leaq	(%rsi,%rcx), %rdx
190885
-	cmpq	%rdx, %rdi
190885
+	lea	(%rsi,%rcx), %RDX_LP
190885
+	cmp	%RDX_LP, %RDI_LP
190885
 	jb	L(movsb_backward)
190885
 1:
190885
 	rep movsb
190885
@@ -189,20 +193,20 @@ strong_alias (__memmove_chk_erms, __memcpy_chk_erms)
190885
 
190885
 # ifdef SHARED
190885
 ENTRY (MEMMOVE_CHK_SYMBOL (__mempcpy_chk, unaligned_erms))
190885
-	cmpq	%rdx, %rcx
190885
+	cmp	%RDX_LP, %RCX_LP
190885
 	jb	HIDDEN_JUMPTARGET (__chk_fail)
190885
 END (MEMMOVE_CHK_SYMBOL (__mempcpy_chk, unaligned_erms))
190885
 # endif
190885
 
190885
 ENTRY (MEMMOVE_SYMBOL (__mempcpy, unaligned_erms))
190885
-	movq	%rdi, %rax
190885
-	addq	%rdx, %rax
190885
+	mov	%RDI_LP, %RAX_LP
190885
+	add	%RDX_LP, %RAX_LP
190885
 	jmp	L(start_erms)
190885
 END (MEMMOVE_SYMBOL (__mempcpy, unaligned_erms))
190885
 
190885
 # ifdef SHARED
190885
 ENTRY (MEMMOVE_CHK_SYMBOL (__memmove_chk, unaligned_erms))
190885
-	cmpq	%rdx, %rcx
190885
+	cmp	%RDX_LP, %RCX_LP
190885
 	jb	HIDDEN_JUMPTARGET (__chk_fail)
190885
 END (MEMMOVE_CHK_SYMBOL (__memmove_chk, unaligned_erms))
190885
 # endif
190885
@@ -210,9 +214,13 @@ END (MEMMOVE_CHK_SYMBOL (__memmove_chk, unaligned_erms))
190885
 ENTRY (MEMMOVE_SYMBOL (__memmove, unaligned_erms))
190885
 	movq	%rdi, %rax
190885
 L(start_erms):
190885
-	cmpq	$VEC_SIZE, %rdx
190885
+# ifdef __ILP32__
190885
+	/* Clear the upper 32 bits.  */
190885
+	movl	%edx, %edx
190885
+# endif
190885
+	cmp	$VEC_SIZE, %RDX_LP
190885
 	jb	L(less_vec)
190885
-	cmpq	$(VEC_SIZE * 2), %rdx
190885
+	cmp	$(VEC_SIZE * 2), %RDX_LP
190885
 	ja	L(movsb_more_2x_vec)
190885
 L(last_2x_vec):
190885
 	/* From VEC and to 2 * VEC.  No branch when size == VEC_SIZE. */
190885
@@ -236,7 +244,7 @@ L(movsb):
190885
 	/* Avoid slow backward REP MOVSB.  */
190885
 	jb	L(more_8x_vec_backward)
190885
 1:
190885
-	movq	%rdx, %rcx
190885
+	mov	%RDX_LP, %RCX_LP
190885
 	rep movsb
190885
 L(nop):
190885
 	ret
190885
diff --git a/sysdeps/x86_64/x32/Makefile b/sysdeps/x86_64/x32/Makefile
190885
index ddec7f04..2fe1e5ac 100644
190885
--- a/sysdeps/x86_64/x32/Makefile
190885
+++ b/sysdeps/x86_64/x32/Makefile
190885
@@ -6,7 +6,7 @@ CFLAGS-s_llround.c += -fno-builtin-lround
190885
 endif
190885
 
190885
 ifeq ($(subdir),string)
190885
-tests += tst-size_t-memchr tst-size_t-memcmp
190885
+tests += tst-size_t-memchr tst-size_t-memcmp tst-size_t-memcpy
190885
 endif
190885
 
190885
 ifeq ($(subdir),wcsmbs)
190885
diff --git a/sysdeps/x86_64/x32/tst-size_t-memcpy.c b/sysdeps/x86_64/x32/tst-size_t-memcpy.c
190885
new file mode 100644
190885
index 00000000..66b71e17
190885
--- /dev/null
190885
+++ b/sysdeps/x86_64/x32/tst-size_t-memcpy.c
190885
@@ -0,0 +1,58 @@
190885
+/* Test memcpy with size_t in the lower 32 bits of 64-bit register.
190885
+   Copyright (C) 2019 Free Software Foundation, Inc.
190885
+   This file is part of the GNU C Library.
190885
+
190885
+   The GNU C Library is free software; you can redistribute it and/or
190885
+   modify it under the terms of the GNU Lesser General Public
190885
+   License as published by the Free Software Foundation; either
190885
+   version 2.1 of the License, or (at your option) any later version.
190885
+
190885
+   The GNU C Library is distributed in the hope that it will be useful,
190885
+   but WITHOUT ANY WARRANTY; without even the implied warranty of
190885
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
190885
+   Lesser General Public License for more details.
190885
+
190885
+   You should have received a copy of the GNU Lesser General Public
190885
+   License along with the GNU C Library; if not, see
190885
+   <http://www.gnu.org/licenses/>.  */
190885
+
190885
+#define TEST_NAME "memcpy"
190885
+#include "test-size_t.h"
190885
+
190885
+IMPL (memcpy, 1)
190885
+
190885
+typedef void *(*proto_t) (void *, const void *, size_t);
190885
+
190885
+static void *
190885
+__attribute__ ((noinline, noclone))
190885
+do_memcpy (parameter_t a, parameter_t b)
190885
+{
190885
+  return CALL (&b, a.p, b.p, a.len);
190885
+}
190885
+
190885
+static int
190885
+test_main (void)
190885
+{
190885
+  test_init ();
190885
+
190885
+  parameter_t dest = { { page_size }, buf1 };
190885
+  parameter_t src = { { 0 }, buf2 };
190885
+
190885
+  int ret = 0;
190885
+  FOR_EACH_IMPL (impl, 0)
190885
+    {
190885
+      src.fn = impl->fn;
190885
+      do_memcpy (dest, src);
190885
+      int res = memcmp (dest.p, src.p, dest.len);
190885
+      if (res)
190885
+	{
190885
+	  error (0, 0, "Wrong result in function %s: %i != 0",
190885
+		 impl->name, res);
190885
+	  ret = 1;
190885
+	}
190885
+    }
190885
+
190885
+  return ret ? EXIT_FAILURE : EXIT_SUCCESS;
190885
+}
190885
+
190885
+#include <support/test-driver.c>
190885
-- 
190885
GitLab
190885