Blame SOURCES/libgcrypt-1.8.5-intel-cet.patch

5bcc82
diff -up libgcrypt-1.8.5/cipher/camellia-aesni-avx2-amd64.S.intel-cet libgcrypt-1.8.5/cipher/camellia-aesni-avx2-amd64.S
5bcc82
--- libgcrypt-1.8.5/cipher/camellia-aesni-avx2-amd64.S.intel-cet	2017-11-23 19:16:58.000000000 +0100
5bcc82
+++ libgcrypt-1.8.5/cipher/camellia-aesni-avx2-amd64.S	2020-01-23 15:36:44.148972045 +0100
5bcc82
@@ -18,8 +18,9 @@
5bcc82
  * License along with this program; if not, see <http://www.gnu.org/licenses/>.
5bcc82
  */
5bcc82
 
5bcc82
-#ifdef __x86_64
5bcc82
 #include <config.h>
5bcc82
+
5bcc82
+#ifdef __x86_64
5bcc82
 #if (defined(HAVE_COMPATIBLE_GCC_AMD64_PLATFORM_AS) || \
5bcc82
      defined(HAVE_COMPATIBLE_GCC_WIN64_PLATFORM_AS)) && \
5bcc82
     defined(ENABLE_AESNI_SUPPORT) && defined(ENABLE_AVX2_SUPPORT)
5bcc82
diff -up libgcrypt-1.8.5/cipher/camellia-aesni-avx-amd64.S.intel-cet libgcrypt-1.8.5/cipher/camellia-aesni-avx-amd64.S
5bcc82
--- libgcrypt-1.8.5/cipher/camellia-aesni-avx-amd64.S.intel-cet	2017-11-23 19:16:58.000000000 +0100
5bcc82
+++ libgcrypt-1.8.5/cipher/camellia-aesni-avx-amd64.S	2020-01-23 15:36:44.145972088 +0100
5bcc82
@@ -18,8 +18,9 @@
5bcc82
  * License along with this program; if not, see <http://www.gnu.org/licenses/>.
5bcc82
  */
5bcc82
 
5bcc82
-#ifdef __x86_64
5bcc82
 #include <config.h>
5bcc82
+
5bcc82
+#ifdef __x86_64
5bcc82
 #if (defined(HAVE_COMPATIBLE_GCC_AMD64_PLATFORM_AS) || \
5bcc82
      defined(HAVE_COMPATIBLE_GCC_WIN64_PLATFORM_AS)) && \
5bcc82
     defined(ENABLE_AESNI_SUPPORT) && defined(ENABLE_AVX_SUPPORT)
5bcc82
diff -up libgcrypt-1.8.5/cipher/chacha20-avx2-amd64.S.intel-cet libgcrypt-1.8.5/cipher/chacha20-avx2-amd64.S
5bcc82
--- libgcrypt-1.8.5/cipher/chacha20-avx2-amd64.S.intel-cet	2017-11-23 19:16:58.000000000 +0100
5bcc82
+++ libgcrypt-1.8.5/cipher/chacha20-avx2-amd64.S	2020-01-23 15:36:16.780250066 +0100
5bcc82
@@ -48,6 +48,9 @@
5bcc82
 .globl _gcry_chacha20_amd64_avx2_blocks
5bcc82
 ELF(.type  _gcry_chacha20_amd64_avx2_blocks,@function;)
5bcc82
 _gcry_chacha20_amd64_avx2_blocks:
5bcc82
+#ifdef _CET_ENDBR
5bcc82
+	_CET_ENDBR
5bcc82
+#endif
5bcc82
 .Lchacha_blocks_avx2_local:
5bcc82
 	vzeroupper
5bcc82
 	pushq %rbx
5bcc82
diff -up libgcrypt-1.8.5/cipher/chacha20-sse2-amd64.S.intel-cet libgcrypt-1.8.5/cipher/chacha20-sse2-amd64.S
5bcc82
--- libgcrypt-1.8.5/cipher/chacha20-sse2-amd64.S.intel-cet	2017-11-23 19:16:58.000000000 +0100
5bcc82
+++ libgcrypt-1.8.5/cipher/chacha20-sse2-amd64.S	2020-01-23 15:36:16.783250095 +0100
5bcc82
@@ -41,6 +41,9 @@
5bcc82
 .globl _gcry_chacha20_amd64_sse2_blocks
5bcc82
 ELF(.type  _gcry_chacha20_amd64_sse2_blocks,@function;)
5bcc82
 _gcry_chacha20_amd64_sse2_blocks:
5bcc82
+#ifdef _CET_ENDBR
5bcc82
+	_CET_ENDBR
5bcc82
+#endif
5bcc82
 .Lchacha_blocks_sse2_local:
5bcc82
 	pushq %rbx
5bcc82
 	pushq %rbp
5bcc82
diff -up libgcrypt-1.8.5/cipher/poly1305-avx2-amd64.S.intel-cet libgcrypt-1.8.5/cipher/poly1305-avx2-amd64.S
5bcc82
--- libgcrypt-1.8.5/cipher/poly1305-avx2-amd64.S.intel-cet	2017-11-23 19:16:58.000000000 +0100
5bcc82
+++ libgcrypt-1.8.5/cipher/poly1305-avx2-amd64.S	2020-01-23 15:36:16.784250105 +0100
5bcc82
@@ -43,6 +43,9 @@
5bcc82
 .globl _gcry_poly1305_amd64_avx2_init_ext
5bcc82
 ELF(.type  _gcry_poly1305_amd64_avx2_init_ext,@function;)
5bcc82
 _gcry_poly1305_amd64_avx2_init_ext:
5bcc82
+#ifdef _CET_ENDBR
5bcc82
+	_CET_ENDBR
5bcc82
+#endif
5bcc82
 .Lpoly1305_init_ext_avx2_local:
5bcc82
 	xor %edx, %edx
5bcc82
 	vzeroupper
5bcc82
@@ -406,6 +409,9 @@ ELF(.size _gcry_poly1305_amd64_avx2_init
5bcc82
 .globl _gcry_poly1305_amd64_avx2_blocks
5bcc82
 ELF(.type  _gcry_poly1305_amd64_avx2_blocks,@function;)
5bcc82
 _gcry_poly1305_amd64_avx2_blocks:
5bcc82
+#ifdef _CET_ENDBR
5bcc82
+	_CET_ENDBR
5bcc82
+#endif
5bcc82
 .Lpoly1305_blocks_avx2_local:
5bcc82
 	vzeroupper
5bcc82
 	pushq %rbp
5bcc82
@@ -732,6 +738,9 @@ ELF(.size _gcry_poly1305_amd64_avx2_bloc
5bcc82
 .globl _gcry_poly1305_amd64_avx2_finish_ext
5bcc82
 ELF(.type  _gcry_poly1305_amd64_avx2_finish_ext,@function;)
5bcc82
 _gcry_poly1305_amd64_avx2_finish_ext:
5bcc82
+#ifdef _CET_ENDBR
5bcc82
+	_CET_ENDBR
5bcc82
+#endif
5bcc82
 .Lpoly1305_finish_ext_avx2_local:
5bcc82
 	vzeroupper
5bcc82
 	pushq %rbp
5bcc82
diff -up libgcrypt-1.8.5/cipher/poly1305-sse2-amd64.S.intel-cet libgcrypt-1.8.5/cipher/poly1305-sse2-amd64.S
5bcc82
--- libgcrypt-1.8.5/cipher/poly1305-sse2-amd64.S.intel-cet	2017-11-23 19:16:58.000000000 +0100
5bcc82
+++ libgcrypt-1.8.5/cipher/poly1305-sse2-amd64.S	2020-01-23 15:36:16.787250134 +0100
5bcc82
@@ -42,6 +42,9 @@
5bcc82
 .globl _gcry_poly1305_amd64_sse2_init_ext
5bcc82
 ELF(.type  _gcry_poly1305_amd64_sse2_init_ext,@function;)
5bcc82
 _gcry_poly1305_amd64_sse2_init_ext:
5bcc82
+#ifdef _CET_ENDBR
5bcc82
+	_CET_ENDBR
5bcc82
+#endif
5bcc82
 .Lpoly1305_init_ext_x86_local:
5bcc82
 	xor %edx, %edx
5bcc82
 	pushq %r12
5bcc82
@@ -288,6 +291,9 @@ ELF(.size _gcry_poly1305_amd64_sse2_init
5bcc82
 .globl _gcry_poly1305_amd64_sse2_finish_ext
5bcc82
 ELF(.type  _gcry_poly1305_amd64_sse2_finish_ext,@function;)
5bcc82
 _gcry_poly1305_amd64_sse2_finish_ext:
5bcc82
+#ifdef _CET_ENDBR
5bcc82
+	_CET_ENDBR
5bcc82
+#endif
5bcc82
 .Lpoly1305_finish_ext_x86_local:
5bcc82
 	pushq %rbp
5bcc82
 	movq %rsp, %rbp
5bcc82
@@ -439,6 +445,9 @@ ELF(.size _gcry_poly1305_amd64_sse2_fini
5bcc82
 .globl _gcry_poly1305_amd64_sse2_blocks
5bcc82
 ELF(.type  _gcry_poly1305_amd64_sse2_blocks,@function;)
5bcc82
 _gcry_poly1305_amd64_sse2_blocks:
5bcc82
+#ifdef _CET_ENDBR
5bcc82
+	_CET_ENDBR
5bcc82
+#endif
5bcc82
 .Lpoly1305_blocks_x86_local:
5bcc82
 	pushq %rbp
5bcc82
 	movq %rsp, %rbp
5bcc82
diff -up libgcrypt-1.8.5/cipher/serpent-avx2-amd64.S.intel-cet libgcrypt-1.8.5/cipher/serpent-avx2-amd64.S
5bcc82
--- libgcrypt-1.8.5/cipher/serpent-avx2-amd64.S.intel-cet	2017-11-23 19:16:58.000000000 +0100
5bcc82
+++ libgcrypt-1.8.5/cipher/serpent-avx2-amd64.S	2020-01-23 15:36:44.151972003 +0100
5bcc82
@@ -18,8 +18,9 @@
5bcc82
  * License along with this program; if not, see <http://www.gnu.org/licenses/>.
5bcc82
  */
5bcc82
 
5bcc82
-#ifdef __x86_64
5bcc82
 #include <config.h>
5bcc82
+
5bcc82
+#ifdef __x86_64
5bcc82
 #if (defined(HAVE_COMPATIBLE_GCC_AMD64_PLATFORM_AS) || \
5bcc82
     defined(HAVE_COMPATIBLE_GCC_WIN64_PLATFORM_AS)) && defined(USE_SERPENT) && \
5bcc82
     defined(ENABLE_AVX2_SUPPORT)
5bcc82
diff -up libgcrypt-1.8.5/configure.ac.intel-cet libgcrypt-1.8.5/configure.ac
5bcc82
--- libgcrypt-1.8.5/configure.ac.intel-cet	2019-08-29 15:00:08.000000000 +0200
5bcc82
+++ libgcrypt-1.8.5/configure.ac	2020-01-23 15:35:28.147774463 +0100
5bcc82
@@ -95,6 +95,12 @@ AH_TOP([
5bcc82
 AH_BOTTOM([
5bcc82
 #define _GCRYPT_IN_LIBGCRYPT 1
5bcc82
 
5bcc82
+/* Add .note.gnu.property section for Intel CET in assembler sources
5bcc82
+   when CET is enabled.  */
5bcc82
+#if defined(__ASSEMBLER__) && defined(__CET__)
5bcc82
+# include <cet.h>
5bcc82
+#endif
5bcc82
+
5bcc82
 /* If the configure check for endianness has been disabled, get it from
5bcc82
    OS macros.  This is intended for making fat binary builds on OS X.  */
5bcc82
 #ifdef DISABLED_ENDIAN_CHECK
5bcc82
diff -up libgcrypt-1.8.5/mpi/config.links.intel-cet libgcrypt-1.8.5/mpi/config.links
5bcc82
--- libgcrypt-1.8.5/mpi/config.links.intel-cet	2017-11-23 19:16:58.000000000 +0100
5bcc82
+++ libgcrypt-1.8.5/mpi/config.links	2020-01-23 15:35:46.398952954 +0100
5bcc82
@@ -382,6 +382,16 @@ if test x"$mpi_cpu_arch" = x ; then
5bcc82
     mpi_cpu_arch="unknown"
5bcc82
 fi
5bcc82
 
5bcc82
+# Add .note.gnu.property section for Intel CET in assembler sources
5bcc82
+# when CET is enabled.  */
5bcc82
+if test x"$mpi_cpu_arch" = xx86 ; then
5bcc82
+    cat <<EOF >> ./mpi/asm-syntax.h
5bcc82
+
5bcc82
+#if defined(__ASSEMBLER__) && defined(__CET__)
5bcc82
+# include <cet.h>
5bcc82
+#endif
5bcc82
+EOF
5bcc82
+fi
5bcc82
 
5bcc82
 # Make sysdep.h
5bcc82
 echo '/* created by config.links - do not edit */' >./mpi/sysdep.h
5bcc82
diff -up libgcrypt-1.8.5/mpi/i386/mpih-add1.S.intel-cet libgcrypt-1.8.5/mpi/i386/mpih-add1.S
5bcc82
--- libgcrypt-1.8.5/mpi/i386/mpih-add1.S.intel-cet	2017-11-23 19:16:58.000000000 +0100
5bcc82
+++ libgcrypt-1.8.5/mpi/i386/mpih-add1.S	2020-01-23 15:37:40.470175379 +0100
5bcc82
@@ -52,6 +52,10 @@ C_SYMBOL_NAME(_gcry_mpih_add_n:)
5bcc82
 	movl 20(%esp),%edx		/* s2_ptr */
5bcc82
 	movl 24(%esp),%ecx		/* size */
5bcc82
 
5bcc82
+#if defined __CET__ && (__CET__ & 1) != 0
5bcc82
+	pushl	%ebx
5bcc82
+#endif
5bcc82
+
5bcc82
 	movl	%ecx,%eax
5bcc82
 	shrl	$3,%ecx 		/* compute count for unrolled loop */
5bcc82
 	negl	%eax
5bcc82
@@ -63,6 +67,9 @@ C_SYMBOL_NAME(_gcry_mpih_add_n:)
5bcc82
 	subl	%eax,%esi		/* ... by a constant when we ... */
5bcc82
 	subl	%eax,%edx		/* ... enter the loop */
5bcc82
 	shrl	$2,%eax 		/* restore previous value */
5bcc82
+#if defined __CET__ && (__CET__ & 1) != 0
5bcc82
+	leal	-4(,%eax,4),%ebx	/* Count for 4-byte endbr32 */
5bcc82
+#endif
5bcc82
 #ifdef PIC
5bcc82
 /* Calculate start address in loop for PIC.  Due to limitations in some
5bcc82
    assemblers, Loop-L0-3 cannot be put into the leal */
5bcc82
@@ -75,29 +82,53 @@ L0:	leal	(%eax,%eax,8),%eax
5bcc82
 /* Calculate start address in loop for non-PIC.  */
5bcc82
 	leal	(Loop - 3)(%eax,%eax,8),%eax
5bcc82
 #endif
5bcc82
+#if defined __CET__ && (__CET__ & 1) != 0
5bcc82
+	addl	%ebx,%eax		/* Adjust for endbr32 */
5bcc82
+#endif
5bcc82
 	jmp	*%eax			/* jump into loop */
5bcc82
 	ALIGN (3)
5bcc82
 Loop:	movl	(%esi),%eax
5bcc82
 	adcl	(%edx),%eax
5bcc82
 	movl	%eax,(%edi)
5bcc82
+#ifdef _CET_ENDBR
5bcc82
+	_CET_ENDBR
5bcc82
+#endif
5bcc82
 	movl	4(%esi),%eax
5bcc82
 	adcl	4(%edx),%eax
5bcc82
 	movl	%eax,4(%edi)
5bcc82
+#ifdef _CET_ENDBR
5bcc82
+	_CET_ENDBR
5bcc82
+#endif
5bcc82
 	movl	8(%esi),%eax
5bcc82
 	adcl	8(%edx),%eax
5bcc82
 	movl	%eax,8(%edi)
5bcc82
+#ifdef _CET_ENDBR
5bcc82
+	_CET_ENDBR
5bcc82
+#endif
5bcc82
 	movl	12(%esi),%eax
5bcc82
 	adcl	12(%edx),%eax
5bcc82
 	movl	%eax,12(%edi)
5bcc82
+#ifdef _CET_ENDBR
5bcc82
+	_CET_ENDBR
5bcc82
+#endif
5bcc82
 	movl	16(%esi),%eax
5bcc82
 	adcl	16(%edx),%eax
5bcc82
 	movl	%eax,16(%edi)
5bcc82
+#ifdef _CET_ENDBR
5bcc82
+	_CET_ENDBR
5bcc82
+#endif
5bcc82
 	movl	20(%esi),%eax
5bcc82
 	adcl	20(%edx),%eax
5bcc82
 	movl	%eax,20(%edi)
5bcc82
+#ifdef _CET_ENDBR
5bcc82
+	_CET_ENDBR
5bcc82
+#endif
5bcc82
 	movl	24(%esi),%eax
5bcc82
 	adcl	24(%edx),%eax
5bcc82
 	movl	%eax,24(%edi)
5bcc82
+#ifdef _CET_ENDBR
5bcc82
+	_CET_ENDBR
5bcc82
+#endif
5bcc82
 	movl	28(%esi),%eax
5bcc82
 	adcl	28(%edx),%eax
5bcc82
 	movl	%eax,28(%edi)
5bcc82
@@ -110,6 +141,10 @@ Loop:	movl	(%esi),%eax
5bcc82
 	sbbl	%eax,%eax
5bcc82
 	negl	%eax
5bcc82
 
5bcc82
+#if defined __CET__ && (__CET__ & 1) != 0
5bcc82
+	popl	%ebx
5bcc82
+#endif
5bcc82
+
5bcc82
 	popl %esi
5bcc82
 	popl %edi
5bcc82
 	ret
5bcc82
diff -up libgcrypt-1.8.5/mpi/i386/mpih-sub1.S.intel-cet libgcrypt-1.8.5/mpi/i386/mpih-sub1.S
5bcc82
--- libgcrypt-1.8.5/mpi/i386/mpih-sub1.S.intel-cet	2017-11-23 19:16:58.000000000 +0100
5bcc82
+++ libgcrypt-1.8.5/mpi/i386/mpih-sub1.S	2020-01-23 15:37:40.472175351 +0100
5bcc82
@@ -53,6 +53,10 @@ C_SYMBOL_NAME(_gcry_mpih_sub_n:)
5bcc82
 	movl 20(%esp),%edx		/* s2_ptr */
5bcc82
 	movl 24(%esp),%ecx		/* size */
5bcc82
 
5bcc82
+#if defined __CET__ && (__CET__ & 1) != 0
5bcc82
+	pushl	%ebx
5bcc82
+#endif
5bcc82
+
5bcc82
 	movl	%ecx,%eax
5bcc82
 	shrl	$3,%ecx 		/* compute count for unrolled loop */
5bcc82
 	negl	%eax
5bcc82
@@ -64,6 +68,9 @@ C_SYMBOL_NAME(_gcry_mpih_sub_n:)
5bcc82
 	subl	%eax,%esi		/* ... by a constant when we ... */
5bcc82
 	subl	%eax,%edx		/* ... enter the loop */
5bcc82
 	shrl	$2,%eax 		/* restore previous value */
5bcc82
+#if defined __CET__ && (__CET__ & 1) != 0
5bcc82
+	leal	-4(,%eax,4),%ebx	/* Count for 4-byte endbr32 */
5bcc82
+#endif
5bcc82
 #ifdef PIC
5bcc82
 /* Calculate start address in loop for PIC.  Due to limitations in some
5bcc82
    assemblers, Loop-L0-3 cannot be put into the leal */
5bcc82
@@ -76,29 +83,53 @@ L0:	leal	(%eax,%eax,8),%eax
5bcc82
 /* Calculate start address in loop for non-PIC.  */
5bcc82
 	leal	(Loop - 3)(%eax,%eax,8),%eax
5bcc82
 #endif
5bcc82
+#if defined __CET__ && (__CET__ & 1) != 0
5bcc82
+	addl	%ebx,%eax		/* Adjust for endbr32 */
5bcc82
+#endif
5bcc82
 	jmp	*%eax			/* jump into loop */
5bcc82
 	ALIGN (3)
5bcc82
 Loop:	movl	(%esi),%eax
5bcc82
 	sbbl	(%edx),%eax
5bcc82
 	movl	%eax,(%edi)
5bcc82
+#ifdef _CET_ENDBR
5bcc82
+	_CET_ENDBR
5bcc82
+#endif
5bcc82
 	movl	4(%esi),%eax
5bcc82
 	sbbl	4(%edx),%eax
5bcc82
 	movl	%eax,4(%edi)
5bcc82
+#ifdef _CET_ENDBR
5bcc82
+	_CET_ENDBR
5bcc82
+#endif
5bcc82
 	movl	8(%esi),%eax
5bcc82
 	sbbl	8(%edx),%eax
5bcc82
 	movl	%eax,8(%edi)
5bcc82
+#ifdef _CET_ENDBR
5bcc82
+	_CET_ENDBR
5bcc82
+#endif
5bcc82
 	movl	12(%esi),%eax
5bcc82
 	sbbl	12(%edx),%eax
5bcc82
 	movl	%eax,12(%edi)
5bcc82
+#ifdef _CET_ENDBR
5bcc82
+	_CET_ENDBR
5bcc82
+#endif
5bcc82
 	movl	16(%esi),%eax
5bcc82
 	sbbl	16(%edx),%eax
5bcc82
 	movl	%eax,16(%edi)
5bcc82
+#ifdef _CET_ENDBR
5bcc82
+	_CET_ENDBR
5bcc82
+#endif
5bcc82
 	movl	20(%esi),%eax
5bcc82
 	sbbl	20(%edx),%eax
5bcc82
 	movl	%eax,20(%edi)
5bcc82
+#ifdef _CET_ENDBR
5bcc82
+	_CET_ENDBR
5bcc82
+#endif
5bcc82
 	movl	24(%esi),%eax
5bcc82
 	sbbl	24(%edx),%eax
5bcc82
 	movl	%eax,24(%edi)
5bcc82
+#ifdef _CET_ENDBR
5bcc82
+	_CET_ENDBR
5bcc82
+#endif
5bcc82
 	movl	28(%esi),%eax
5bcc82
 	sbbl	28(%edx),%eax
5bcc82
 	movl	%eax,28(%edi)
5bcc82
@@ -111,6 +142,10 @@ Loop:	movl	(%esi),%eax
5bcc82
 	sbbl	%eax,%eax
5bcc82
 	negl	%eax
5bcc82
 
5bcc82
+#if defined __CET__ && (__CET__ & 1) != 0
5bcc82
+	popl	%ebx
5bcc82
+#endif
5bcc82
+
5bcc82
 	popl %esi
5bcc82
 	popl %edi
5bcc82
 	ret