Blame SOURCES/glibc-rh1505492-bounded-14.patch

c6d234
commit 92945b5261c412eb590b2b34c7ec9a035f0693a1
c6d234
Author: Joseph Myers <joseph@codesourcery.com>
c6d234
Date:   Tue Feb 19 21:58:08 2013 +0000
c6d234
c6d234
    Remove some bounded-pointers support from i386 .S files.
c6d234
c6d234
diff --git a/sysdeps/i386/add_n.S b/sysdeps/i386/add_n.S
c6d234
index 5223d685952d6093..df3ea2362c76247a 100644
c6d234
--- a/sysdeps/i386/add_n.S
c6d234
+++ b/sysdeps/i386/add_n.S
c6d234
@@ -30,7 +30,6 @@
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (__mpn_add_n))
c6d234
-	ENTER
c6d234
 
c6d234
 	pushl %edi
c6d234
 	cfi_adjust_cfa_offset (4)
c6d234
@@ -43,13 +42,6 @@ ENTRY (BP_SYM (__mpn_add_n))
c6d234
 	cfi_rel_offset (esi, 0)
c6d234
 	movl S2(%esp),%edx
c6d234
 	movl SIZE(%esp),%ecx
c6d234
-#if __BOUNDED_POINTERS__
c6d234
-	shll	$2, %ecx	/* convert limbs to bytes */
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%edi, RES(%esp), %ecx)
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%esi, S1(%esp), %ecx)
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%edx, S2(%esp), %ecx)
c6d234
-	shrl	$2, %ecx
c6d234
-#endif
c6d234
 	movl	%ecx,%eax
c6d234
 	shrl	$3,%ecx			/* compute count for unrolled loop */
c6d234
 	negl	%eax
c6d234
@@ -117,6 +109,5 @@ L(oop):	movl	(%esi),%eax
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (edi)
c6d234
 
c6d234
-	LEAVE
c6d234
 	ret
c6d234
 END (BP_SYM (__mpn_add_n))
c6d234
diff --git a/sysdeps/i386/addmul_1.S b/sysdeps/i386/addmul_1.S
c6d234
index 02acb436d9833033..006c08a92de4e303 100644
c6d234
--- a/sysdeps/i386/addmul_1.S
c6d234
+++ b/sysdeps/i386/addmul_1.S
c6d234
@@ -35,7 +35,6 @@
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (__mpn_addmul_1))
c6d234
-	ENTER
c6d234
 
c6d234
 	pushl	%res_ptr
c6d234
 	cfi_adjust_cfa_offset (4)
c6d234
@@ -53,12 +52,6 @@ ENTRY (BP_SYM (__mpn_addmul_1))
c6d234
 	movl	SIZE(%esp), %sizeP
c6d234
 	movl	S2LIMB(%esp), %s2_limb
c6d234
 	cfi_rel_offset (s2_limb, 0)
c6d234
-#if __BOUNDED_POINTERS__
c6d234
-	shll	$2, %sizeP	/* convert limbs to bytes */
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%res_ptr, RES(%esp), %sizeP)
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%s1_ptr, S1(%esp), %sizeP)
c6d234
-	shrl	$2, %sizeP
c6d234
-#endif
c6d234
 	leal	(%res_ptr,%sizeP,4), %res_ptr
c6d234
 	leal	(%s1_ptr,%sizeP,4), %s1_ptr
c6d234
 	negl	%sizeP
c6d234
@@ -91,6 +84,5 @@ L(oop):
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (res_ptr)
c6d234
 
c6d234
-	LEAVE
c6d234
 	ret
c6d234
 END (BP_SYM (__mpn_addmul_1))
c6d234
diff --git a/sysdeps/i386/bp-asm.h b/sysdeps/i386/bp-asm.h
c6d234
index 5e66e90b450846ed..75e1a007cb3eca71 100644
c6d234
--- a/sysdeps/i386/bp-asm.h
c6d234
+++ b/sysdeps/i386/bp-asm.h
c6d234
@@ -34,81 +34,12 @@
c6d234
 /* Although the caller pushes the hidden arg, the callee is
c6d234
    responsible for popping it.  */
c6d234
 #   define RET_PTR ret $RTN_SIZE
c6d234
-/* Maintain frame pointer chain in leaf assembler functions for the benefit
c6d234
-   of debugging stack traces when bounds violations occur.  */
c6d234
-#   define ENTER pushl %ebp; movl %esp, %ebp
c6d234
-#   define LEAVE movl %ebp, %esp; popl %ebp
c6d234
 /* Stack space overhead of procedure-call linkage: return address and
c6d234
    frame pointer.  */
c6d234
 #   define LINKAGE 8
c6d234
 /* Stack offset of return address after calling ENTER.  */
c6d234
 #   define PCOFF 4
c6d234
 
c6d234
-/* Int 5 is the "bound range" exception also raised by the "bound"
c6d234
-   instruction.  */
c6d234
-#   define BOUNDS_VIOLATED int $5
c6d234
-
c6d234
-#   define CHECK_BOUNDS_LOW(VAL_REG, BP_MEM)	\
c6d234
-	cmpl 4+BP_MEM, VAL_REG;			\
c6d234
-	jae 0f; /* continue if value >= low */	\
c6d234
-	BOUNDS_VIOLATED;			\
c6d234
-    0:
c6d234
-
c6d234
-#   define CHECK_BOUNDS_HIGH(VAL_REG, BP_MEM, Jcc)	\
c6d234
-	cmpl 8+BP_MEM, VAL_REG;				\
c6d234
-	Jcc 0f; /* continue if value < high */		\
c6d234
-	BOUNDS_VIOLATED;				\
c6d234
-    0:
c6d234
-
c6d234
-#   define CHECK_BOUNDS_BOTH(VAL_REG, BP_MEM)	\
c6d234
-	cmpl 4+BP_MEM, VAL_REG;			\
c6d234
-	jb 1f; /* die if value < low */		\
c6d234
-    	cmpl 8+BP_MEM, VAL_REG;			\
c6d234
-	jb 0f; /* continue if value < high */	\
c6d234
-    1:	BOUNDS_VIOLATED;			\
c6d234
-    0:
c6d234
-
c6d234
-#   define CHECK_BOUNDS_BOTH_WIDE(VAL_REG, BP_MEM, LENGTH)	\
c6d234
-	CHECK_BOUNDS_LOW(VAL_REG, BP_MEM);			\
c6d234
-	addl LENGTH, VAL_REG;					\
c6d234
-    	cmpl 8+BP_MEM, VAL_REG;					\
c6d234
-	jbe 0f; /* continue if value <= high */			\
c6d234
-	BOUNDS_VIOLATED;					\
c6d234
-    0:	subl LENGTH, VAL_REG /* restore value */
c6d234
-
c6d234
-/* Take bounds from BP_MEM and affix them to the pointer
c6d234
-   value in %eax, stuffing all into memory at RTN(%esp).
c6d234
-   Use %edx as a scratch register.  */
c6d234
-
c6d234
-#   define RETURN_BOUNDED_POINTER(BP_MEM)	\
c6d234
-	movl RTN(%esp), %edx;			\
c6d234
-	movl %eax, 0(%edx);			\
c6d234
-	movl 4+BP_MEM, %eax;			\
c6d234
-	movl %eax, 4(%edx);			\
c6d234
-	movl 8+BP_MEM, %eax;			\
c6d234
-	movl %eax, 8(%edx)
c6d234
-
c6d234
-#   define RETURN_NULL_BOUNDED_POINTER		\
c6d234
-	movl RTN(%esp), %edx;			\
c6d234
-	movl %eax, 0(%edx);			\
c6d234
-	movl %eax, 4(%edx);			\
c6d234
-	movl %eax, 8(%edx)
c6d234
-
c6d234
-/* The caller of __errno_location is responsible for allocating space
c6d234
-   for the three-word BP return-value and passing pushing its address
c6d234
-   as an implicit first argument.  */
c6d234
-#   define PUSH_ERRNO_LOCATION_RETURN		\
c6d234
-	subl $8, %esp;				\
c6d234
-	subl $4, %esp;				\
c6d234
-	pushl %esp
c6d234
-
c6d234
-/* __errno_location is responsible for popping the implicit first
c6d234
-   argument, but we must pop the space for the BP itself.  We also
c6d234
-   dereference the return value in order to dig out the pointer value.  */
c6d234
-#   define POP_ERRNO_LOCATION_RETURN		\
c6d234
-	popl %eax;				\
c6d234
-	addl $8, %esp
c6d234
-
c6d234
 #  else /* !__BOUNDED_POINTERS__ */
c6d234
 
c6d234
 /* Unbounded pointers occupy one word.  */
c6d234
@@ -117,25 +48,11 @@
c6d234
 #   define RTN_SIZE 0
c6d234
 /* Use simple return instruction for unbounded pointer values.  */
c6d234
 #   define RET_PTR ret
c6d234
-/* Don't maintain frame pointer chain for leaf assembler functions.  */
c6d234
-#   define ENTER
c6d234
-#   define LEAVE
c6d234
 /* Stack space overhead of procedure-call linkage: return address only.  */
c6d234
 #   define LINKAGE 4
c6d234
 /* Stack offset of return address after calling ENTER.  */
c6d234
 #   define PCOFF 0
c6d234
 
c6d234
-#   define CHECK_BOUNDS_LOW(VAL_REG, BP_MEM)
c6d234
-#   define CHECK_BOUNDS_HIGH(VAL_REG, BP_MEM, Jcc)
c6d234
-#   define CHECK_BOUNDS_BOTH(VAL_REG, BP_MEM)
c6d234
-#   define CHECK_BOUNDS_BOTH_WIDE(VAL_REG, BP_MEM, LENGTH)
c6d234
-#   define RETURN_BOUNDED_POINTER(BP_MEM)
c6d234
-
c6d234
-#   define RETURN_NULL_BOUNDED_POINTER
c6d234
-
c6d234
-#   define PUSH_ERRNO_LOCATION_RETURN
c6d234
-#   define POP_ERRNO_LOCATION_RETURN
c6d234
-
c6d234
 #  endif /* !__BOUNDED_POINTERS__ */
c6d234
 
c6d234
 # endif /* __ASSEMBLER__ */
c6d234
diff --git a/sysdeps/i386/bsd-_setjmp.S b/sysdeps/i386/bsd-_setjmp.S
c6d234
index 8a5d0d8624c0e17d..fe0224145643ea0b 100644
c6d234
--- a/sysdeps/i386/bsd-_setjmp.S
c6d234
+++ b/sysdeps/i386/bsd-_setjmp.S
c6d234
@@ -31,11 +31,9 @@
c6d234
 #define SIGMSK	JMPBUF+PTR_SIZE
c6d234
 
c6d234
 ENTRY (BP_SYM (_setjmp))
c6d234
-	ENTER
c6d234
 
c6d234
 	xorl %eax, %eax
c6d234
 	movl JMPBUF(%esp), %edx
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%edx, JMPBUF(%esp), $(JB_SIZE+4))
c6d234
 
c6d234
      	/* Save registers.  */
c6d234
 	movl %ebx, (JB_BX*4)(%edx)
c6d234
@@ -52,7 +50,6 @@ ENTRY (BP_SYM (_setjmp))
c6d234
 	PTR_MANGLE (%ecx)
c6d234
 #endif
c6d234
      	movl %ecx, (JB_PC*4)(%edx)
c6d234
-	LEAVE
c6d234
 	movl %ebp, (JB_BP*4)(%edx) /* Save caller's frame pointer.  */
c6d234
 
c6d234
 	movl %eax, JB_SIZE(%edx) /* No signal mask set.  */
c6d234
diff --git a/sysdeps/i386/bsd-setjmp.S b/sysdeps/i386/bsd-setjmp.S
c6d234
index d3a284ef28196c29..25470f3904a8ca73 100644
c6d234
--- a/sysdeps/i386/bsd-setjmp.S
c6d234
+++ b/sysdeps/i386/bsd-setjmp.S
c6d234
@@ -34,10 +34,8 @@ ENTRY (BP_SYM (setjmp))
c6d234
 	/* Note that we have to use a non-exported symbol in the next
c6d234
 	   jump since otherwise gas will emit it as a jump through the
c6d234
 	   PLT which is what we cannot use here.  */
c6d234
-	ENTER
c6d234
 
c6d234
 	movl JMPBUF(%esp), %eax
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%eax, JMPBUF(%esp), $JB_SIZE)
c6d234
 
c6d234
      	/* Save registers.  */
c6d234
 	movl %ebx, (JB_BX*4)(%eax)
c6d234
@@ -54,7 +52,6 @@ ENTRY (BP_SYM (setjmp))
c6d234
 	PTR_MANGLE (%ecx)
c6d234
 #endif
c6d234
      	movl %ecx, (JB_PC*4)(%eax)
c6d234
-	LEAVE /* pop frame pointer to prepare for tail-call.  */
c6d234
 	movl %ebp, (JB_BP*4)(%eax) /* Save caller's frame pointer.  */
c6d234
 
c6d234
 	/* Call __sigjmp_save.  */
c6d234
diff --git a/sysdeps/i386/fpu/s_frexp.S b/sysdeps/i386/fpu/s_frexp.S
c6d234
index e76732dc8bc2a091..6c5cd8fdfd9f825b 100644
c6d234
--- a/sysdeps/i386/fpu/s_frexp.S
c6d234
+++ b/sysdeps/i386/fpu/s_frexp.S
c6d234
@@ -41,7 +41,6 @@ two54:	.byte 0, 0, 0, 0, 0, 0, 0x50, 0x43
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (__frexp))
c6d234
-	ENTER
c6d234
 
c6d234
 	movl	VAL0(%esp), %ecx
c6d234
 	movl	VAL1(%esp), %eax
c6d234
@@ -78,11 +77,9 @@ ENTRY (BP_SYM (__frexp))
c6d234
 	/* Store %ecx in the variable pointed to by the second argument,
c6d234
 	   get the factor from the stack and return.  */
c6d234
 1:	movl	EXPP(%esp), %eax
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%eax, EXPP(%esp), $4)
c6d234
 	fldl	VAL0(%esp)
c6d234
 	movl	%ecx, (%eax)
c6d234
 
c6d234
-	LEAVE
c6d234
 	ret
c6d234
 END (BP_SYM (__frexp))
c6d234
 weak_alias (BP_SYM (__frexp), BP_SYM (frexp))
c6d234
diff --git a/sysdeps/i386/fpu/s_frexpf.S b/sysdeps/i386/fpu/s_frexpf.S
c6d234
index af0dc8ee3d58c1a6..4fe2181873de7302 100644
c6d234
--- a/sysdeps/i386/fpu/s_frexpf.S
c6d234
+++ b/sysdeps/i386/fpu/s_frexpf.S
c6d234
@@ -40,7 +40,6 @@ two25:	.byte 0, 0, 0, 0x4c
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (__frexpf))
c6d234
-	ENTER
c6d234
 
c6d234
 	movl	VAL(%esp), %eax
c6d234
 	xorl	%ecx, %ecx
c6d234
@@ -75,11 +74,9 @@ ENTRY (BP_SYM (__frexpf))
c6d234
 	/* Store %ecx in the variable pointed to by the second argument,
c6d234
 	   get the factor from the stack and return.  */
c6d234
 1:	movl	EXPP(%esp), %eax
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%eax, EXPP(%esp), $4)
c6d234
 	flds	VAL(%esp)
c6d234
 	movl	%ecx, (%eax)
c6d234
 
c6d234
-	LEAVE
c6d234
 	ret
c6d234
 END (BP_SYM (__frexpf))
c6d234
 weak_alias (BP_SYM (__frexpf), BP_SYM (frexpf))
c6d234
diff --git a/sysdeps/i386/fpu/s_frexpl.S b/sysdeps/i386/fpu/s_frexpl.S
c6d234
index 6f464a89d6a2e75d..54d5010185792bd9 100644
c6d234
--- a/sysdeps/i386/fpu/s_frexpl.S
c6d234
+++ b/sysdeps/i386/fpu/s_frexpl.S
c6d234
@@ -42,7 +42,6 @@ two64:	.byte 0, 0, 0, 0, 0, 0, 0xf0, 0x43
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (__frexpl))
c6d234
-	ENTER
c6d234
 
c6d234
 	movl	VAL0(%esp), %ecx
c6d234
 	movl	VAL2(%esp), %eax
c6d234
@@ -80,11 +79,9 @@ ENTRY (BP_SYM (__frexpl))
c6d234
 	/* Store %ecx in the variable pointed to by the second argument,
c6d234
 	   get the factor from the stack and return.  */
c6d234
 1:	movl	EXPP(%esp), %eax
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%eax, EXPP(%esp), $4)
c6d234
 	fldt	VAL0(%esp)
c6d234
 	movl	%ecx, (%eax)
c6d234
 
c6d234
-	LEAVE
c6d234
 	ret
c6d234
 END (BP_SYM (__frexpl))
c6d234
 weak_alias (BP_SYM (__frexpl), BP_SYM (frexpl))
c6d234
diff --git a/sysdeps/i386/fpu/s_remquo.S b/sysdeps/i386/fpu/s_remquo.S
c6d234
index 5056593214e66f3c..e61ff5be7bc826df 100644
c6d234
--- a/sysdeps/i386/fpu/s_remquo.S
c6d234
+++ b/sysdeps/i386/fpu/s_remquo.S
c6d234
@@ -15,7 +15,6 @@
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (__remquo))
c6d234
-	ENTER
c6d234
 
c6d234
 	fldl	DVSOR(%esp)
c6d234
 	fldl	DVDND(%esp)
c6d234
@@ -36,7 +35,6 @@ ENTRY (BP_SYM (__remquo))
c6d234
 	shrl	%cl, %eax
c6d234
 	andl	$7, %eax
c6d234
 	movl	QUOP(%esp), %ecx
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%ecx, QUOP(%esp), $4)
c6d234
 	movl	DVDND+4(%esp), %edx
c6d234
 	xorl	DVSOR+4(%esp), %edx
c6d234
 	testl	$0x80000000, %edx
c6d234
@@ -44,7 +42,6 @@ ENTRY (BP_SYM (__remquo))
c6d234
 	negl	%eax
c6d234
 1:	movl	%eax, (%ecx)
c6d234
 
c6d234
-	LEAVE
c6d234
 	ret
c6d234
 END (BP_SYM (__remquo))
c6d234
 weak_alias (BP_SYM (__remquo), BP_SYM (remquo))
c6d234
diff --git a/sysdeps/i386/fpu/s_remquof.S b/sysdeps/i386/fpu/s_remquof.S
c6d234
index d3c5965be455e42d..c2b351b859c28e7c 100644
c6d234
--- a/sysdeps/i386/fpu/s_remquof.S
c6d234
+++ b/sysdeps/i386/fpu/s_remquof.S
c6d234
@@ -15,7 +15,6 @@
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (__remquof))
c6d234
-	ENTER
c6d234
 
c6d234
 	flds	DVSOR(%esp)
c6d234
 	flds	DVDND(%esp)
c6d234
@@ -36,7 +35,6 @@ ENTRY (BP_SYM (__remquof))
c6d234
 	shrl	%cl, %eax
c6d234
 	andl	$7, %eax
c6d234
 	movl	QUOP(%esp), %ecx
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%ecx, QUOP(%esp), $4)
c6d234
 	movl	DVDND(%esp), %edx
c6d234
 	xorl	DVSOR(%esp), %edx
c6d234
 	testl	$0x80000000, %edx
c6d234
@@ -44,7 +42,6 @@ ENTRY (BP_SYM (__remquof))
c6d234
 	negl	%eax
c6d234
 1:	movl	%eax, (%ecx)
c6d234
 
c6d234
-	LEAVE
c6d234
 	ret
c6d234
 END (BP_SYM (__remquof))
c6d234
 weak_alias (BP_SYM (__remquof), BP_SYM (remquof))
c6d234
diff --git a/sysdeps/i386/fpu/s_remquol.S b/sysdeps/i386/fpu/s_remquol.S
c6d234
index 65240adbe47f6014..2cbe43589609a2b8 100644
c6d234
--- a/sysdeps/i386/fpu/s_remquol.S
c6d234
+++ b/sysdeps/i386/fpu/s_remquol.S
c6d234
@@ -15,7 +15,6 @@
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (__remquol))
c6d234
-	ENTER
c6d234
 
c6d234
 	fldt	DVSOR(%esp)
c6d234
 	fldt	DVDND(%esp)
c6d234
@@ -36,7 +35,6 @@ ENTRY (BP_SYM (__remquol))
c6d234
 	shrl	%cl, %eax
c6d234
 	andl	$7, %eax
c6d234
 	movl	QUOP(%esp), %ecx
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%ecx, QUOP(%esp), $4)
c6d234
 	movl	DVDND+8(%esp), %edx
c6d234
 	xorl	DVSOR+8(%esp), %edx
c6d234
 	testl	$0x8000, %edx
c6d234
@@ -44,7 +42,6 @@ ENTRY (BP_SYM (__remquol))
c6d234
 	negl	%eax
c6d234
 1:	movl	%eax, (%ecx)
c6d234
 
c6d234
-	LEAVE
c6d234
 	ret
c6d234
 END (BP_SYM (__remquol))
c6d234
 weak_alias (BP_SYM (__remquol), BP_SYM (remquol))
c6d234
diff --git a/sysdeps/i386/i486/strcat.S b/sysdeps/i386/i486/strcat.S
c6d234
index 7596a0dcdbff6fcc..b3212c8beb948411 100644
c6d234
--- a/sysdeps/i386/i486/strcat.S
c6d234
+++ b/sysdeps/i386/i486/strcat.S
c6d234
@@ -31,15 +31,12 @@
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (strcat))
c6d234
-	ENTER
c6d234
 
c6d234
 	pushl %edi		/* Save callee-safe register.  */
c6d234
 	cfi_adjust_cfa_offset (4)
c6d234
 
c6d234
 	movl DEST(%esp), %edx
c6d234
 	movl SRC(%esp), %ecx
c6d234
-	CHECK_BOUNDS_LOW (%edx, DEST(%esp))
c6d234
-	CHECK_BOUNDS_LOW (%ecx, SRC(%esp))
c6d234
 
c6d234
 	testb $0xff, (%ecx)	/* Is source string empty? */
c6d234
 	jz L(8)			/* yes => return */
c6d234
@@ -262,12 +259,10 @@ L(9):	movb %al, (%ecx,%edx)	/* store first byte of last word */
c6d234
 
c6d234
 L(8):	/* GKM FIXME: check high bounds */
c6d234
 	movl DEST(%esp), %eax	/* start address of destination is result */
c6d234
-	RETURN_BOUNDED_POINTER (DEST(%esp))
c6d234
 	popl %edi		/* restore saved register */
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (edi)
c6d234
 
c6d234
-	LEAVE
c6d234
 	RET_PTR
c6d234
 END (BP_SYM (strcat))
c6d234
 libc_hidden_builtin_def (strcat)
c6d234
diff --git a/sysdeps/i386/i486/strlen.S b/sysdeps/i386/i486/strlen.S
c6d234
index 3ba4df92d9a40d91..76c31098e84a1c93 100644
c6d234
--- a/sysdeps/i386/i486/strlen.S
c6d234
+++ b/sysdeps/i386/i486/strlen.S
c6d234
@@ -28,10 +28,8 @@
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (strlen))
c6d234
-	ENTER
c6d234
 
c6d234
 	movl STR(%esp), %ecx
c6d234
-	CHECK_BOUNDS_LOW (%ecx, STR(%esp))
c6d234
 	movl %ecx, %eax		/* duplicate it */
c6d234
 
c6d234
 	andl $3, %ecx		/* mask alignment bits */
c6d234
@@ -129,10 +127,8 @@ L(3):	testb %cl, %cl		/* is first byte NUL? */
c6d234
 	jz L(2)			/* yes => return pointer */
c6d234
 	incl %eax		/* increment pointer */
c6d234
 
c6d234
-L(2):	CHECK_BOUNDS_HIGH (%eax, STR(%esp), jb)
c6d234
-	subl STR(%esp), %eax	/* compute difference to string start */
c6d234
+L(2):	subl STR(%esp), %eax	/* compute difference to string start */
c6d234
 
c6d234
-	LEAVE
c6d234
 	ret
c6d234
 END (BP_SYM (strlen))
c6d234
 libc_hidden_builtin_def (strlen)
c6d234
diff --git a/sysdeps/i386/i586/add_n.S b/sysdeps/i386/i586/add_n.S
c6d234
index d6ed224035868645..a2b502d0d21bf9f5 100644
c6d234
--- a/sysdeps/i386/i586/add_n.S
c6d234
+++ b/sysdeps/i386/i586/add_n.S
c6d234
@@ -30,7 +30,6 @@
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (__mpn_add_n))
c6d234
-	ENTER
c6d234
 
c6d234
 	pushl	%edi
c6d234
 	cfi_adjust_cfa_offset (4)
c6d234
@@ -48,13 +47,6 @@ ENTRY (BP_SYM (__mpn_add_n))
c6d234
 	movl	S2(%esp),%ebx
c6d234
 	cfi_rel_offset (ebx, 0)
c6d234
 	movl	SIZE(%esp),%ecx
c6d234
-#if __BOUNDED_POINTERS__
c6d234
-	shll	$2, %ecx		/* convert limbs to bytes */
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%edi, RES(%esp), %ecx)
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%esi, S1(%esp), %ecx)
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%ebx, S2(%esp), %ecx)
c6d234
-	shrl	$2, %ecx
c6d234
-#endif
c6d234
 	movl	(%ebx),%ebp
c6d234
 	cfi_rel_offset (ebp, 4)
c6d234
 
c6d234
@@ -149,6 +141,5 @@ L(end2):
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (edi)
c6d234
 
c6d234
-	LEAVE
c6d234
 	ret
c6d234
 END (BP_SYM (__mpn_add_n))
c6d234
diff --git a/sysdeps/i386/i586/addmul_1.S b/sysdeps/i386/i586/addmul_1.S
c6d234
index 89bf87c4ea2a87af..a9d0b08b5e8072ab 100644
c6d234
--- a/sysdeps/i386/i586/addmul_1.S
c6d234
+++ b/sysdeps/i386/i586/addmul_1.S
c6d234
@@ -35,7 +35,6 @@
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (__mpn_addmul_1))
c6d234
-	ENTER
c6d234
 
c6d234
 	pushl	%res_ptr
c6d234
 	cfi_adjust_cfa_offset (4)
c6d234
@@ -53,12 +52,6 @@ ENTRY (BP_SYM (__mpn_addmul_1))
c6d234
 	movl	SIZE(%esp), %size
c6d234
 	movl	S2LIMB(%esp), %s2_limb
c6d234
 	cfi_rel_offset (s2_limb, 0)
c6d234
-#if __BOUNDED_POINTERS__
c6d234
-	shll	$2, %size	/* convert limbs to bytes */
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%res_ptr, RES(%esp), %size)
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%s1_ptr, S1(%esp), %size)
c6d234
-	shrl	$2, %size
c6d234
-#endif
c6d234
 	leal	(%res_ptr,%size,4), %res_ptr
c6d234
 	leal	(%s1_ptr,%size,4), %s1_ptr
c6d234
 	negl	%size
c6d234
@@ -98,7 +91,6 @@ L(oop):	adcl	$0, %ebp
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (res_ptr)
c6d234
 
c6d234
-	LEAVE
c6d234
 	ret
c6d234
 #undef size
c6d234
 END (BP_SYM (__mpn_addmul_1))
c6d234
diff --git a/sysdeps/i386/i586/lshift.S b/sysdeps/i386/i586/lshift.S
c6d234
index d619c58d2dda212d..23c5002d69f50f8f 100644
c6d234
--- a/sysdeps/i386/i586/lshift.S
c6d234
+++ b/sysdeps/i386/i586/lshift.S
c6d234
@@ -29,7 +29,6 @@
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (__mpn_lshift))
c6d234
-	ENTER
c6d234
 
c6d234
 	pushl	%edi
c6d234
 	cfi_adjust_cfa_offset (4)
c6d234
@@ -48,12 +47,6 @@ ENTRY (BP_SYM (__mpn_lshift))
c6d234
 	movl	SIZE(%esp),%ebx
c6d234
 	cfi_rel_offset (ebx, 0)
c6d234
 	movl	CNT(%esp),%ecx
c6d234
-#if __BOUNDED_POINTERS__
c6d234
-	shll	$2, %ebx		/* convert limbs to bytes */
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%edi, RES(%esp), %ebx)
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%esi, S(%esp), %ebx)
c6d234
-	shrl	$2, %ebx
c6d234
-#endif
c6d234
 
c6d234
 /* We can use faster code for shift-by-1 under certain conditions.  */
c6d234
 	cmp	$1,%ecx
c6d234
@@ -155,7 +148,6 @@ L(end2):
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (edi)
c6d234
 
c6d234
-	LEAVE
c6d234
 	ret
c6d234
 
c6d234
 /* We loop from least significant end of the arrays, which is only
c6d234
@@ -261,6 +253,5 @@ L(L1):	movl	%edx,(%edi)		/* store last limb */
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (edi)
c6d234
 
c6d234
-	LEAVE
c6d234
 	ret
c6d234
 END (BP_SYM (__mpn_lshift))
c6d234
diff --git a/sysdeps/i386/i586/memcpy.S b/sysdeps/i386/i586/memcpy.S
c6d234
index a3f3524ad03509c1..07f5432d761c453d 100644
c6d234
--- a/sysdeps/i386/i586/memcpy.S
c6d234
+++ b/sysdeps/i386/i586/memcpy.S
c6d234
@@ -42,7 +42,6 @@ ENTRY (__memcpy_chk)
c6d234
 END (__memcpy_chk)
c6d234
 #endif
c6d234
 ENTRY (BP_SYM (memcpy))
c6d234
-	ENTER
c6d234
 
c6d234
 	pushl	%edi
c6d234
 	cfi_adjust_cfa_offset (4)
c6d234
@@ -54,8 +53,6 @@ ENTRY (BP_SYM (memcpy))
c6d234
 	movl	SRC(%esp), %esi
c6d234
 	cfi_rel_offset (esi, 0)
c6d234
 	movl	LEN(%esp), %ecx
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%edi, DEST(%esp), %ecx)
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%esi, SRC(%esp), %ecx)
c6d234
 	movl	%edi, %eax
c6d234
 
c6d234
 	/* We need this in any case.  */
c6d234
@@ -127,7 +124,6 @@ L(1):	rep; movsb
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (edi)
c6d234
 
c6d234
-	LEAVE
c6d234
 	RET_PTR
c6d234
 END (BP_SYM (memcpy))
c6d234
 #if !MEMPCPY_P
c6d234
diff --git a/sysdeps/i386/i586/memset.S b/sysdeps/i386/i586/memset.S
c6d234
index d7a4b73a72b1abce..8234b99d734b24b0 100644
c6d234
--- a/sysdeps/i386/i586/memset.S
c6d234
+++ b/sysdeps/i386/i586/memset.S
c6d234
@@ -45,7 +45,6 @@ ENTRY (__memset_chk)
c6d234
 END (__memset_chk)
c6d234
 #endif
c6d234
 ENTRY (BP_SYM (memset))
c6d234
-	ENTER
c6d234
 
c6d234
 	pushl	%edi
c6d234
 	cfi_adjust_cfa_offset (4)
c6d234
@@ -53,7 +52,6 @@ ENTRY (BP_SYM (memset))
c6d234
 	movl	DEST(%esp), %edi
c6d234
 	cfi_rel_offset (edi, 0)
c6d234
 	movl	LEN(%esp), %edx
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%edi, DEST(%esp), %edx)
c6d234
 #if BZERO_P
c6d234
 	xorl	%eax, %eax	/* we fill with 0 */
c6d234
 #else
c6d234
@@ -111,13 +109,11 @@ L(2):	shrl	$2, %ecx	/* convert byte count to longword count */
c6d234
 #if !BZERO_P
c6d234
 	/* Load result (only if used as memset).  */
c6d234
 	movl DEST(%esp), %eax	/* start address of destination is result */
c6d234
-	RETURN_BOUNDED_POINTER (DEST(%esp))
c6d234
 #endif
c6d234
 	popl	%edi
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (edi)
c6d234
 
c6d234
-	LEAVE
c6d234
 #if BZERO_P
c6d234
 	ret
c6d234
 #else
c6d234
diff --git a/sysdeps/i386/i586/mul_1.S b/sysdeps/i386/i586/mul_1.S
c6d234
index 28670a24679d0e7e..c694afb42141b16f 100644
c6d234
--- a/sysdeps/i386/i586/mul_1.S
c6d234
+++ b/sysdeps/i386/i586/mul_1.S
c6d234
@@ -35,7 +35,6 @@
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (__mpn_mul_1))
c6d234
-	ENTER
c6d234
 
c6d234
 	pushl	%res_ptr
c6d234
 	cfi_adjust_cfa_offset (4)
c6d234
@@ -53,12 +52,6 @@ ENTRY (BP_SYM (__mpn_mul_1))
c6d234
 	movl	SIZE(%esp), %size
c6d234
 	movl	S2LIMB(%esp), %s2_limb
c6d234
 	cfi_rel_offset (s2_limb, 0)
c6d234
-#if __BOUNDED_POINTERS__
c6d234
-	shll	$2, %size	/* convert limbs to bytes */
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%res_ptr, RES(%esp), %size)
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%s1_ptr, S1(%esp), %size)
c6d234
-	shrl	$2, %size
c6d234
-#endif
c6d234
 	leal	(%res_ptr,%size,4), %res_ptr
c6d234
 	leal	(%s1_ptr,%size,4), %s1_ptr
c6d234
 	negl	%size
c6d234
@@ -94,7 +87,6 @@ L(oop):	adcl	$0, %ebp
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (res_ptr)
c6d234
 
c6d234
-	LEAVE
c6d234
 	ret
c6d234
 #undef size
c6d234
 END (BP_SYM (__mpn_mul_1))
c6d234
diff --git a/sysdeps/i386/i586/rshift.S b/sysdeps/i386/i586/rshift.S
c6d234
index caf0986d2f473c74..ce3710391193dbac 100644
c6d234
--- a/sysdeps/i386/i586/rshift.S
c6d234
+++ b/sysdeps/i386/i586/rshift.S
c6d234
@@ -29,7 +29,6 @@
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (__mpn_rshift))
c6d234
-	ENTER
c6d234
 
c6d234
 	pushl	%edi
c6d234
 	cfi_adjust_cfa_offset (4)
c6d234
@@ -48,12 +47,6 @@ ENTRY (BP_SYM (__mpn_rshift))
c6d234
 	movl	SIZE(%esp),%ebx
c6d234
 	cfi_rel_offset (ebx, 0)
c6d234
 	movl	CNT(%esp),%ecx
c6d234
-#if __BOUNDED_POINTERS__
c6d234
-	shll	$2, %ebx		/* convert limbs to bytes */
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%edi, RES(%esp), %ebx)
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%esi, S(%esp), %ebx)
c6d234
-	shrl	$2, %ebx
c6d234
-#endif
c6d234
 
c6d234
 /* We can use faster code for shift-by-1 under certain conditions.  */
c6d234
 	cmp	$1,%ecx
c6d234
@@ -152,7 +145,6 @@ L(end2):
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (edi)
c6d234
 
c6d234
-	LEAVE
c6d234
 	ret
c6d234
 
c6d234
 /* We loop from least significant end of the arrays, which is only
c6d234
@@ -261,6 +253,5 @@ L(L1):	movl	%edx,(%edi)		/* store last limb */
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (edi)
c6d234
 
c6d234
-	LEAVE
c6d234
 	ret
c6d234
 END (BP_SYM (__mpn_rshift))
c6d234
diff --git a/sysdeps/i386/i586/strchr.S b/sysdeps/i386/i586/strchr.S
c6d234
index 4c424763224ca839..35259fb085840180 100644
c6d234
--- a/sysdeps/i386/i586/strchr.S
c6d234
+++ b/sysdeps/i386/i586/strchr.S
c6d234
@@ -43,7 +43,6 @@
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (strchr))
c6d234
-	ENTER
c6d234
 
c6d234
 	pushl %edi		/* Save callee-safe registers.  */
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
@@ -57,7 +56,6 @@ ENTRY (BP_SYM (strchr))
c6d234
 
c6d234
 	movl STR(%esp), %eax
c6d234
 	movl CHR(%esp), %edx
c6d234
-	CHECK_BOUNDS_LOW (%eax, STR(%esp))
c6d234
 
c6d234
 	movl %eax, %edi		/* duplicate string pointer for later */
c6d234
 	cfi_rel_offset (edi, 12)
c6d234
@@ -82,7 +80,7 @@ ENTRY (BP_SYM (strchr))
c6d234
 	jp L(0)			/* exactly two bits set */
c6d234
 
c6d234
 	xorb (%eax), %cl	/* is byte the one we are looking for? */
c6d234
-	jz L(2)			/* yes => return pointer */
c6d234
+	jz L(out)		/* yes => return pointer */
c6d234
 
c6d234
 	xorb %dl, %cl		/* load single byte and test for NUL */
c6d234
 	je L(3)			/* yes => return NULL */
c6d234
@@ -91,7 +89,7 @@ ENTRY (BP_SYM (strchr))
c6d234
 	incl %eax
c6d234
 
c6d234
 	cmpb %cl, %dl		/* is byte == C? */
c6d234
-	je L(2)			/* aligned => return pointer */
c6d234
+	je L(out)		/* aligned => return pointer */
c6d234
 
c6d234
 	cmpb $0, %cl		/* is byte NUL? */
c6d234
 	je L(3)			/* yes => return NULL */
c6d234
@@ -104,7 +102,7 @@ ENTRY (BP_SYM (strchr))
c6d234
 L(0):	movb (%eax), %cl	/* load single byte */
c6d234
 
c6d234
 	cmpb %cl, %dl		/* is byte == C? */
c6d234
-	je L(2)			/* aligned => return pointer */
c6d234
+	je L(out)		/* aligned => return pointer */
c6d234
 
c6d234
 	cmpb $0, %cl		/* is byte NUL? */
c6d234
 	je L(3)			/* yes => return NULL */
c6d234
@@ -274,23 +272,21 @@ L(1):	xorl %ecx, %ebp			/* (word^magic) */
c6d234
 L(5):	subl $4, %eax		/* adjust pointer */
c6d234
 	testb %bl, %bl		/* first byte == C? */
c6d234
 
c6d234
-	jz L(2)			/* yes => return pointer */
c6d234
+	jz L(out)		/* yes => return pointer */
c6d234
 
c6d234
 	incl %eax		/* increment pointer */
c6d234
 	testb %bh, %bh		/* second byte == C? */
c6d234
 
c6d234
-	jz L(2)			/* yes => return pointer */
c6d234
+	jz L(out)		/* yes => return pointer */
c6d234
 
c6d234
 	shrl $16, %ebx		/* make upper bytes accessible */
c6d234
 	incl %eax		/* increment pointer */
c6d234
 
c6d234
 	cmp $0, %bl		/* third byte == C */
c6d234
-	je L(2)			/* yes => return pointer */
c6d234
+	je L(out)		/* yes => return pointer */
c6d234
 
c6d234
 	incl %eax		/* increment pointer */
c6d234
 
c6d234
-L(2):	CHECK_BOUNDS_HIGH (%eax, STR(%esp), jb)
c6d234
-	RETURN_BOUNDED_POINTER (STR(%esp))
c6d234
 L(out):	popl %ebp		/* restore saved registers */
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (ebp)
c6d234
@@ -305,7 +301,6 @@ L(out):	popl %ebp		/* restore saved registers */
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (edi)
c6d234
 
c6d234
-	LEAVE
c6d234
 	RET_PTR
c6d234
 
c6d234
 	cfi_adjust_cfa_offset (16)
c6d234
@@ -318,7 +313,7 @@ L(out):	popl %ebp		/* restore saved registers */
c6d234
 L(4):	subl $4, %eax		/* adjust pointer */
c6d234
 	cmpb %dl, %cl		/* first byte == C? */
c6d234
 
c6d234
-	je L(2)			/* yes => return pointer */
c6d234
+	je L(out)		/* yes => return pointer */
c6d234
 
c6d234
 	cmpb $0, %cl		/* first byte == NUL? */
c6d234
 	je L(3)			/* yes => return NULL */
c6d234
@@ -326,7 +321,7 @@ L(4):	subl $4, %eax		/* adjust pointer */
c6d234
 	incl %eax		/* increment pointer */
c6d234
 
c6d234
 	cmpb %dl, %ch		/* second byte == C? */
c6d234
-	je L(2)			/* yes => return pointer */
c6d234
+	je L(out)		/* yes => return pointer */
c6d234
 
c6d234
 	cmpb $0, %ch		/* second byte == NUL? */
c6d234
 	je L(3)			/* yes => return NULL */
c6d234
@@ -335,7 +330,7 @@ L(4):	subl $4, %eax		/* adjust pointer */
c6d234
 	incl %eax		/* increment pointer */
c6d234
 
c6d234
 	cmpb %dl, %cl		/* third byte == C? */
c6d234
-	je L(2)			/* yes => return pointer */
c6d234
+	je L(out)		/* yes => return pointer */
c6d234
 
c6d234
 	cmpb $0, %cl		/* third byte == NUL? */
c6d234
 	je L(3)			/* yes => return NULL */
c6d234
@@ -344,10 +339,9 @@ L(4):	subl $4, %eax		/* adjust pointer */
c6d234
 
c6d234
 	/* The test four the fourth byte is necessary!  */
c6d234
 	cmpb %dl, %ch		/* fourth byte == C? */
c6d234
-	je L(2)			/* yes => return pointer */
c6d234
+	je L(out)		/* yes => return pointer */
c6d234
 
c6d234
 L(3):	xorl %eax, %eax
c6d234
-	RETURN_NULL_BOUNDED_POINTER
c6d234
 	jmp L(out)
c6d234
 END (BP_SYM (strchr))
c6d234
 
c6d234
diff --git a/sysdeps/i386/i586/strcpy.S b/sysdeps/i386/i586/strcpy.S
c6d234
index 50fc521dd2e28c17..edd21f135d46a732 100644
c6d234
--- a/sysdeps/i386/i586/strcpy.S
c6d234
+++ b/sysdeps/i386/i586/strcpy.S
c6d234
@@ -35,7 +35,6 @@
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (STRCPY))
c6d234
-	ENTER
c6d234
 
c6d234
 	pushl	%edi
c6d234
 	cfi_adjust_cfa_offset (4)
c6d234
@@ -48,8 +47,6 @@ ENTRY (BP_SYM (STRCPY))
c6d234
 	cfi_rel_offset (edi, 8)
c6d234
 	movl	SRC(%esp), %esi
c6d234
 	cfi_rel_offset (esi, 4)
c6d234
-	CHECK_BOUNDS_LOW (%edi, DEST(%esp))
c6d234
-	CHECK_BOUNDS_LOW (%esi, SRC(%esp))
c6d234
 
c6d234
 	xorl	%eax, %eax
c6d234
 	leal	-1(%esi), %ecx
c6d234
@@ -158,7 +155,6 @@ L(end2):
c6d234
 #else
c6d234
 	movl	DEST(%esp), %eax
c6d234
 #endif
c6d234
-	RETURN_BOUNDED_POINTER (DEST(%esp))
c6d234
 	popl	%ebx
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (ebx)
c6d234
@@ -169,7 +165,6 @@ L(end2):
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (edi)
c6d234
 
c6d234
-	LEAVE
c6d234
 	RET_PTR
c6d234
 END (BP_SYM (STRCPY))
c6d234
 #ifndef USE_AS_STPCPY
c6d234
diff --git a/sysdeps/i386/i586/strlen.S b/sysdeps/i386/i586/strlen.S
c6d234
index 29d81d6b8de51b87..323cb950790174e6 100644
c6d234
--- a/sysdeps/i386/i586/strlen.S
c6d234
+++ b/sysdeps/i386/i586/strlen.S
c6d234
@@ -41,10 +41,8 @@
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (strlen))
c6d234
-	ENTER
c6d234
 
c6d234
 	movl STR(%esp), %eax
c6d234
-	CHECK_BOUNDS_LOW (%eax, STR(%esp))
c6d234
 	movl $3, %edx		/* load mask (= 3) */
c6d234
 
c6d234
 	andl %eax, %edx		/* separate last two bits of address */
c6d234
@@ -178,11 +176,9 @@ L(3):	subl $4, %eax		/* correct too early pointer increment */
c6d234
 
c6d234
 	incl %eax		/* increment pointer */
c6d234
 
c6d234
-L(2):	CHECK_BOUNDS_HIGH (%eax, STR(%esp), jb)
c6d234
-	subl STR(%esp), %eax	/* now compute the length as difference
c6d234
+L(2):	subl STR(%esp), %eax	/* now compute the length as difference
c6d234
 				   between start and terminating NUL
c6d234
 				   character */
c6d234
-	LEAVE
c6d234
 	ret
c6d234
 END (BP_SYM (strlen))
c6d234
 libc_hidden_builtin_def (strlen)
c6d234
diff --git a/sysdeps/i386/i586/sub_n.S b/sysdeps/i386/i586/sub_n.S
c6d234
index 6ac1447be31f84f0..07bddca9a8ce7fe8 100644
c6d234
--- a/sysdeps/i386/i586/sub_n.S
c6d234
+++ b/sysdeps/i386/i586/sub_n.S
c6d234
@@ -30,7 +30,6 @@
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (__mpn_sub_n))
c6d234
-	ENTER
c6d234
 
c6d234
 	pushl	%edi
c6d234
 	cfi_adjust_cfa_offset (4)
c6d234
@@ -48,13 +47,6 @@ ENTRY (BP_SYM (__mpn_sub_n))
c6d234
 	movl	S2(%esp),%ebx
c6d234
 	cfi_rel_offset (ebx, 0)
c6d234
 	movl	SIZE(%esp),%ecx
c6d234
-#if __BOUNDED_POINTERS__
c6d234
-	shll	$2, %ecx	/* convert limbs to bytes */
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%edi, RES(%esp), %ecx)
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%esi, S1(%esp), %ecx)
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%ebx, S2(%esp), %ecx)
c6d234
-	shrl	$2, %ecx
c6d234
-#endif
c6d234
 	movl	(%ebx),%ebp
c6d234
 	cfi_rel_offset (ebp, 4)
c6d234
 
c6d234
@@ -149,6 +141,5 @@ L(end2):
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (edi)
c6d234
 
c6d234
-	LEAVE
c6d234
 	ret
c6d234
 END (BP_SYM (__mpn_sub_n))
c6d234
diff --git a/sysdeps/i386/i586/submul_1.S b/sysdeps/i386/i586/submul_1.S
c6d234
index b08902562fbc8807..a028d7089798ceb7 100644
c6d234
--- a/sysdeps/i386/i586/submul_1.S
c6d234
+++ b/sysdeps/i386/i586/submul_1.S
c6d234
@@ -35,7 +35,6 @@
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (__mpn_submul_1))
c6d234
-	ENTER
c6d234
 
c6d234
 	pushl	%res_ptr
c6d234
 	cfi_adjust_cfa_offset (4)
c6d234
@@ -53,12 +52,6 @@ ENTRY (BP_SYM (__mpn_submul_1))
c6d234
 	movl	SIZE(%esp), %size
c6d234
 	movl	S2LIMB(%esp), %s2_limb
c6d234
 	cfi_rel_offset (s2_limb, 0)
c6d234
-#if __BOUNDED_POINTERS__
c6d234
-	shll	$2, %sizeP	/* convert limbs to bytes */
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%res_ptr, RES(%esp), %sizeP)
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%s1_ptr, S1(%esp), %sizeP)
c6d234
-	shrl	$2, %sizeP
c6d234
-#endif
c6d234
 	leal	(%res_ptr,%size,4), %res_ptr
c6d234
 	leal	(%s1_ptr,%size,4), %s1_ptr
c6d234
 	negl	%size
c6d234
@@ -98,7 +91,6 @@ L(oop):	adcl	$0, %ebp
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (res_ptr)
c6d234
 
c6d234
-	LEAVE
c6d234
 	ret
c6d234
 #undef size
c6d234
 END (BP_SYM (__mpn_submul_1))
c6d234
diff --git a/sysdeps/i386/i686/add_n.S b/sysdeps/i386/i686/add_n.S
c6d234
index ff4c66db354d6100..f645a21c1cad52ab 100644
c6d234
--- a/sysdeps/i386/i686/add_n.S
c6d234
+++ b/sysdeps/i386/i686/add_n.S
c6d234
@@ -34,7 +34,6 @@ L(1):	addl    (%esp), %eax
c6d234
 	ret
c6d234
 #endif
c6d234
 ENTRY (BP_SYM (__mpn_add_n))
c6d234
-	ENTER
c6d234
 
c6d234
 	pushl %edi
c6d234
 	cfi_adjust_cfa_offset (4)
c6d234
@@ -47,13 +46,6 @@ ENTRY (BP_SYM (__mpn_add_n))
c6d234
 	cfi_rel_offset (esi, 0)
c6d234
 	movl	S2(%esp),%edx
c6d234
 	movl	SIZE(%esp),%ecx
c6d234
-#if __BOUNDED_POINTERS__
c6d234
-	shll	$2, %ecx	/* convert limbs to bytes */
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%edi, RES(%esp), %ecx)
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%esi, S1(%esp), %ecx)
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%edx, S2(%esp), %ecx)
c6d234
-	shrl	$2, %ecx
c6d234
-#endif
c6d234
 	movl	%ecx,%eax
c6d234
 	shrl	$3,%ecx			/* compute count for unrolled loop */
c6d234
 	negl	%eax
c6d234
@@ -116,6 +108,5 @@ L(oop):	movl	(%esi),%eax
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (edi)
c6d234
 
c6d234
-	LEAVE
c6d234
 	ret
c6d234
 END (BP_SYM (__mpn_add_n))
c6d234
diff --git a/sysdeps/i386/i686/memcmp.S b/sysdeps/i386/i686/memcmp.S
c6d234
index 9eab978983695f20..f45a22400242cb02 100644
c6d234
--- a/sysdeps/i386/i686/memcmp.S
c6d234
+++ b/sysdeps/i386/i686/memcmp.S
c6d234
@@ -26,9 +26,9 @@
c6d234
 #define BLK2		BLK1+PTR_SIZE
c6d234
 #define LEN		BLK2+PTR_SIZE
c6d234
 #define ENTRANCE	pushl %ebx; cfi_adjust_cfa_offset (4); \
c6d234
-			cfi_rel_offset (ebx, 0); ENTER
c6d234
+			cfi_rel_offset (ebx, 0)
c6d234
 #define RETURN		popl %ebx; cfi_adjust_cfa_offset (-4); \
c6d234
-			cfi_restore (ebx); LEAVE; ret
c6d234
+			cfi_restore (ebx); ret
c6d234
 
c6d234
 /* Load an entry in a jump table into EBX.  TABLE is a jump table
c6d234
    with relative offsets.  INDEX is a register contains the index
c6d234
diff --git a/sysdeps/i386/i686/memcpy.S b/sysdeps/i386/i686/memcpy.S
c6d234
index a720b02c3b2d5ed8..68f7e40031b1b4db 100644
c6d234
--- a/sysdeps/i386/i686/memcpy.S
c6d234
+++ b/sysdeps/i386/i686/memcpy.S
c6d234
@@ -38,7 +38,6 @@ ENTRY_CHK (__memcpy_chk)
c6d234
 END_CHK (__memcpy_chk)
c6d234
 #endif
c6d234
 ENTRY (BP_SYM (memcpy))
c6d234
-	ENTER
c6d234
 
c6d234
 	movl	%edi, %eax
c6d234
 	movl	DEST(%esp), %edi
c6d234
@@ -81,9 +80,7 @@ ENTRY (BP_SYM (memcpy))
c6d234
 .Lend:	movl	%eax, %edi
c6d234
 	movl	%edx, %esi
c6d234
 	movl	DEST(%esp), %eax
c6d234
-	RETURN_BOUNDED_POINTER (DEST(%esp))
c6d234
 
c6d234
-	LEAVE
c6d234
 	RET_PTR
c6d234
 
c6d234
 	/* When we come here the pointers do not have the same
c6d234
diff --git a/sysdeps/i386/i686/memmove.S b/sysdeps/i386/i686/memmove.S
c6d234
index 2de2a193eab59da7..683f45fc44fc4f4f 100644
c6d234
--- a/sysdeps/i386/i686/memmove.S
c6d234
+++ b/sysdeps/i386/i686/memmove.S
c6d234
@@ -47,7 +47,6 @@ END_CHK (__memmove_chk)
c6d234
 #endif
c6d234
 
c6d234
 ENTRY (BP_SYM (memmove))
c6d234
-	ENTER
c6d234
 
c6d234
 	pushl	%edi
c6d234
 	cfi_adjust_cfa_offset (4)
c6d234
@@ -58,8 +57,6 @@ ENTRY (BP_SYM (memmove))
c6d234
 	movl	%esi, %edx
c6d234
 	movl	SRC(%esp), %esi
c6d234
 	cfi_register (esi, edx)
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%edi, DEST(%esp), %ecx)
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%esi, SRC(%esp), %ecx)
c6d234
 
c6d234
 	movl	%edi, %eax
c6d234
 	subl	%esi, %eax
c6d234
@@ -79,14 +76,12 @@ ENTRY (BP_SYM (memmove))
c6d234
 	cfi_restore (esi)
c6d234
 #ifndef USE_AS_BCOPY
c6d234
 	movl	DEST(%esp), %eax
c6d234
-	RETURN_BOUNDED_POINTER (DEST(%esp))
c6d234
 #endif
c6d234
 
c6d234
 	popl	%edi
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (edi)
c6d234
 
c6d234
-	LEAVE
c6d234
 	RET_PTR
c6d234
 
c6d234
 	cfi_adjust_cfa_offset (4)
c6d234
@@ -113,7 +108,6 @@ ENTRY (BP_SYM (memmove))
c6d234
 	cfi_restore (esi)
c6d234
 #ifndef USE_AS_BCOPY
c6d234
 	movl	DEST(%esp), %eax
c6d234
-	RETURN_BOUNDED_POINTER (DEST(%esp))
c6d234
 #endif
c6d234
 
c6d234
 	cld
c6d234
@@ -121,7 +115,6 @@ ENTRY (BP_SYM (memmove))
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (edi)
c6d234
 
c6d234
-	LEAVE
c6d234
 	RET_PTR
c6d234
 END (BP_SYM (memmove))
c6d234
 #ifndef USE_AS_BCOPY
c6d234
diff --git a/sysdeps/i386/i686/mempcpy.S b/sysdeps/i386/i686/mempcpy.S
c6d234
index 402c415e06b3c80f..facff870e3f8f9de 100644
c6d234
--- a/sysdeps/i386/i686/mempcpy.S
c6d234
+++ b/sysdeps/i386/i686/mempcpy.S
c6d234
@@ -38,17 +38,14 @@ ENTRY_CHK (__mempcpy_chk)
c6d234
 END_CHK (__mempcpy_chk)
c6d234
 #endif
c6d234
 ENTRY (BP_SYM (__mempcpy))
c6d234
-	ENTER
c6d234
 
c6d234
 	movl	LEN(%esp), %ecx
c6d234
 	movl	%edi, %eax
c6d234
 	cfi_register (edi, eax)
c6d234
 	movl	DEST(%esp), %edi
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%edi, DEST(%esp), %ecx)
c6d234
 	movl	%esi, %edx
c6d234
 	cfi_register (esi, edx)
c6d234
 	movl	SRC(%esp), %esi
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%esi, SRC(%esp), %ecx)
c6d234
 	cld
c6d234
 	shrl	$1, %ecx
c6d234
 	jnc	1f
c6d234
@@ -62,9 +59,7 @@ ENTRY (BP_SYM (__mempcpy))
c6d234
 	cfi_restore (edi)
c6d234
 	movl	%edx, %esi
c6d234
 	cfi_restore (esi)
c6d234
-	RETURN_BOUNDED_POINTER (DEST(%esp))
c6d234
 
c6d234
-	LEAVE
c6d234
 	RET_PTR
c6d234
 END (BP_SYM (__mempcpy))
c6d234
 libc_hidden_def (BP_SYM (__mempcpy))
c6d234
diff --git a/sysdeps/i386/i686/memset.S b/sysdeps/i386/i686/memset.S
c6d234
index 429c5386ec0a395b..7db25497117b6258 100644
c6d234
--- a/sysdeps/i386/i686/memset.S
c6d234
+++ b/sysdeps/i386/i686/memset.S
c6d234
@@ -46,14 +46,12 @@ ENTRY_CHK (__memset_chk)
c6d234
 END_CHK (__memset_chk)
c6d234
 #endif
c6d234
 ENTRY (BP_SYM (memset))
c6d234
-	ENTER
c6d234
 
c6d234
 	cld
c6d234
 	pushl	%edi
c6d234
 	cfi_adjust_cfa_offset (4)
c6d234
 	movl	DEST(%esp), %edx
c6d234
 	movl	LEN(%esp), %ecx
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%edx, DEST(%esp), %ecx)
c6d234
 #if BZERO_P
c6d234
 	xorl	%eax, %eax	/* fill with 0 */
c6d234
 #else
c6d234
@@ -90,13 +88,11 @@ ENTRY (BP_SYM (memset))
c6d234
 1:
c6d234
 #if !BZERO_P
c6d234
 	movl DEST(%esp), %eax	/* start address of destination is result */
c6d234
-	RETURN_BOUNDED_POINTER (DEST(%esp))
c6d234
 #endif
c6d234
 	popl	%edi
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (edi)
c6d234
 
c6d234
-	LEAVE
c6d234
 #if BZERO_P
c6d234
 	ret
c6d234
 #else
c6d234
diff --git a/sysdeps/i386/i686/strcmp.S b/sysdeps/i386/i686/strcmp.S
c6d234
index 3dcc289bfc172db5..0423aac297358c77 100644
c6d234
--- a/sysdeps/i386/i686/strcmp.S
c6d234
+++ b/sysdeps/i386/i686/strcmp.S
c6d234
@@ -28,12 +28,9 @@
c6d234
 
c6d234
         .text
c6d234
 ENTRY (BP_SYM (strcmp))
c6d234
-	ENTER
c6d234
 
c6d234
 	movl	STR1(%esp), %ecx
c6d234
 	movl	STR2(%esp), %edx
c6d234
-	CHECK_BOUNDS_LOW (%ecx, STR1(%esp))
c6d234
-	CHECK_BOUNDS_LOW (%edx, STR2(%esp))
c6d234
 
c6d234
 L(oop):	movb	(%ecx), %al
c6d234
 	cmpb	(%edx), %al
c6d234
@@ -46,26 +43,12 @@ L(oop):	movb	(%ecx), %al
c6d234
 	xorl	%eax, %eax
c6d234
 	/* when strings are equal, pointers rest one beyond
c6d234
 	   the end of the NUL terminators.  */
c6d234
-	CHECK_BOUNDS_HIGH (%ecx, STR1(%esp), jbe)
c6d234
-	CHECK_BOUNDS_HIGH (%edx, STR2(%esp), jbe)
c6d234
-	LEAVE
c6d234
 	ret
c6d234
 
c6d234
-#ifndef __BOUNDED_POINTERS__
c6d234
 L(neq):	movl	$1, %eax
c6d234
 	movl	$-1, %ecx
c6d234
 	cmovbl	%ecx, %eax
c6d234
-#else
c6d234
-L(neq):	movl	$1, %eax
c6d234
-	ja	L(chk)
c6d234
-	negl	%eax
c6d234
-	/* When strings differ, pointers rest on
c6d234
-	   the unequal characters.  */
c6d234
-L(chk):	CHECK_BOUNDS_HIGH (%ecx, STR1(%esp), jb)
c6d234
-	CHECK_BOUNDS_HIGH (%edx, STR2(%esp), jb)
c6d234
-#endif
c6d234
 
c6d234
-	LEAVE
c6d234
 	ret
c6d234
 END (BP_SYM (strcmp))
c6d234
 libc_hidden_builtin_def (strcmp)
c6d234
diff --git a/sysdeps/i386/i686/strtok.S b/sysdeps/i386/i686/strtok.S
c6d234
index 5589ae5360918944..372352e32a84f81d 100644
c6d234
--- a/sysdeps/i386/i686/strtok.S
c6d234
+++ b/sysdeps/i386/i686/strtok.S
c6d234
@@ -46,11 +46,7 @@
c6d234
 	.type save_ptr, @object
c6d234
 	.size save_ptr, 4
c6d234
 save_ptr:
c6d234
-# if __BOUNDED_POINTERS__
c6d234
-	.space 12
c6d234
-# else
c6d234
 	.space 4
c6d234
-# endif
c6d234
 
c6d234
 # ifdef PIC
c6d234
 #  define SAVE_PTR save_ptr@GOTOFF(%ebx)
c6d234
@@ -81,7 +77,6 @@ save_ptr:
c6d234
 #endif
c6d234
 
c6d234
 ENTRY (BP_SYM (FUNCTION))
c6d234
-	ENTER
c6d234
 
c6d234
 #if !defined USE_AS_STRTOK_R && defined PIC
c6d234
 	pushl %ebx			/* Save PIC register.  */
c6d234
@@ -127,23 +122,7 @@ ENTRY (BP_SYM (FUNCTION))
c6d234
 	cmove %eax, %edx
c6d234
 	testl %edx, %edx
c6d234
 	jz L(returnNULL)
c6d234
-#if __BOUNDED_POINTERS__
c6d234
-# ifdef USE_AS_STRTOK_R
c6d234
-	movl SAVE(%esp), %ecx	/* borrow %ecx for a moment */
c6d234
-# endif
c6d234
-	je L(0)
c6d234
-	/* Save bounds of incoming non-NULL STR into save area.  */
c6d234
-	movl 4+STR(%esp), %eax
c6d234
-	movl %eax, 4+SAVE_PTR
c6d234
-	movl 8+STR(%esp), %eax
c6d234
-	movl %eax, 8+SAVE_PTR
c6d234
-L(0):	CHECK_BOUNDS_LOW (%edx, SAVE_PTR)
c6d234
-# ifdef USE_AS_STRTOK_R
c6d234
-	xorl %ecx, %ecx		/* restore %ecx to zero */
c6d234
-# endif
c6d234
-#endif
c6d234
 	movl DELIM(%esp), %eax		/* Get start of delimiter set.  */
c6d234
-	CHECK_BOUNDS_LOW (%eax, DELIM(%esp))
c6d234
 
c6d234
 /* For understanding the following code remember that %ecx == 0 now.
c6d234
    Although all the following instruction only modify %cl we always
c6d234
@@ -151,17 +130,17 @@ L(0):	CHECK_BOUNDS_LOW (%edx, SAVE_PTR)
c6d234
 
c6d234
 L(2):	movb (%eax), %cl	/* get byte from stopset */
c6d234
 	testb %cl, %cl		/* is NUL char? */
c6d234
-	jz L(1_1)		/* yes => start compare loop */
c6d234
+	jz L(1)			/* yes => start compare loop */
c6d234
 	movb %cl, (%esp,%ecx)	/* set corresponding byte in stopset table */
c6d234
 
c6d234
 	movb 1(%eax), %cl	/* get byte from stopset */
c6d234
 	testb $0xff, %cl	/* is NUL char? */
c6d234
-	jz L(1_2)		/* yes => start compare loop */
c6d234
+	jz L(1)			/* yes => start compare loop */
c6d234
 	movb %cl, (%esp,%ecx)	/* set corresponding byte in stopset table */
c6d234
 
c6d234
 	movb 2(%eax), %cl	/* get byte from stopset */
c6d234
 	testb $0xff, %cl	/* is NUL char? */
c6d234
-	jz L(1_3)		/* yes => start compare loop */
c6d234
+	jz L(1)			/* yes => start compare loop */
c6d234
 	movb %cl, (%esp,%ecx)	/* set corresponding byte in stopset table */
c6d234
 
c6d234
 	movb 3(%eax), %cl	/* get byte from stopset */
c6d234
@@ -170,16 +149,7 @@ L(2):	movb (%eax), %cl	/* get byte from stopset */
c6d234
 	testb $0xff, %cl	/* is NUL char? */
c6d234
 	jnz L(2)		/* no => process next dword from stopset */
c6d234
 
c6d234
-#if __BOUNDED_POINTERS__
c6d234
-	jmp L(1_0)		/* pointer is correct for bounds check */
c6d234
-L(1_3):	incl %eax		/* adjust pointer for bounds check */
c6d234
-L(1_2):	incl %eax		/* ditto */
c6d234
-L(1_1):	incl %eax		/* ditto */
c6d234
-L(1_0):	CHECK_BOUNDS_HIGH (%eax, DELIM(%esp), jbe)
c6d234
-#else
c6d234
-L(1_3):; L(1_2):; L(1_1):	/* fall through */
c6d234
-#endif
c6d234
-	leal -4(%edx), %eax	/* prepare loop */
c6d234
+L(1):	leal -4(%edx), %eax	/* prepare loop */
c6d234
 
c6d234
 	/* We use a neat trick for the following loop.  Normally we would
c6d234
 	   have to test for two termination conditions
c6d234
@@ -253,8 +223,6 @@ L(8):	cmpl %eax, %edx
c6d234
 	movl SAVE(%esp), %ecx
c6d234
 #endif
c6d234
 	movl %edx, SAVE_PTR
c6d234
-	CHECK_BOUNDS_HIGH (%edx, SAVE_PTR, jb)
c6d234
-	RETURN_BOUNDED_POINTER (SAVE_PTR)
c6d234
 
c6d234
 L(epilogue):
c6d234
 	/* Remove the stopset table.  */
c6d234
@@ -265,7 +233,6 @@ L(epilogue):
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (ebx)
c6d234
 #endif
c6d234
-	LEAVE
c6d234
 	RET_PTR
c6d234
 
c6d234
 L(returnNULL):
c6d234
@@ -274,7 +241,6 @@ L(returnNULL):
c6d234
 	movl SAVE(%esp), %ecx
c6d234
 #endif
c6d234
 	movl %edx, SAVE_PTR
c6d234
-	RETURN_NULL_BOUNDED_POINTER
c6d234
 	jmp L(epilogue)
c6d234
 
c6d234
 END (BP_SYM (FUNCTION))
c6d234
diff --git a/sysdeps/i386/lshift.S b/sysdeps/i386/lshift.S
c6d234
index eb2e6ffd3b92ba97..b40d4ddd3a0e7fd7 100644
c6d234
--- a/sysdeps/i386/lshift.S
c6d234
+++ b/sysdeps/i386/lshift.S
c6d234
@@ -29,7 +29,6 @@
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (__mpn_lshift))
c6d234
-	ENTER
c6d234
 
c6d234
 	pushl	%edi
c6d234
 	cfi_adjust_cfa_offset (4)
c6d234
@@ -44,12 +43,6 @@ ENTRY (BP_SYM (__mpn_lshift))
c6d234
 	cfi_rel_offset (esi, 4)
c6d234
 	movl	SIZE(%esp),%edx
c6d234
 	movl	CNT(%esp),%ecx
c6d234
-#if __BOUNDED_POINTERS__
c6d234
-	shll	$2, %edx		/* convert limbs to bytes */
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%edi, RES(%esp), %edx)
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%esi, S(%esp), %edx)
c6d234
-	shrl	$2, %edx
c6d234
-#endif
c6d234
 	subl	$4,%esi			/* adjust s_ptr */
c6d234
 
c6d234
 	movl	(%esi,%edx,4),%ebx	/* read most significant limb */
c6d234
@@ -92,7 +85,6 @@ L(1):	movl	(%esi,%edx,4),%eax
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (edi)
c6d234
 
c6d234
-	LEAVE
c6d234
 	ret
c6d234
 
c6d234
 	cfi_restore_state
c6d234
@@ -109,6 +101,5 @@ L(end):	shll	%cl,%ebx		/* compute least significant limb */
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (edi)
c6d234
 
c6d234
-	LEAVE
c6d234
 	ret
c6d234
 END (BP_SYM (__mpn_lshift))
c6d234
diff --git a/sysdeps/i386/memchr.S b/sysdeps/i386/memchr.S
c6d234
index a3427c17dafd0cbf..88651e527650b26b 100644
c6d234
--- a/sysdeps/i386/memchr.S
c6d234
+++ b/sysdeps/i386/memchr.S
c6d234
@@ -39,7 +39,6 @@
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (__memchr))
c6d234
-	ENTER
c6d234
 
c6d234
 	/* Save callee-safe registers used in this function.  */
c6d234
 	pushl %esi
c6d234
@@ -53,7 +52,6 @@ ENTRY (BP_SYM (__memchr))
c6d234
 	movl CHR(%esp), %edx	/* c: byte we are looking for.  */
c6d234
 	movl LEN(%esp), %esi	/* len: length of memory block.  */
c6d234
 	cfi_rel_offset (esi, 4)
c6d234
-	CHECK_BOUNDS_LOW (%eax, STR(%esp))
c6d234
 
c6d234
 	/* If my must not test more than three characters test
c6d234
 	   them one by one.  This is especially true for 0.  */
c6d234
@@ -312,23 +310,13 @@ L(8):	testb %cl, %cl		/* test first byte in dword */
c6d234
 	incl %eax		/* increment source pointer */
c6d234
 
c6d234
 	/* No further test needed we we know it is one of the four bytes.  */
c6d234
-L(9):
c6d234
-#if __BOUNDED_POINTERS__
c6d234
-	CHECK_BOUNDS_HIGH (%eax, STR(%esp), jb)
c6d234
-	/* If RTN pointer is phony, don't copy return value into it.  */
c6d234
-	movl RTN(%esp), %ecx
c6d234
-	testl %ecx, %ecx
c6d234
-	jz L(pop)
c6d234
-	RETURN_BOUNDED_POINTER (STR(%esp))
c6d234
-#endif
c6d234
-L(pop):	popl %edi		/* pop saved registers */
c6d234
+L(9):	popl %edi		/* pop saved registers */
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (edi)
c6d234
 	popl %esi
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (esi)
c6d234
 
c6d234
-	LEAVE
c6d234
 	RET_PTR
c6d234
 END (BP_SYM (__memchr))
c6d234
 
c6d234
diff --git a/sysdeps/i386/memcmp.S b/sysdeps/i386/memcmp.S
c6d234
index e21e4916e6151c28..1d5535a29dd12b33 100644
c6d234
--- a/sysdeps/i386/memcmp.S
c6d234
+++ b/sysdeps/i386/memcmp.S
c6d234
@@ -28,7 +28,6 @@
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (memcmp))
c6d234
-	ENTER
c6d234
 
c6d234
 	pushl %esi		/* Save callee-safe registers.  */
c6d234
 	cfi_adjust_cfa_offset (4)
c6d234
@@ -40,8 +39,6 @@ ENTRY (BP_SYM (memcmp))
c6d234
 	cfi_rel_offset (esi, 0)
c6d234
 	movl BLK2(%esp), %edi
c6d234
 	movl LEN(%esp), %ecx
c6d234
-	CHECK_BOUNDS_LOW (%esi, BLK1(%esp))
c6d234
-	CHECK_BOUNDS_LOW (%edi, BLK2(%esp))
c6d234
 
c6d234
 	cld			/* Set direction of comparison.  */
c6d234
 
c6d234
@@ -64,15 +61,12 @@ ENTRY (BP_SYM (memcmp))
c6d234
 	   Note that the following operation does not change 0xffffffff.  */
c6d234
 	orb $1, %al		/* Change 0 to 1.  */
c6d234
 
c6d234
-L(1):	CHECK_BOUNDS_HIGH (%esi, BLK1(%esp), jbe)
c6d234
-	CHECK_BOUNDS_HIGH (%edi, BLK2(%esp), jbe)
c6d234
-	popl %esi		/* Restore registers.  */
c6d234
+L(1):	popl %esi		/* Restore registers.  */
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (esi)
c6d234
 	movl %edx, %edi
c6d234
 	cfi_restore (edi)
c6d234
 
c6d234
-	LEAVE
c6d234
 	ret
c6d234
 END (BP_SYM (memcmp))
c6d234
 
c6d234
diff --git a/sysdeps/i386/mul_1.S b/sysdeps/i386/mul_1.S
c6d234
index c599a96eade313e4..71f8dceb188057d9 100644
c6d234
--- a/sysdeps/i386/mul_1.S
c6d234
+++ b/sysdeps/i386/mul_1.S
c6d234
@@ -35,7 +35,6 @@
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (__mpn_mul_1))
c6d234
-	ENTER
c6d234
 
c6d234
 	pushl	%res_ptr
c6d234
 	cfi_adjust_cfa_offset (4)
c6d234
@@ -53,12 +52,6 @@ ENTRY (BP_SYM (__mpn_mul_1))
c6d234
 	movl	SIZE(%esp), %size
c6d234
 	movl	S2LIMB(%esp), %s2_limb
c6d234
 	cfi_rel_offset (s2_limb, 0)
c6d234
-#if __BOUNDED_POINTERS__
c6d234
-	shll	$2, %size	/* convert limbs to bytes */
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%res_ptr, RES(%esp), %size)
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%s1_ptr, S1(%esp), %size)
c6d234
-	shrl	$2, %size
c6d234
-#endif
c6d234
 	leal	(%res_ptr,%size,4), %res_ptr
c6d234
 	leal	(%s1_ptr,%size,4), %s1_ptr
c6d234
 	negl	%size
c6d234
@@ -90,7 +83,6 @@ L(oop):
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (res_ptr)
c6d234
 
c6d234
-	LEAVE
c6d234
 	ret
c6d234
 #undef size
c6d234
 END (BP_SYM (__mpn_mul_1))
c6d234
diff --git a/sysdeps/i386/rawmemchr.S b/sysdeps/i386/rawmemchr.S
c6d234
index 7479e3bd75b9d4a0..3ce497485e439c02 100644
c6d234
--- a/sysdeps/i386/rawmemchr.S
c6d234
+++ b/sysdeps/i386/rawmemchr.S
c6d234
@@ -38,7 +38,6 @@
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (__rawmemchr))
c6d234
-	ENTER
c6d234
 
c6d234
 	/* Save callee-safe register used in this function.  */
c6d234
 	pushl %edi
c6d234
@@ -48,7 +47,6 @@ ENTRY (BP_SYM (__rawmemchr))
c6d234
 	/* Load parameters into registers.  */
c6d234
 	movl STR(%esp), %eax
c6d234
 	movl CHR(%esp), %edx
c6d234
-	CHECK_BOUNDS_LOW (%eax, STR(%esp))
c6d234
 
c6d234
 	/* At the moment %edx contains C.  What we need for the
c6d234
 	   algorithm is C in all bytes of the dword.  Avoid
c6d234
@@ -215,13 +213,10 @@ L(8):	testb %cl, %cl		/* test first byte in dword */
c6d234
 	/* No further test needed we we know it is one of the four bytes.  */
c6d234
 
c6d234
 L(9):
c6d234
-	CHECK_BOUNDS_HIGH (%eax, STR(%esp), jb)
c6d234
-	RETURN_BOUNDED_POINTER (STR(%esp))
c6d234
 	popl %edi		/* pop saved register */
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (edi)
c6d234
 
c6d234
-	LEAVE
c6d234
 	RET_PTR
c6d234
 END (BP_SYM (__rawmemchr))
c6d234
 
c6d234
diff --git a/sysdeps/i386/rshift.S b/sysdeps/i386/rshift.S
c6d234
index c9ec570979f5b555..48f7c4f89dc408d3 100644
c6d234
--- a/sysdeps/i386/rshift.S
c6d234
+++ b/sysdeps/i386/rshift.S
c6d234
@@ -29,7 +29,6 @@
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (__mpn_rshift))
c6d234
-	ENTER
c6d234
 
c6d234
 	pushl	%edi
c6d234
 	cfi_adjust_cfa_offset (4)
c6d234
@@ -44,12 +43,6 @@ ENTRY (BP_SYM (__mpn_rshift))
c6d234
 	cfi_rel_offset (esi, 4)
c6d234
 	movl	SIZE(%esp),%edx
c6d234
 	movl	CNT(%esp),%ecx
c6d234
-#if __BOUNDED_POINTERS__
c6d234
-	shll	$2, %edx		/* convert limbs to bytes */
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%edi, RES(%esp), %edx)
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%esi, S(%esp), %edx)
c6d234
-	shrl	$2, %edx
c6d234
-#endif
c6d234
 	leal	-4(%edi,%edx,4),%edi
c6d234
 	leal	(%esi,%edx,4),%esi
c6d234
 	negl	%edx
c6d234
@@ -94,7 +87,6 @@ L(1):	movl	(%esi,%edx,4),%eax
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (edi)
c6d234
 
c6d234
-	LEAVE
c6d234
 	ret
c6d234
 
c6d234
 	cfi_restore_state
c6d234
@@ -111,6 +103,5 @@ L(end):	shrl	%cl,%ebx		/* compute most significant limb */
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (edi)
c6d234
 
c6d234
-	LEAVE
c6d234
 	ret
c6d234
 END (BP_SYM (__mpn_rshift))
c6d234
diff --git a/sysdeps/i386/setjmp.S b/sysdeps/i386/setjmp.S
c6d234
index 518f3c30c01893b5..8e7a4df25bd2ff8d 100644
c6d234
--- a/sysdeps/i386/setjmp.S
c6d234
+++ b/sysdeps/i386/setjmp.S
c6d234
@@ -28,10 +28,8 @@
c6d234
 #define SIGMSK	JMPBUF+PTR_SIZE
c6d234
 
c6d234
 ENTRY (BP_SYM (__sigsetjmp))
c6d234
-	ENTER
c6d234
 
c6d234
 	movl JMPBUF(%esp), %eax
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%eax, JMPBUF(%esp), $JB_SIZE)
c6d234
 
c6d234
      	/* Save registers.  */
c6d234
 	movl %ebx, (JB_BX*4)(%eax)
c6d234
@@ -48,7 +46,6 @@ ENTRY (BP_SYM (__sigsetjmp))
c6d234
 	PTR_MANGLE (%ecx)
c6d234
 #endif
c6d234
      	movl %ecx, (JB_PC*4)(%eax)
c6d234
-	LEAVE /* pop frame pointer to prepare for tail-call.  */
c6d234
 	movl %ebp, (JB_BP*4)(%eax) /* Save caller's frame pointer.  */
c6d234
 
c6d234
 #if IS_IN (rtld)
c6d234
diff --git a/sysdeps/i386/stpcpy.S b/sysdeps/i386/stpcpy.S
c6d234
index 472c315ce7a1ad00..3a01be3174ef0863 100644
c6d234
--- a/sysdeps/i386/stpcpy.S
c6d234
+++ b/sysdeps/i386/stpcpy.S
c6d234
@@ -34,12 +34,9 @@
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (__stpcpy))
c6d234
-	ENTER
c6d234
 
c6d234
 	movl DEST(%esp), %eax
c6d234
 	movl SRC(%esp), %ecx
c6d234
-	CHECK_BOUNDS_LOW (%eax, DEST(%esp))
c6d234
-	CHECK_BOUNDS_LOW (%ecx, SRC(%esp))
c6d234
 	subl %eax, %ecx		/* magic: reduce number of loop variants
c6d234
 				   to one using addressing mode */
c6d234
 
c6d234
@@ -85,10 +82,7 @@ L(1):	addl $4, %eax		/* increment loop counter */
c6d234
 L(4):	incl %eax
c6d234
 L(3):	incl %eax
c6d234
 L(2):
c6d234
-	CHECK_BOUNDS_HIGH (%eax, DEST(%esp), jb)
c6d234
-	RETURN_BOUNDED_POINTER (DEST(%esp))
c6d234
 
c6d234
-	LEAVE
c6d234
 	RET_PTR
c6d234
 END (BP_SYM (__stpcpy))
c6d234
 
c6d234
diff --git a/sysdeps/i386/stpncpy.S b/sysdeps/i386/stpncpy.S
c6d234
index d74a42f188d8d849..7f3ca8c3b775b42f 100644
c6d234
--- a/sysdeps/i386/stpncpy.S
c6d234
+++ b/sysdeps/i386/stpncpy.S
c6d234
@@ -36,7 +36,6 @@
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (__stpncpy))
c6d234
-	ENTER
c6d234
 
c6d234
 	pushl %esi
c6d234
 	cfi_adjust_cfa_offset (4)
c6d234
@@ -45,8 +44,6 @@ ENTRY (BP_SYM (__stpncpy))
c6d234
 	movl SRC(%esp), %esi
c6d234
 	cfi_rel_offset (esi, 0)
c6d234
 	movl LEN(%esp), %ecx
c6d234
-	CHECK_BOUNDS_LOW (%eax, DEST(%esp))
c6d234
-	CHECK_BOUNDS_LOW (%esi, SRC(%esp))
c6d234
 
c6d234
 	subl %eax, %esi		/* magic: reduce number of loop variants
c6d234
 				   to one using addressing mode */
c6d234
@@ -141,18 +138,10 @@ L(8):
c6d234
 L(3):	decl %ecx		/* all bytes written? */
c6d234
 	jnz L(8)		/* no, then again */
c6d234
 
c6d234
-L(9):
c6d234
-#if __BOUNDED_POINTERS__
c6d234
-	addl %eax, %esi		/* undo magic: %esi now points beyond end of SRC */
c6d234
-	CHECK_BOUNDS_HIGH (%esi, SRC(%esp), jbe)
c6d234
-	CHECK_BOUNDS_HIGH (%eax, DEST(%esp), jbe)
c6d234
-	RETURN_BOUNDED_POINTER (DEST(%esp))
c6d234
-#endif
c6d234
-	popl %esi		/* restore saved register content */
c6d234
+L(9):	popl %esi		/* restore saved register content */
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (esi)
c6d234
 
c6d234
-	LEAVE
c6d234
 	RET_PTR
c6d234
 END (BP_SYM (__stpncpy))
c6d234
 
c6d234
diff --git a/sysdeps/i386/strchr.S b/sysdeps/i386/strchr.S
c6d234
index 80a7bd825df55e87..aff95b9b7ee4c35c 100644
c6d234
--- a/sysdeps/i386/strchr.S
c6d234
+++ b/sysdeps/i386/strchr.S
c6d234
@@ -32,14 +32,12 @@
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (strchr))
c6d234
-	ENTER
c6d234
 
c6d234
 	pushl %edi		/* Save callee-safe registers used here.  */
c6d234
 	cfi_adjust_cfa_offset (4)
c6d234
 	cfi_rel_offset (edi, 0)
c6d234
 	movl STR(%esp), %eax
c6d234
 	movl CHR(%esp), %edx
c6d234
-	CHECK_BOUNDS_LOW (%eax, STR(%esp))
c6d234
 
c6d234
 	/* At the moment %edx contains C.  What we need for the
c6d234
 	   algorithm is C in all bytes of the dword.  Avoid
c6d234
@@ -244,12 +242,10 @@ L(11):	movl (%eax), %ecx	/* get word (= 4 bytes) in question */
c6d234
 
c6d234
 L(2):	/* Return NULL.  */
c6d234
 	xorl %eax, %eax
c6d234
-	RETURN_NULL_BOUNDED_POINTER
c6d234
 	popl %edi		/* restore saved register content */
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (edi)
c6d234
 
c6d234
-	LEAVE
c6d234
 	RET_PTR
c6d234
 
c6d234
 	cfi_adjust_cfa_offset (4)
c6d234
@@ -286,13 +282,10 @@ L(7):	testb %cl, %cl		/* is first byte C? */
c6d234
 	incl %eax
c6d234
 
c6d234
 L(6):
c6d234
-	CHECK_BOUNDS_HIGH (%eax, STR(%esp), jb)
c6d234
-	RETURN_BOUNDED_POINTER (STR(%esp))
c6d234
 	popl %edi		/* restore saved register content */
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (edi)
c6d234
 
c6d234
-	LEAVE
c6d234
 	RET_PTR
c6d234
 END (BP_SYM (strchr))
c6d234
 
c6d234
diff --git a/sysdeps/i386/strchrnul.S b/sysdeps/i386/strchrnul.S
c6d234
index 65b950008f970a12..2aa7d82cc8af1dfc 100644
c6d234
--- a/sysdeps/i386/strchrnul.S
c6d234
+++ b/sysdeps/i386/strchrnul.S
c6d234
@@ -32,7 +32,6 @@
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (__strchrnul))
c6d234
-	ENTER
c6d234
 
c6d234
 	pushl %edi		/* Save callee-safe registers used here.  */
c6d234
 	cfi_adjust_cfa_offset (4)
c6d234
@@ -40,7 +39,6 @@ ENTRY (BP_SYM (__strchrnul))
c6d234
 
c6d234
 	movl STR(%esp), %eax
c6d234
 	movl CHR(%esp), %edx
c6d234
-	CHECK_BOUNDS_LOW (%eax, STR(%esp))
c6d234
 
c6d234
 	/* At the moment %edx contains CHR.  What we need for the
c6d234
 	   algorithm is CHR in all bytes of the dword.  Avoid
c6d234
@@ -272,13 +270,10 @@ L(7):	testb %cl, %cl		/* is first byte CHR? */
c6d234
 	/* It must be in the fourth byte and it cannot be NUL.  */
c6d234
 	incl %eax
c6d234
 
c6d234
-L(6):	CHECK_BOUNDS_HIGH (%eax, STR(%esp), jb)
c6d234
-	RETURN_BOUNDED_POINTER (STR(%esp))
c6d234
-	popl %edi		/* restore saved register content */
c6d234
+L(6):	popl %edi		/* restore saved register content */
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (edi)
c6d234
 
c6d234
-	LEAVE
c6d234
 	RET_PTR
c6d234
 END (BP_SYM (__strchrnul))
c6d234
 
c6d234
diff --git a/sysdeps/i386/strcspn.S b/sysdeps/i386/strcspn.S
c6d234
index 1934daffb0ec41d1..40b5207809f74530 100644
c6d234
--- a/sysdeps/i386/strcspn.S
c6d234
+++ b/sysdeps/i386/strcspn.S
c6d234
@@ -31,11 +31,9 @@
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (strcspn))
c6d234
-	ENTER
c6d234
 
c6d234
 	movl STR(%esp), %edx
c6d234
 	movl STOP(%esp), %eax
c6d234
-	CHECK_BOUNDS_LOW (%edx, STR(%esp))
c6d234
 
c6d234
 	/* First we create a table with flags for all possible characters.
c6d234
 	   For the ASCII (7bit/8bit) or ISO-8859-X character sets which are
c6d234
@@ -236,11 +234,9 @@ L(5):	incl %eax
c6d234
 
c6d234
 L(4):	addl $256, %esp		/* remove stopset */
c6d234
 	cfi_adjust_cfa_offset (-256)
c6d234
-	CHECK_BOUNDS_HIGH (%eax, STR(%esp), jb)
c6d234
 	subl %edx, %eax		/* we have to return the number of valid
c6d234
 				   characters, so compute distance to first
c6d234
 				   non-valid character */
c6d234
-	LEAVE
c6d234
 	ret
c6d234
 END (BP_SYM (strcspn))
c6d234
 libc_hidden_builtin_def (strcspn)
c6d234
diff --git a/sysdeps/i386/strpbrk.S b/sysdeps/i386/strpbrk.S
c6d234
index 9696b9acd50b4885..ae35ba44f6a624a7 100644
c6d234
--- a/sysdeps/i386/strpbrk.S
c6d234
+++ b/sysdeps/i386/strpbrk.S
c6d234
@@ -32,11 +32,9 @@
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (strpbrk))
c6d234
-	ENTER
c6d234
 
c6d234
 	movl STR(%esp), %edx
c6d234
 	movl STOP(%esp), %eax
c6d234
-	CHECK_BOUNDS_LOW (%edx, STR(%esp))
c6d234
 
c6d234
 	/* First we create a table with flags for all possible characters.
c6d234
 	   For the ASCII (7bit/8bit) or ISO-8859-X character sets which are
c6d234
@@ -238,18 +236,10 @@ L(5):	incl %eax
c6d234
 L(4):	addl $256, %esp		/* remove stopset */
c6d234
 	cfi_adjust_cfa_offset (-256)
c6d234
 
c6d234
-	CHECK_BOUNDS_HIGH (%eax, STR(%esp), jb)
c6d234
 	orb %cl, %cl		/* was last character NUL? */
c6d234
 	jnz L(7)		/* no => return pointer */
c6d234
 	xorl %eax, %eax
c6d234
-	RETURN_NULL_BOUNDED_POINTER
c6d234
 
c6d234
-	LEAVE
c6d234
-	RET_PTR
c6d234
-
c6d234
-L(7):	RETURN_BOUNDED_POINTER (STR(%esp))
c6d234
-
c6d234
-	LEAVE
c6d234
-	RET_PTR
c6d234
+L(7):	RET_PTR
c6d234
 END (BP_SYM (strpbrk))
c6d234
 libc_hidden_builtin_def (strpbrk)
c6d234
diff --git a/sysdeps/i386/strrchr.S b/sysdeps/i386/strrchr.S
c6d234
index f3fe8b75540c58e1..57931c165831aa55 100644
c6d234
--- a/sysdeps/i386/strrchr.S
c6d234
+++ b/sysdeps/i386/strrchr.S
c6d234
@@ -31,7 +31,6 @@
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (strrchr))
c6d234
-	ENTER
c6d234
 
c6d234
 	pushl %edi		/* Save callee-safe registers used here.  */
c6d234
 	cfi_adjust_cfa_offset (4)
c6d234
@@ -43,7 +42,6 @@ ENTRY (BP_SYM (strrchr))
c6d234
 	movl STR(%esp), %esi
c6d234
 	cfi_rel_offset (esi, 0)
c6d234
 	movl CHR(%esp), %ecx
c6d234
-	CHECK_BOUNDS_LOW (%esi, STR(%esp))
c6d234
 
c6d234
 	/* At the moment %ecx contains C.  What we need for the
c6d234
 	   algorithm is C in all bytes of the dword.  Avoid
c6d234
@@ -324,16 +322,13 @@ L(26):	testb %dl, %dl		/* is third byte == NUL */
c6d234
 	jne L(2)		/* no => skip */
c6d234
 	leal 3(%esi), %eax	/* store address as result */
c6d234
 
c6d234
-L(2):	CHECK_BOUNDS_HIGH (%eax, STR(%esp), jb)
c6d234
-	RETURN_BOUNDED_POINTER (STR(%esp))
c6d234
-	popl %esi		/* restore saved register content */
c6d234
+L(2):	popl %esi		/* restore saved register content */
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (esi)
c6d234
 	popl %edi
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (edi)
c6d234
 
c6d234
-	LEAVE
c6d234
 	RET_PTR
c6d234
 END (BP_SYM (strrchr))
c6d234
 
c6d234
diff --git a/sysdeps/i386/strspn.S b/sysdeps/i386/strspn.S
c6d234
index 4193f376590fa48c..2b5c2a4c670bdfa2 100644
c6d234
--- a/sysdeps/i386/strspn.S
c6d234
+++ b/sysdeps/i386/strspn.S
c6d234
@@ -31,11 +31,9 @@
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (strspn))
c6d234
-	ENTER
c6d234
 
c6d234
 	movl STR(%esp), %edx
c6d234
 	movl SKIP(%esp), %eax
c6d234
-	CHECK_BOUNDS_LOW (%edx, STR(%esp))
c6d234
 
c6d234
 	/* First we create a table with flags for all possible characters.
c6d234
 	   For the ASCII (7bit/8bit) or ISO-8859-X character sets which are
c6d234
@@ -236,11 +234,9 @@ L(5):	incl %eax
c6d234
 
c6d234
 L(4):	addl $256, %esp		/* remove stopset */
c6d234
 	cfi_adjust_cfa_offset (-256)
c6d234
-	CHECK_BOUNDS_HIGH (%eax, STR(%esp), jb)
c6d234
 	subl %edx, %eax		/* we have to return the number of valid
c6d234
 				   characters, so compute distance to first
c6d234
 				   non-valid character */
c6d234
-	LEAVE
c6d234
 	ret
c6d234
 END (BP_SYM (strspn))
c6d234
 libc_hidden_builtin_def (strspn)
c6d234
diff --git a/sysdeps/i386/strtok.S b/sysdeps/i386/strtok.S
c6d234
index 3b222aff3dd7d237..eb586928eb19bf21 100644
c6d234
--- a/sysdeps/i386/strtok.S
c6d234
+++ b/sysdeps/i386/strtok.S
c6d234
@@ -46,11 +46,7 @@
c6d234
 	.type save_ptr, @object
c6d234
 	.size save_ptr, 4
c6d234
 save_ptr:
c6d234
-# if __BOUNDED_POINTERS__
c6d234
-	.space 12
c6d234
-# else
c6d234
 	.space 4
c6d234
-# endif
c6d234
 
c6d234
 # ifdef PIC
c6d234
 #  define SAVE_PTR save_ptr@GOTOFF(%ebx)
c6d234
@@ -69,11 +65,9 @@ save_ptr:
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (FUNCTION))
c6d234
-	ENTER
c6d234
 
c6d234
 	movl STR(%esp), %edx
c6d234
 	movl DELIM(%esp), %eax
c6d234
-	CHECK_BOUNDS_LOW (%eax, DELIM(%esp))
c6d234
 
c6d234
 #if !defined USE_AS_STRTOK_R && defined PIC
c6d234
 	pushl %ebx			/* Save PIC register.  */
c6d234
@@ -90,22 +84,7 @@ L(here):
c6d234
 	/* If the pointer is NULL we have to use the stored value of
c6d234
 	   the last run.  */
c6d234
 	cmpl $0, %edx
c6d234
-#if __BOUNDED_POINTERS__
c6d234
-	movl SAVE(%esp), %ecx
c6d234
-	je L(0)
c6d234
-	/* Save bounds of incoming non-NULL STR into save area.  */
c6d234
-	movl 4+STR(%esp), %eax
c6d234
-	movl %eax, 4+SAVE_PTR
c6d234
-	movl 8+STR(%esp), %eax
c6d234
-	movl %eax, 8+SAVE_PTR
c6d234
-	CHECK_BOUNDS_LOW (%edx, SAVE_PTR)
c6d234
-	jmp L(1)
c6d234
-L(0):	movl SAVE_PTR, %edx
c6d234
-	CHECK_BOUNDS_LOW (%edx, SAVE_PTR)
c6d234
-	jmp L(1)
c6d234
-#else
c6d234
 	jne L(1)
c6d234
-#endif
c6d234
 
c6d234
 #ifdef USE_AS_STRTOK_R
c6d234
 	/* The value is stored in the third argument.  */
c6d234
@@ -267,12 +246,12 @@ L(2):	movb (%eax), %cl	/* get byte from stopset */
c6d234
 
c6d234
 	movb 1(%eax), %cl	/* get byte from stopset */
c6d234
 	testb $0xff, %cl	/* is NUL char? */
c6d234
-	jz L(1_2)		/* yes => start compare loop */
c6d234
+	jz L(1_1)		/* yes => start compare loop */
c6d234
 	movb %cl, (%esp,%ecx)	/* set corresponding byte in stopset table */
c6d234
 
c6d234
 	movb 2(%eax), %cl	/* get byte from stopset */
c6d234
 	testb $0xff, %cl	/* is NUL char? */
c6d234
-	jz L(1_3)		/* yes => start compare loop */
c6d234
+	jz L(1_1)		/* yes => start compare loop */
c6d234
 	movb %cl, (%esp,%ecx)	/* set corresponding byte in stopset table */
c6d234
 
c6d234
 	movb 3(%eax), %cl	/* get byte from stopset */
c6d234
@@ -281,16 +260,7 @@ L(2):	movb (%eax), %cl	/* get byte from stopset */
c6d234
 	testb $0xff, %cl	/* is NUL char? */
c6d234
 	jnz L(2)		/* no => process next dword from stopset */
c6d234
 
c6d234
-#if __BOUNDED_POINTERS__
c6d234
-	jmp L(1_0)		/* pointer is correct for bounds check */
c6d234
-L(1_3):	incl %eax		/* adjust pointer for bounds check */
c6d234
-L(1_2):	incl %eax		/* ditto */
c6d234
-L(1_1):	incl %eax		/* ditto */
c6d234
-L(1_0):	CHECK_BOUNDS_HIGH (%eax, DELIM(%esp), jbe)
c6d234
-#else
c6d234
-L(1_3):; L(1_2):; L(1_1):	/* fall through */
c6d234
-#endif
c6d234
-	leal -4(%edx), %eax	/* prepare loop */
c6d234
+L(1_1):	leal -4(%edx), %eax	/* prepare loop */
c6d234
 
c6d234
 	/* We use a neat trick for the following loop.  Normally we would
c6d234
 	   have to test for two termination conditions
c6d234
@@ -370,8 +340,6 @@ L(11):
c6d234
 	movl SAVE(%esp), %ecx
c6d234
 #endif
c6d234
 	movl %edx, SAVE_PTR
c6d234
-	CHECK_BOUNDS_HIGH (%edx, SAVE_PTR, jb)
c6d234
-	RETURN_BOUNDED_POINTER (SAVE_PTR)
c6d234
 
c6d234
 L(epilogue):
c6d234
 #if !defined USE_AS_STRTOK_R && defined PIC
c6d234
@@ -379,7 +347,6 @@ L(epilogue):
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (ebx)
c6d234
 #endif
c6d234
-	LEAVE
c6d234
 	RET_PTR
c6d234
 
c6d234
 L(returnNULL):
c6d234
@@ -388,7 +355,6 @@ L(returnNULL):
c6d234
 	movl SAVE(%esp), %ecx
c6d234
 #endif
c6d234
 	movl %edx, SAVE_PTR
c6d234
-	RETURN_NULL_BOUNDED_POINTER
c6d234
 	jmp L(epilogue)
c6d234
 
c6d234
 END (BP_SYM (FUNCTION))
c6d234
diff --git a/sysdeps/i386/sub_n.S b/sysdeps/i386/sub_n.S
c6d234
index dbda4d4d4e7180f4..2a09256f9c71f93a 100644
c6d234
--- a/sysdeps/i386/sub_n.S
c6d234
+++ b/sysdeps/i386/sub_n.S
c6d234
@@ -31,7 +31,6 @@
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (__mpn_sub_n))
c6d234
-	ENTER
c6d234
 
c6d234
 	pushl %edi
c6d234
 	cfi_adjust_cfa_offset (4)
c6d234
@@ -44,13 +43,6 @@ ENTRY (BP_SYM (__mpn_sub_n))
c6d234
 	cfi_rel_offset (esi, 0)
c6d234
 	movl	S2(%esp),%edx
c6d234
 	movl	SIZE(%esp),%ecx
c6d234
-#if __BOUNDED_POINTERS__
c6d234
-	shll	$2, %ecx	/* convert limbs to bytes */
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%edi, RES(%esp), %ecx)
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%esi, S1(%esp), %ecx)
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%edx, S2(%esp), %ecx)
c6d234
-	shrl	$2, %ecx
c6d234
-#endif
c6d234
 	movl	%ecx,%eax
c6d234
 	shrl	$3,%ecx			/* compute count for unrolled loop */
c6d234
 	negl	%eax
c6d234
@@ -118,6 +110,5 @@ L(oop):	movl	(%esi),%eax
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (edi)
c6d234
 
c6d234
-	LEAVE
c6d234
 	ret
c6d234
 END (BP_SYM (__mpn_sub_n))
c6d234
diff --git a/sysdeps/i386/submul_1.S b/sysdeps/i386/submul_1.S
c6d234
index beb0eb5f07f31a06..c7054e0f6bd48a5e 100644
c6d234
--- a/sysdeps/i386/submul_1.S
c6d234
+++ b/sysdeps/i386/submul_1.S
c6d234
@@ -35,7 +35,6 @@
c6d234
 
c6d234
 	.text
c6d234
 ENTRY (BP_SYM (__mpn_submul_1))
c6d234
-	ENTER
c6d234
 
c6d234
 	pushl	%edi
c6d234
 	cfi_adjust_cfa_offset (4)
c6d234
@@ -54,12 +53,6 @@ ENTRY (BP_SYM (__mpn_submul_1))
c6d234
 	movl	S1(%esp), %s1_ptr
c6d234
 	movl	SIZE(%esp), %sizeP
c6d234
 	movl	S2LIMB(%esp), %s2_limb
c6d234
-#if __BOUNDED_POINTERS__
c6d234
-	shll	$2, %sizeP	/* convert limbs to bytes */
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%res_ptr, RES(%esp), %sizeP)
c6d234
-	CHECK_BOUNDS_BOTH_WIDE (%s1_ptr, S1(%esp), %sizeP)
c6d234
-	shrl	$2, %sizeP
c6d234
-#endif
c6d234
 	leal	(%res_ptr,%sizeP,4), %res_ptr
c6d234
 	leal	(%s1_ptr,%sizeP,4), %s1_ptr
c6d234
 	negl	%sizeP
c6d234
@@ -91,6 +84,5 @@ L(oop):
c6d234
 	cfi_adjust_cfa_offset (-4)
c6d234
 	cfi_restore (edi)
c6d234
 
c6d234
-	LEAVE
c6d234
 	ret
c6d234
 END (BP_SYM (__mpn_submul_1))