|
|
179894 |
commit 813c6ec808556553be9d39e900a3fc97ceb32330
|
|
|
179894 |
Author: Pedro Franco de Carvalho <pedromfc@linux.ibm.com>
|
|
|
179894 |
Date: Wed Jun 30 12:36:07 2021 -0300
|
|
|
179894 |
|
|
|
179894 |
powerpc: optimize strcpy/stpcpy for POWER9/10
|
|
|
179894 |
|
|
|
179894 |
This patch modifies the current POWER9 implementation of strcpy and
|
|
|
179894 |
stpcpy to optimize it for POWER9/10.
|
|
|
179894 |
|
|
|
179894 |
Since no new POWER10 instructions are used, the original POWER9 strcpy is
|
|
|
179894 |
modified instead of creating a new implementation for POWER10. This
|
|
|
179894 |
implementation is based on both the original POWER9 implementation of
|
|
|
179894 |
strcpy and the preamble of the new POWER10 implementation of strlen.
|
|
|
179894 |
|
|
|
179894 |
The changes also affect stpcpy, which uses the same implementation with
|
|
|
179894 |
some additional code before returning.
|
|
|
179894 |
|
|
|
179894 |
On POWER9, averaging improvements across the benchmark
|
|
|
179894 |
inputs (length/source alignment/destination alignment), for an
|
|
|
179894 |
experiment that ran the benchmark five times, bench-strcpy showed an
|
|
|
179894 |
improvement of 5.23%, and bench-stpcpy showed an improvement of 6.59%.
|
|
|
179894 |
|
|
|
179894 |
On POWER10, bench-strcpy showed 13.16%, and bench-stpcpy showed 13.59%.
|
|
|
179894 |
|
|
|
179894 |
The changes are:
|
|
|
179894 |
|
|
|
179894 |
1. Removed the null string optimization.
|
|
|
179894 |
|
|
|
179894 |
Although this results in a few extra cycles for the null string, in
|
|
|
179894 |
combination with the second change, this resulted in improvements for
|
|
|
179894 |
for other cases.
|
|
|
179894 |
|
|
|
179894 |
2. Adapted the preamble from strlen for POWER10.
|
|
|
179894 |
|
|
|
179894 |
This is the part of the function that handles up to the first 16 bytes
|
|
|
179894 |
of the string.
|
|
|
179894 |
|
|
|
179894 |
3. Increased number of unrolled iterations in the main loop to 6.
|
|
|
179894 |
|
|
|
179894 |
Reviewed-by: Matheus Castanho <msc@linux.ibm.com>
|
|
|
179894 |
Tested-by: Matheus Castanho <msc@linux.ibm.com>
|
|
|
179894 |
|
|
|
179894 |
diff --git a/sysdeps/powerpc/powerpc64/le/power9/strcpy.S b/sysdeps/powerpc/powerpc64/le/power9/strcpy.S
|
|
|
179894 |
index ce8f50329177fd06..9845a1d4cf0e1e5d 100644
|
|
|
179894 |
--- a/sysdeps/powerpc/powerpc64/le/power9/strcpy.S
|
|
|
179894 |
+++ b/sysdeps/powerpc/powerpc64/le/power9/strcpy.S
|
|
|
179894 |
@@ -45,91 +45,78 @@
|
|
|
179894 |
The implementation can load bytes past a null terminator, but only
|
|
|
179894 |
up to the next 16B boundary, so it never crosses a page. */
|
|
|
179894 |
|
|
|
179894 |
+/* Load quadword at addr+offset to vreg, check for null bytes,
|
|
|
179894 |
+ and branch to label if any are found. */
|
|
|
179894 |
+#define CHECK16(vreg,offset,addr,label) \
|
|
|
179894 |
+ lxv vreg+32,offset(addr); \
|
|
|
179894 |
+ vcmpequb. v6,vreg,v18; \
|
|
|
179894 |
+ bne cr6,L(label);
|
|
|
179894 |
+
|
|
|
179894 |
.machine power9
|
|
|
179894 |
ENTRY_TOCLESS (FUNC_NAME, 4)
|
|
|
179894 |
CALL_MCOUNT 2
|
|
|
179894 |
|
|
|
179894 |
- /* NULL string optimisation */
|
|
|
179894 |
- lbz r0,0(r4)
|
|
|
179894 |
- stb r0,0(r3)
|
|
|
179894 |
- cmpwi r0,0
|
|
|
179894 |
- beqlr
|
|
|
179894 |
-
|
|
|
179894 |
- addi r4,r4,1
|
|
|
179894 |
- addi r11,r3,1
|
|
|
179894 |
-
|
|
|
179894 |
vspltisb v18,0 /* Zeroes in v18 */
|
|
|
179894 |
+ vspltisb v19,-1 /* 0xFF bytes in v19 */
|
|
|
179894 |
|
|
|
179894 |
- neg r5,r4
|
|
|
179894 |
- rldicl r9,r5,0,60 /* How many bytes to get source 16B aligned? */
|
|
|
179894 |
+ /* Next 16B-aligned address. Prepare address for L(loop). */
|
|
|
179894 |
+ addi r5,r4,16
|
|
|
179894 |
+ clrrdi r5,r5,4
|
|
|
179894 |
+ subf r8,r4,r5
|
|
|
179894 |
+ add r11,r3,r8
|
|
|
179894 |
|
|
|
179894 |
- /* Get source 16B aligned */
|
|
|
179894 |
+ /* Align data and fill bytes not loaded with non matching char. */
|
|
|
179894 |
lvx v0,0,r4
|
|
|
179894 |
lvsr v1,0,r4
|
|
|
179894 |
- vperm v0,v18,v0,v1
|
|
|
179894 |
-
|
|
|
179894 |
- vcmpequb v6,v0,v18 /* 0xff if byte is NULL, 0x00 otherwise */
|
|
|
179894 |
- vctzlsbb r7,v6 /* Number of trailing zeroes */
|
|
|
179894 |
- addi r8,r7,1 /* Add null terminator */
|
|
|
179894 |
+ vperm v0,v19,v0,v1
|
|
|
179894 |
|
|
|
179894 |
- /* r8 = bytes including null
|
|
|
179894 |
- r9 = bytes to get source 16B aligned
|
|
|
179894 |
- if r8 > r9
|
|
|
179894 |
- no null, copy r9 bytes
|
|
|
179894 |
- else
|
|
|
179894 |
- there is a null, copy r8 bytes and return. */
|
|
|
179894 |
- cmpd r8,r9
|
|
|
179894 |
- bgt L(no_null)
|
|
|
179894 |
+ vcmpequb. v6,v0,v18 /* 0xff if byte is NULL, 0x00 otherwise */
|
|
|
179894 |
+ beq cr6,L(no_null)
|
|
|
179894 |
|
|
|
179894 |
- sldi r10,r8,56 /* stxvl wants size in top 8 bits */
|
|
|
179894 |
- stxvl 32+v0,r11,r10 /* Partial store */
|
|
|
179894 |
+ /* There's a null byte. */
|
|
|
179894 |
+ vctzlsbb r8,v6 /* Number of trailing zeroes */
|
|
|
179894 |
+ addi r9,r8,1 /* Add null byte. */
|
|
|
179894 |
+ sldi r10,r9,56 /* stxvl wants size in top 8 bits. */
|
|
|
179894 |
+ stxvl 32+v0,r3,r10 /* Partial store */
|
|
|
179894 |
|
|
|
179894 |
#ifdef USE_AS_STPCPY
|
|
|
179894 |
/* stpcpy returns the dest address plus the size not counting the
|
|
|
179894 |
final '\0'. */
|
|
|
179894 |
- add r3,r11,r7
|
|
|
179894 |
+ add r3,r3,r8
|
|
|
179894 |
#endif
|
|
|
179894 |
blr
|
|
|
179894 |
|
|
|
179894 |
L(no_null):
|
|
|
179894 |
- sldi r10,r9,56 /* stxvl wants size in top 8 bits */
|
|
|
179894 |
- stxvl 32+v0,r11,r10 /* Partial store */
|
|
|
179894 |
-
|
|
|
179894 |
- add r4,r4,r9
|
|
|
179894 |
- add r11,r11,r9
|
|
|
179894 |
+ sldi r10,r8,56 /* stxvl wants size in top 8 bits */
|
|
|
179894 |
+ stxvl 32+v0,r3,r10 /* Partial store */
|
|
|
179894 |
|
|
|
179894 |
+ .p2align 4
|
|
|
179894 |
L(loop):
|
|
|
179894 |
- lxv 32+v0,0(r4)
|
|
|
179894 |
- vcmpequb. v6,v0,v18 /* Any zero bytes? */
|
|
|
179894 |
- bne cr6,L(tail1)
|
|
|
179894 |
-
|
|
|
179894 |
- lxv 32+v1,16(r4)
|
|
|
179894 |
- vcmpequb. v6,v1,v18 /* Any zero bytes? */
|
|
|
179894 |
- bne cr6,L(tail2)
|
|
|
179894 |
-
|
|
|
179894 |
- lxv 32+v2,32(r4)
|
|
|
179894 |
- vcmpequb. v6,v2,v18 /* Any zero bytes? */
|
|
|
179894 |
- bne cr6,L(tail3)
|
|
|
179894 |
-
|
|
|
179894 |
- lxv 32+v3,48(r4)
|
|
|
179894 |
- vcmpequb. v6,v3,v18 /* Any zero bytes? */
|
|
|
179894 |
- bne cr6,L(tail4)
|
|
|
179894 |
+ CHECK16(v0,0,r5,tail1)
|
|
|
179894 |
+ CHECK16(v1,16,r5,tail2)
|
|
|
179894 |
+ CHECK16(v2,32,r5,tail3)
|
|
|
179894 |
+ CHECK16(v3,48,r5,tail4)
|
|
|
179894 |
+ CHECK16(v4,64,r5,tail5)
|
|
|
179894 |
+ CHECK16(v5,80,r5,tail6)
|
|
|
179894 |
|
|
|
179894 |
stxv 32+v0,0(r11)
|
|
|
179894 |
stxv 32+v1,16(r11)
|
|
|
179894 |
stxv 32+v2,32(r11)
|
|
|
179894 |
stxv 32+v3,48(r11)
|
|
|
179894 |
+ stxv 32+v4,64(r11)
|
|
|
179894 |
+ stxv 32+v5,80(r11)
|
|
|
179894 |
|
|
|
179894 |
- addi r4,r4,64
|
|
|
179894 |
- addi r11,r11,64
|
|
|
179894 |
+ addi r5,r5,96
|
|
|
179894 |
+ addi r11,r11,96
|
|
|
179894 |
|
|
|
179894 |
b L(loop)
|
|
|
179894 |
|
|
|
179894 |
+ .p2align 4
|
|
|
179894 |
L(tail1):
|
|
|
179894 |
- vctzlsbb r8,v6
|
|
|
179894 |
- addi r9,r8,1
|
|
|
179894 |
+ vctzlsbb r8,v6 /* Number of trailing zeroes */
|
|
|
179894 |
+ addi r9,r8,1 /* Add null terminator */
|
|
|
179894 |
sldi r9,r9,56 /* stxvl wants size in top 8 bits */
|
|
|
179894 |
- stxvl 32+v0,r11,r9
|
|
|
179894 |
+ stxvl 32+v0,r11,r9 /* Partial store */
|
|
|
179894 |
#ifdef USE_AS_STPCPY
|
|
|
179894 |
/* stpcpy returns the dest address plus the size not counting the
|
|
|
179894 |
final '\0'. */
|
|
|
179894 |
@@ -137,50 +124,81 @@ L(tail1):
|
|
|
179894 |
#endif
|
|
|
179894 |
blr
|
|
|
179894 |
|
|
|
179894 |
+ .p2align 4
|
|
|
179894 |
L(tail2):
|
|
|
179894 |
stxv 32+v0,0(r11)
|
|
|
179894 |
- vctzlsbb r8,v6 /* Number of trailing zeroes */
|
|
|
179894 |
- addi r9,r8,1 /* Add null terminator */
|
|
|
179894 |
- sldi r10,r9,56 /* stxvl wants size in top 8 bits */
|
|
|
179894 |
+ vctzlsbb r8,v6
|
|
|
179894 |
+ addi r9,r8,1
|
|
|
179894 |
+ sldi r9,r9,56
|
|
|
179894 |
addi r11,r11,16
|
|
|
179894 |
- stxvl 32+v1,r11,r10 /* Partial store */
|
|
|
179894 |
+ stxvl 32+v1,r11,r9
|
|
|
179894 |
#ifdef USE_AS_STPCPY
|
|
|
179894 |
- /* stpcpy returns the dest address plus the size not counting the
|
|
|
179894 |
- final '\0'. */
|
|
|
179894 |
add r3,r11,r8
|
|
|
179894 |
#endif
|
|
|
179894 |
blr
|
|
|
179894 |
|
|
|
179894 |
+ .p2align 4
|
|
|
179894 |
L(tail3):
|
|
|
179894 |
stxv 32+v0,0(r11)
|
|
|
179894 |
stxv 32+v1,16(r11)
|
|
|
179894 |
- vctzlsbb r8,v6 /* Number of trailing zeroes */
|
|
|
179894 |
- addi r9,r8,1 /* Add null terminator */
|
|
|
179894 |
- sldi r10,r9,56 /* stxvl wants size in top 8 bits */
|
|
|
179894 |
+ vctzlsbb r8,v6
|
|
|
179894 |
+ addi r9,r8,1
|
|
|
179894 |
+ sldi r9,r9,56
|
|
|
179894 |
addi r11,r11,32
|
|
|
179894 |
- stxvl 32+v2,r11,r10 /* Partial store */
|
|
|
179894 |
+ stxvl 32+v2,r11,r9
|
|
|
179894 |
#ifdef USE_AS_STPCPY
|
|
|
179894 |
- /* stpcpy returns the dest address plus the size not counting the
|
|
|
179894 |
- final '\0'. */
|
|
|
179894 |
add r3,r11,r8
|
|
|
179894 |
#endif
|
|
|
179894 |
blr
|
|
|
179894 |
|
|
|
179894 |
+ .p2align 4
|
|
|
179894 |
L(tail4):
|
|
|
179894 |
stxv 32+v0,0(r11)
|
|
|
179894 |
stxv 32+v1,16(r11)
|
|
|
179894 |
stxv 32+v2,32(r11)
|
|
|
179894 |
- vctzlsbb r8,v6 /* Number of trailing zeroes */
|
|
|
179894 |
- addi r9,r8,1 /* Add null terminator */
|
|
|
179894 |
- sldi r10,r9,56 /* stxvl wants size in top 8 bits */
|
|
|
179894 |
+ vctzlsbb r8,v6
|
|
|
179894 |
+ addi r9,r8,1
|
|
|
179894 |
+ sldi r9,r9,56
|
|
|
179894 |
addi r11,r11,48
|
|
|
179894 |
- stxvl 32+v3,r11,r10 /* Partial store */
|
|
|
179894 |
+ stxvl 32+v3,r11,r9
|
|
|
179894 |
#ifdef USE_AS_STPCPY
|
|
|
179894 |
- /* stpcpy returns the dest address plus the size not counting the
|
|
|
179894 |
- final '\0'. */
|
|
|
179894 |
add r3,r11,r8
|
|
|
179894 |
#endif
|
|
|
179894 |
blr
|
|
|
179894 |
+
|
|
|
179894 |
+ .p2align 4
|
|
|
179894 |
+L(tail5):
|
|
|
179894 |
+ stxv 32+v0,0(r11)
|
|
|
179894 |
+ stxv 32+v1,16(r11)
|
|
|
179894 |
+ stxv 32+v2,32(r11)
|
|
|
179894 |
+ stxv 32+v3,48(r11)
|
|
|
179894 |
+ vctzlsbb r8,v6
|
|
|
179894 |
+ addi r9,r8,1
|
|
|
179894 |
+ sldi r9,r9,56
|
|
|
179894 |
+ addi r11,r11,64
|
|
|
179894 |
+ stxvl 32+v4,r11,r9
|
|
|
179894 |
+#ifdef USE_AS_STPCPY
|
|
|
179894 |
+ add r3,r11,r8
|
|
|
179894 |
+#endif
|
|
|
179894 |
+ blr
|
|
|
179894 |
+
|
|
|
179894 |
+ .p2align 4
|
|
|
179894 |
+L(tail6):
|
|
|
179894 |
+ stxv 32+v0,0(r11)
|
|
|
179894 |
+ stxv 32+v1,16(r11)
|
|
|
179894 |
+ stxv 32+v2,32(r11)
|
|
|
179894 |
+ stxv 32+v3,48(r11)
|
|
|
179894 |
+ stxv 32+v4,64(r11)
|
|
|
179894 |
+ vctzlsbb r8,v6
|
|
|
179894 |
+ addi r9,r8,1
|
|
|
179894 |
+ sldi r9,r9,56
|
|
|
179894 |
+ addi r11,r11,80
|
|
|
179894 |
+ stxvl 32+v5,r11,r9
|
|
|
179894 |
+#ifdef USE_AS_STPCPY
|
|
|
179894 |
+ add r3,r11,r8
|
|
|
179894 |
+#endif
|
|
|
179894 |
+ blr
|
|
|
179894 |
+
|
|
|
179894 |
END (FUNC_NAME)
|
|
|
179894 |
#ifndef USE_AS_STPCPY
|
|
|
179894 |
libc_hidden_builtin_def (strcpy)
|