[v2,2/3] x86: Move and slightly improve memset_erms
Checks
Context |
Check |
Description |
dj/TryBot-apply_patch |
success
|
Patch applied to master at the time it was sent
|
Commit Message
Implementation wise:
1. Remove the VZEROUPPER as memset_{impl}_unaligned_erms does not
use the L(stosb) label that was previously defined.
2. Don't give the hotpath (fallthrough) to zero size.
Code positioning wise:
Move memset_{chk}_erms to its own file. Leaving it in between the
memset_{impl}_unaligned both adds unnecessary complexity to the
file and wastes space in a relatively hot cache section.
---
sysdeps/x86_64/multiarch/Makefile | 1 +
sysdeps/x86_64/multiarch/memset-erms.S | 25 +++++++++++++++
.../multiarch/memset-vec-unaligned-erms.S | 31 -------------------
3 files changed, 26 insertions(+), 31 deletions(-)
create mode 100644 sysdeps/x86_64/multiarch/memset-erms.S
Comments
On Wed, Jun 29, 2022 at 3:12 PM Noah Goldstein <goldstein.w.n@gmail.com> wrote:
>
> Implementation wise:
> 1. Remove the VZEROUPPER as memset_{impl}_unaligned_erms does not
> use the L(stosb) label that was previously defined.
>
> 2. Don't give the hotpath (fallthrough) to zero size.
>
> Code positioning wise:
>
> Move memset_{chk}_erms to its own file. Leaving it in between the
> memset_{impl}_unaligned both adds unnecessary complexity to the
> file and wastes space in a relatively hot cache section.
> ---
> sysdeps/x86_64/multiarch/Makefile | 1 +
> sysdeps/x86_64/multiarch/memset-erms.S | 25 +++++++++++++++
> .../multiarch/memset-vec-unaligned-erms.S | 31 -------------------
> 3 files changed, 26 insertions(+), 31 deletions(-)
> create mode 100644 sysdeps/x86_64/multiarch/memset-erms.S
>
> diff --git a/sysdeps/x86_64/multiarch/Makefile b/sysdeps/x86_64/multiarch/Makefile
> index 62a4d96fb8..18cea04423 100644
> --- a/sysdeps/x86_64/multiarch/Makefile
> +++ b/sysdeps/x86_64/multiarch/Makefile
> @@ -30,6 +30,7 @@ sysdep_routines += \
> memset-avx2-unaligned-erms-rtm \
> memset-avx512-no-vzeroupper \
> memset-avx512-unaligned-erms \
> + memset-erms \
> memset-evex-unaligned-erms \
> memset-sse2-unaligned-erms \
> rawmemchr-avx2 \
> diff --git a/sysdeps/x86_64/multiarch/memset-erms.S b/sysdeps/x86_64/multiarch/memset-erms.S
> new file mode 100644
> index 0000000000..1fce0c9fcc
> --- /dev/null
> +++ b/sysdeps/x86_64/multiarch/memset-erms.S
> @@ -0,0 +1,25 @@
Need copyright notice.
> +#include <sysdep.h>
> +
> +#if defined USE_MULTIARCH && IS_IN (libc)
> + .text
> +ENTRY (__memset_chk_erms)
> + cmp %RDX_LP, %RCX_LP
> + jb HIDDEN_JUMPTARGET (__chk_fail)
> +END (__memset_chk_erms)
> +
> +/* Only used to measure performance of REP STOSB. */
> +ENTRY (__memset_erms)
> + /* Skip zero length. */
> + test %RDX_LP, %RDX_LP
> + jz L(stosb_return_zero)
> + mov %RDX_LP, %RCX_LP
> + movzbl %sil, %eax
> + mov %RDI_LP, %RDX_LP
> + rep stosb
> + mov %RDX_LP, %RAX_LP
> + ret
> +L(stosb_return_zero):
> + movq %rdi, %rax
> + ret
> +END (__memset_erms)
> +#endif
> diff --git a/sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S b/sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S
> index abc12d9cda..905d0fa464 100644
> --- a/sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S
> +++ b/sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S
> @@ -156,37 +156,6 @@ L(entry_from_wmemset):
> #if defined USE_MULTIARCH && IS_IN (libc)
> END (MEMSET_SYMBOL (__memset, unaligned))
>
> -# if VEC_SIZE == 16
> -ENTRY (__memset_chk_erms)
> - cmp %RDX_LP, %RCX_LP
> - jb HIDDEN_JUMPTARGET (__chk_fail)
> -END (__memset_chk_erms)
> -
> -/* Only used to measure performance of REP STOSB. */
> -ENTRY (__memset_erms)
> - /* Skip zero length. */
> - test %RDX_LP, %RDX_LP
> - jnz L(stosb)
> - movq %rdi, %rax
> - ret
> -# else
> -/* Provide a hidden symbol to debugger. */
> - .hidden MEMSET_SYMBOL (__memset, erms)
> -ENTRY (MEMSET_SYMBOL (__memset, erms))
> -# endif
> -L(stosb):
> - mov %RDX_LP, %RCX_LP
> - movzbl %sil, %eax
> - mov %RDI_LP, %RDX_LP
> - rep stosb
> - mov %RDX_LP, %RAX_LP
> - VZEROUPPER_RETURN
> -# if VEC_SIZE == 16
> -END (__memset_erms)
> -# else
> -END (MEMSET_SYMBOL (__memset, erms))
> -# endif
> -
> # if defined SHARED && IS_IN (libc)
> ENTRY_CHK (MEMSET_CHK_SYMBOL (__memset_chk, unaligned_erms))
> cmp %RDX_LP, %RCX_LP
> --
> 2.34.1
>
On Wed, Jun 29, 2022 at 3:19 PM H.J. Lu <hjl.tools@gmail.com> wrote:
>
> On Wed, Jun 29, 2022 at 3:12 PM Noah Goldstein <goldstein.w.n@gmail.com> wrote:
> >
> > Implementation wise:
> > 1. Remove the VZEROUPPER as memset_{impl}_unaligned_erms does not
> > use the L(stosb) label that was previously defined.
> >
> > 2. Don't give the hotpath (fallthrough) to zero size.
> >
> > Code positioning wise:
> >
> > Move memset_{chk}_erms to its own file. Leaving it in between the
> > memset_{impl}_unaligned both adds unnecessary complexity to the
> > file and wastes space in a relatively hot cache section.
> > ---
> > sysdeps/x86_64/multiarch/Makefile | 1 +
> > sysdeps/x86_64/multiarch/memset-erms.S | 25 +++++++++++++++
> > .../multiarch/memset-vec-unaligned-erms.S | 31 -------------------
> > 3 files changed, 26 insertions(+), 31 deletions(-)
> > create mode 100644 sysdeps/x86_64/multiarch/memset-erms.S
> >
> > diff --git a/sysdeps/x86_64/multiarch/Makefile b/sysdeps/x86_64/multiarch/Makefile
> > index 62a4d96fb8..18cea04423 100644
> > --- a/sysdeps/x86_64/multiarch/Makefile
> > +++ b/sysdeps/x86_64/multiarch/Makefile
> > @@ -30,6 +30,7 @@ sysdep_routines += \
> > memset-avx2-unaligned-erms-rtm \
> > memset-avx512-no-vzeroupper \
> > memset-avx512-unaligned-erms \
> > + memset-erms \
> > memset-evex-unaligned-erms \
> > memset-sse2-unaligned-erms \
> > rawmemchr-avx2 \
> > diff --git a/sysdeps/x86_64/multiarch/memset-erms.S b/sysdeps/x86_64/multiarch/memset-erms.S
> > new file mode 100644
> > index 0000000000..1fce0c9fcc
> > --- /dev/null
> > +++ b/sysdeps/x86_64/multiarch/memset-erms.S
> > @@ -0,0 +1,25 @@
>
> Need copyright notice.
Fixed in V3.
>
> > +#include <sysdep.h>
> > +
> > +#if defined USE_MULTIARCH && IS_IN (libc)
> > + .text
> > +ENTRY (__memset_chk_erms)
> > + cmp %RDX_LP, %RCX_LP
> > + jb HIDDEN_JUMPTARGET (__chk_fail)
> > +END (__memset_chk_erms)
> > +
> > +/* Only used to measure performance of REP STOSB. */
> > +ENTRY (__memset_erms)
> > + /* Skip zero length. */
> > + test %RDX_LP, %RDX_LP
> > + jz L(stosb_return_zero)
> > + mov %RDX_LP, %RCX_LP
> > + movzbl %sil, %eax
> > + mov %RDI_LP, %RDX_LP
> > + rep stosb
> > + mov %RDX_LP, %RAX_LP
> > + ret
> > +L(stosb_return_zero):
> > + movq %rdi, %rax
> > + ret
> > +END (__memset_erms)
> > +#endif
> > diff --git a/sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S b/sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S
> > index abc12d9cda..905d0fa464 100644
> > --- a/sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S
> > +++ b/sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S
> > @@ -156,37 +156,6 @@ L(entry_from_wmemset):
> > #if defined USE_MULTIARCH && IS_IN (libc)
> > END (MEMSET_SYMBOL (__memset, unaligned))
> >
> > -# if VEC_SIZE == 16
> > -ENTRY (__memset_chk_erms)
> > - cmp %RDX_LP, %RCX_LP
> > - jb HIDDEN_JUMPTARGET (__chk_fail)
> > -END (__memset_chk_erms)
> > -
> > -/* Only used to measure performance of REP STOSB. */
> > -ENTRY (__memset_erms)
> > - /* Skip zero length. */
> > - test %RDX_LP, %RDX_LP
> > - jnz L(stosb)
> > - movq %rdi, %rax
> > - ret
> > -# else
> > -/* Provide a hidden symbol to debugger. */
> > - .hidden MEMSET_SYMBOL (__memset, erms)
> > -ENTRY (MEMSET_SYMBOL (__memset, erms))
> > -# endif
> > -L(stosb):
> > - mov %RDX_LP, %RCX_LP
> > - movzbl %sil, %eax
> > - mov %RDI_LP, %RDX_LP
> > - rep stosb
> > - mov %RDX_LP, %RAX_LP
> > - VZEROUPPER_RETURN
> > -# if VEC_SIZE == 16
> > -END (__memset_erms)
> > -# else
> > -END (MEMSET_SYMBOL (__memset, erms))
> > -# endif
> > -
> > # if defined SHARED && IS_IN (libc)
> > ENTRY_CHK (MEMSET_CHK_SYMBOL (__memset_chk, unaligned_erms))
> > cmp %RDX_LP, %RCX_LP
> > --
> > 2.34.1
> >
>
>
> --
> H.J.
@@ -30,6 +30,7 @@ sysdep_routines += \
memset-avx2-unaligned-erms-rtm \
memset-avx512-no-vzeroupper \
memset-avx512-unaligned-erms \
+ memset-erms \
memset-evex-unaligned-erms \
memset-sse2-unaligned-erms \
rawmemchr-avx2 \
new file mode 100644
@@ -0,0 +1,25 @@
+#include <sysdep.h>
+
+#if defined USE_MULTIARCH && IS_IN (libc)
+ .text
+ENTRY (__memset_chk_erms)
+ cmp %RDX_LP, %RCX_LP
+ jb HIDDEN_JUMPTARGET (__chk_fail)
+END (__memset_chk_erms)
+
+/* Only used to measure performance of REP STOSB. */
+ENTRY (__memset_erms)
+ /* Skip zero length. */
+ test %RDX_LP, %RDX_LP
+ jz L(stosb_return_zero)
+ mov %RDX_LP, %RCX_LP
+ movzbl %sil, %eax
+ mov %RDI_LP, %RDX_LP
+ rep stosb
+ mov %RDX_LP, %RAX_LP
+ ret
+L(stosb_return_zero):
+ movq %rdi, %rax
+ ret
+END (__memset_erms)
+#endif
@@ -156,37 +156,6 @@ L(entry_from_wmemset):
#if defined USE_MULTIARCH && IS_IN (libc)
END (MEMSET_SYMBOL (__memset, unaligned))
-# if VEC_SIZE == 16
-ENTRY (__memset_chk_erms)
- cmp %RDX_LP, %RCX_LP
- jb HIDDEN_JUMPTARGET (__chk_fail)
-END (__memset_chk_erms)
-
-/* Only used to measure performance of REP STOSB. */
-ENTRY (__memset_erms)
- /* Skip zero length. */
- test %RDX_LP, %RDX_LP
- jnz L(stosb)
- movq %rdi, %rax
- ret
-# else
-/* Provide a hidden symbol to debugger. */
- .hidden MEMSET_SYMBOL (__memset, erms)
-ENTRY (MEMSET_SYMBOL (__memset, erms))
-# endif
-L(stosb):
- mov %RDX_LP, %RCX_LP
- movzbl %sil, %eax
- mov %RDI_LP, %RDX_LP
- rep stosb
- mov %RDX_LP, %RAX_LP
- VZEROUPPER_RETURN
-# if VEC_SIZE == 16
-END (__memset_erms)
-# else
-END (MEMSET_SYMBOL (__memset, erms))
-# endif
-
# if defined SHARED && IS_IN (libc)
ENTRY_CHK (MEMSET_CHK_SYMBOL (__memset_chk, unaligned_erms))
cmp %RDX_LP, %RCX_LP