x86-64: Use testl to check __x86_string_control
Checks
Context |
Check |
Description |
dj/TryBot-apply_patch |
success
|
Patch applied to master at the time it was sent
|
dj/TryBot-32bit |
success
|
Build for i686
|
Commit Message
Use testl, instead of andl, to check __x86_string_control to avoid
updating __x86_string_control.
---
sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
Comments
On 8/28/21 9:15 AM, H.J. Lu via Libc-alpha wrote:
> Use testl, instead of andl, to check __x86_string_control to avoid
> updating __x86_string_control.
The __x86_string_control is a global variable that is hidden from external
linkage and used internally by various routines. Today the value is RW,
but in the future it could become RO (and probably should after a call
to init_cacheinfo()). We don't want to do an idempotent update (we have
only one bit constant for now), but instead just want to check for the bit.
This code will break when we get another bit, or when it becomes RO.
LGTM.
Reviewed-by: Carlos O'Donell <carlos@redhat.com>
> ---
> sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S | 4 ++--
> 1 file changed, 2 insertions(+), 2 deletions(-)
>
> diff --git a/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S b/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S
> index 9f02624375..abde8438d4 100644
> --- a/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S
> +++ b/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S
> @@ -325,7 +325,7 @@ L(movsb):
> /* Avoid slow backward REP MOVSB. */
> jb L(more_8x_vec_backward)
> # if AVOID_SHORT_DISTANCE_REP_MOVSB
> - andl $X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB, __x86_string_control(%rip)
> + testl $X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB, __x86_string_control(%rip)
> jz 3f
> movq %rdi, %rcx
> subq %rsi, %rcx
> @@ -333,7 +333,7 @@ L(movsb):
> # endif
> 1:
> # if AVOID_SHORT_DISTANCE_REP_MOVSB
> - andl $X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB, __x86_string_control(%rip)
> + testl $X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB, __x86_string_control(%rip)
> jz 3f
> movq %rsi, %rcx
> subq %rdi, %rcx
>
On Mon, Aug 30, 2021 at 10:35 AM Carlos O'Donell <carlos@redhat.com> wrote:
>
> On 8/28/21 9:15 AM, H.J. Lu via Libc-alpha wrote:
> > Use testl, instead of andl, to check __x86_string_control to avoid
> > updating __x86_string_control.
>
> The __x86_string_control is a global variable that is hidden from external
> linkage and used internally by various routines. Today the value is RW,
> but in the future it could become RO (and probably should after a call
> to init_cacheinfo()). We don't want to do an idempotent update (we have
> only one bit constant for now), but instead just want to check for the bit.
> This code will break when we get another bit, or when it becomes RO.
>
> LGTM.
>
> Reviewed-by: Carlos O'Donell <carlos@redhat.com>
I am backporting it to release branches.
Thanks.
> > ---
> > sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S | 4 ++--
> > 1 file changed, 2 insertions(+), 2 deletions(-)
> >
> > diff --git a/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S b/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S
> > index 9f02624375..abde8438d4 100644
> > --- a/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S
> > +++ b/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S
> > @@ -325,7 +325,7 @@ L(movsb):
> > /* Avoid slow backward REP MOVSB. */
> > jb L(more_8x_vec_backward)
> > # if AVOID_SHORT_DISTANCE_REP_MOVSB
> > - andl $X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB, __x86_string_control(%rip)
> > + testl $X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB, __x86_string_control(%rip)
> > jz 3f
> > movq %rdi, %rcx
> > subq %rsi, %rcx
> > @@ -333,7 +333,7 @@ L(movsb):
> > # endif
> > 1:
> > # if AVOID_SHORT_DISTANCE_REP_MOVSB
> > - andl $X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB, __x86_string_control(%rip)
> > + testl $X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB, __x86_string_control(%rip)
> > jz 3f
> > movq %rsi, %rcx
> > subq %rdi, %rcx
> >
>
>
> --
> Cheers,
> Carlos.
>
On Tue, Aug 31, 2021 at 8:11 AM H.J. Lu via Libc-alpha
<libc-alpha@sourceware.org> wrote:
>
> On Mon, Aug 30, 2021 at 10:35 AM Carlos O'Donell <carlos@redhat.com> wrote:
> >
> > On 8/28/21 9:15 AM, H.J. Lu via Libc-alpha wrote:
> > > Use testl, instead of andl, to check __x86_string_control to avoid
> > > updating __x86_string_control.
> >
> > The __x86_string_control is a global variable that is hidden from external
> > linkage and used internally by various routines. Today the value is RW,
> > but in the future it could become RO (and probably should after a call
> > to init_cacheinfo()). We don't want to do an idempotent update (we have
> > only one bit constant for now), but instead just want to check for the bit.
> > This code will break when we get another bit, or when it becomes RO.
> >
> > LGTM.
> >
> > Reviewed-by: Carlos O'Donell <carlos@redhat.com>
>
> I am backporting it to release branches.
>
> Thanks.
>
> > > ---
> > > sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S | 4 ++--
> > > 1 file changed, 2 insertions(+), 2 deletions(-)
> > >
> > > diff --git a/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S b/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S
> > > index 9f02624375..abde8438d4 100644
> > > --- a/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S
> > > +++ b/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S
> > > @@ -325,7 +325,7 @@ L(movsb):
> > > /* Avoid slow backward REP MOVSB. */
> > > jb L(more_8x_vec_backward)
> > > # if AVOID_SHORT_DISTANCE_REP_MOVSB
> > > - andl $X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB, __x86_string_control(%rip)
> > > + testl $X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB, __x86_string_control(%rip)
> > > jz 3f
> > > movq %rdi, %rcx
> > > subq %rsi, %rcx
> > > @@ -333,7 +333,7 @@ L(movsb):
> > > # endif
> > > 1:
> > > # if AVOID_SHORT_DISTANCE_REP_MOVSB
> > > - andl $X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB, __x86_string_control(%rip)
> > > + testl $X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB, __x86_string_control(%rip)
> > > jz 3f
> > > movq %rsi, %rcx
> > > subq %rdi, %rcx
> > >
> >
> >
> > --
> > Cheers,
> > Carlos.
> >
>
>
> --
> H.J.
I would like to backport this patch to release branches.
Any comments or objections?
--Sunil
@@ -325,7 +325,7 @@ L(movsb):
/* Avoid slow backward REP MOVSB. */
jb L(more_8x_vec_backward)
# if AVOID_SHORT_DISTANCE_REP_MOVSB
- andl $X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB, __x86_string_control(%rip)
+ testl $X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB, __x86_string_control(%rip)
jz 3f
movq %rdi, %rcx
subq %rsi, %rcx
@@ -333,7 +333,7 @@ L(movsb):
# endif
1:
# if AVOID_SHORT_DISTANCE_REP_MOVSB
- andl $X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB, __x86_string_control(%rip)
+ testl $X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB, __x86_string_control(%rip)
jz 3f
movq %rsi, %rcx
subq %rdi, %rcx