diff mbox series

x86-64: Use testl to check __x86_string_control

Message ID 20210828131530.539387-1-hjl.tools@gmail.com
State Committed
Commit 3c8b9879cab6d41787bc5b14c1748f62fd6d0e5f
Delegated to: Carlos O'Donell
Headers show
Series x86-64: Use testl to check __x86_string_control | expand

Checks

Context Check Description
dj/TryBot-apply_patch success Patch applied to master at the time it was sent
dj/TryBot-32bit success Build for i686

Commit Message

H.J. Lu Aug. 28, 2021, 1:15 p.m. UTC
Use testl, instead of andl, to check __x86_string_control to avoid
updating __x86_string_control.
---
 sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

Comments

Carlos O'Donell Aug. 30, 2021, 5:35 p.m. UTC | #1
On 8/28/21 9:15 AM, H.J. Lu via Libc-alpha wrote:
> Use testl, instead of andl, to check __x86_string_control to avoid
> updating __x86_string_control.

The __x86_string_control is a global variable that is hidden from external
linkage and used internally by various routines. Today the value is RW,
but in the future it could become RO (and probably should after a call
to init_cacheinfo()). We don't want to do an idempotent update (we have
only one bit constant for now), but instead just want to check for the bit.
This code will break when we get another bit, or when it becomes RO.

LGTM.

Reviewed-by: Carlos O'Donell <carlos@redhat.com>

> ---
>  sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S | 4 ++--
>  1 file changed, 2 insertions(+), 2 deletions(-)
> 
> diff --git a/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S b/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S
> index 9f02624375..abde8438d4 100644
> --- a/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S
> +++ b/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S
> @@ -325,7 +325,7 @@ L(movsb):
>  	/* Avoid slow backward REP MOVSB.  */
>  	jb	L(more_8x_vec_backward)
>  # if AVOID_SHORT_DISTANCE_REP_MOVSB
> -	andl	$X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB, __x86_string_control(%rip)
> +	testl	$X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB, __x86_string_control(%rip)
>  	jz	3f
>  	movq	%rdi, %rcx
>  	subq	%rsi, %rcx
> @@ -333,7 +333,7 @@ L(movsb):
>  # endif
>  1:
>  # if AVOID_SHORT_DISTANCE_REP_MOVSB
> -	andl	$X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB, __x86_string_control(%rip)
> +	testl	$X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB, __x86_string_control(%rip)
>  	jz	3f
>  	movq	%rsi, %rcx
>  	subq	%rdi, %rcx
>
H.J. Lu Aug. 31, 2021, 3:10 p.m. UTC | #2
On Mon, Aug 30, 2021 at 10:35 AM Carlos O'Donell <carlos@redhat.com> wrote:
>
> On 8/28/21 9:15 AM, H.J. Lu via Libc-alpha wrote:
> > Use testl, instead of andl, to check __x86_string_control to avoid
> > updating __x86_string_control.
>
> The __x86_string_control is a global variable that is hidden from external
> linkage and used internally by various routines. Today the value is RW,
> but in the future it could become RO (and probably should after a call
> to init_cacheinfo()). We don't want to do an idempotent update (we have
> only one bit constant for now), but instead just want to check for the bit.
> This code will break when we get another bit, or when it becomes RO.
>
> LGTM.
>
> Reviewed-by: Carlos O'Donell <carlos@redhat.com>

I am backporting it to release branches.

Thanks.

> > ---
> >  sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S | 4 ++--
> >  1 file changed, 2 insertions(+), 2 deletions(-)
> >
> > diff --git a/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S b/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S
> > index 9f02624375..abde8438d4 100644
> > --- a/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S
> > +++ b/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S
> > @@ -325,7 +325,7 @@ L(movsb):
> >       /* Avoid slow backward REP MOVSB.  */
> >       jb      L(more_8x_vec_backward)
> >  # if AVOID_SHORT_DISTANCE_REP_MOVSB
> > -     andl    $X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB, __x86_string_control(%rip)
> > +     testl   $X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB, __x86_string_control(%rip)
> >       jz      3f
> >       movq    %rdi, %rcx
> >       subq    %rsi, %rcx
> > @@ -333,7 +333,7 @@ L(movsb):
> >  # endif
> >  1:
> >  # if AVOID_SHORT_DISTANCE_REP_MOVSB
> > -     andl    $X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB, __x86_string_control(%rip)
> > +     testl   $X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB, __x86_string_control(%rip)
> >       jz      3f
> >       movq    %rsi, %rcx
> >       subq    %rdi, %rcx
> >
>
>
> --
> Cheers,
> Carlos.
>
diff mbox series

Patch

diff --git a/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S b/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S
index 9f02624375..abde8438d4 100644
--- a/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S
+++ b/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S
@@ -325,7 +325,7 @@  L(movsb):
 	/* Avoid slow backward REP MOVSB.  */
 	jb	L(more_8x_vec_backward)
 # if AVOID_SHORT_DISTANCE_REP_MOVSB
-	andl	$X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB, __x86_string_control(%rip)
+	testl	$X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB, __x86_string_control(%rip)
 	jz	3f
 	movq	%rdi, %rcx
 	subq	%rsi, %rcx
@@ -333,7 +333,7 @@  L(movsb):
 # endif
 1:
 # if AVOID_SHORT_DISTANCE_REP_MOVSB
-	andl	$X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB, __x86_string_control(%rip)
+	testl	$X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB, __x86_string_control(%rip)
 	jz	3f
 	movq	%rsi, %rcx
 	subq	%rdi, %rcx