x86_64: Update THREAD_SETMEM/THREAD_SETMEM_NC for IMM64

Message ID 20210202191209.4036619-1-hjl.tools@gmail.com
State Superseded
Delegated to: Carlos O'Donell
Headers
Series x86_64: Update THREAD_SETMEM/THREAD_SETMEM_NC for IMM64 |

Commit Message

H.J. Lu Feb. 2, 2021, 7:12 p.m. UTC
  Since there is only "movq imm32s, mem64" and no "movq imm64, mem64", use
"movq reg64, mem64" to store 64-bit constant in TCB.
---
 sysdeps/x86_64/nptl/tls.h | 11 +++++++++++
 1 file changed, 11 insertions(+)
  

Comments

Carlos O'Donell March 1, 2021, 1:30 p.m. UTC | #1
On 2/2/21 2:12 PM, H.J. Lu via Libc-alpha wrote:
> Since there is only "movq imm32s, mem64" and no "movq imm64, mem64", use
> "movq reg64, mem64" to store 64-bit constant in TCB.
> ---
>  sysdeps/x86_64/nptl/tls.h | 11 +++++++++++
>  1 file changed, 11 insertions(+)
> 
> diff --git a/sysdeps/x86_64/nptl/tls.h b/sysdeps/x86_64/nptl/tls.h
> index 20f0958780..9ec8703e45 100644
> --- a/sysdeps/x86_64/nptl/tls.h
> +++ b/sysdeps/x86_64/nptl/tls.h
> @@ -269,6 +269,11 @@ _Static_assert (offsetof (tcbhead_t, __glibc_unused2) == 0x80,
>         asm volatile ("movl %0,%%fs:%P1" :				      \
>  		     : IMM_MODE (value),				      \
>  		       "i" (offsetof (struct pthread, member)));	      \
> +     else if (__builtin_constant_p (value)				      \
> +	      && (int64_t) (int32_t) (uintptr_t) value != (uintptr_t) value)  \
> +       asm volatile ("movq %0,%%fs:%P1" :				      \
> +		     : "r" (value),					      \
> +		       "i" (offsetof (struct pthread, member)));	      \

(1) Move conditional into 'else /* 8 */' section.

The blocks of conditionals are predicated on the size of the member we are 
about to write to.

In the block below...

>       else /* 8 */							      \

... here, we are about to write value into a member that is size 8.

Your code changes the logical construction of the code, but in way that makes
it more difficult to understand.

We previously had:

if (sizeof() == 1)

else if (sizeof() == 4)

else /* Assume 8 */

In your case we must already be in the 'else /* Assume 8 */' because otherwise
we've be writing a 64-bit constant into a < 64-bit structure member.

I think we should put your code into the else clause.

272      else /* 8 */                                                             \
273        {                                                                      \

             if (__builtin_constant_p (value)
                 && ([the value is a >32-bit constant])
               asm volatile ([use movq reg64, mem64]);
             else

274          asm volatile ("movq %q0,%%fs:%P1" :                                  \
275                        : IMM_MODE ((uint64_t) cast_to_integer (value)),       \
276                          "i" (offsetof (struct pthread, member)));            \
277        }})

(2) What if gcc can't prove it's constant?

If the constant is >32-bit, but gcc can't prove it's constant, then don't we
try to put a >32-bit constant into a 32-bit immediate?

Shouldn't the code be structured the other way around?

e.g.

else /* 8 */
{
  if (__builtin_constant_p (value)
      && ([the value is a <= 32-bit constant])
    asm volatile ([use movq imm32, mem64]);
  else
    asm volatile ([use movq reg64, mem64]);
}

This way the code is always correct?

>         {								      \
>  	 asm volatile ("movq %q0,%%fs:%P1" :				      \
> @@ -294,6 +299,12 @@ _Static_assert (offsetof (tcbhead_t, __glibc_unused2) == 0x80,
>  		     : IMM_MODE (value),				      \
>  		       "i" (offsetof (struct pthread, member[0])),	      \
>  		       "r" (idx));					      \
> +     else if (__builtin_constant_p (value)				      \
> +	      && (int64_t) (int32_t) (uintptr_t) value != (uintptr_t) value)  \
> +       asm volatile ("movq %0,%%fs:%P1(,%q2,8)" :			      \
> +		     : "r" (value),					      \
> +		       "i" (offsetof (struct pthread, member[0])),	      \
> +		       "r" (idx));					      \
>       else /* 8 */							      \
>         {								      \
>  	 asm volatile ("movq %q0,%%fs:%P1(,%q2,8)" :			      \
>
  

Patch

diff --git a/sysdeps/x86_64/nptl/tls.h b/sysdeps/x86_64/nptl/tls.h
index 20f0958780..9ec8703e45 100644
--- a/sysdeps/x86_64/nptl/tls.h
+++ b/sysdeps/x86_64/nptl/tls.h
@@ -269,6 +269,11 @@  _Static_assert (offsetof (tcbhead_t, __glibc_unused2) == 0x80,
        asm volatile ("movl %0,%%fs:%P1" :				      \
 		     : IMM_MODE (value),				      \
 		       "i" (offsetof (struct pthread, member)));	      \
+     else if (__builtin_constant_p (value)				      \
+	      && (int64_t) (int32_t) (uintptr_t) value != (uintptr_t) value)  \
+       asm volatile ("movq %0,%%fs:%P1" :				      \
+		     : "r" (value),					      \
+		       "i" (offsetof (struct pthread, member)));	      \
      else /* 8 */							      \
        {								      \
 	 asm volatile ("movq %q0,%%fs:%P1" :				      \
@@ -294,6 +299,12 @@  _Static_assert (offsetof (tcbhead_t, __glibc_unused2) == 0x80,
 		     : IMM_MODE (value),				      \
 		       "i" (offsetof (struct pthread, member[0])),	      \
 		       "r" (idx));					      \
+     else if (__builtin_constant_p (value)				      \
+	      && (int64_t) (int32_t) (uintptr_t) value != (uintptr_t) value)  \
+       asm volatile ("movq %0,%%fs:%P1(,%q2,8)" :			      \
+		     : "r" (value),					      \
+		       "i" (offsetof (struct pthread, member[0])),	      \
+		       "r" (idx));					      \
      else /* 8 */							      \
        {								      \
 	 asm volatile ("movq %q0,%%fs:%P1(,%q2,8)" :			      \