[v2,2/2] aarch64: Set the syscall register right before doing the syscall.

Message ID 20230412211104.2609738-3-josimmon@redhat.com
State Superseded
Headers
Series x86_64: aarch64: Set call number just before syscall |

Checks

Context Check Description
dj/TryBot-apply_patch success Patch applied to master at the time it was sent
dj/TryBot-32bit success Build for i686

Commit Message

Joe Simmons-Talbott April 12, 2023, 9:11 p.m. UTC
  To make identifying syscalls easier during call tree analysis load the
syscall number just before performing the syscall.
---
 sysdeps/unix/sysv/linux/aarch64/sysdep.h | 20 +++++++++++++++++---
 1 file changed, 17 insertions(+), 3 deletions(-)
  

Patch

diff --git a/sysdeps/unix/sysv/linux/aarch64/sysdep.h b/sysdeps/unix/sysv/linux/aarch64/sysdep.h
index e94d1703ad..b91656fdff 100644
--- a/sysdeps/unix/sysv/linux/aarch64/sysdep.h
+++ b/sysdeps/unix/sysv/linux/aarch64/sysdep.h
@@ -167,14 +167,28 @@ 
 
 # define HAVE_CLONE3_WRAPPER		1
 
+# define MSTR_HELPER(x) # x
+# define MSTR(x) MSTR_HELPER(x)
+
 # undef INTERNAL_SYSCALL_RAW
 # define INTERNAL_SYSCALL_RAW(name, nr, args...)		\
   ({ long _sys_result;						\
      {								\
        LOAD_ARGS_##nr (args)					\
-       register long _x8 asm ("x8") = (name);			\
-       asm volatile ("svc	0	// syscall " # name     \
-		     : "=r" (_x0) : "r"(_x8) ASM_ARGS_##nr : "memory");	\
+       if (__builtin_constant_p(name))				\
+         asm volatile ("mov	x8, %1\n"			\
+		       "svc	0	// syscall " # name	\
+                       : "=r" (_x0)				\
+		       : "i" (name) ASM_ARGS_##nr		\
+		       : "x8", "memory");			\
+       else							\
+         {							\
+           register long _x8 asm ("x8") = (name);		\
+           asm volatile ("svc	0	// syscall " # name     \
+		         : "=r" (_x0)				\
+		         : "r"(_x8) ASM_ARGS_##nr		\
+		         : "memory");				\
+         }							\
        _sys_result = _x0;					\
      }								\
      _sys_result; })