From patchwork Sun May 25 19:41:47 2014 Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit X-Patchwork-Submitter: Richard Henderson X-Patchwork-Id: 1142 Received: (qmail 2840 invoked by alias); 25 May 2014 19:42:19 -0000 Mailing-List: contact libc-alpha-help@sourceware.org; run by ezmlm Precedence: bulk List-Id: List-Unsubscribe: List-Subscribe: List-Archive: List-Post: List-Help: , Sender: libc-alpha-owner@sourceware.org Delivered-To: mailing list libc-alpha@sourceware.org Received: (qmail 2632 invoked by uid 89); 25 May 2014 19:42:16 -0000 Authentication-Results: sourceware.org; auth=none X-Virus-Found: No X-Spam-SWARE-Status: No, score=-1.1 required=5.0 tests=AWL, BAYES_50, FREEMAIL_ENVFROM_END_DIGIT, FREEMAIL_FROM, RCVD_IN_DNSWL_LOW, SPF_PASS autolearn=ham version=3.3.2 X-HELO: mail-qc0-f179.google.com X-Received: by 10.140.94.39 with SMTP id f36mr26126441qge.64.1401046933212; Sun, 25 May 2014 12:42:13 -0700 (PDT) From: Richard Henderson To: libc-alpha@sourceware.org Cc: marcus.shawcroft@gmail.com, Richard Henderson Subject: [PATCH v3 7/9] aarch64: Rely on syscalls preserving registers Date: Sun, 25 May 2014 12:41:47 -0700 Message-Id: <1401046909-25821-8-git-send-email-rth@twiddle.net> In-Reply-To: <1401046909-25821-1-git-send-email-rth@twiddle.net> References: <1401046909-25821-1-git-send-email-rth@twiddle.net> From: Richard Henderson In several cases we've had asm routines rely on syscalls not clobbering call-clobbered registers, and that's now deemed ABI. So take advantage of this in the INLINE_SYSCALL path as well. Shrinks libc.so by about 1k. --- sysdeps/unix/sysv/linux/aarch64/sysdep.h | 36 +++++++++++--------------------- 1 file changed, 12 insertions(+), 24 deletions(-) diff --git a/sysdeps/unix/sysv/linux/aarch64/sysdep.h b/sysdeps/unix/sysv/linux/aarch64/sysdep.h index 8cce986..4686599 100644 --- a/sysdeps/unix/sysv/linux/aarch64/sysdep.h +++ b/sysdeps/unix/sysv/linux/aarch64/sysdep.h @@ -231,7 +231,7 @@ LOAD_ARGS_##nr (args) \ asm volatile ("blr %1" \ : "=r" (_x0) \ - : "r" (funcptr), ASM_ARGS_##nr \ + : "r" (funcptr) ASM_ARGS_##nr \ : "x30", "memory"); \ (long) _x0; \ }) @@ -254,17 +254,15 @@ # undef INTERNAL_SYSCALL_RAW # define INTERNAL_SYSCALL_RAW(name, err, nr, args...) \ - ({ unsigned long _sys_result; \ + ({ long _sys_result; \ { \ LOAD_ARGS_##nr (args) \ register long _x8 asm ("x8") = (name); \ asm volatile ("svc 0 // syscall " # name \ - : "+r" (_x0), "+r" (_x8) \ - : ASM_ARGS_##nr \ - : "memory", CLOBBER_ARGS_##nr); \ + : "=r" (_x0) : "r"(_x8) ASM_ARGS_##nr : "memory"); \ _sys_result = _x0; \ } \ - (long) _sys_result; }) + _sys_result; }) # undef INTERNAL_SYSCALL # define INTERNAL_SYSCALL(name, err, nr, args...) \ @@ -281,54 +279,44 @@ # undef INTERNAL_SYSCALL_ERRNO # define INTERNAL_SYSCALL_ERRNO(val, err) (-(val)) -# define CLOBBER_ARGS_0 CLOBBER_ARGS_1 -# define CLOBBER_ARGS_1 "x1", CLOBBER_ARGS_2 -# define CLOBBER_ARGS_2 "x2", CLOBBER_ARGS_3 -# define CLOBBER_ARGS_3 "x3", CLOBBER_ARGS_4 -# define CLOBBER_ARGS_4 "x4", CLOBBER_ARGS_5 -# define CLOBBER_ARGS_5 "x5", CLOBBER_ARGS_6 -# define CLOBBER_ARGS_6 "x6", CLOBBER_ARGS_7 -# define CLOBBER_ARGS_7 \ - "x7", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18" - # define LOAD_ARGS_0() \ register long _x0 asm ("x0"); - -# define ASM_ARGS_0 # define LOAD_ARGS_1(x0) \ long _x0tmp = (long) (x0); \ LOAD_ARGS_0 () \ _x0 = _x0tmp; -# define ASM_ARGS_1 "r" (_x0) # define LOAD_ARGS_2(x0, x1) \ long _x1tmp = (long) (x1); \ LOAD_ARGS_1 (x0) \ register long _x1 asm ("x1") = _x1tmp; -# define ASM_ARGS_2 ASM_ARGS_1, "r" (_x1) # define LOAD_ARGS_3(x0, x1, x2) \ long _x2tmp = (long) (x2); \ LOAD_ARGS_2 (x0, x1) \ register long _x2 asm ("x2") = _x2tmp; -# define ASM_ARGS_3 ASM_ARGS_2, "r" (_x2) # define LOAD_ARGS_4(x0, x1, x2, x3) \ long _x3tmp = (long) (x3); \ LOAD_ARGS_3 (x0, x1, x2) \ register long _x3 asm ("x3") = _x3tmp; -# define ASM_ARGS_4 ASM_ARGS_3, "r" (_x3) # define LOAD_ARGS_5(x0, x1, x2, x3, x4) \ long _x4tmp = (long) (x4); \ LOAD_ARGS_4 (x0, x1, x2, x3) \ register long _x4 asm ("x4") = _x4tmp; -# define ASM_ARGS_5 ASM_ARGS_4, "r" (_x4) # define LOAD_ARGS_6(x0, x1, x2, x3, x4, x5) \ long _x5tmp = (long) (x5); \ LOAD_ARGS_5 (x0, x1, x2, x3, x4) \ register long _x5 asm ("x5") = _x5tmp; -# define ASM_ARGS_6 ASM_ARGS_5, "r" (_x5) # define LOAD_ARGS_7(x0, x1, x2, x3, x4, x5, x6)\ long _x6tmp = (long) (x6); \ LOAD_ARGS_6 (x0, x1, x2, x3, x4, x5) \ register long _x6 asm ("x6") = _x6tmp; + +# define ASM_ARGS_0 +# define ASM_ARGS_1 , "r" (_x0) +# define ASM_ARGS_2 ASM_ARGS_1, "r" (_x1) +# define ASM_ARGS_3 ASM_ARGS_2, "r" (_x2) +# define ASM_ARGS_4 ASM_ARGS_3, "r" (_x3) +# define ASM_ARGS_5 ASM_ARGS_4, "r" (_x4) +# define ASM_ARGS_6 ASM_ARGS_5, "r" (_x5) # define ASM_ARGS_7 ASM_ARGS_6, "r" (_x6) # undef INTERNAL_SYSCALL_NCS