diff --git a/sysdeps/i386/i386-mcount.S b/sysdeps/i386/i386-mcount.S
index 94fb95e..f92f3ff 100644
--- a/sysdeps/i386/i386-mcount.S
+++ b/sysdeps/i386/i386-mcount.S
@@ -30,10 +30,17 @@
.type C_SYMBOL_NAME(_mcount), @function
.align ALIGNARG(4)
C_LABEL(_mcount)
+ cfi_startproc
/* Save the caller-clobbered registers. */
pushl %eax
+ cfi_adjust_cfa_offset (4)
pushl %ecx
+ cfi_adjust_cfa_offset (4)
pushl %edx
+ cfi_adjust_cfa_offset (4)
+ cfi_rel_offset (eax, 8)
+ cfi_rel_offset (ecx, 4)
+ cfi_rel_offset (edx, 0)
movl 12(%esp), %edx
movl 4(%ebp), %eax
@@ -45,9 +52,16 @@ C_LABEL(_mcount)
/* Pop the saved registers. Please note that `mcount' has no
return value. */
popl %edx
+ cfi_adjust_cfa_offset (-4)
+ cfi_restore (edx)
popl %ecx
+ cfi_adjust_cfa_offset (-4)
+ cfi_restore (ecx)
popl %eax
+ cfi_adjust_cfa_offset (-4)
+ cfi_restore (eax)
ret
+ cfi_endproc
ASM_SIZE_DIRECTIVE(C_SYMBOL_NAME(_mcount))
#undef mcount
@@ -58,10 +72,17 @@ weak_alias (_mcount, mcount)
.type C_SYMBOL_NAME(__fentry__), @function
.align ALIGNARG(4)
C_LABEL(__fentry__)
+ cfi_startproc
/* Save the caller-clobbered registers. */
pushl %eax
+ cfi_adjust_cfa_offset (4)
pushl %ecx
+ cfi_adjust_cfa_offset (4)
pushl %edx
+ cfi_adjust_cfa_offset (4)
+ cfi_rel_offset (eax, 8)
+ cfi_rel_offset (ecx, 4)
+ cfi_rel_offset (edx, 0)
movl 12(%esp), %edx
movl 16(%esp), %eax
@@ -73,7 +94,14 @@ C_LABEL(__fentry__)
/* Pop the saved registers. Please note that `__fentry__' has no
return value. */
popl %edx
+ cfi_adjust_cfa_offset (-4)
+ cfi_restore (edx)
popl %ecx
+ cfi_adjust_cfa_offset (-4)
+ cfi_restore (ecx)
popl %eax
+ cfi_adjust_cfa_offset (-4)
+ cfi_restore (eax)
ret
+ cfi_endproc
ASM_SIZE_DIRECTIVE(C_SYMBOL_NAME(__fentry__))
diff --git a/sysdeps/i386/nptl/pthread_spin_lock.S b/sysdeps/i386/nptl/pthread_spin_lock.S
index e311d95..ea552da 100644
--- a/sysdeps/i386/nptl/pthread_spin_lock.S
+++ b/sysdeps/i386/nptl/pthread_spin_lock.S
@@ -16,11 +16,9 @@
. */
#include
+#include
- .globl pthread_spin_lock
- .type pthread_spin_lock,@function
- .align 16
-pthread_spin_lock:
+ENTRY(pthread_spin_lock)
mov 4(%esp), %eax
1: LOCK
decl 0(%eax)
@@ -34,4 +32,4 @@ pthread_spin_lock:
cmpl $0, 0(%eax)
jg 1b
jmp 2b
- .size pthread_spin_lock,.-pthread_spin_lock
+END(pthread_spin_lock)
diff --git a/sysdeps/i386/nptl/pthread_spin_unlock.S b/sysdeps/i386/nptl/pthread_spin_unlock.S
index a552cf9..ef75a50 100644
--- a/sysdeps/i386/nptl/pthread_spin_unlock.S
+++ b/sysdeps/i386/nptl/pthread_spin_unlock.S
@@ -16,15 +16,14 @@
License along with the GNU C Library; if not, see
. */
- .globl pthread_spin_unlock
- .type pthread_spin_unlock,@function
- .align 16
-pthread_spin_unlock:
+#include
+
+ENTRY(pthread_spin_unlock)
movl 4(%esp), %eax
movl $1, (%eax)
xorl %eax, %eax
ret
- .size pthread_spin_unlock,.-pthread_spin_unlock
+END(pthread_spin_unlock)
/* The implementation of pthread_spin_init is identical. */
.globl pthread_spin_init
diff --git a/sysdeps/i386/pthread_spin_trylock.S b/sysdeps/i386/pthread_spin_trylock.S
index 36979bd..7d3dabb 100644
--- a/sysdeps/i386/pthread_spin_trylock.S
+++ b/sysdeps/i386/pthread_spin_trylock.S
@@ -17,7 +17,7 @@
. */
#include
-
+#include
#ifdef UP
# define LOCK
@@ -25,10 +25,7 @@
# define LOCK lock
#endif
- .globl pthread_spin_trylock
- .type pthread_spin_trylock,@function
- .align 16
-pthread_spin_trylock:
+ENTRY(pthread_spin_trylock)
movl 4(%esp), %edx
movl $1, %eax
xorl %ecx, %ecx
@@ -43,4 +40,4 @@ pthread_spin_trylock:
0:
#endif
ret
- .size pthread_spin_trylock,.-pthread_spin_trylock
+END(pthread_spin_trylock)
diff --git a/sysdeps/unix/sysv/linux/i386/_exit.S b/sysdeps/unix/sysv/linux/i386/_exit.S
index e1550e6..6193ff2 100644
--- a/sysdeps/unix/sysv/linux/i386/_exit.S
+++ b/sysdeps/unix/sysv/linux/i386/_exit.S
@@ -17,10 +17,7 @@
#include
- .text
- .type _exit,@function
- .global _exit
-_exit:
+ENTRY(_exit)
movl 4(%esp), %ebx
/* Try the new syscall first. */
@@ -37,7 +34,7 @@ _exit:
/* This must not fail. Be sure we don't return. */
hlt
- .size _exit,.-_exit
+END(_exit)
libc_hidden_def (_exit)
rtld_hidden_def (_exit)