kernel-fxtec-pro1x/arch/x86_64/lib/rwlock.S
Andi Kleen 9a0b26e6bc [PATCH] Clean up read write lock assembly
- Move the slow path fallbacks to their own assembly files
This makes them much easier to read and is needed for the next change.
- Add CFI annotations for unwinding (XXX need review)
- Remove constant case which can never happen with out of line spinlocks
- Use patchable LOCK prefixes
- Don't use lock sections anymore for inline code because they can't
be expressed by the unwinder (this adds one taken jump to the lock
fast path)

Cc: jbeulich@novell.com

Signed-off-by: Andi Kleen <ak@suse.de>
2006-09-26 10:52:28 +02:00

38 lines
646 B
ArmAsm

/* Slow paths of read/write spinlocks. */
#include <linux/linkage.h>
#include <asm/rwlock.h>
#include <asm/alternative-asm.i>
#include <asm/dwarf2.h>
/* rdi: pointer to rwlock_t */
ENTRY(__write_lock_failed)
CFI_STARTPROC
LOCK_PREFIX
addl $RW_LOCK_BIAS,(%rdi)
1: rep
nop
cmpl $RW_LOCK_BIAS,(%rdi)
jne 1b
LOCK_PREFIX
subl $RW_LOCK_BIAS,(%rdi)
jnz __write_lock_failed
ret
CFI_ENDPROC
END(__write_lock_failed)
/* rdi: pointer to rwlock_t */
ENTRY(__read_lock_failed)
CFI_STARTPROC
LOCK_PREFIX
incl (%rdi)
1: rep
nop
cmpl $1,(%rdi)
js 1b
LOCK_PREFIX
decl (%rdi)
js __read_lock_failed
ret
CFI_ENDPROC
END(__read_lock_failed)