locking/x86: Use named operands in rwsem.h

Since GCC version 3.1 it is possible to specify input and output
operands using symbolic names, which can be referenced within the
assembler code.

Converting to named operands makes it easier to understand and maintain
the code in the future.

Update operands in asm/rwsem.h accordingly.

Signed-off-by: Miguel Bernal Marin <miguel.bernal.marin@linux.intel.com>
Cc: Andy Lutomirski <luto@kernel.org>
Cc: Borislav Petkov <bp@alien8.de>
Cc: Brian Gerst <brgerst@gmail.com>
Cc: Denys Vlasenko <dvlasenk@redhat.com>
Cc: H. Peter Anvin <hpa@zytor.com>
Cc: Josh Poimboeuf <jpoimboe@redhat.com>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Thomas Gleixner <tglx@linutronix.de>
Link: http://lkml.kernel.org/r/20170925230349.18834-1-miguel.bernal.marin@linux.intel.com
Signed-off-by: Ingo Molnar <mingo@kernel.org>
This commit is contained in:
Miguel Bernal Marin 2017-09-25 18:03:49 -05:00 committed by Ingo Molnar
parent 9cd6681cb1
commit 30c23f29d2

View file

@ -63,14 +63,14 @@
static inline void __down_read(struct rw_semaphore *sem) static inline void __down_read(struct rw_semaphore *sem)
{ {
asm volatile("# beginning down_read\n\t" asm volatile("# beginning down_read\n\t"
LOCK_PREFIX _ASM_INC "(%1)\n\t" LOCK_PREFIX _ASM_INC "(%[sem])\n\t"
/* adds 0x00000001 */ /* adds 0x00000001 */
" jns 1f\n" " jns 1f\n"
" call call_rwsem_down_read_failed\n" " call call_rwsem_down_read_failed\n"
"1:\n\t" "1:\n\t"
"# ending down_read\n\t" "# ending down_read\n\t"
: "+m" (sem->count) : "+m" (sem->count)
: "a" (sem) : [sem] "a" (sem)
: "memory", "cc"); : "memory", "cc");
} }
@ -81,17 +81,18 @@ static inline bool __down_read_trylock(struct rw_semaphore *sem)
{ {
long result, tmp; long result, tmp;
asm volatile("# beginning __down_read_trylock\n\t" asm volatile("# beginning __down_read_trylock\n\t"
" mov %0,%1\n\t" " mov %[count],%[result]\n\t"
"1:\n\t" "1:\n\t"
" mov %1,%2\n\t" " mov %[result],%[tmp]\n\t"
" add %3,%2\n\t" " add %[inc],%[tmp]\n\t"
" jle 2f\n\t" " jle 2f\n\t"
LOCK_PREFIX " cmpxchg %2,%0\n\t" LOCK_PREFIX " cmpxchg %[tmp],%[count]\n\t"
" jnz 1b\n\t" " jnz 1b\n\t"
"2:\n\t" "2:\n\t"
"# ending __down_read_trylock\n\t" "# ending __down_read_trylock\n\t"
: "+m" (sem->count), "=&a" (result), "=&r" (tmp) : [count] "+m" (sem->count), [result] "=&a" (result),
: "i" (RWSEM_ACTIVE_READ_BIAS) [tmp] "=&r" (tmp)
: [inc] "i" (RWSEM_ACTIVE_READ_BIAS)
: "memory", "cc"); : "memory", "cc");
return result >= 0; return result >= 0;
} }
@ -105,7 +106,7 @@ static inline bool __down_read_trylock(struct rw_semaphore *sem)
struct rw_semaphore* ret; \ struct rw_semaphore* ret; \
\ \
asm volatile("# beginning down_write\n\t" \ asm volatile("# beginning down_write\n\t" \
LOCK_PREFIX " xadd %1,(%4)\n\t" \ LOCK_PREFIX " xadd %[tmp],(%[sem])\n\t" \
/* adds 0xffff0001, returns the old value */ \ /* adds 0xffff0001, returns the old value */ \
" test " __ASM_SEL(%w1,%k1) "," __ASM_SEL(%w1,%k1) "\n\t" \ " test " __ASM_SEL(%w1,%k1) "," __ASM_SEL(%w1,%k1) "\n\t" \
/* was the active mask 0 before? */\ /* was the active mask 0 before? */\
@ -113,9 +114,9 @@ static inline bool __down_read_trylock(struct rw_semaphore *sem)
" call " slow_path "\n" \ " call " slow_path "\n" \
"1:\n" \ "1:\n" \
"# ending down_write" \ "# ending down_write" \
: "+m" (sem->count), "=d" (tmp), \ : "+m" (sem->count), [tmp] "=d" (tmp), \
"=a" (ret), ASM_CALL_CONSTRAINT \ "=a" (ret), ASM_CALL_CONSTRAINT \
: "a" (sem), "1" (RWSEM_ACTIVE_WRITE_BIAS) \ : [sem] "a" (sem), "[tmp]" (RWSEM_ACTIVE_WRITE_BIAS) \
: "memory", "cc"); \ : "memory", "cc"); \
ret; \ ret; \
}) })
@ -141,21 +142,21 @@ static inline bool __down_write_trylock(struct rw_semaphore *sem)
bool result; bool result;
long tmp0, tmp1; long tmp0, tmp1;
asm volatile("# beginning __down_write_trylock\n\t" asm volatile("# beginning __down_write_trylock\n\t"
" mov %0,%1\n\t" " mov %[count],%[tmp0]\n\t"
"1:\n\t" "1:\n\t"
" test " __ASM_SEL(%w1,%k1) "," __ASM_SEL(%w1,%k1) "\n\t" " test " __ASM_SEL(%w1,%k1) "," __ASM_SEL(%w1,%k1) "\n\t"
/* was the active mask 0 before? */ /* was the active mask 0 before? */
" jnz 2f\n\t" " jnz 2f\n\t"
" mov %1,%2\n\t" " mov %[tmp0],%[tmp1]\n\t"
" add %4,%2\n\t" " add %[inc],%[tmp1]\n\t"
LOCK_PREFIX " cmpxchg %2,%0\n\t" LOCK_PREFIX " cmpxchg %[tmp1],%[count]\n\t"
" jnz 1b\n\t" " jnz 1b\n\t"
"2:\n\t" "2:\n\t"
CC_SET(e) CC_SET(e)
"# ending __down_write_trylock\n\t" "# ending __down_write_trylock\n\t"
: "+m" (sem->count), "=&a" (tmp0), "=&r" (tmp1), : [count] "+m" (sem->count), [tmp0] "=&a" (tmp0),
CC_OUT(e) (result) [tmp1] "=&r" (tmp1), CC_OUT(e) (result)
: "er" (RWSEM_ACTIVE_WRITE_BIAS) : [inc] "er" (RWSEM_ACTIVE_WRITE_BIAS)
: "memory"); : "memory");
return result; return result;
} }
@ -167,14 +168,14 @@ static inline void __up_read(struct rw_semaphore *sem)
{ {
long tmp; long tmp;
asm volatile("# beginning __up_read\n\t" asm volatile("# beginning __up_read\n\t"
LOCK_PREFIX " xadd %1,(%2)\n\t" LOCK_PREFIX " xadd %[tmp],(%[sem])\n\t"
/* subtracts 1, returns the old value */ /* subtracts 1, returns the old value */
" jns 1f\n\t" " jns 1f\n\t"
" call call_rwsem_wake\n" /* expects old value in %edx */ " call call_rwsem_wake\n" /* expects old value in %edx */
"1:\n" "1:\n"
"# ending __up_read\n" "# ending __up_read\n"
: "+m" (sem->count), "=d" (tmp) : "+m" (sem->count), [tmp] "=d" (tmp)
: "a" (sem), "1" (-RWSEM_ACTIVE_READ_BIAS) : [sem] "a" (sem), "[tmp]" (-RWSEM_ACTIVE_READ_BIAS)
: "memory", "cc"); : "memory", "cc");
} }
@ -185,14 +186,14 @@ static inline void __up_write(struct rw_semaphore *sem)
{ {
long tmp; long tmp;
asm volatile("# beginning __up_write\n\t" asm volatile("# beginning __up_write\n\t"
LOCK_PREFIX " xadd %1,(%2)\n\t" LOCK_PREFIX " xadd %[tmp],(%[sem])\n\t"
/* subtracts 0xffff0001, returns the old value */ /* subtracts 0xffff0001, returns the old value */
" jns 1f\n\t" " jns 1f\n\t"
" call call_rwsem_wake\n" /* expects old value in %edx */ " call call_rwsem_wake\n" /* expects old value in %edx */
"1:\n\t" "1:\n\t"
"# ending __up_write\n" "# ending __up_write\n"
: "+m" (sem->count), "=d" (tmp) : "+m" (sem->count), [tmp] "=d" (tmp)
: "a" (sem), "1" (-RWSEM_ACTIVE_WRITE_BIAS) : [sem] "a" (sem), "[tmp]" (-RWSEM_ACTIVE_WRITE_BIAS)
: "memory", "cc"); : "memory", "cc");
} }
@ -202,7 +203,7 @@ static inline void __up_write(struct rw_semaphore *sem)
static inline void __downgrade_write(struct rw_semaphore *sem) static inline void __downgrade_write(struct rw_semaphore *sem)
{ {
asm volatile("# beginning __downgrade_write\n\t" asm volatile("# beginning __downgrade_write\n\t"
LOCK_PREFIX _ASM_ADD "%2,(%1)\n\t" LOCK_PREFIX _ASM_ADD "%[inc],(%[sem])\n\t"
/* /*
* transitions 0xZZZZ0001 -> 0xYYYY0001 (i386) * transitions 0xZZZZ0001 -> 0xYYYY0001 (i386)
* 0xZZZZZZZZ00000001 -> 0xYYYYYYYY00000001 (x86_64) * 0xZZZZZZZZ00000001 -> 0xYYYYYYYY00000001 (x86_64)
@ -212,7 +213,7 @@ static inline void __downgrade_write(struct rw_semaphore *sem)
"1:\n\t" "1:\n\t"
"# ending __downgrade_write\n" "# ending __downgrade_write\n"
: "+m" (sem->count) : "+m" (sem->count)
: "a" (sem), "er" (-RWSEM_WAITING_BIAS) : [sem] "a" (sem), [inc] "er" (-RWSEM_WAITING_BIAS)
: "memory", "cc"); : "memory", "cc");
} }