diff options
author | Ralf Baechle <ralf@linux-mips.org> | 2006-08-31 14:16:06 +0100 |
---|---|---|
committer | Ralf Baechle <ralf@linux-mips.org> | 2006-09-27 13:37:39 +0100 |
commit | 65316fd13ad9d82560edbad0a940d684380f7461 (patch) | |
tree | 9b3bf885035525a42143a49952b5eabb45fab65e /include/asm-mips | |
parent | 09f451bfb9fe5ceea763a9ebd6ec73dd05e4faf8 (diff) | |
download | linux-65316fd13ad9d82560edbad0a940d684380f7461.tar.bz2 |
[MIPS] Replace generic__raw_read_trylock usage
generic__raw_read_trylock() is a defect generic function actually doing
a __raw_read_lock ...
Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
Diffstat (limited to 'include/asm-mips')
-rw-r--r-- | include/asm-mips/spinlock.h | 47 |
1 files changed, 46 insertions, 1 deletions
diff --git a/include/asm-mips/spinlock.h b/include/asm-mips/spinlock.h index 669b8e349ff2..4c1a1b53aeaf 100644 --- a/include/asm-mips/spinlock.h +++ b/include/asm-mips/spinlock.h @@ -239,7 +239,51 @@ static inline void __raw_write_unlock(raw_rwlock_t *rw) : "memory"); } -#define __raw_read_trylock(lock) generic__raw_read_trylock(lock) +static inline int __raw_read_trylock(raw_rwlock_t *rw) +{ + unsigned int tmp; + int ret; + + if (R10000_LLSC_WAR) { + __asm__ __volatile__( + " .set noreorder # __raw_read_trylock \n" + " li %2, 0 \n" + "1: ll %1, %3 \n" + " bnez %1, 2f \n" + " addu %1, 1 \n" + " sc %1, %0 \n" + " beqzl %1, 1b \n" + " .set reorder \n" +#ifdef CONFIG_SMP + " sync \n" +#endif + " li %2, 1 \n" + "2: \n" + : "=m" (rw->lock), "=&r" (tmp), "=&r" (ret) + : "m" (rw->lock) + : "memory"); + } else { + __asm__ __volatile__( + " .set noreorder # __raw_read_trylock \n" + " li %2, 0 \n" + "1: ll %1, %3 \n" + " bnez %1, 2f \n" + " addu %1, 1 \n" + " sc %1, %0 \n" + " beqz %1, 1b \n" + " .set reorder \n" +#ifdef CONFIG_SMP + " sync \n" +#endif + " li %2, 1 \n" + "2: \n" + : "=m" (rw->lock), "=&r" (tmp), "=&r" (ret) + : "m" (rw->lock) + : "memory"); + } + + return ret; +} static inline int __raw_write_trylock(raw_rwlock_t *rw) { @@ -283,4 +327,5 @@ static inline int __raw_write_trylock(raw_rwlock_t *rw) return ret; } + #endif /* _ASM_SPINLOCK_H */ |