mirror of
https://github.com/adulau/aha.git
synced 2024-12-28 19:56:18 +00:00
[PATCH] s390: spinlock corner case
On s390 the lock value used for spinlocks consists of the lower 32 bits of the PSW that holds the lock. If this address happens to be on a four gigabyte boundary the lock is left unlocked. This allows other cpus to grab the same lock and enter a lock protected code path concurrently. In theory this can happen if the vmalloc area for the code of a module crosses a 4 GB boundary. Signed-off-by: Heiko Carstens <heiko.carstens@de.ibm.com> Signed-off-by: Martin Schwidefsky <schwidefsky@de.ibm.com> Signed-off-by: Andrew Morton <akpm@osdl.org> Signed-off-by: Linus Torvalds <torvalds@osdl.org>
This commit is contained in:
parent
c563077e52
commit
9513e5e3f5
1 changed files with 2 additions and 2 deletions
|
@ -47,7 +47,7 @@ extern int _raw_spin_trylock_retry(spinlock_t *lp, unsigned int pc);
|
|||
|
||||
static inline void _raw_spin_lock(spinlock_t *lp)
|
||||
{
|
||||
unsigned long pc = (unsigned long) __builtin_return_address(0);
|
||||
unsigned long pc = 1 | (unsigned long) __builtin_return_address(0);
|
||||
|
||||
if (unlikely(_raw_compare_and_swap(&lp->lock, 0, pc) != 0))
|
||||
_raw_spin_lock_wait(lp, pc);
|
||||
|
@ -55,7 +55,7 @@ static inline void _raw_spin_lock(spinlock_t *lp)
|
|||
|
||||
static inline int _raw_spin_trylock(spinlock_t *lp)
|
||||
{
|
||||
unsigned long pc = (unsigned long) __builtin_return_address(0);
|
||||
unsigned long pc = 1 | (unsigned long) __builtin_return_address(0);
|
||||
|
||||
if (likely(_raw_compare_and_swap(&lp->lock, 0, pc) == 0))
|
||||
return 1;
|
||||
|
|
Loading…
Reference in a new issue