diff options
Diffstat (limited to 'lib/atomic64.c')
| -rw-r--r-- | lib/atomic64.c | 78 | 
1 files changed, 48 insertions, 30 deletions
| diff --git a/lib/atomic64.c b/lib/atomic64.c index caf895789a1e..1a72bba36d24 100644 --- a/lib/atomic64.c +++ b/lib/atomic64.c @@ -25,15 +25,15 @@   * Ensure each lock is in a separate cacheline.   */  static union { -	raw_spinlock_t lock; +	arch_spinlock_t lock;  	char pad[L1_CACHE_BYTES];  } atomic64_lock[NR_LOCKS] __cacheline_aligned_in_smp = {  	[0 ... (NR_LOCKS - 1)] = { -		.lock =  __RAW_SPIN_LOCK_UNLOCKED(atomic64_lock.lock), +		.lock =  __ARCH_SPIN_LOCK_UNLOCKED,  	},  }; -static inline raw_spinlock_t *lock_addr(const atomic64_t *v) +static inline arch_spinlock_t *lock_addr(const atomic64_t *v)  {  	unsigned long addr = (unsigned long) v; @@ -45,12 +45,14 @@ static inline raw_spinlock_t *lock_addr(const atomic64_t *v)  s64 generic_atomic64_read(const atomic64_t *v)  {  	unsigned long flags; -	raw_spinlock_t *lock = lock_addr(v); +	arch_spinlock_t *lock = lock_addr(v);  	s64 val; -	raw_spin_lock_irqsave(lock, flags); +	local_irq_save(flags); +	arch_spin_lock(lock);  	val = v->counter; -	raw_spin_unlock_irqrestore(lock, flags); +	arch_spin_unlock(lock); +	local_irq_restore(flags);  	return val;  }  EXPORT_SYMBOL(generic_atomic64_read); @@ -58,11 +60,13 @@ EXPORT_SYMBOL(generic_atomic64_read);  void generic_atomic64_set(atomic64_t *v, s64 i)  {  	unsigned long flags; -	raw_spinlock_t *lock = lock_addr(v); +	arch_spinlock_t *lock = lock_addr(v); -	raw_spin_lock_irqsave(lock, flags); +	local_irq_save(flags); +	arch_spin_lock(lock);  	v->counter = i; -	raw_spin_unlock_irqrestore(lock, flags); +	arch_spin_unlock(lock); +	local_irq_restore(flags);  }  EXPORT_SYMBOL(generic_atomic64_set); @@ -70,11 +74,13 @@ EXPORT_SYMBOL(generic_atomic64_set);  void generic_atomic64_##op(s64 a, atomic64_t *v)			\  {									\  	unsigned long flags;						\ -	raw_spinlock_t *lock = lock_addr(v);				\ +	arch_spinlock_t *lock = lock_addr(v);				\  									\ -	raw_spin_lock_irqsave(lock, flags);				\ +	local_irq_save(flags);						\ +	arch_spin_lock(lock);						\  	v->counter c_op a;						\ -	raw_spin_unlock_irqrestore(lock, flags);			\ +	arch_spin_unlock(lock);						\ +	local_irq_restore(flags);					\  }									\  EXPORT_SYMBOL(generic_atomic64_##op); @@ -82,12 +88,14 @@ EXPORT_SYMBOL(generic_atomic64_##op);  s64 generic_atomic64_##op##_return(s64 a, atomic64_t *v)		\  {									\  	unsigned long flags;						\ -	raw_spinlock_t *lock = lock_addr(v);				\ +	arch_spinlock_t *lock = lock_addr(v);				\  	s64 val;							\  									\ -	raw_spin_lock_irqsave(lock, flags);				\ +	local_irq_save(flags);						\ +	arch_spin_lock(lock);						\  	val = (v->counter c_op a);					\ -	raw_spin_unlock_irqrestore(lock, flags);			\ +	arch_spin_unlock(lock);						\ +	local_irq_restore(flags);					\  	return val;							\  }									\  EXPORT_SYMBOL(generic_atomic64_##op##_return); @@ -96,13 +104,15 @@ EXPORT_SYMBOL(generic_atomic64_##op##_return);  s64 generic_atomic64_fetch_##op(s64 a, atomic64_t *v)			\  {									\  	unsigned long flags;						\ -	raw_spinlock_t *lock = lock_addr(v);				\ +	arch_spinlock_t *lock = lock_addr(v);				\  	s64 val;							\  									\ -	raw_spin_lock_irqsave(lock, flags);				\ +	local_irq_save(flags);						\ +	arch_spin_lock(lock);						\  	val = v->counter;						\  	v->counter c_op a;						\ -	raw_spin_unlock_irqrestore(lock, flags);			\ +	arch_spin_unlock(lock);						\ +	local_irq_restore(flags);					\  	return val;							\  }									\  EXPORT_SYMBOL(generic_atomic64_fetch_##op); @@ -131,14 +141,16 @@ ATOMIC64_OPS(xor, ^=)  s64 generic_atomic64_dec_if_positive(atomic64_t *v)  {  	unsigned long flags; -	raw_spinlock_t *lock = lock_addr(v); +	arch_spinlock_t *lock = lock_addr(v);  	s64 val; -	raw_spin_lock_irqsave(lock, flags); +	local_irq_save(flags); +	arch_spin_lock(lock);  	val = v->counter - 1;  	if (val >= 0)  		v->counter = val; -	raw_spin_unlock_irqrestore(lock, flags); +	arch_spin_unlock(lock); +	local_irq_restore(flags);  	return val;  }  EXPORT_SYMBOL(generic_atomic64_dec_if_positive); @@ -146,14 +158,16 @@ EXPORT_SYMBOL(generic_atomic64_dec_if_positive);  s64 generic_atomic64_cmpxchg(atomic64_t *v, s64 o, s64 n)  {  	unsigned long flags; -	raw_spinlock_t *lock = lock_addr(v); +	arch_spinlock_t *lock = lock_addr(v);  	s64 val; -	raw_spin_lock_irqsave(lock, flags); +	local_irq_save(flags); +	arch_spin_lock(lock);  	val = v->counter;  	if (val == o)  		v->counter = n; -	raw_spin_unlock_irqrestore(lock, flags); +	arch_spin_unlock(lock); +	local_irq_restore(flags);  	return val;  }  EXPORT_SYMBOL(generic_atomic64_cmpxchg); @@ -161,13 +175,15 @@ EXPORT_SYMBOL(generic_atomic64_cmpxchg);  s64 generic_atomic64_xchg(atomic64_t *v, s64 new)  {  	unsigned long flags; -	raw_spinlock_t *lock = lock_addr(v); +	arch_spinlock_t *lock = lock_addr(v);  	s64 val; -	raw_spin_lock_irqsave(lock, flags); +	local_irq_save(flags); +	arch_spin_lock(lock);  	val = v->counter;  	v->counter = new; -	raw_spin_unlock_irqrestore(lock, flags); +	arch_spin_unlock(lock); +	local_irq_restore(flags);  	return val;  }  EXPORT_SYMBOL(generic_atomic64_xchg); @@ -175,14 +191,16 @@ EXPORT_SYMBOL(generic_atomic64_xchg);  s64 generic_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)  {  	unsigned long flags; -	raw_spinlock_t *lock = lock_addr(v); +	arch_spinlock_t *lock = lock_addr(v);  	s64 val; -	raw_spin_lock_irqsave(lock, flags); +	local_irq_save(flags); +	arch_spin_lock(lock);  	val = v->counter;  	if (val != u)  		v->counter += a; -	raw_spin_unlock_irqrestore(lock, flags); +	arch_spin_unlock(lock); +	local_irq_restore(flags);  	return val;  } | 
