summaryrefslogtreecommitdiff
path: root/include/asm-mips/atomic.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-mips/atomic.h')
-rw-r--r--include/asm-mips/atomic.h57
1 files changed, 35 insertions, 22 deletions
diff --git a/include/asm-mips/atomic.h b/include/asm-mips/atomic.h
index 55c37c106ef0..2c8b853376c9 100644
--- a/include/asm-mips/atomic.h
+++ b/include/asm-mips/atomic.h
@@ -24,10 +24,9 @@
#define _ASM_ATOMIC_H
#include <asm/cpu-features.h>
+#include <asm/interrupt.h>
#include <asm/war.h>
-extern spinlock_t atomic_lock;
-
typedef struct { volatile int counter; } atomic_t;
#define ATOMIC_INIT(i) { (i) }
@@ -85,9 +84,9 @@ static __inline__ void atomic_add(int i, atomic_t * v)
} else {
unsigned long flags;
- spin_lock_irqsave(&atomic_lock, flags);
+ local_irq_save(flags);
v->counter += i;
- spin_unlock_irqrestore(&atomic_lock, flags);
+ local_irq_restore(flags);
}
}
@@ -127,9 +126,9 @@ static __inline__ void atomic_sub(int i, atomic_t * v)
} else {
unsigned long flags;
- spin_lock_irqsave(&atomic_lock, flags);
+ local_irq_save(flags);
v->counter -= i;
- spin_unlock_irqrestore(&atomic_lock, flags);
+ local_irq_restore(flags);
}
}
@@ -173,11 +172,11 @@ static __inline__ int atomic_add_return(int i, atomic_t * v)
} else {
unsigned long flags;
- spin_lock_irqsave(&atomic_lock, flags);
+ local_irq_save(flags);
result = v->counter;
result += i;
v->counter = result;
- spin_unlock_irqrestore(&atomic_lock, flags);
+ local_irq_restore(flags);
}
return result;
@@ -220,11 +219,11 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v)
} else {
unsigned long flags;
- spin_lock_irqsave(&atomic_lock, flags);
+ local_irq_save(flags);
result = v->counter;
result -= i;
v->counter = result;
- spin_unlock_irqrestore(&atomic_lock, flags);
+ local_irq_restore(flags);
}
return result;
@@ -251,7 +250,10 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
" subu %0, %1, %3 \n"
" bltz %0, 1f \n"
" sc %0, %2 \n"
+ " .set noreorder \n"
" beqzl %0, 1b \n"
+ " subu %0, %1, %3 \n"
+ " .set reorder \n"
" sync \n"
"1: \n"
" .set mips0 \n"
@@ -267,7 +269,10 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
" subu %0, %1, %3 \n"
" bltz %0, 1f \n"
" sc %0, %2 \n"
+ " .set noreorder \n"
" beqz %0, 1b \n"
+ " subu %0, %1, %3 \n"
+ " .set reorder \n"
" sync \n"
"1: \n"
" .set mips0 \n"
@@ -277,18 +282,19 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
} else {
unsigned long flags;
- spin_lock_irqsave(&atomic_lock, flags);
+ local_irq_save(flags);
result = v->counter;
result -= i;
if (result >= 0)
v->counter = result;
- spin_unlock_irqrestore(&atomic_lock, flags);
+ local_irq_restore(flags);
}
return result;
}
#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
+#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
/**
* atomic_add_unless - add unless the number is a given value
@@ -432,9 +438,9 @@ static __inline__ void atomic64_add(long i, atomic64_t * v)
} else {
unsigned long flags;
- spin_lock_irqsave(&atomic_lock, flags);
+ local_irq_save(flags);
v->counter += i;
- spin_unlock_irqrestore(&atomic_lock, flags);
+ local_irq_restore(flags);
}
}
@@ -474,9 +480,9 @@ static __inline__ void atomic64_sub(long i, atomic64_t * v)
} else {
unsigned long flags;
- spin_lock_irqsave(&atomic_lock, flags);
+ local_irq_save(flags);
v->counter -= i;
- spin_unlock_irqrestore(&atomic_lock, flags);
+ local_irq_restore(flags);
}
}
@@ -520,11 +526,11 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v)
} else {
unsigned long flags;
- spin_lock_irqsave(&atomic_lock, flags);
+ local_irq_save(flags);
result = v->counter;
result += i;
v->counter = result;
- spin_unlock_irqrestore(&atomic_lock, flags);
+ local_irq_restore(flags);
}
return result;
@@ -567,11 +573,11 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
} else {
unsigned long flags;
- spin_lock_irqsave(&atomic_lock, flags);
+ local_irq_save(flags);
result = v->counter;
result -= i;
v->counter = result;
- spin_unlock_irqrestore(&atomic_lock, flags);
+ local_irq_restore(flags);
}
return result;
@@ -598,7 +604,10 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
" dsubu %0, %1, %3 \n"
" bltz %0, 1f \n"
" scd %0, %2 \n"
+ " .set noreorder \n"
" beqzl %0, 1b \n"
+ " dsubu %0, %1, %3 \n"
+ " .set reorder \n"
" sync \n"
"1: \n"
" .set mips0 \n"
@@ -614,7 +623,10 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
" dsubu %0, %1, %3 \n"
" bltz %0, 1f \n"
" scd %0, %2 \n"
+ " .set noreorder \n"
" beqz %0, 1b \n"
+ " dsubu %0, %1, %3 \n"
+ " .set reorder \n"
" sync \n"
"1: \n"
" .set mips0 \n"
@@ -624,12 +636,12 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
} else {
unsigned long flags;
- spin_lock_irqsave(&atomic_lock, flags);
+ local_irq_save(flags);
result = v->counter;
result -= i;
if (result >= 0)
v->counter = result;
- spin_unlock_irqrestore(&atomic_lock, flags);
+ local_irq_restore(flags);
}
return result;
@@ -713,4 +725,5 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
#define smp_mb__before_atomic_inc() smp_mb()
#define smp_mb__after_atomic_inc() smp_mb()
+#include <asm-generic/atomic.h>
#endif /* _ASM_ATOMIC_H */