config SYS_HAS_CPU_SB1
bool
+#
+# CPU may reorder R->R, R->W, W->R, W->W
+# Reordering beyond LL and SC is handled in WEAK_REORDERING_BEYOND_LLSC
+#
config WEAK_ORDERING
bool
+
+#
+# CPU may reorder reads and writes beyond LL/SC
+# CPU may reorder R->LL, R->LL, W->LL, W->LL, R->SC, R->SC, W->SC, W->SC
+#
+config WEAK_REORDERING_BEYOND_LLSC
+ bool
endmenu
#
{
unsigned long result;
- smp_mb();
+ smp_llsc_mb();
if (cpu_has_llsc && R10000_LLSC_WAR) {
unsigned long temp;
raw_local_irq_restore(flags);
}
- smp_mb();
+ smp_llsc_mb();
return result;
}
{
unsigned long result;
- smp_mb();
+ smp_llsc_mb();
if (cpu_has_llsc && R10000_LLSC_WAR) {
unsigned long temp;
raw_local_irq_restore(flags);
}
- smp_mb();
+ smp_llsc_mb();
return result;
}
{
unsigned long result;
- smp_mb();
+ smp_llsc_mb();
if (cpu_has_llsc && R10000_LLSC_WAR) {
unsigned long temp;
raw_local_irq_restore(flags);
}
- smp_mb();
+ smp_llsc_mb();
return result;
}
{
unsigned long result;
- smp_mb();
+ smp_llsc_mb();
if (cpu_has_llsc && R10000_LLSC_WAR) {
unsigned long temp;
raw_local_irq_restore(flags);
}
- smp_mb();
+ smp_llsc_mb();
return result;
}
{
unsigned long result;
- smp_mb();
+ smp_llsc_mb();
if (cpu_has_llsc && R10000_LLSC_WAR) {
unsigned long temp;
raw_local_irq_restore(flags);
}
- smp_mb();
+ smp_llsc_mb();
return result;
}
{
unsigned long result;
- smp_mb();
+ smp_llsc_mb();
if (cpu_has_llsc && R10000_LLSC_WAR) {
unsigned long temp;
raw_local_irq_restore(flags);
}
- smp_mb();
+ smp_llsc_mb();
return result;
}
* atomic*_return operations are serializing but not the non-*_return
* versions.
*/
-#define smp_mb__before_atomic_dec() smp_mb()
-#define smp_mb__after_atomic_dec() smp_mb()
-#define smp_mb__before_atomic_inc() smp_mb()
-#define smp_mb__after_atomic_inc() smp_mb()
+#define smp_mb__before_atomic_dec() smp_llsc_mb()
+#define smp_mb__after_atomic_dec() smp_llsc_mb()
+#define smp_mb__before_atomic_inc() smp_llsc_mb()
+#define smp_mb__after_atomic_inc() smp_llsc_mb()
#include <asm-generic/atomic.h>
+
#endif /* _ASM_ATOMIC_H */
#else
#define __WEAK_ORDERING_MB " \n"
#endif
+#if defined(CONFIG_WEAK_REORDERING_BEYOND_LLSC) && defined(CONFIG_SMP)
+#define __WEAK_LLSC_MB " sync \n"
+#else
+#define __WEAK_LLSC_MB " \n"
+#endif
#define smp_mb() __asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory")
#define smp_rmb() __asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory")
#define set_mb(var, value) \
do { var = value; smp_mb(); } while (0)
+#define smp_llsc_mb() __asm__ __volatile__(__WEAK_LLSC_MB : : :"memory")
+#define smp_llsc_rmb() __asm__ __volatile__(__WEAK_LLSC_MB : : :"memory")
+#define smp_llsc_wmb() __asm__ __volatile__(__WEAK_LLSC_MB : : :"memory")
+
#endif /* __ASM_BARRIER_H */
/*
* clear_bit() doesn't provide any barrier for the compiler.
*/
-#define smp_mb__before_clear_bit() smp_mb()
-#define smp_mb__after_clear_bit() smp_mb()
+#define smp_mb__before_clear_bit() smp_llsc_mb()
+#define smp_mb__after_clear_bit() smp_llsc_mb()
/*
* set_bit - Atomically set a bit in memory
raw_local_irq_restore(flags);
}
- smp_mb();
+ smp_llsc_mb();
return res != 0;
}
raw_local_irq_restore(flags);
}
- smp_mb();
+ smp_llsc_mb();
return res != 0;
}
raw_local_irq_restore(flags);
}
- smp_mb();
+ smp_llsc_mb();
return res != 0;
}
" .set mips3 \n" \
"2: sc $1, %2 \n" \
" beqzl $1, 1b \n" \
- __WEAK_ORDERING_MB \
+ __WEAK_LLSC_MB \
"3: \n" \
" .set pop \n" \
" .set mips0 \n" \
" .set mips3 \n" \
"2: sc $1, %2 \n" \
" beqz $1, 1b \n" \
- __WEAK_ORDERING_MB \
+ __WEAK_LLSC_MB \
"3: \n" \
" .set pop \n" \
" .set mips0 \n" \
" .set mips3 \n"
"2: sc $1, %1 \n"
" beqzl $1, 1b \n"
- __WEAK_ORDERING_MB
+ __WEAK_LLSC_MB
"3: \n"
" .set pop \n"
" .section .fixup,\"ax\" \n"
" .set mips3 \n"
"2: sc $1, %1 \n"
" beqz $1, 1b \n"
- __WEAK_ORDERING_MB
+ __WEAK_LLSC_MB
"3: \n"
" .set pop \n"
" .section .fixup,\"ax\" \n"
: "memory");
}
- smp_mb();
+ smp_llsc_mb();
}
static inline void __raw_spin_unlock(raw_spinlock_t *lock)
: "memory");
}
- smp_mb();
+ smp_llsc_mb();
return res == 0;
}
: "memory");
}
- smp_mb();
+ smp_llsc_mb();
}
/* Note the use of sub, not subu which will make the kernel die with an
{
unsigned int tmp;
- smp_mb();
+ smp_llsc_mb();
if (R10000_LLSC_WAR) {
__asm__ __volatile__(
: "memory");
}
- smp_mb();
+ smp_llsc_mb();
}
static inline void __raw_write_unlock(raw_rwlock_t *rw)
" .set reorder \n"
" beqzl %1, 1b \n"
" nop \n"
- __WEAK_ORDERING_MB
+ __WEAK_LLSC_MB
" li %2, 1 \n"
"2: \n"
: "=m" (rw->lock), "=&r" (tmp), "=&r" (ret)
" beqz %1, 1b \n"
" nop \n"
" .set reorder \n"
- __WEAK_ORDERING_MB
+ __WEAK_LLSC_MB
" li %2, 1 \n"
"2: \n"
: "=m" (rw->lock), "=&r" (tmp), "=&r" (ret)
" sc %1, %0 \n"
" beqzl %1, 1b \n"
" nop \n"
- __WEAK_ORDERING_MB
+ __WEAK_LLSC_MB
" li %2, 1 \n"
" .set reorder \n"
"2: \n"
" beqz %1, 3f \n"
" li %2, 1 \n"
"2: \n"
- __WEAK_ORDERING_MB
+ __WEAK_LLSC_MB
" .subsection 2 \n"
"3: b 1b \n"
" li %2, 0 \n"
raw_local_irq_restore(flags); /* implies memory barrier */
}
- smp_mb();
+ smp_llsc_mb();
return retval;
}
raw_local_irq_restore(flags); /* implies memory barrier */
}
- smp_mb();
+ smp_llsc_mb();
return retval;
}
raw_local_irq_restore(flags); /* implies memory barrier */
}
- smp_mb();
+ smp_llsc_mb();
return retval;
}
raw_local_irq_restore(flags); /* implies memory barrier */
}
- smp_mb();
+ smp_llsc_mb();
return retval;
}