From 5bb29275df7a7aab8b6c29686109cc5cb1015850 Mon Sep 17 00:00:00 2001 From: Paul Burton Date: Tue, 1 Oct 2019 21:53:35 +0000 Subject: [PATCH] MIPS: bitops: Emit Loongson3 sync workarounds within asm Generate the sync instructions required to workaround Loongson3 LL/SC errata within inline asm blocks, which feels a little safer than doing it from C where strictly speaking the compiler would be well within its rights to insert a memory access between the separate asm statements we previously had, containing sync & ll instructions respectively. Signed-off-by: Paul Burton Cc: linux-mips@vger.kernel.org Cc: Huacai Chen Cc: Jiaxun Yang Cc: linux-kernel@vger.kernel.org --- arch/mips/include/asm/bitops.h | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/arch/mips/include/asm/bitops.h b/arch/mips/include/asm/bitops.h index d39fca2def60..c08b6d225f10 100644 --- a/arch/mips/include/asm/bitops.h +++ b/arch/mips/include/asm/bitops.h @@ -31,6 +31,7 @@ asm volatile( \ " .set push \n" \ " .set " MIPS_ISA_LEVEL " \n" \ + " " __SYNC(full, loongson3_war) " \n" \ "1: " __LL "%0, %1 \n" \ " " insn " \n" \ " " __SC "%0, %1 \n" \ @@ -47,6 +48,7 @@ asm volatile( \ " .set push \n" \ " .set " MIPS_ISA_LEVEL " \n" \ + " " __SYNC(full, loongson3_war) " \n" \ "1: " __LL ll_dst ", %2 \n" \ " " insn " \n" \ " " __SC "%1, %2 \n" \ @@ -96,12 +98,10 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr) } if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(bit) && (bit >= 16)) { - loongson_llsc_mb(); __bit_op(*m, __INS "%0, %3, %2, 1", "i"(bit), "r"(~0)); return; } - loongson_llsc_mb(); __bit_op(*m, "or\t%0, %2", "ir"(BIT(bit))); } @@ -126,12 +126,10 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) } if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(bit)) { - loongson_llsc_mb(); __bit_op(*m, __INS "%0, $0, %2, 1", "i"(bit)); return; } - loongson_llsc_mb(); __bit_op(*m, "and\t%0, %2", "ir"(~BIT(bit))); } @@ -168,7 +166,6 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr) return; } - loongson_llsc_mb(); __bit_op(*m, "xor\t%0, %2", "ir"(BIT(bit))); } @@ -190,7 +187,6 @@ static inline int test_and_set_bit_lock(unsigned long nr, if (!kernel_uses_llsc) { res = __mips_test_and_set_bit_lock(nr, addr); } else { - loongson_llsc_mb(); orig = __test_bit_op(*m, "%0", "or\t%1, %0, %3", "ir"(BIT(bit))); @@ -237,13 +233,11 @@ static inline int test_and_clear_bit(unsigned long nr, if (!kernel_uses_llsc) { res = __mips_test_and_clear_bit(nr, addr); } else if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(nr)) { - loongson_llsc_mb(); res = __test_bit_op(*m, "%1", __EXT "%0, %1, %3, 1;" __INS "%1, $0, %3, 1", "i"(bit)); } else { - loongson_llsc_mb(); orig = __test_bit_op(*m, "%0", "or\t%1, %0, %3;" "xor\t%1, %1, %3", @@ -276,7 +270,6 @@ static inline int test_and_change_bit(unsigned long nr, if (!kernel_uses_llsc) { res = __mips_test_and_change_bit(nr, addr); } else { - loongson_llsc_mb(); orig = __test_bit_op(*m, "%0", "xor\t%1, %0, %3", "ir"(BIT(bit))); -- 2.30.2