#define ATOMIC_OP_ADD_RETURN(name, mb, cl...) \
static inline int __lse_atomic_add_return##name(int i, atomic_t *v) \
{ \
+ u32 tmp; \
+ \
asm volatile( \
- " ldadd" #mb " %w[i], w30, %[v]\n" \
- " add %w[i], %w[i], w30" \
- : [i] "+r" (i), [v] "+Q" (v->counter) \
+ " ldadd" #mb " %w[i], %w[tmp], %[v]\n" \
+ " add %w[i], %w[i], %w[tmp]" \
+ : [i] "+r" (i), [v] "+Q" (v->counter), [tmp] "=&r" (tmp) \
: "r" (v) \
- : "x30", ##cl); \
+ : cl); \
\
return i; \
}
#define ATOMIC_OP_SUB_RETURN(name, mb, cl...) \
static inline int __lse_atomic_sub_return##name(int i, atomic_t *v) \
{ \
+ u32 tmp; \
+ \
asm volatile( \
" neg %w[i], %w[i]\n" \
- " ldadd" #mb " %w[i], w30, %[v]\n" \
- " add %w[i], %w[i], w30" \
- : [i] "+&r" (i), [v] "+Q" (v->counter) \
+ " ldadd" #mb " %w[i], %w[tmp], %[v]\n" \
+ " add %w[i], %w[i], %w[tmp]" \
+ : [i] "+&r" (i), [v] "+Q" (v->counter), [tmp] "=&r" (tmp) \
: "r" (v) \
- : "x30", ##cl); \
+ : cl); \
\
return i; \
}
#define ATOMIC64_OP_ADD_RETURN(name, mb, cl...) \
static inline long __lse_atomic64_add_return##name(s64 i, atomic64_t *v)\
{ \
+ unsigned long tmp; \
+ \
asm volatile( \
- " ldadd" #mb " %[i], x30, %[v]\n" \
- " add %[i], %[i], x30" \
- : [i] "+r" (i), [v] "+Q" (v->counter) \
+ " ldadd" #mb " %[i], %x[tmp], %[v]\n" \
+ " add %[i], %[i], %x[tmp]" \
+ : [i] "+r" (i), [v] "+Q" (v->counter), [tmp] "=&r" (tmp) \
: "r" (v) \
- : "x30", ##cl); \
+ : cl); \
\
return i; \
}
#define ATOMIC64_OP_SUB_RETURN(name, mb, cl...) \
static inline long __lse_atomic64_sub_return##name(s64 i, atomic64_t *v) \
{ \
+ unsigned long tmp; \
+ \
asm volatile( \
" neg %[i], %[i]\n" \
- " ldadd" #mb " %[i], x30, %[v]\n" \
- " add %[i], %[i], x30" \
- : [i] "+&r" (i), [v] "+Q" (v->counter) \
+ " ldadd" #mb " %[i], %x[tmp], %[v]\n" \
+ " add %[i], %[i], %x[tmp]" \
+ : [i] "+&r" (i), [v] "+Q" (v->counter), [tmp] "=&r" (tmp) \
: "r" (v) \
- : "x30", ##cl); \
+ : cl); \
\
return i; \
}
static inline s64 __lse_atomic64_dec_if_positive(atomic64_t *v)
{
+ unsigned long tmp;
+
asm volatile(
- "1: ldr x30, %[v]\n"
- " subs %[ret], x30, #1\n"
+ "1: ldr %x[tmp], %[v]\n"
+ " subs %[ret], %x[tmp], #1\n"
" b.lt 2f\n"
- " casal x30, %[ret], %[v]\n"
- " sub x30, x30, #1\n"
- " sub x30, x30, %[ret]\n"
- " cbnz x30, 1b\n"
+ " casal %x[tmp], %[ret], %[v]\n"
+ " sub %x[tmp], %x[tmp], #1\n"
+ " sub %x[tmp], %x[tmp], %[ret]\n"
+ " cbnz %x[tmp], 1b\n"
"2:"
- : [ret] "+&r" (v), [v] "+Q" (v->counter)
+ : [ret] "+&r" (v), [v] "+Q" (v->counter), [tmp] "=&r" (tmp)
:
- : "x30", "cc", "memory");
+ : "cc", "memory");
return (long)v;
}
register unsigned long x0 asm ("x0") = (unsigned long)ptr; \
register u##sz x1 asm ("x1") = old; \
register u##sz x2 asm ("x2") = new; \
+ unsigned long tmp; \
\
asm volatile( \
- " mov " #w "30, %" #w "[old]\n" \
- " cas" #mb #sfx "\t" #w "30, %" #w "[new], %[v]\n" \
- " mov %" #w "[ret], " #w "30" \
- : [ret] "+r" (x0), [v] "+Q" (*(unsigned long *)ptr) \
+ " mov %" #w "[tmp], %" #w "[old]\n" \
+ " cas" #mb #sfx "\t%" #w "[tmp], %" #w "[new], %[v]\n" \
+ " mov %" #w "[ret], %" #w "[tmp]" \
+ : [ret] "+r" (x0), [v] "+Q" (*(unsigned long *)ptr), \
+ [tmp] "=&r" (tmp) \
: [old] "r" (x1), [new] "r" (x2) \
- : "x30", ##cl); \
+ : cl); \
\
return x0; \
}