#include <asm/alternative.h>
-#define ADDR (*(volatile long *) addr)
+#if __GNUC__ < 4 || __GNUC_MINOR__ < 1
+/* Technically wrong, but this avoids compilation errors on some gcc
+ versions. */
+#define ADDR "=m" (*(volatile long *) addr)
+#else
+#define ADDR "+m" (*(volatile long *) addr)
+#endif
/**
* set_bit - Atomically set a bit in memory
{
__asm__ __volatile__( LOCK_PREFIX
"btsl %1,%0"
- :"+m" (ADDR)
+ :ADDR
:"dIr" (nr) : "memory");
}
{
__asm__ volatile(
"btsl %1,%0"
- :"+m" (ADDR)
+ :ADDR
:"dIr" (nr) : "memory");
}
{
__asm__ __volatile__( LOCK_PREFIX
"btrl %1,%0"
- :"+m" (ADDR)
+ :ADDR
:"dIr" (nr));
}
{
__asm__ __volatile__(
"btrl %1,%0"
- :"+m" (ADDR)
+ :ADDR
:"dIr" (nr));
}
{
__asm__ __volatile__(
"btcl %1,%0"
- :"+m" (ADDR)
+ :ADDR
:"dIr" (nr));
}
{
__asm__ __volatile__( LOCK_PREFIX
"btcl %1,%0"
- :"+m" (ADDR)
+ :ADDR
:"dIr" (nr));
}
__asm__ __volatile__( LOCK_PREFIX
"btsl %2,%1\n\tsbbl %0,%0"
- :"=r" (oldbit),"+m" (ADDR)
+ :"=r" (oldbit),ADDR
:"dIr" (nr) : "memory");
return oldbit;
}
__asm__(
"btsl %2,%1\n\tsbbl %0,%0"
- :"=r" (oldbit),"+m" (ADDR)
+ :"=r" (oldbit),ADDR
:"dIr" (nr));
return oldbit;
}
__asm__ __volatile__( LOCK_PREFIX
"btrl %2,%1\n\tsbbl %0,%0"
- :"=r" (oldbit),"+m" (ADDR)
+ :"=r" (oldbit),ADDR
:"dIr" (nr) : "memory");
return oldbit;
}
__asm__(
"btrl %2,%1\n\tsbbl %0,%0"
- :"=r" (oldbit),"+m" (ADDR)
+ :"=r" (oldbit),ADDR
:"dIr" (nr));
return oldbit;
}
__asm__ __volatile__(
"btcl %2,%1\n\tsbbl %0,%0"
- :"=r" (oldbit),"+m" (ADDR)
+ :"=r" (oldbit),ADDR
:"dIr" (nr) : "memory");
return oldbit;
}
__asm__ __volatile__( LOCK_PREFIX
"btcl %2,%1\n\tsbbl %0,%0"
- :"=r" (oldbit),"+m" (ADDR)
+ :"=r" (oldbit),ADDR
:"dIr" (nr) : "memory");
return oldbit;
}
__asm__ __volatile__(
"btl %2,%1\n\tsbbl %0,%0"
:"=r" (oldbit)
- :"m" (ADDR),"dIr" (nr));
+ :"m" (*(volatile long *)addr),"dIr" (nr));
return oldbit;
}