void *newval)
{
void* prev;
-#ifndef __x86_64__
- __asm__ __volatile__("lock ; cmpxchgl %1,%2"
+#if (defined(__x86_64__) && !defined(__ILP32__))
+ __asm__ __volatile__("lock ; cmpxchgq %1,%2"
: "=a" (prev)
: "q" (newval), "m" (a->value), "0" (oldval)
: "memory");
-#else
- __asm__ __volatile__("lock ; cmpxchgq %1,%2"
+#elif (defined(__x86_64__) && defined(__ILP32__)) || defined(__i386__)
+ __asm__ __volatile__("lock ; cmpxchgl %1,%2"
: "=a" (prev)
: "q" (newval), "m" (a->value), "0" (oldval)
: "memory");
+#else
+# error Cannot detect whether this is a 32-bit or 64-bit x86 build.
#endif
return prev == oldval;
}
static inline void *tMPI_Atomic_ptr_swap(tMPI_Atomic_ptr_t *a, void *b)
{
void *volatile *ret = (void* volatile*)b;
-#ifndef __LP64__
- __asm__ __volatile__("\txchgl %0, %1;"
+#if (defined(__x86_64__) && !defined(__ILP32__))
+ __asm__ __volatile__("\txchgq %0, %1;"
: "+r" (ret), "+m" (a->value)
:
: "memory");
-
-#else
- __asm__ __volatile__("\txchgq %0, %1;"
+#elif (defined(__x86_64__) && defined(__ILP32__)) || defined(__i386__)
+ __asm__ __volatile__("\txchgl %0, %1;"
: "+r" (ret), "+m" (a->value)
:
: "memory");
+#else
+# error Cannot detect whether this is a 32-bit or 64-bit x86 build.
#endif
return (void*)ret;
}