diff options
Diffstat (limited to 'Utilities/cmlibuv/src/unix/atomic-ops.h')
-rw-r--r-- | Utilities/cmlibuv/src/unix/atomic-ops.h | 15 |
1 files changed, 8 insertions, 7 deletions
diff --git a/Utilities/cmlibuv/src/unix/atomic-ops.h b/Utilities/cmlibuv/src/unix/atomic-ops.h index 63d8268..2b58162 100644 --- a/Utilities/cmlibuv/src/unix/atomic-ops.h +++ b/Utilities/cmlibuv/src/unix/atomic-ops.h @@ -43,12 +43,11 @@ UV_UNUSED(static int cmpxchgi(int* ptr, int oldval, int newval)) { __compare_and_swap((volatile int*)ptr, &oldval, newval); return oldval; #elif defined(__MVS__) - unsigned int op4; - if (__plo_CSST(ptr, (unsigned int*) &oldval, newval, - (unsigned int*) ptr, *ptr, &op4)) - return oldval; - else - return op4; + /* Use hand-rolled assembly because codegen from builtin __plo_CSST results in + * a runtime bug. + */ + __asm(" cs %0,%2,%1 \n " : "+r"(oldval), "+m"(*ptr) : "r"(newval) :); + return oldval; #elif defined(__SUNPRO_C) || defined(__SUNPRO_CC) return atomic_cas_uint((uint_t *)ptr, (uint_t)oldval, (uint_t)newval); #else @@ -61,7 +60,9 @@ UV_UNUSED(static void cpu_relax(void)) { __asm__ __volatile__ ("rep; nop" ::: "memory"); /* a.k.a. PAUSE */ #elif (defined(__arm__) && __ARM_ARCH >= 7) || defined(__aarch64__) __asm__ __volatile__ ("yield" ::: "memory"); -#elif defined(__powerpc64__) || defined(__ppc64__) || defined(__PPC64__) +#elif (defined(__ppc__) || defined(__ppc64__)) && defined(__APPLE__) + __asm volatile ("" : : : "memory"); +#elif !defined(__APPLE__) && (defined(__powerpc64__) || defined(__ppc64__) || defined(__PPC64__)) __asm__ __volatile__ ("or 1,1,1; or 2,2,2" ::: "memory"); #endif } |