diff options
author | Noah Misch <noah@leadboat.com> | 2019-09-13 19:34:06 -0700 |
---|---|---|
committer | Noah Misch <noah@leadboat.com> | 2019-09-13 19:34:19 -0700 |
commit | 75941f257aacbc68b08e1cbf2e55110d00a06fff (patch) | |
tree | c4b9182d7bd6cf01f9a76e8d14666e4141f4ba21 /src | |
parent | 4737d3a75b725b6babf2d402c767751a105b67e9 (diff) | |
download | postgresql-75941f257aacbc68b08e1cbf2e55110d00a06fff.tar.gz postgresql-75941f257aacbc68b08e1cbf2e55110d00a06fff.zip |
Replace xlc __fetch_and_add() with inline asm.
PostgreSQL has been unusable when built with xlc 13 and newer, which are
incompatible with our use of __fetch_and_add(). Back-patch to 9.5,
which introduced pg_atomic_fetch_add_u32().
Reviewed by Tom Lane.
Discussion: https://postgr.es/m/20190831071157.GA3251746@rfd.leadboat.com
Diffstat (limited to 'src')
-rw-r--r-- | src/include/port/atomics/generic-xlc.h | 39 |
1 files changed, 35 insertions, 4 deletions
diff --git a/src/include/port/atomics/generic-xlc.h b/src/include/port/atomics/generic-xlc.h index 24bd7690675..5f7bd87b710 100644 --- a/src/include/port/atomics/generic-xlc.h +++ b/src/include/port/atomics/generic-xlc.h @@ -79,11 +79,27 @@ pg_atomic_compare_exchange_u32_impl(volatile pg_atomic_uint32 *ptr, static inline uint32 pg_atomic_fetch_add_u32_impl(volatile pg_atomic_uint32 *ptr, int32 add_) { + uint32 _t; + uint32 res; + /* - * __fetch_and_add() emits a leading "sync" and trailing "isync", thereby - * providing sequential consistency. This is undocumented. + * xlc has a no-longer-documented __fetch_and_add() intrinsic. In xlc + * 12.01.0000.0000, it emits a leading "sync" and trailing "isync". In + * xlc 13.01.0003.0004, it emits neither. Hence, using the intrinsic + * would add redundant syncs on xlc 12. */ - return __fetch_and_add((volatile int *)&ptr->value, add_); + __asm__ __volatile__( + " sync \n" + " lwarx %1,0,%4 \n" + " add %0,%1,%3 \n" + " stwcx. %0,0,%4 \n" + " bne $-12 \n" /* branch to lwarx */ + " isync \n" +: "=&r"(_t), "=&r"(res), "+m"(ptr->value) +: "r"(add_), "r"(&ptr->value) +: "memory", "cc"); + + return res; } #ifdef PG_HAVE_ATOMIC_U64_SUPPORT @@ -109,7 +125,22 @@ pg_atomic_compare_exchange_u64_impl(volatile pg_atomic_uint64 *ptr, static inline uint64 pg_atomic_fetch_add_u64_impl(volatile pg_atomic_uint64 *ptr, int64 add_) { - return __fetch_and_addlp((volatile long *)&ptr->value, add_); + uint64 _t; + uint64 res; + + /* Like u32, but s/lwarx/ldarx/; s/stwcx/stdcx/ */ + __asm__ __volatile__( + " sync \n" + " ldarx %1,0,%4 \n" + " add %0,%1,%3 \n" + " stdcx. %0,0,%4 \n" + " bne $-12 \n" /* branch to ldarx */ + " isync \n" +: "=&r"(_t), "=&r"(res), "+m"(ptr->value) +: "r"(add_), "r"(&ptr->value) +: "memory", "cc"); + + return res; } #endif /* PG_HAVE_ATOMIC_U64_SUPPORT */ |