diff options
author | Noah Misch <noah@leadboat.com> | 2019-09-13 19:34:06 -0700 |
---|---|---|
committer | Noah Misch <noah@leadboat.com> | 2019-09-13 19:34:06 -0700 |
commit | dd50f1a43290bb3c67e24e808d4d8656e9532c9a (patch) | |
tree | 049952454a8af2144db2e9dccdd92845d15437da | |
parent | f380c5190134a4e928bf30a4ff9fec775cdcc692 (diff) | |
download | postgresql-dd50f1a43290bb3c67e24e808d4d8656e9532c9a.tar.gz postgresql-dd50f1a43290bb3c67e24e808d4d8656e9532c9a.zip |
Replace xlc __fetch_and_add() with inline asm.
PostgreSQL has been unusable when built with xlc 13 and newer, which are
incompatible with our use of __fetch_and_add(). Back-patch to 9.5,
which introduced pg_atomic_fetch_add_u32().
Reviewed by Tom Lane.
Discussion: https://postgr.es/m/20190831071157.GA3251746@rfd.leadboat.com
-rw-r--r-- | src/include/port/atomics/generic-xlc.h | 39 |
1 files changed, 35 insertions, 4 deletions
diff --git a/src/include/port/atomics/generic-xlc.h b/src/include/port/atomics/generic-xlc.h index 5ddccff5532..8b5c7329706 100644 --- a/src/include/port/atomics/generic-xlc.h +++ b/src/include/port/atomics/generic-xlc.h @@ -73,11 +73,27 @@ pg_atomic_compare_exchange_u32_impl(volatile pg_atomic_uint32 *ptr, static inline uint32 pg_atomic_fetch_add_u32_impl(volatile pg_atomic_uint32 *ptr, int32 add_) { + uint32 _t; + uint32 res; + /* - * __fetch_and_add() emits a leading "sync" and trailing "isync", thereby - * providing sequential consistency. This is undocumented. + * xlc has a no-longer-documented __fetch_and_add() intrinsic. In xlc + * 12.01.0000.0000, it emits a leading "sync" and trailing "isync". In + * xlc 13.01.0003.0004, it emits neither. Hence, using the intrinsic + * would add redundant syncs on xlc 12. */ - return __fetch_and_add((volatile int *)&ptr->value, add_); + __asm__ __volatile__( + " sync \n" + " lwarx %1,0,%4 \n" + " add %0,%1,%3 \n" + " stwcx. %0,0,%4 \n" + " bne $-12 \n" /* branch to lwarx */ + " isync \n" +: "=&r"(_t), "=&r"(res), "+m"(ptr->value) +: "r"(add_), "r"(&ptr->value) +: "memory", "cc"); + + return res; } #ifdef PG_HAVE_ATOMIC_U64_SUPPORT @@ -103,7 +119,22 @@ pg_atomic_compare_exchange_u64_impl(volatile pg_atomic_uint64 *ptr, static inline uint64 pg_atomic_fetch_add_u64_impl(volatile pg_atomic_uint64 *ptr, int64 add_) { - return __fetch_and_addlp((volatile long *)&ptr->value, add_); + uint64 _t; + uint64 res; + + /* Like u32, but s/lwarx/ldarx/; s/stwcx/stdcx/ */ + __asm__ __volatile__( + " sync \n" + " ldarx %1,0,%4 \n" + " add %0,%1,%3 \n" + " stdcx. %0,0,%4 \n" + " bne $-12 \n" /* branch to ldarx */ + " isync \n" +: "=&r"(_t), "=&r"(res), "+m"(ptr->value) +: "r"(add_), "r"(&ptr->value) +: "memory", "cc"); + + return res; } #endif /* PG_HAVE_ATOMIC_U64_SUPPORT */ |