@@ -46,19 +46,23 @@ static inline bool
4646pg_atomic_compare_exchange_u32_impl (volatile pg_atomic_uint32 * ptr ,
4747uint32 * expected ,uint32 newval )
4848{
49+ /*
50+ * XXX: __compare_and_swap is defined to take signed parameters, but that
51+ * shouldn't matter since we don't perform any arithmetic operations.
52+ */
53+ bool ret = __compare_and_swap ((volatile int * )& ptr -> value ,
54+ (int * )expected , (int )newval );
55+
4956/*
5057 * xlc's documentation tells us:
5158 * "If __compare_and_swap is used as a locking primitive, insert a call to
5259 * the __isync built-in function at the start of any critical sections."
60+ *
61+ * The critical section begins immediately after __compare_and_swap().
5362 */
5463__isync ();
5564
56- /*
57- * XXX: __compare_and_swap is defined to take signed parameters, but that
58- * shouldn't matter since we don't perform any arithmetic operations.
59- */
60- return __compare_and_swap ((volatile int * )& ptr -> value ,
61- (int * )expected , (int )newval );
65+ return ret ;
6266}
6367
6468#define PG_HAVE_ATOMIC_FETCH_ADD_U32
@@ -75,10 +79,12 @@ static inline bool
7579pg_atomic_compare_exchange_u64_impl (volatile pg_atomic_uint64 * ptr ,
7680uint64 * expected ,uint64 newval )
7781{
82+ bool ret = __compare_and_swaplp ((volatile long * )& ptr -> value ,
83+ (long * )expected , (long )newval );
84+
7885__isync ();
7986
80- return __compare_and_swaplp ((volatile long * )& ptr -> value ,
81- (long * )expected , (long )newval );;
87+ return ret ;
8288}
8389
8490#define PG_HAVE_ATOMIC_FETCH_ADD_U64