8#ifndef INCLUDED_BSLS_ATOMICOPERATIONS_ALL_ALL_GCCINTRINSICS
9#define INCLUDED_BSLS_ATOMICOPERATIONS_ALL_ALL_GCCINTRINSICS
54#include <bsls_atomicoperations_default.h>
58#if defined(BSLS_PLATFORM_CMP_GNU) && BSLS_PLATFORM_CMP_VERSION >= 40700
64struct AtomicOperations_ALL_ALL_GCCIntrinsics;
65typedef AtomicOperations_ALL_ALL_GCCIntrinsics AtomicOperations_Imp;
72struct Atomic_TypeTraits<AtomicOperations_ALL_ALL_GCCIntrinsics>
74 struct __attribute__((__aligned__(sizeof(int)))) Int
79 struct __attribute__((__aligned__(sizeof(unsigned int)))) Uint
84 struct __attribute__((__aligned__(sizeof(Types::Int64)))) Int64
89 struct __attribute__((__aligned__(sizeof(Types::Uint64)))) Uint64
91 Types::Uint64 d_value;
94 struct __attribute__((__aligned__(sizeof(void *)))) Pointer
104struct AtomicOperations_ALL_ALL_GCCIntrinsics
105 #ifdef BSLS_PLATFORM_CPU_64_BIT
106 : AtomicOperations_Default64<AtomicOperations_ALL_ALL_GCCIntrinsics>
108 : AtomicOperations_Default32<AtomicOperations_ALL_ALL_GCCIntrinsics>
111 typedef Atomic_TypeTraits<AtomicOperations_ALL_ALL_GCCIntrinsics>
114 typedef char AtomicInt_SizeCheck[
sizeof(int) == 4 ? 1 : -1];
119 static void initInt(AtomicTypes::Int *atomicInt,
int value);
121 static int getInt(
const AtomicTypes::Int *atomicInt);
123 static int getIntAcquire(
const AtomicTypes::Int *atomicInt);
125 static int getIntRelaxed(
const AtomicTypes::Int *atomicInt);
127 static void setInt(AtomicTypes::Int *atomicInt,
int value);
129 static void setIntRelease(AtomicTypes::Int *atomicInt,
int value);
131 static void setIntRelaxed(AtomicTypes::Int *atomicInt,
int value);
133 static int swapInt(AtomicTypes::Int *atomicInt,
int swapValue);
135 static int swapIntAcqRel(AtomicTypes::Int *atomicInt,
int swapValue);
137 static int testAndSwapInt(AtomicTypes::Int *atomicInt,
141 static int testAndSwapIntAcqRel(AtomicTypes::Int *atomicInt,
145 static int addIntNv(AtomicTypes::Int *atomicInt,
int value);
147 static int addIntNvAcqRel(AtomicTypes::Int *atomicInt,
int value);
149 static int addIntNvRelaxed(AtomicTypes::Int *atomicInt,
int value);
153 static void initInt64(AtomicTypes::Int64 *atomicInt, Types::Int64 value);
155 static Types::Int64 getInt64(
const AtomicTypes::Int64 *atomicInt);
157 static Types::Int64 getInt64Acquire(
const AtomicTypes::Int64 *atomicInt);
159 static Types::Int64 getInt64Relaxed(
const AtomicTypes::Int64 *atomicInt);
161 static void setInt64(AtomicTypes::Int64 *atomicInt, Types::Int64 value);
163 static void setInt64Release(AtomicTypes::Int64 *atomicInt,
166 static void setInt64Relaxed(AtomicTypes::Int64 *atomicInt,
169 static Types::Int64 swapInt64(AtomicTypes::Int64 *atomicInt,
170 Types::Int64 swapValue);
172 static Types::Int64 swapInt64AcqRel(AtomicTypes::Int64 *atomicInt,
173 Types::Int64 swapValue);
175 static Types::Int64 testAndSwapInt64(AtomicTypes::Int64 *atomicInt,
176 Types::Int64 compareValue,
177 Types::Int64 swapValue);
179 static Types::Int64 testAndSwapInt64AcqRel(
180 AtomicTypes::Int64 *atomicInt,
181 Types::Int64 compareValue,
182 Types::Int64 swapValue);
184 static Types::Int64 addInt64Nv(AtomicTypes::Int64 *atomicInt,
187 static Types::Int64 addInt64NvAcqRel(AtomicTypes::Int64 *atomicInt,
190 static Types::Int64 addInt64NvRelaxed(AtomicTypes::Int64 *atomicInt,
203void AtomicOperations_ALL_ALL_GCCIntrinsics::
204 initInt(AtomicTypes::Int *atomicInt,
int value)
206 __atomic_store_n(&atomicInt->d_value, value, __ATOMIC_RELAXED);
210int AtomicOperations_ALL_ALL_GCCIntrinsics::
211 getInt(
const AtomicTypes::Int *atomicInt)
213 return __atomic_load_n(&atomicInt->d_value, __ATOMIC_SEQ_CST);
217int AtomicOperations_ALL_ALL_GCCIntrinsics::
218 getIntAcquire(
const AtomicTypes::Int *atomicInt)
220 return __atomic_load_n(&atomicInt->d_value, __ATOMIC_ACQUIRE);
224int AtomicOperations_ALL_ALL_GCCIntrinsics::
225 getIntRelaxed(
const AtomicTypes::Int *atomicInt)
227 return __atomic_load_n(&atomicInt->d_value, __ATOMIC_RELAXED);
231void AtomicOperations_ALL_ALL_GCCIntrinsics::
232 setInt(AtomicTypes::Int *atomicInt,
int value)
234 __atomic_store_n(&atomicInt->d_value, value, __ATOMIC_SEQ_CST);
238void AtomicOperations_ALL_ALL_GCCIntrinsics::
239 setIntRelease(AtomicTypes::Int *atomicInt,
int value)
241 __atomic_store_n(&atomicInt->d_value, value, __ATOMIC_RELEASE);
245void AtomicOperations_ALL_ALL_GCCIntrinsics::
246 setIntRelaxed(AtomicTypes::Int *atomicInt,
int value)
248 __atomic_store_n(&atomicInt->d_value, value, __ATOMIC_RELAXED);
252int AtomicOperations_ALL_ALL_GCCIntrinsics::
253 swapInt(AtomicTypes::Int *atomicInt,
int swapValue)
256 __atomic_exchange_n(&atomicInt->d_value, swapValue, __ATOMIC_SEQ_CST);
260int AtomicOperations_ALL_ALL_GCCIntrinsics::
261 swapIntAcqRel(AtomicTypes::Int *atomicInt,
int swapValue)
264 __atomic_exchange_n(&atomicInt->d_value, swapValue, __ATOMIC_ACQ_REL);
268int AtomicOperations_ALL_ALL_GCCIntrinsics::
269 testAndSwapInt(AtomicTypes::Int *atomicInt,
273 __atomic_compare_exchange_n(&atomicInt->d_value, &compareValue, swapValue,
274 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
279int AtomicOperations_ALL_ALL_GCCIntrinsics::
280 testAndSwapIntAcqRel(AtomicTypes::Int *atomicInt,
284 __atomic_compare_exchange_n(&atomicInt->d_value, &compareValue, swapValue,
285 0, __ATOMIC_ACQ_REL, __ATOMIC_ACQUIRE);
290int AtomicOperations_ALL_ALL_GCCIntrinsics::
291 addIntNv(AtomicTypes::Int *atomicInt,
int value)
293 return __atomic_add_fetch(&atomicInt->d_value, value, __ATOMIC_SEQ_CST);
297int AtomicOperations_ALL_ALL_GCCIntrinsics::
298 addIntNvAcqRel(AtomicTypes::Int *atomicInt,
int value)
300 return __atomic_add_fetch(&atomicInt->d_value, value, __ATOMIC_ACQ_REL);
304int AtomicOperations_ALL_ALL_GCCIntrinsics::
305 addIntNvRelaxed(AtomicTypes::Int *atomicInt,
int value)
307 return __atomic_add_fetch(&atomicInt->d_value, value, __ATOMIC_RELAXED);
311void AtomicOperations_ALL_ALL_GCCIntrinsics::
312 initInt64(AtomicTypes::Int64 *atomicInt,
Types::Int64 value)
314 __atomic_store_n(&atomicInt->d_value, value, __ATOMIC_RELAXED);
319 getInt64(
const AtomicTypes::Int64 *atomicInt)
321 return __atomic_load_n(&atomicInt->d_value, __ATOMIC_SEQ_CST);
326 getInt64Acquire(
const AtomicTypes::Int64 *atomicInt)
328 return __atomic_load_n(&atomicInt->d_value, __ATOMIC_ACQUIRE);
333 getInt64Relaxed(
const AtomicTypes::Int64 *atomicInt)
335 return __atomic_load_n(&atomicInt->d_value, __ATOMIC_RELAXED);
339void AtomicOperations_ALL_ALL_GCCIntrinsics::
340 setInt64(AtomicTypes::Int64 *atomicInt,
Types::Int64 value)
342 __atomic_store_n(&atomicInt->d_value, value, __ATOMIC_SEQ_CST);
346void AtomicOperations_ALL_ALL_GCCIntrinsics::
347 setInt64Release(AtomicTypes::Int64 *atomicInt,
Types::Int64 value)
349 __atomic_store_n(&atomicInt->d_value, value, __ATOMIC_RELEASE);
353void AtomicOperations_ALL_ALL_GCCIntrinsics::
354 setInt64Relaxed(AtomicTypes::Int64 *atomicInt,
Types::Int64 value)
356 __atomic_store_n(&atomicInt->d_value, value, __ATOMIC_RELAXED);
361 swapInt64(AtomicTypes::Int64 *atomicInt,
Types::Int64 swapValue)
364 __atomic_exchange_n(&atomicInt->d_value, swapValue, __ATOMIC_SEQ_CST);
369 swapInt64AcqRel(AtomicTypes::Int64 *atomicInt,
Types::Int64 swapValue)
372 __atomic_exchange_n(&atomicInt->d_value, swapValue, __ATOMIC_ACQ_REL);
377 testAndSwapInt64(AtomicTypes::Int64 *atomicInt,
381 __atomic_compare_exchange_n(&atomicInt->d_value, &compareValue, swapValue,
382 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
388 testAndSwapInt64AcqRel(AtomicTypes::Int64 *atomicInt,
392 __atomic_compare_exchange_n(&atomicInt->d_value, &compareValue, swapValue,
393 0, __ATOMIC_ACQ_REL, __ATOMIC_ACQUIRE);
399 addInt64Nv(AtomicTypes::Int64 *atomicInt,
Types::Int64 value)
401 return __atomic_add_fetch(&atomicInt->d_value, value, __ATOMIC_SEQ_CST);
406 addInt64NvAcqRel(AtomicTypes::Int64 *atomicInt,
Types::Int64 value)
408 return __atomic_add_fetch(&atomicInt->d_value, value, __ATOMIC_ACQ_REL);
413 addInt64NvRelaxed(AtomicTypes::Int64 *atomicInt,
Types::Int64 value)
415 return __atomic_add_fetch(&atomicInt->d_value, value, __ATOMIC_RELAXED);
#define BSLS_IDENT(str)
Definition bsls_ident.h:195
Definition bdlt_iso8601util.h:691
long long Int64
Definition bsls_types.h:132