8#ifndef INCLUDED_BSLS_ATOMICOPERATIONS_POWERPC_ALL_GCC
9#define INCLUDED_BSLS_ATOMICOPERATIONS_POWERPC_ALL_GCC
70#include <bsls_atomicoperations_default.h>
74#if defined(BSLS_PLATFORM_CPU_POWERPC) && defined(BSLS_PLATFORM_CMP_GNU)
80struct AtomicOperations_POWERPC_ALL_GCC;
81typedef AtomicOperations_POWERPC_ALL_GCC AtomicOperations_Imp;
88struct Atomic_TypeTraits<AtomicOperations_POWERPC_ALL_GCC>
90 struct __attribute__((__aligned__(sizeof(int)))) Int
95 struct __attribute__((__aligned__(sizeof(Types::Int64)))) Int64
100 struct __attribute__((__aligned__(sizeof(unsigned int)))) Uint
102 unsigned int d_value;
105 struct __attribute__((__aligned__(sizeof(Types::Uint64)))) Uint64
107 Types::Uint64 d_value;
110 struct __attribute__((__aligned__(sizeof(void *)))) Pointer
120struct AtomicOperations_POWERPC_ALL_GCC
121 #ifdef BSLS_PLATFORM_CPU_64_BIT
122 : AtomicOperations_Default64<AtomicOperations_POWERPC_ALL_GCC>
124 : AtomicOperations_Default32<AtomicOperations_POWERPC_ALL_GCC>
127 typedef Atomic_TypeTraits<AtomicOperations_POWERPC_ALL_GCC> AtomicTypes;
129 typedef char AtomicInt_SizeCheck[
sizeof(int) == 4 ? 1 : -1];
134 static void initInt(AtomicTypes::Int *atomicInt,
int value);
136 static int getInt(
const AtomicTypes::Int *atomicInt);
138 static int getIntAcquire(
const AtomicTypes::Int *atomicInt);
140 static int getIntRelaxed(
const AtomicTypes::Int *atomicInt);
142 static void setInt(AtomicTypes::Int *atomicInt,
int value);
144 static void setIntRelease(AtomicTypes::Int *atomicInt,
int value);
146 static void setIntRelaxed(AtomicTypes::Int *atomicInt,
int value);
148 static int swapInt(AtomicTypes::Int *atomicInt,
int swapValue);
150 static int swapIntAcqRel(AtomicTypes::Int *atomicInt,
int swapValue);
152 static int testAndSwapInt(AtomicTypes::Int *atomicInt,
156 static int testAndSwapIntAcqRel(AtomicTypes::Int *atomicInt,
160 static int addIntNv(AtomicTypes::Int *atomicInt,
int value);
162 static int addIntNvAcqRel(AtomicTypes::Int *atomicInt,
int value);
164 static int addIntNvRelaxed(AtomicTypes::Int *atomicInt,
int value);
168 static void initInt64(AtomicTypes::Int64 *atomicInt, Types::Int64 value);
170 static Types::Int64 getInt64(
const AtomicTypes::Int64 *atomicInt);
172 static Types::Int64 getInt64Acquire(
const AtomicTypes::Int64 *atomicInt);
174 static Types::Int64 getInt64Relaxed(
const AtomicTypes::Int64 *atomicInt);
176 static void setInt64(AtomicTypes::Int64 *atomicInt, Types::Int64 value);
178 static void setInt64Release(AtomicTypes::Int64 *atomicInt,
181 static void setInt64Relaxed(AtomicTypes::Int64 *atomicInt,
184 static Types::Int64 swapInt64(AtomicTypes::Int64 *atomicInt,
185 Types::Int64 swapValue);
187 static Types::Int64 swapInt64AcqRel(AtomicTypes::Int64 *atomicInt,
188 Types::Int64 swapValue);
190 static Types::Int64 testAndSwapInt64(AtomicTypes::Int64 *atomicInt,
191 Types::Int64 compareValue,
192 Types::Int64 swapValue);
194 static Types::Int64 testAndSwapInt64AcqRel(AtomicTypes::Int64 *atomicInt,
195 Types::Int64 compareValue,
196 Types::Int64 swapValue);
198 static Types::Int64 addInt64Nv(AtomicTypes::Int64 *atomicInt,
201 static Types::Int64 addInt64NvAcqRel(AtomicTypes::Int64 *atomicInt,
204 static Types::Int64 addInt64NvRelaxed(AtomicTypes::Int64 *atomicInt,
217void AtomicOperations_POWERPC_ALL_GCC::
218 initInt(AtomicTypes::Int *atomicInt,
int value)
220 __asm__ __volatile__ (
"stw%U0%X0 %1,%0"
221 :
"=m"(atomicInt->d_value)
226int AtomicOperations_POWERPC_ALL_GCC::
227 getInt(
const AtomicTypes::Int *atomicInt)
230 __asm__ __volatile__ (
"sync":::
"memory");
231 __asm__ __volatile__ (
"lwz%U1%X1 %0,%1"
233 :
"m"(atomicInt->d_value));
234 __asm__ __volatile__ (
"lwsync":::
"memory");
239int AtomicOperations_POWERPC_ALL_GCC::
240 getIntAcquire(
const AtomicTypes::Int *atomicInt)
243 __asm__ __volatile__ (
"lwz%U1%X1 %0,%1"
245 :
"m"(atomicInt->d_value));
246 __asm__ __volatile__ (
"lwsync":::
"memory");
251int AtomicOperations_POWERPC_ALL_GCC::
252 getIntRelaxed(
const AtomicTypes::Int *atomicInt)
255 __asm__ __volatile__ (
"lwz%U1%X1 %0,%1"
257 :
"m"(atomicInt->d_value));
262void AtomicOperations_POWERPC_ALL_GCC::
263 setInt(AtomicTypes::Int *atomicInt,
int value)
265 __asm__ __volatile__ (
"sync":::
"memory");
266 __asm__ __volatile__ (
"stw%U0%X0 %1,%0"
267 :
"=m"(atomicInt->d_value)
272void AtomicOperations_POWERPC_ALL_GCC::
273 setIntRelease(AtomicTypes::Int *atomicInt,
int value)
275 __asm__ __volatile__ (
"lwsync":::
"memory");
276 __asm__ __volatile__ (
"stw%U0%X0 %1,%0"
277 :
"=m"(atomicInt->d_value)
282void AtomicOperations_POWERPC_ALL_GCC::
283 setIntRelaxed(AtomicTypes::Int *atomicInt,
int value)
285 __asm__ __volatile__ (
"stw%U0%X0 %1,%0"
286 :
"=m"(atomicInt->d_value)
291int AtomicOperations_POWERPC_ALL_GCC::
292 swapInt(AtomicTypes::Int *atomicInt,
int swapValue)
294 __asm__ __volatile__ (
"sync":::
"memory");
295 return __sync_lock_test_and_set(&atomicInt->d_value, swapValue);
299int AtomicOperations_POWERPC_ALL_GCC::
300 swapIntAcqRel(AtomicTypes::Int *atomicInt,
int swapValue)
302 __asm__ __volatile__ (
"lwsync":::
"memory");
303 return __sync_lock_test_and_set(&atomicInt->d_value, swapValue);
307int AtomicOperations_POWERPC_ALL_GCC::
308 testAndSwapInt(AtomicTypes::Int *atomicInt,
309 int compareValue,
int swapValue)
311 __asm__ __volatile__ (
"sync":::
"memory");
312 return __sync_val_compare_and_swap(&atomicInt->d_value,
313 compareValue, swapValue);
317int AtomicOperations_POWERPC_ALL_GCC::
318 testAndSwapIntAcqRel(AtomicTypes::Int *atomicInt,
319 int compareValue,
int swapValue)
321 __asm__ __volatile__ (
"lwsync":::
"memory");
322 return __sync_val_compare_and_swap(&atomicInt->d_value,
323 compareValue, swapValue);
327int AtomicOperations_POWERPC_ALL_GCC::
328 addIntNv(AtomicTypes::Int *atomicInt,
int value)
331 __asm__ __volatile__ (
"sync":::
"memory");
332 rv = __sync_add_and_fetch(&atomicInt->d_value, value);
333 __asm__ __volatile__ (
"lwsync":::
"memory");
338int AtomicOperations_POWERPC_ALL_GCC::
339 addIntNvAcqRel(AtomicTypes::Int *atomicInt,
int value)
342 __asm__ __volatile__ (
"lwsync":::
"memory");
343 rv = __sync_add_and_fetch(&atomicInt->d_value, value);
344 __asm__ __volatile__ (
"lwsync":::
"memory");
349int AtomicOperations_POWERPC_ALL_GCC::
350 addIntNvRelaxed(AtomicTypes::Int *atomicInt,
int value)
352 return __sync_add_and_fetch(&atomicInt->d_value, value);
356void AtomicOperations_POWERPC_ALL_GCC::
357 initInt64(AtomicTypes::Int64 *atomicInt,
Types::Int64 value)
359 __asm__ __volatile__ (
"std%U0%X0 %1,%0"
360 :
"=m"(atomicInt->d_value)
366 getInt64(
const AtomicTypes::Int64 *atomicInt)
369 __asm__ __volatile__ (
"sync":::
"memory");
370 __asm__ __volatile__ (
"ld%U1%X1 %0,%1"
372 :
"m"(atomicInt->d_value));
373 __asm__ __volatile__ (
"lwsync":::
"memory");
379 getInt64Acquire(
const AtomicTypes::Int64 *atomicInt)
382 __asm__ __volatile__ (
"ld%U1%X1 %0,%1"
384 :
"m"(atomicInt->d_value));
385 __asm__ __volatile__ (
"lwsync":::
"memory");
391 getInt64Relaxed(
const AtomicTypes::Int64 *atomicInt)
394 __asm__ __volatile__ (
"ld%U1%X1 %0,%1"
396 :
"m"(atomicInt->d_value));
401void AtomicOperations_POWERPC_ALL_GCC::
402 setInt64(AtomicTypes::Int64 *atomicInt,
Types::Int64 value)
404 __asm__ __volatile__ (
"sync":::
"memory");
405 __asm__ __volatile__ (
"std%U0%X0 %1,%0"
406 :
"=m"(atomicInt->d_value)
411void AtomicOperations_POWERPC_ALL_GCC::
412 setInt64Release(AtomicTypes::Int64 *atomicInt,
Types::Int64 value)
414 __asm__ __volatile__ (
"lwsync":::
"memory");
415 __asm__ __volatile__ (
"std%U0%X0 %1,%0"
416 :
"=m"(atomicInt->d_value)
421void AtomicOperations_POWERPC_ALL_GCC::
422 setInt64Relaxed(AtomicTypes::Int64 *atomicInt,
Types::Int64 value)
424 __asm__ __volatile__ (
"std%U0%X0 %1,%0"
425 :
"=m"(atomicInt->d_value)
431 swapInt64(AtomicTypes::Int64 *atomicInt,
Types::Int64 swapValue)
433 __asm__ __volatile__ (
"sync":::
"memory");
434 return __sync_lock_test_and_set(&atomicInt->d_value, swapValue);
439 swapInt64AcqRel(AtomicTypes::Int64 *atomicInt,
Types::Int64 swapValue)
441 __asm__ __volatile__ (
"lwsync":::
"memory");
442 return __sync_lock_test_and_set(&atomicInt->d_value, swapValue);
447 testAndSwapInt64(AtomicTypes::Int64 *atomicInt,
450 __asm__ __volatile__ (
"sync":::
"memory");
451 return __sync_val_compare_and_swap(&atomicInt->d_value,
452 compareValue, swapValue);
457 testAndSwapInt64AcqRel(AtomicTypes::Int64 *atomicInt,
460 __asm__ __volatile__ (
"lwsync":::
"memory");
461 return __sync_val_compare_and_swap(&atomicInt->d_value,
462 compareValue, swapValue);
467 addInt64Nv(AtomicTypes::Int64 *atomicInt,
Types::Int64 value)
470 __asm__ __volatile__ (
"sync":::
"memory");
471 rv = __sync_add_and_fetch(&atomicInt->d_value, value);
472 __asm__ __volatile__ (
"lwsync":::
"memory");
478 addInt64NvAcqRel(AtomicTypes::Int64 *atomicInt,
Types::Int64 value)
481 __asm__ __volatile__ (
"lwsync":::
"memory");
482 rv = __sync_add_and_fetch(&atomicInt->d_value, value);
483 __asm__ __volatile__ (
"lwsync":::
"memory");
489 addInt64NvRelaxed(AtomicTypes::Int64 *atomicInt,
Types::Int64 value)
491 return __sync_add_and_fetch(&atomicInt->d_value, value);
#define BSLS_IDENT(str)
Definition bsls_ident.h:195
Definition bdlt_iso8601util.h:691
long long Int64
Definition bsls_types.h:132