BDE 4.14.0 Production release
Loading...
Searching...
No Matches
bsls_atomicoperations_powerpc_all_gcc.h
Go to the documentation of this file.
1/// @file bsls_atomicoperations_powerpc_all_gcc.h
2///
3/// The content of this file has been pre-processed for Doxygen.
4///
5
6
7// bsls_atomicoperations_powerpc_all_gcc.h -*-C++-*-
8#ifndef INCLUDED_BSLS_ATOMICOPERATIONS_POWERPC_ALL_GCC
9#define INCLUDED_BSLS_ATOMICOPERATIONS_POWERPC_ALL_GCC
10
11#include <bsls_ident.h>
12BSLS_IDENT("$Id: $")
13
14/// @defgroup bsls_atomicoperations_powerpc_all_gcc bsls_atomicoperations_powerpc_all_gcc
15/// @brief Provide implementations of atomic operations for gcc on PowerPC
16/// @addtogroup bsl
17/// @{
18/// @addtogroup bsls
19/// @{
20/// @addtogroup bsls_atomicoperations_powerpc_all_gcc
21/// @{
22///
23/// <h1> Outline </h1>
24/// * <a href="#bsls_atomicoperations_powerpc_all_gcc-purpose"> Purpose</a>
25/// * <a href="#bsls_atomicoperations_powerpc_all_gcc-classes"> Classes </a>
26/// * <a href="#bsls_atomicoperations_powerpc_all_gcc-description"> Description </a>
27///
28/// # Purpose {#bsls_atomicoperations_powerpc_all_gcc-purpose}
29/// Provide implementations of atomic operations for gcc on PowerPC
30///
31/// # Classes {#bsls_atomicoperations_powerpc_all_gcc-classes}
32///
33/// - bsls::AtomicOperations_POWERPC_ALL_GCC: atomics for gcc on PowerPC
34///
35/// # Description {#bsls_atomicoperations_powerpc_all_gcc-description}
36/// This component provides classes necessary to implement atomics
37/// on the PowerPC platform in 32bit/64bit mode with the GCC compiler. The
38/// classes are for private use only. See @ref bsls_atomicoperations and
39/// @ref bsls_atomic for the public interface to atomics.
40///
41/// `bsls_atomicoperations_all_all_gccintrinsics.h` is used for gcc 4.7+ and
42/// provides C++11 atomics. The implementation herein is intended for earlier
43/// versions of gcc and will work correctly for 64-bit programs, as well as for
44/// 32-bit programs on AIX 6 or better, where AIX kernel saves and restores
45/// 64-bit registers across context switches and interrupts, even in 32-bit
46/// programs. According to IBM developers, this guarantee is not provided by
47/// the 32-bit ABI when running Linux kernel on POWER hardware, and therefore
48/// 64-bit atomic operations using gcc __sync_* intrinsics might not operate
49/// properly in some circumstances on Linux on POWER. If running on Linux on
50/// POWER, it is highly recommended that gcc 4.7+ be used, such as the IBM
51/// Advanced Toolchain (AT) which currently provides gcc 4.8.3 for POWER, or the
52/// RedHat Developer Toolset (DTS) for POWER.
53///
54/// IMPLEMENTATION NOTE: there are likely excess explicit barriers since gcc
55/// __sync_* intrinsics may provide their own barriers
56/// @}
57/** @} */
58/** @} */
59
60/** @addtogroup bsl
61 * @{
62 */
63/** @addtogroup bsls
64 * @{
65 */
66/** @addtogroup bsls_atomicoperations_powerpc_all_gcc
67 * @{
68 */
69
70#include <bsls_atomicoperations_default.h>
71#include <bsls_platform.h>
72#include <bsls_types.h>
73
74#if defined(BSLS_PLATFORM_CPU_POWERPC) && defined(BSLS_PLATFORM_CMP_GNU)
75
76
77
78namespace bsls {
79
80struct AtomicOperations_POWERPC_ALL_GCC;
81typedef AtomicOperations_POWERPC_ALL_GCC AtomicOperations_Imp;
82
83 // ==========================================================
84 // struct Atomic_TypeTraits<AtomicOperations_POWERPC_ALL_GCC>
85 // ==========================================================
86
87template <>
88struct Atomic_TypeTraits<AtomicOperations_POWERPC_ALL_GCC>
89{
90 struct __attribute__((__aligned__(sizeof(int)))) Int
91 {
92 int d_value;
93 };
94
95 struct __attribute__((__aligned__(sizeof(Types::Int64)))) Int64
96 {
97 Types::Int64 d_value;
98 };
99
100 struct __attribute__((__aligned__(sizeof(unsigned int)))) Uint
101 {
102 unsigned int d_value;
103 };
104
105 struct __attribute__((__aligned__(sizeof(Types::Uint64)))) Uint64
106 {
107 Types::Uint64 d_value;
108 };
109
110 struct __attribute__((__aligned__(sizeof(void *)))) Pointer
111 {
112 void * d_value;
113 };
114};
115
116 // =======================================
117 // struct AtomicOperations_POWERPC_ALL_GCC
118 // =======================================
119
120struct AtomicOperations_POWERPC_ALL_GCC
121 #ifdef BSLS_PLATFORM_CPU_64_BIT
122 : AtomicOperations_Default64<AtomicOperations_POWERPC_ALL_GCC>
123 #else
124 : AtomicOperations_Default32<AtomicOperations_POWERPC_ALL_GCC>
125 #endif
126{
127 typedef Atomic_TypeTraits<AtomicOperations_POWERPC_ALL_GCC> AtomicTypes;
128
129 typedef char AtomicInt_SizeCheck[sizeof(int) == 4 ? 1 : -1];
130 // compile-time assert
131
132 // *** atomic functions for int ***
133
134 static void initInt(AtomicTypes::Int *atomicInt, int value);
135
136 static int getInt(const AtomicTypes::Int *atomicInt);
137
138 static int getIntAcquire(const AtomicTypes::Int *atomicInt);
139
140 static int getIntRelaxed(const AtomicTypes::Int *atomicInt);
141
142 static void setInt(AtomicTypes::Int *atomicInt, int value);
143
144 static void setIntRelease(AtomicTypes::Int *atomicInt, int value);
145
146 static void setIntRelaxed(AtomicTypes::Int *atomicInt, int value);
147
148 static int swapInt(AtomicTypes::Int *atomicInt, int swapValue);
149
150 static int swapIntAcqRel(AtomicTypes::Int *atomicInt, int swapValue);
151
152 static int testAndSwapInt(AtomicTypes::Int *atomicInt,
153 int compareValue,
154 int swapValue);
155
156 static int testAndSwapIntAcqRel(AtomicTypes::Int *atomicInt,
157 int compareValue,
158 int swapValue);
159
160 static int addIntNv(AtomicTypes::Int *atomicInt, int value);
161
162 static int addIntNvAcqRel(AtomicTypes::Int *atomicInt, int value);
163
164 static int addIntNvRelaxed(AtomicTypes::Int *atomicInt, int value);
165
166 // *** atomic functions for Int64 ***
167
168 static void initInt64(AtomicTypes::Int64 *atomicInt, Types::Int64 value);
169
170 static Types::Int64 getInt64(const AtomicTypes::Int64 *atomicInt);
171
172 static Types::Int64 getInt64Acquire(const AtomicTypes::Int64 *atomicInt);
173
174 static Types::Int64 getInt64Relaxed(const AtomicTypes::Int64 *atomicInt);
175
176 static void setInt64(AtomicTypes::Int64 *atomicInt, Types::Int64 value);
177
178 static void setInt64Release(AtomicTypes::Int64 *atomicInt,
179 Types::Int64 value);
180
181 static void setInt64Relaxed(AtomicTypes::Int64 *atomicInt,
182 Types::Int64 value);
183
184 static Types::Int64 swapInt64(AtomicTypes::Int64 *atomicInt,
185 Types::Int64 swapValue);
186
187 static Types::Int64 swapInt64AcqRel(AtomicTypes::Int64 *atomicInt,
188 Types::Int64 swapValue);
189
190 static Types::Int64 testAndSwapInt64(AtomicTypes::Int64 *atomicInt,
191 Types::Int64 compareValue,
192 Types::Int64 swapValue);
193
194 static Types::Int64 testAndSwapInt64AcqRel(AtomicTypes::Int64 *atomicInt,
195 Types::Int64 compareValue,
196 Types::Int64 swapValue);
197
198 static Types::Int64 addInt64Nv(AtomicTypes::Int64 *atomicInt,
199 Types::Int64 value);
200
201 static Types::Int64 addInt64NvAcqRel(AtomicTypes::Int64 *atomicInt,
202 Types::Int64 value);
203
204 static Types::Int64 addInt64NvRelaxed(AtomicTypes::Int64 *atomicInt,
205 Types::Int64 value);
206};
207
208// ===========================================================================
209// INLINE FUNCTION DEFINITIONS
210// ===========================================================================
211
212 // ---------------------------------------
213 // struct AtomicOperations_POWERPC_ALL_GCC
214 // ---------------------------------------
215
216inline
217void AtomicOperations_POWERPC_ALL_GCC::
218 initInt(AtomicTypes::Int *atomicInt, int value)
219{
220 __asm__ __volatile__ ("stw%U0%X0 %1,%0"
221 :"=m"(atomicInt->d_value)
222 :"r"(value));
223}
224
225inline
226int AtomicOperations_POWERPC_ALL_GCC::
227 getInt(const AtomicTypes::Int *atomicInt)
228{
229 int rv;
230 __asm__ __volatile__ ("sync":::"memory");
231 __asm__ __volatile__ ("lwz%U1%X1 %0,%1"
232 :"=r"(rv)
233 :"m"(atomicInt->d_value));
234 __asm__ __volatile__ ("lwsync":::"memory");
235 return rv;
236}
237
238inline
239int AtomicOperations_POWERPC_ALL_GCC::
240 getIntAcquire(const AtomicTypes::Int *atomicInt)
241{
242 int rv;
243 __asm__ __volatile__ ("lwz%U1%X1 %0,%1"
244 :"=r"(rv)
245 :"m"(atomicInt->d_value));
246 __asm__ __volatile__ ("lwsync":::"memory");
247 return rv;
248}
249
250inline
251int AtomicOperations_POWERPC_ALL_GCC::
252 getIntRelaxed(const AtomicTypes::Int *atomicInt)
253{
254 int rv;
255 __asm__ __volatile__ ("lwz%U1%X1 %0,%1"
256 :"=r"(rv)
257 :"m"(atomicInt->d_value));
258 return rv;
259}
260
261inline
262void AtomicOperations_POWERPC_ALL_GCC::
263 setInt(AtomicTypes::Int *atomicInt, int value)
264{
265 __asm__ __volatile__ ("sync":::"memory");
266 __asm__ __volatile__ ("stw%U0%X0 %1,%0"
267 :"=m"(atomicInt->d_value)
268 :"r"(value));
269}
270
271inline
272void AtomicOperations_POWERPC_ALL_GCC::
273 setIntRelease(AtomicTypes::Int *atomicInt, int value)
274{
275 __asm__ __volatile__ ("lwsync":::"memory");
276 __asm__ __volatile__ ("stw%U0%X0 %1,%0"
277 :"=m"(atomicInt->d_value)
278 :"r"(value));
279}
280
281inline
282void AtomicOperations_POWERPC_ALL_GCC::
283 setIntRelaxed(AtomicTypes::Int *atomicInt, int value)
284{
285 __asm__ __volatile__ ("stw%U0%X0 %1,%0"
286 :"=m"(atomicInt->d_value)
287 :"r"(value));
288}
289
290inline
291int AtomicOperations_POWERPC_ALL_GCC::
292 swapInt(AtomicTypes::Int *atomicInt, int swapValue)
293{
294 __asm__ __volatile__ ("sync":::"memory");
295 return __sync_lock_test_and_set(&atomicInt->d_value, swapValue);
296}
297
298inline
299int AtomicOperations_POWERPC_ALL_GCC::
300 swapIntAcqRel(AtomicTypes::Int *atomicInt, int swapValue)
301{
302 __asm__ __volatile__ ("lwsync":::"memory");
303 return __sync_lock_test_and_set(&atomicInt->d_value, swapValue);
304}
305
306inline
307int AtomicOperations_POWERPC_ALL_GCC::
308 testAndSwapInt(AtomicTypes::Int *atomicInt,
309 int compareValue, int swapValue)
310{
311 __asm__ __volatile__ ("sync":::"memory");
312 return __sync_val_compare_and_swap(&atomicInt->d_value,
313 compareValue, swapValue);
314}
315
316inline
317int AtomicOperations_POWERPC_ALL_GCC::
318 testAndSwapIntAcqRel(AtomicTypes::Int *atomicInt,
319 int compareValue, int swapValue)
320{
321 __asm__ __volatile__ ("lwsync":::"memory");
322 return __sync_val_compare_and_swap(&atomicInt->d_value,
323 compareValue, swapValue);
324}
325
326inline
327int AtomicOperations_POWERPC_ALL_GCC::
328 addIntNv(AtomicTypes::Int *atomicInt, int value)
329{
330 int rv;
331 __asm__ __volatile__ ("sync":::"memory");
332 rv = __sync_add_and_fetch(&atomicInt->d_value, value);
333 __asm__ __volatile__ ("lwsync":::"memory");
334 return rv;
335}
336
337inline
338int AtomicOperations_POWERPC_ALL_GCC::
339 addIntNvAcqRel(AtomicTypes::Int *atomicInt, int value)
340{
341 int rv;
342 __asm__ __volatile__ ("lwsync":::"memory");
343 rv = __sync_add_and_fetch(&atomicInt->d_value, value);
344 __asm__ __volatile__ ("lwsync":::"memory");
345 return rv;
346}
347
348inline
349int AtomicOperations_POWERPC_ALL_GCC::
350 addIntNvRelaxed(AtomicTypes::Int *atomicInt, int value)
351{
352 return __sync_add_and_fetch(&atomicInt->d_value, value);
353}
354
355inline
356void AtomicOperations_POWERPC_ALL_GCC::
357 initInt64(AtomicTypes::Int64 *atomicInt, Types::Int64 value)
358{
359 __asm__ __volatile__ ("std%U0%X0 %1,%0"
360 :"=m"(atomicInt->d_value)
361 :"r"(value));
362}
363
364inline
365Types::Int64 AtomicOperations_POWERPC_ALL_GCC::
366 getInt64(const AtomicTypes::Int64 *atomicInt)
367{
368 Types::Int64 rv;
369 __asm__ __volatile__ ("sync":::"memory");
370 __asm__ __volatile__ ("ld%U1%X1 %0,%1"
371 :"=r"(rv)
372 :"m"(atomicInt->d_value));
373 __asm__ __volatile__ ("lwsync":::"memory");
374 return rv;
375}
376
377inline
378Types::Int64 AtomicOperations_POWERPC_ALL_GCC::
379 getInt64Acquire(const AtomicTypes::Int64 *atomicInt)
380{
381 Types::Int64 rv;
382 __asm__ __volatile__ ("ld%U1%X1 %0,%1"
383 :"=r"(rv)
384 :"m"(atomicInt->d_value));
385 __asm__ __volatile__ ("lwsync":::"memory");
386 return rv;
387}
388
389inline
390Types::Int64 AtomicOperations_POWERPC_ALL_GCC::
391 getInt64Relaxed(const AtomicTypes::Int64 *atomicInt)
392{
393 Types::Int64 rv;
394 __asm__ __volatile__ ("ld%U1%X1 %0,%1"
395 :"=r"(rv)
396 :"m"(atomicInt->d_value));
397 return rv;
398}
399
400inline
401void AtomicOperations_POWERPC_ALL_GCC::
402 setInt64(AtomicTypes::Int64 *atomicInt, Types::Int64 value)
403{
404 __asm__ __volatile__ ("sync":::"memory");
405 __asm__ __volatile__ ("std%U0%X0 %1,%0"
406 :"=m"(atomicInt->d_value)
407 :"r"(value));
408}
409
410inline
411void AtomicOperations_POWERPC_ALL_GCC::
412 setInt64Release(AtomicTypes::Int64 *atomicInt, Types::Int64 value)
413{
414 __asm__ __volatile__ ("lwsync":::"memory");
415 __asm__ __volatile__ ("std%U0%X0 %1,%0"
416 :"=m"(atomicInt->d_value)
417 :"r"(value));
418}
419
420inline
421void AtomicOperations_POWERPC_ALL_GCC::
422 setInt64Relaxed(AtomicTypes::Int64 *atomicInt, Types::Int64 value)
423{
424 __asm__ __volatile__ ("std%U0%X0 %1,%0"
425 :"=m"(atomicInt->d_value)
426 :"r"(value));
427}
428
429inline
430Types::Int64 AtomicOperations_POWERPC_ALL_GCC::
431 swapInt64(AtomicTypes::Int64 *atomicInt, Types::Int64 swapValue)
432{
433 __asm__ __volatile__ ("sync":::"memory");
434 return __sync_lock_test_and_set(&atomicInt->d_value, swapValue);
435}
436
437inline
438Types::Int64 AtomicOperations_POWERPC_ALL_GCC::
439 swapInt64AcqRel(AtomicTypes::Int64 *atomicInt, Types::Int64 swapValue)
440{
441 __asm__ __volatile__ ("lwsync":::"memory");
442 return __sync_lock_test_and_set(&atomicInt->d_value, swapValue);
443}
444
445inline
446Types::Int64 AtomicOperations_POWERPC_ALL_GCC::
447 testAndSwapInt64(AtomicTypes::Int64 *atomicInt,
448 Types::Int64 compareValue, Types::Int64 swapValue)
449{
450 __asm__ __volatile__ ("sync":::"memory");
451 return __sync_val_compare_and_swap(&atomicInt->d_value,
452 compareValue, swapValue);
453}
454
455inline
456Types::Int64 AtomicOperations_POWERPC_ALL_GCC::
457 testAndSwapInt64AcqRel(AtomicTypes::Int64 *atomicInt,
458 Types::Int64 compareValue, Types::Int64 swapValue)
459{
460 __asm__ __volatile__ ("lwsync":::"memory");
461 return __sync_val_compare_and_swap(&atomicInt->d_value,
462 compareValue, swapValue);
463}
464
465inline
466Types::Int64 AtomicOperations_POWERPC_ALL_GCC::
467 addInt64Nv(AtomicTypes::Int64 *atomicInt, Types::Int64 value)
468{
469 Types::Int64 rv;
470 __asm__ __volatile__ ("sync":::"memory");
471 rv = __sync_add_and_fetch(&atomicInt->d_value, value);
472 __asm__ __volatile__ ("lwsync":::"memory");
473 return rv;
474}
475
476inline
477Types::Int64 AtomicOperations_POWERPC_ALL_GCC::
478 addInt64NvAcqRel(AtomicTypes::Int64 *atomicInt, Types::Int64 value)
479{
480 Types::Int64 rv;
481 __asm__ __volatile__ ("lwsync":::"memory");
482 rv = __sync_add_and_fetch(&atomicInt->d_value, value);
483 __asm__ __volatile__ ("lwsync":::"memory");
484 return rv;
485}
486
487inline
488Types::Int64 AtomicOperations_POWERPC_ALL_GCC::
489 addInt64NvRelaxed(AtomicTypes::Int64 *atomicInt, Types::Int64 value)
490{
491 return __sync_add_and_fetch(&atomicInt->d_value, value);
492}
493
494} // close package namespace
495
496
497
498#endif // BSLS_PLATFORM_CPU_POWERPC && BSLS_PLATFORM_CMP_GNU
499
500#endif
501
502// ----------------------------------------------------------------------------
503// Copyright 2013 Bloomberg Finance L.P.
504//
505// Licensed under the Apache License, Version 2.0 (the "License");
506// you may not use this file except in compliance with the License.
507// You may obtain a copy of the License at
508//
509// http://www.apache.org/licenses/LICENSE-2.0
510//
511// Unless required by applicable law or agreed to in writing, software
512// distributed under the License is distributed on an "AS IS" BASIS,
513// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
514// See the License for the specific language governing permissions and
515// limitations under the License.
516// ----------------------------- END-OF-FILE ----------------------------------
517
518/** @} */
519/** @} */
520/** @} */
#define BSLS_IDENT(str)
Definition bsls_ident.h:195
Definition bdlt_iso8601util.h:691
long long Int64
Definition bsls_types.h:132