source: rtems-libbsd/rtemsbsd/include/machine/atomic.h @ 70555d5

55-freebsd-126-freebsd-12
Last change on this file since 70555d5 was bcdce02, checked in by Sebastian Huber <sebastian.huber@…>, on 08/21/18 at 11:47:02

Update to FreeBSD head 2018-06-01

Git mirror commit fb63610a69b0eb7f69a201ba05c4c1a7a2739cf9.

Update #3472.

  • Property mode set to 100644
File size: 38.8 KB
Line 
1/**
2 * @file
3 *
4 * @ingroup rtems_bsd_machine
5 *
6 * @brief TODO.
7 */
8
9/*
10 * Copyright (c) 2009, 2015 embedded brains GmbH.  All rights reserved.
11 *
12 *  embedded brains GmbH
13 *  Dornierstr. 4
14 *  82178 Puchheim
15 *  Germany
16 *  <rtems@embedded-brains.de>
17 *
18 * Redistribution and use in source and binary forms, with or without
19 * modification, are permitted provided that the following conditions
20 * are met:
21 * 1. Redistributions of source code must retain the above copyright
22 *    notice, this list of conditions and the following disclaimer.
23 * 2. Redistributions in binary form must reproduce the above copyright
24 *    notice, this list of conditions and the following disclaimer in the
25 *    documentation and/or other materials provided with the distribution.
26 *
27 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
28 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
29 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
30 * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
31 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
32 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
33 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
34 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
35 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
36 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
37 * SUCH DAMAGE.
38 */
39
40#ifndef _RTEMS_BSD_MACHINE_ATOMIC_H_
41#define _RTEMS_BSD_MACHINE_ATOMIC_H_
42
43#include <rtems.h>
44
45#ifdef RTEMS_SMP
46  #if defined(__cplusplus) \
47    && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 9))
48    /*
49     * The GCC 4.9 ships its own <stdatomic.h> which is not C++ compatible.  The
50     * suggested solution was to include <atomic> in case C++ is used.  This works
51     * at least with GCC 4.9.  See also:
52     *
53     * http://gcc.gnu.org/bugzilla/show_bug.cgi?id=60932
54     * http://gcc.gnu.org/bugzilla/show_bug.cgi?id=60940
55     */
56    #include <atomic>
57    #define _RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC
58  #else
59    #include <stdatomic.h>
60    #define _RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC
61  #endif
62#endif
63
64static inline void
65mb(void)
66{
67#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
68        std::atomic_thread_fence(std::memory_order_seq_cst);
69#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
70        atomic_thread_fence(memory_order_seq_cst);
71#else
72        RTEMS_COMPILER_MEMORY_BARRIER();
73#endif
74}
75
76static inline void
77wmb(void)
78{
79#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
80        std::atomic_thread_fence(std::memory_order_release);
81#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
82        atomic_thread_fence(memory_order_release);
83#else
84        RTEMS_COMPILER_MEMORY_BARRIER();
85#endif
86}
87
88static inline void
89rmb(void)
90{
91#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
92        std::atomic_thread_fence(std::memory_order_acquire);
93#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
94        atomic_thread_fence(memory_order_acquire);
95#else
96        RTEMS_COMPILER_MEMORY_BARRIER();
97#endif
98}
99
100static inline void
101atomic_add_int(volatile int *p, int v)
102{
103#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
104        std::atomic_int *q =
105            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
106
107        q->fetch_add(v, std::memory_order_seq_cst);
108#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
109        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
110
111        atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
112#else
113        rtems_interrupt_level level;
114
115        rtems_interrupt_disable(level);
116        *p += v;
117        rtems_interrupt_enable(level);
118#endif
119}
120
121static inline void
122atomic_add_acq_int(volatile int *p, int v)
123{
124#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
125        std::atomic_int *q =
126            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
127
128        q->fetch_add(v, std::memory_order_acquire);
129#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
130        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
131
132        atomic_fetch_add_explicit(q, v, memory_order_acquire);
133#else
134        rtems_interrupt_level level;
135
136        rtems_interrupt_disable(level);
137        *p += v;
138        rtems_interrupt_enable(level);
139#endif
140}
141
142static inline void
143atomic_add_rel_int(volatile int *p, int v)
144{
145#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
146        std::atomic_int *q =
147            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
148
149        q->fetch_add(v, std::memory_order_release);
150#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
151        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
152
153        atomic_fetch_add_explicit(q, v, memory_order_release);
154#else
155        rtems_interrupt_level level;
156
157        rtems_interrupt_disable(level);
158        *p += v;
159        rtems_interrupt_enable(level);
160#endif
161}
162
163static inline void
164atomic_subtract_int(volatile int *p, int v)
165{
166#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
167        std::atomic_int *q =
168            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
169
170        q->fetch_sub(v, std::memory_order_seq_cst);
171#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
172        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
173
174        atomic_fetch_sub_explicit(q, v, memory_order_seq_cst);
175#else
176        rtems_interrupt_level level;
177
178        rtems_interrupt_disable(level);
179        *p -= v;
180        rtems_interrupt_enable(level);
181#endif
182}
183
184static inline void
185atomic_subtract_acq_int(volatile int *p, int v)
186{
187#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
188        std::atomic_int *q =
189            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
190
191        q->fetch_sub(v, std::memory_order_acquire);
192#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
193        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
194
195        atomic_fetch_sub_explicit(q, v, memory_order_acquire);
196#else
197        rtems_interrupt_level level;
198
199        rtems_interrupt_disable(level);
200        *p -= v;
201        rtems_interrupt_enable(level);
202#endif
203}
204
205static inline void
206atomic_subtract_rel_int(volatile int *p, int v)
207{
208#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
209        std::atomic_int *q =
210            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
211
212        q->fetch_sub(v, std::memory_order_release);
213#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
214        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
215
216        atomic_fetch_sub_explicit(q, v, memory_order_release);
217#else
218        rtems_interrupt_level level;
219
220        rtems_interrupt_disable(level);
221        *p -= v;
222        rtems_interrupt_enable(level);
223#endif
224}
225
226static inline void
227atomic_set_int(volatile int *p, int v)
228{
229#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
230        std::atomic_int *q =
231            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
232
233        q->fetch_or(v, std::memory_order_seq_cst);
234#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
235        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
236
237        atomic_fetch_or_explicit(q, v, memory_order_seq_cst);
238#else
239        rtems_interrupt_level level;
240
241        rtems_interrupt_disable(level);
242        *p |= v;
243        rtems_interrupt_enable(level);
244#endif
245}
246
247static inline void
248atomic_set_acq_int(volatile int *p, int v)
249{
250#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
251        std::atomic_int *q =
252            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
253
254        q->fetch_or(v, std::memory_order_acquire);
255#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
256        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
257
258        atomic_fetch_or_explicit(q, v, memory_order_acquire);
259#else
260        rtems_interrupt_level level;
261
262        rtems_interrupt_disable(level);
263        *p |= v;
264        rtems_interrupt_enable(level);
265#endif
266}
267
268static inline void
269atomic_set_rel_int(volatile int *p, int v)
270{
271#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
272        std::atomic_int *q =
273            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
274
275        q->fetch_or(v, std::memory_order_release);
276#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
277        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
278
279        atomic_fetch_or_explicit(q, v, memory_order_release);
280#else
281        rtems_interrupt_level level;
282
283        rtems_interrupt_disable(level);
284        *p |= v;
285        rtems_interrupt_enable(level);
286#endif
287}
288
289static inline void
290atomic_clear_int(volatile int *p, int v)
291{
292#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
293        std::atomic_int *q =
294            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
295
296        q->fetch_and(~v, std::memory_order_seq_cst);
297#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
298        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
299
300        atomic_fetch_and_explicit(q, ~v, memory_order_seq_cst);
301#else
302        rtems_interrupt_level level;
303
304        rtems_interrupt_disable(level);
305        *p &= ~v;
306        rtems_interrupt_enable(level);
307#endif
308}
309
310static inline void
311atomic_clear_acq_int(volatile int *p, int v)
312{
313#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
314        std::atomic_int *q =
315            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
316
317        q->fetch_and(~v, std::memory_order_acquire);
318#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
319        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
320
321        atomic_fetch_and_explicit(q, ~v, memory_order_acquire);
322#else
323        rtems_interrupt_level level;
324
325        rtems_interrupt_disable(level);
326        *p &= ~v;
327        rtems_interrupt_enable(level);
328#endif
329}
330
331static inline void
332atomic_clear_rel_int(volatile int *p, int v)
333{
334#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
335        std::atomic_int *q =
336            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
337
338        q->fetch_and(~v, std::memory_order_release);
339#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
340        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
341
342        atomic_fetch_and_explicit(q, ~v, memory_order_release);
343#else
344        rtems_interrupt_level level;
345
346        rtems_interrupt_disable(level);
347        *p &= ~v;
348        rtems_interrupt_enable(level);
349#endif
350}
351
352static inline int
353atomic_cmpset_int(volatile int *p, int cmp, int set)
354{
355        int rv;
356
357#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
358        std::atomic_int *q =
359            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
360
361        rv = q->compare_exchange_strong(cmp, set, std::memory_order_seq_cst,
362            std::memory_order_relaxed);
363#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
364        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
365
366        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
367            memory_order_seq_cst, memory_order_relaxed);
368#else
369        rtems_interrupt_level level;
370
371        rtems_interrupt_disable(level);
372        rv = *p == cmp;
373        if (rv) {
374                *p = set;
375        }
376        rtems_interrupt_enable(level);
377#endif
378
379        return (rv);
380}
381
382static inline int
383atomic_fcmpset_int(volatile int *p, int *cmp, int set)
384{
385        int rv;
386
387#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
388        std::atomic_int *q =
389            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
390
391        rv = q->compare_exchange_strong(*cmp, set, std::memory_order_seq_cst,
392            std::memory_order_relaxed);
393#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
394        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
395
396        rv = atomic_compare_exchange_strong_explicit(q, cmp, set,
397            memory_order_seq_cst, memory_order_relaxed);
398#else
399        rtems_interrupt_level level;
400        int actual;
401
402        rtems_interrupt_disable(level);
403        actual = *p;
404        rv = actual == *cmp;
405        *cmp = actual;
406        if (rv) {
407                *p = set;
408        }
409        rtems_interrupt_enable(level);
410#endif
411
412        return (rv);
413}
414
415static inline int
416atomic_cmpset_acq_int(volatile int *p, int cmp, int set)
417{
418        int rv;
419
420#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
421        std::atomic_int *q =
422            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
423
424        rv = q->compare_exchange_strong(cmp, set, std::memory_order_acquire,
425            std::memory_order_relaxed);
426#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
427        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
428
429        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
430            memory_order_acquire, memory_order_relaxed);
431#else
432        rtems_interrupt_level level;
433
434        rtems_interrupt_disable(level);
435        rv = *p == cmp;
436        if (rv) {
437                *p = set;
438        }
439        rtems_interrupt_enable(level);
440#endif
441
442        return (rv);
443}
444
445static inline int
446atomic_cmpset_rel_int(volatile int *p, int cmp, int set)
447{
448        int rv;
449
450#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
451        std::atomic_int *q =
452            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
453
454        rv = q->compare_exchange_strong(cmp, set, std::memory_order_release,
455            std::memory_order_relaxed);
456#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
457        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
458
459        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
460            memory_order_release, memory_order_relaxed);
461#else
462        rtems_interrupt_level level;
463
464        rtems_interrupt_disable(level);
465        rv = *p == cmp;
466        if (rv) {
467                *p = set;
468        }
469        rtems_interrupt_enable(level);
470#endif
471
472        return (rv);
473}
474
475static inline int
476atomic_fetchadd_int(volatile int *p, int v)
477{
478        int tmp;
479
480#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
481        std::atomic_int *q =
482            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
483
484        tmp = q->fetch_add(v, std::memory_order_seq_cst);
485#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
486        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
487
488        tmp = atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
489#else
490        rtems_interrupt_level level;
491
492        rtems_interrupt_disable(level);
493        tmp = *p;
494        *p += v;
495        rtems_interrupt_enable(level);
496#endif
497
498        return (tmp);
499}
500
501static inline int
502atomic_readandclear_int(volatile int *p)
503{
504        int tmp;
505
506#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
507        std::atomic_int *q =
508            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
509
510        tmp = q->exchange(0, std::memory_order_seq_cst);
511#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
512        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
513
514        tmp = atomic_exchange_explicit(q, 0, memory_order_seq_cst);
515#else
516        rtems_interrupt_level level;
517
518        rtems_interrupt_disable(level);
519        tmp = *p;
520        *p = 0;
521        rtems_interrupt_enable(level);
522#endif
523
524        return (tmp);
525}
526
527static inline int
528atomic_load_int(volatile int *p)
529{
530        int tmp;
531
532#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
533        std::atomic_int *q =
534            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
535
536        tmp = q->load(std::memory_order_relaxed);
537#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
538        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
539
540        tmp = atomic_load_explicit(q, memory_order_relaxed);
541#else
542        tmp = *p;
543#endif
544
545        return (tmp);
546}
547
548static inline int
549atomic_load_acq_int(volatile int *p)
550{
551        int tmp;
552
553#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
554        std::atomic_int *q =
555            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
556
557        tmp = q->load(std::memory_order_acquire);
558#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
559        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
560
561        tmp = atomic_load_explicit(q, memory_order_acquire);
562#else
563        RTEMS_COMPILER_MEMORY_BARRIER();
564        tmp = *p;
565#endif
566
567        return (tmp);
568}
569
570static inline void
571atomic_store_int(volatile int *p, int v)
572{
573#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
574        std::atomic_int *q =
575            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
576
577        q->store(v, std::memory_order_relaxed);
578#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
579        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
580
581        atomic_store_explicit(q, v, memory_order_relaxed);
582#else
583        *p = v;
584#endif
585}
586
587static inline void
588atomic_store_rel_int(volatile int *p, int v)
589{
590#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
591        std::atomic_int *q =
592            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
593
594        q->store(v, std::memory_order_release);
595#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
596        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
597
598        atomic_store_explicit(q, v, memory_order_release);
599#else
600        *p = v;
601        RTEMS_COMPILER_MEMORY_BARRIER();
602#endif
603}
604
605static inline void
606atomic_add_32(volatile uint32_t *p, uint32_t v)
607{
608#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
609        std::atomic_uint_least32_t *q =
610            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
611
612        q->fetch_add(v, std::memory_order_seq_cst);
613#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
614        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
615
616        atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
617#else
618        rtems_interrupt_level level;
619
620        rtems_interrupt_disable(level);
621        *p += v;
622        rtems_interrupt_enable(level);
623#endif
624}
625
626static inline void
627atomic_add_acq_32(volatile uint32_t *p, uint32_t v)
628{
629#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
630        std::atomic_uint_least32_t *q =
631            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
632
633        q->fetch_add(v, std::memory_order_acquire);
634#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
635        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
636
637        atomic_fetch_add_explicit(q, v, memory_order_acquire);
638#else
639        rtems_interrupt_level level;
640
641        rtems_interrupt_disable(level);
642        *p += v;
643        rtems_interrupt_enable(level);
644#endif
645}
646
647static inline void
648atomic_add_rel_32(volatile uint32_t *p, uint32_t v)
649{
650#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
651        std::atomic_uint_least32_t *q =
652            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
653
654        q->fetch_add(v, std::memory_order_release);
655#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
656        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
657
658        atomic_fetch_add_explicit(q, v, memory_order_release);
659#else
660        rtems_interrupt_level level;
661
662        rtems_interrupt_disable(level);
663        *p += v;
664        rtems_interrupt_enable(level);
665#endif
666}
667
668static inline void
669atomic_subtract_32(volatile uint32_t *p, uint32_t v)
670{
671#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
672        std::atomic_uint_least32_t *q =
673            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
674
675        q->fetch_sub(v, std::memory_order_seq_cst);
676#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
677        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
678
679        atomic_fetch_sub_explicit(q, v, memory_order_seq_cst);
680#else
681        rtems_interrupt_level level;
682
683        rtems_interrupt_disable(level);
684        *p -= v;
685        rtems_interrupt_enable(level);
686#endif
687}
688
689static inline void
690atomic_subtract_acq_32(volatile uint32_t *p, uint32_t v)
691{
692#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
693        std::atomic_uint_least32_t *q =
694            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
695
696        q->fetch_sub(v, std::memory_order_acquire);
697#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
698        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
699
700        atomic_fetch_sub_explicit(q, v, memory_order_acquire);
701#else
702        rtems_interrupt_level level;
703
704        rtems_interrupt_disable(level);
705        *p -= v;
706        rtems_interrupt_enable(level);
707#endif
708}
709
710static inline void
711atomic_subtract_rel_32(volatile uint32_t *p, uint32_t v)
712{
713#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
714        std::atomic_uint_least32_t *q =
715            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
716
717        q->fetch_sub(v, std::memory_order_release);
718#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
719        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
720
721        atomic_fetch_sub_explicit(q, v, memory_order_release);
722#else
723        rtems_interrupt_level level;
724
725        rtems_interrupt_disable(level);
726        *p -= v;
727        rtems_interrupt_enable(level);
728#endif
729}
730
731static inline void
732atomic_set_32(volatile uint32_t *p, uint32_t v)
733{
734#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
735        std::atomic_uint_least32_t *q =
736            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
737
738        q->fetch_or(v, std::memory_order_seq_cst);
739#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
740        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
741
742        atomic_fetch_or_explicit(q, v, memory_order_seq_cst);
743#else
744        rtems_interrupt_level level;
745
746        rtems_interrupt_disable(level);
747        *p |= v;
748        rtems_interrupt_enable(level);
749#endif
750}
751
752static inline void
753atomic_set_acq_32(volatile uint32_t *p, uint32_t v)
754{
755#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
756        std::atomic_uint_least32_t *q =
757            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
758
759        q->fetch_or(v, std::memory_order_acquire);
760#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
761        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
762
763        atomic_fetch_or_explicit(q, v, memory_order_acquire);
764#else
765        rtems_interrupt_level level;
766
767        rtems_interrupt_disable(level);
768        *p |= v;
769        rtems_interrupt_enable(level);
770#endif
771}
772
773static inline void
774atomic_set_rel_32(volatile uint32_t *p, uint32_t v)
775{
776#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
777        std::atomic_uint_least32_t *q =
778            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
779
780        q->fetch_or(v, std::memory_order_release);
781#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
782        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
783
784        atomic_fetch_or_explicit(q, v, memory_order_release);
785#else
786        rtems_interrupt_level level;
787
788        rtems_interrupt_disable(level);
789        *p |= v;
790        rtems_interrupt_enable(level);
791#endif
792}
793
794static inline void
795atomic_clear_32(volatile uint32_t *p, uint32_t v)
796{
797#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
798        std::atomic_uint_least32_t *q =
799            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
800
801        q->fetch_and(~v, std::memory_order_seq_cst);
802#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
803        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
804
805        atomic_fetch_and_explicit(q, ~v, memory_order_seq_cst);
806#else
807        rtems_interrupt_level level;
808
809        rtems_interrupt_disable(level);
810        *p &= ~v;
811        rtems_interrupt_enable(level);
812#endif
813}
814
815static inline void
816atomic_clear_acq_32(volatile uint32_t *p, uint32_t v)
817{
818#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
819        std::atomic_uint_least32_t *q =
820            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
821
822        q->fetch_and(~v, std::memory_order_acquire);
823#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
824        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
825
826        atomic_fetch_and_explicit(q, ~v, memory_order_acquire);
827#else
828        rtems_interrupt_level level;
829
830        rtems_interrupt_disable(level);
831        *p &= ~v;
832        rtems_interrupt_enable(level);
833#endif
834}
835
836static inline void
837atomic_clear_rel_32(volatile uint32_t *p, uint32_t v)
838{
839#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
840        std::atomic_uint_least32_t *q =
841            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
842
843        q->fetch_and(~v, std::memory_order_release);
844#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
845        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
846
847        atomic_fetch_and_explicit(q, ~v, memory_order_release);
848#else
849        rtems_interrupt_level level;
850
851        rtems_interrupt_disable(level);
852        *p &= ~v;
853        rtems_interrupt_enable(level);
854#endif
855}
856
857static inline int
858atomic_cmpset_32(volatile uint32_t *p, uint32_t cmp, uint32_t set)
859{
860        int rv;
861
862#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
863        std::atomic_uint_least32_t *q =
864            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
865
866        rv = q->compare_exchange_strong(cmp, set, std::memory_order_seq_cst,
867            std::memory_order_relaxed);
868#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
869        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
870
871        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
872            memory_order_seq_cst, memory_order_relaxed);
873#else
874        rtems_interrupt_level level;
875
876        rtems_interrupt_disable(level);
877        rv = *p == cmp;
878        if (rv) {
879                *p = set;
880        }
881        rtems_interrupt_enable(level);
882#endif
883
884        return (rv);
885}
886
887static inline int
888atomic_cmpset_acq_32(volatile uint32_t *p, uint32_t cmp, uint32_t set)
889{
890        int rv;
891
892#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
893        std::atomic_uint_least32_t *q =
894            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
895
896        rv = q->compare_exchange_strong(cmp, set, std::memory_order_acquire,
897            std::memory_order_relaxed);
898#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
899        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
900
901        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
902            memory_order_acquire, memory_order_relaxed);
903#else
904        rtems_interrupt_level level;
905
906        rtems_interrupt_disable(level);
907        rv = *p == cmp;
908        if (rv) {
909                *p = set;
910        }
911        rtems_interrupt_enable(level);
912#endif
913
914        return (rv);
915}
916
917static inline int
918atomic_cmpset_rel_32(volatile uint32_t *p, uint32_t cmp, uint32_t set)
919{
920        int rv;
921
922#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
923        std::atomic_uint_least32_t *q =
924            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
925
926        rv = q->compare_exchange_strong(cmp, set, std::memory_order_release,
927            std::memory_order_relaxed);
928#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
929        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
930
931        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
932            memory_order_release, memory_order_relaxed);
933#else
934        rtems_interrupt_level level;
935
936        rtems_interrupt_disable(level);
937        rv = *p == cmp;
938        if (rv) {
939                *p = set;
940        }
941        rtems_interrupt_enable(level);
942#endif
943
944        return (rv);
945}
946
947static inline uint32_t
948atomic_fetchadd_32(volatile uint32_t *p, uint32_t v)
949{
950        uint32_t tmp;
951
952#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
953        std::atomic_uint_least32_t *q =
954            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
955
956        tmp = q->fetch_add(v, std::memory_order_seq_cst);
957#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
958        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
959
960        tmp = atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
961#else
962        rtems_interrupt_level level;
963
964        rtems_interrupt_disable(level);
965        tmp = *p;
966        *p += v;
967        rtems_interrupt_enable(level);
968#endif
969
970        return (tmp);
971}
972
973static inline uint32_t
974atomic_readandclear_32(volatile uint32_t *p)
975{
976        uint32_t tmp;
977
978#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
979        std::atomic_uint_least32_t *q =
980            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
981
982        tmp = q->exchange(0, std::memory_order_seq_cst);
983#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
984        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
985
986        tmp = atomic_exchange_explicit(q, 0, memory_order_seq_cst);
987#else
988        rtems_interrupt_level level;
989
990        rtems_interrupt_disable(level);
991        tmp = *p;
992        *p = 0;
993        rtems_interrupt_enable(level);
994#endif
995
996        return (tmp);
997}
998
999static inline uint32_t
1000atomic_load_acq_32(volatile uint32_t *p)
1001{
1002        uint32_t tmp;
1003
1004#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1005        std::atomic_uint_least32_t *q =
1006            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
1007
1008        tmp = q->load(std::memory_order_acquire);
1009#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1010        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
1011
1012        tmp = atomic_load_explicit(q, memory_order_acquire);
1013#else
1014        RTEMS_COMPILER_MEMORY_BARRIER();
1015        tmp = *p;
1016#endif
1017
1018        return (tmp);
1019}
1020
1021static inline void
1022atomic_store_rel_32(volatile uint32_t *p, uint32_t v)
1023{
1024#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1025        std::atomic_uint_least32_t *q =
1026            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
1027
1028        q->store(v, std::memory_order_release);
1029#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1030        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
1031
1032        atomic_store_explicit(q, v, memory_order_release);
1033#else
1034        *p = v;
1035        RTEMS_COMPILER_MEMORY_BARRIER();
1036#endif
1037}
1038
1039static inline void
1040atomic_add_long(volatile long *p, long v)
1041{
1042#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1043        std::atomic_long *q =
1044            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1045
1046        q->fetch_add(v, std::memory_order_seq_cst);
1047#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1048        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1049
1050        atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
1051#else
1052        rtems_interrupt_level level;
1053
1054        rtems_interrupt_disable(level);
1055        *p += v;
1056        rtems_interrupt_enable(level);
1057#endif
1058}
1059
1060static inline void
1061atomic_add_acq_long(volatile long *p, long v)
1062{
1063#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1064        std::atomic_long *q =
1065            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1066
1067        q->fetch_add(v, std::memory_order_acquire);
1068#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1069        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1070
1071        atomic_fetch_add_explicit(q, v, memory_order_acquire);
1072#else
1073        rtems_interrupt_level level;
1074
1075        rtems_interrupt_disable(level);
1076        *p += v;
1077        rtems_interrupt_enable(level);
1078#endif
1079}
1080
1081static inline void
1082atomic_add_rel_long(volatile long *p, long v)
1083{
1084#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1085        std::atomic_long *q =
1086            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1087
1088        q->fetch_add(v, std::memory_order_release);
1089#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1090        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1091
1092        atomic_fetch_add_explicit(q, v, memory_order_release);
1093#else
1094        rtems_interrupt_level level;
1095
1096        rtems_interrupt_disable(level);
1097        *p += v;
1098        rtems_interrupt_enable(level);
1099#endif
1100}
1101
1102static inline void
1103atomic_subtract_long(volatile long *p, long v)
1104{
1105#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1106        std::atomic_long *q =
1107            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1108
1109        q->fetch_sub(v, std::memory_order_seq_cst);
1110#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1111        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1112
1113        atomic_fetch_sub_explicit(q, v, memory_order_seq_cst);
1114#else
1115        rtems_interrupt_level level;
1116
1117        rtems_interrupt_disable(level);
1118        *p -= v;
1119        rtems_interrupt_enable(level);
1120#endif
1121}
1122
1123static inline void
1124atomic_subtract_acq_long(volatile long *p, long v)
1125{
1126#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1127        std::atomic_long *q =
1128            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1129
1130        q->fetch_sub(v, std::memory_order_acquire);
1131#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1132        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1133
1134        atomic_fetch_sub_explicit(q, v, memory_order_acquire);
1135#else
1136        rtems_interrupt_level level;
1137
1138        rtems_interrupt_disable(level);
1139        *p -= v;
1140        rtems_interrupt_enable(level);
1141#endif
1142}
1143
1144static inline void
1145atomic_subtract_rel_long(volatile long *p, long v)
1146{
1147#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1148        std::atomic_long *q =
1149            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1150
1151        q->fetch_sub(v, std::memory_order_release);
1152#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1153        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1154
1155        atomic_fetch_sub_explicit(q, v, memory_order_release);
1156#else
1157        rtems_interrupt_level level;
1158
1159        rtems_interrupt_disable(level);
1160        *p -= v;
1161        rtems_interrupt_enable(level);
1162#endif
1163}
1164
1165static inline void
1166atomic_set_long(volatile long *p, long v)
1167{
1168#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1169        std::atomic_long *q =
1170            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1171
1172        q->fetch_or(v, std::memory_order_seq_cst);
1173#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1174        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1175
1176        atomic_fetch_or_explicit(q, v, memory_order_seq_cst);
1177#else
1178        rtems_interrupt_level level;
1179
1180        rtems_interrupt_disable(level);
1181        *p |= v;
1182        rtems_interrupt_enable(level);
1183#endif
1184}
1185
1186static inline void
1187atomic_set_acq_long(volatile long *p, long v)
1188{
1189#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1190        std::atomic_long *q =
1191            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1192
1193        q->fetch_or(v, std::memory_order_acquire);
1194#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1195        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1196
1197        atomic_fetch_or_explicit(q, v, memory_order_acquire);
1198#else
1199        rtems_interrupt_level level;
1200
1201        rtems_interrupt_disable(level);
1202        *p |= v;
1203        rtems_interrupt_enable(level);
1204#endif
1205}
1206
1207static inline void
1208atomic_set_rel_long(volatile long *p, long v)
1209{
1210#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1211        std::atomic_long *q =
1212            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1213
1214        q->fetch_or(v, std::memory_order_release);
1215#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1216        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1217
1218        atomic_fetch_or_explicit(q, v, memory_order_release);
1219#else
1220        rtems_interrupt_level level;
1221
1222        rtems_interrupt_disable(level);
1223        *p |= v;
1224        rtems_interrupt_enable(level);
1225#endif
1226}
1227
1228static inline void
1229atomic_clear_long(volatile long *p, long v)
1230{
1231#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1232        std::atomic_long *q =
1233            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1234
1235        q->fetch_and(~v, std::memory_order_seq_cst);
1236#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1237        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1238
1239        atomic_fetch_and_explicit(q, ~v, memory_order_seq_cst);
1240#else
1241        rtems_interrupt_level level;
1242
1243        rtems_interrupt_disable(level);
1244        *p &= ~v;
1245        rtems_interrupt_enable(level);
1246#endif
1247}
1248
1249static inline void
1250atomic_clear_acq_long(volatile long *p, long v)
1251{
1252#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1253        std::atomic_long *q =
1254            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1255
1256        q->fetch_and(~v, std::memory_order_acquire);
1257#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1258        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1259
1260        atomic_fetch_and_explicit(q, ~v, memory_order_acquire);
1261#else
1262        rtems_interrupt_level level;
1263
1264        rtems_interrupt_disable(level);
1265        *p &= ~v;
1266        rtems_interrupt_enable(level);
1267#endif
1268}
1269
1270static inline void
1271atomic_clear_rel_long(volatile long *p, long v)
1272{
1273#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1274        std::atomic_long *q =
1275            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1276
1277        q->fetch_and(~v, std::memory_order_release);
1278#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1279        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1280
1281        atomic_fetch_and_explicit(q, ~v, memory_order_release);
1282#else
1283        rtems_interrupt_level level;
1284
1285        rtems_interrupt_disable(level);
1286        *p &= ~v;
1287        rtems_interrupt_enable(level);
1288#endif
1289}
1290
1291static inline int
1292atomic_cmpset_long(volatile long *p, long cmp, long set)
1293{
1294        int rv;
1295
1296#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1297        std::atomic_long *q =
1298            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1299
1300        rv = q->compare_exchange_strong(cmp, set, std::memory_order_seq_cst,
1301            std::memory_order_relaxed);
1302#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1303        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1304
1305        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
1306            memory_order_seq_cst, memory_order_relaxed);
1307#else
1308        rtems_interrupt_level level;
1309
1310        rtems_interrupt_disable(level);
1311        rv = *p == cmp;
1312        if (rv) {
1313                *p = set;
1314        }
1315        rtems_interrupt_enable(level);
1316#endif
1317
1318        return (rv);
1319}
1320
1321static inline int
1322atomic_cmpset_acq_long(volatile long *p, long cmp, long set)
1323{
1324        int rv;
1325
1326#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1327        std::atomic_long *q =
1328            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1329
1330        rv = q->compare_exchange_strong(cmp, set, std::memory_order_acquire,
1331            std::memory_order_relaxed);
1332#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1333        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1334
1335        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
1336            memory_order_acquire, memory_order_relaxed);
1337#else
1338        rtems_interrupt_level level;
1339
1340        rtems_interrupt_disable(level);
1341        rv = *p == cmp;
1342        if (rv) {
1343                *p = set;
1344        }
1345        rtems_interrupt_enable(level);
1346#endif
1347
1348        return (rv);
1349}
1350
1351static inline int
1352atomic_cmpset_rel_long(volatile long *p, long cmp, long set)
1353{
1354        int rv;
1355
1356#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1357        std::atomic_long *q =
1358            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1359
1360        rv = q->compare_exchange_strong(cmp, set, std::memory_order_release,
1361            std::memory_order_relaxed);
1362#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1363        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1364
1365        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
1366            memory_order_release, memory_order_relaxed);
1367#else
1368        rtems_interrupt_level level;
1369
1370        rtems_interrupt_disable(level);
1371        rv = *p == cmp;
1372        if (rv) {
1373                *p = set;
1374        }
1375        rtems_interrupt_enable(level);
1376#endif
1377
1378        return (rv);
1379}
1380
1381static inline long
1382atomic_fetchadd_long(volatile long *p, long v)
1383{
1384        long tmp;
1385
1386#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1387        std::atomic_long *q =
1388            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1389
1390        tmp = q->fetch_add(v, std::memory_order_seq_cst);
1391#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1392        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1393
1394        tmp = atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
1395#else
1396        rtems_interrupt_level level;
1397
1398        rtems_interrupt_disable(level);
1399        tmp = *p;
1400        *p += v;
1401        rtems_interrupt_enable(level);
1402#endif
1403
1404        return (tmp);
1405}
1406
1407static inline long
1408atomic_readandclear_long(volatile long *p)
1409{
1410        long tmp;
1411
1412#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1413        std::atomic_long *q =
1414            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1415
1416        tmp = q->exchange(0, std::memory_order_seq_cst);
1417#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1418        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1419
1420        tmp = atomic_exchange_explicit(q, 0, memory_order_seq_cst);
1421#else
1422        rtems_interrupt_level level;
1423
1424        rtems_interrupt_disable(level);
1425        tmp = *p;
1426        *p = 0;
1427        rtems_interrupt_enable(level);
1428#endif
1429
1430        return (tmp);
1431}
1432
1433static inline long
1434atomic_load_acq_long(volatile long *p)
1435{
1436        long tmp;
1437
1438#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1439        std::atomic_long *q =
1440            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1441
1442        tmp = q->load(std::memory_order_acquire);
1443#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1444        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1445
1446        tmp = atomic_load_explicit(q, memory_order_acquire);
1447#else
1448        RTEMS_COMPILER_MEMORY_BARRIER();
1449        tmp = *p;
1450#endif
1451
1452        return (tmp);
1453}
1454
1455static inline void
1456atomic_store_rel_long(volatile long *p, long v)
1457{
1458#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1459        std::atomic_long *q =
1460            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1461
1462        q->store(v, std::memory_order_release);
1463#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1464        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1465
1466        atomic_store_explicit(q, v, memory_order_release);
1467#else
1468        *p = v;
1469        RTEMS_COMPILER_MEMORY_BARRIER();
1470#endif
1471}
1472
1473static inline void
1474atomic_thread_fence_acq(void)
1475{
1476
1477#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1478        std::atomic_thread_fence(std::memory_order_acquire);
1479#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1480        atomic_thread_fence(memory_order_acquire);
1481#else
1482        RTEMS_COMPILER_MEMORY_BARRIER();
1483#endif
1484}
1485
1486static inline void
1487atomic_thread_fence_rel(void)
1488{
1489
1490#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1491        std::atomic_thread_fence(std::memory_order_release);
1492#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1493        atomic_thread_fence(memory_order_release);
1494#else
1495        RTEMS_COMPILER_MEMORY_BARRIER();
1496#endif
1497}
1498
1499static inline void
1500atomic_thread_fence_acq_rel(void)
1501{
1502
1503#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1504        std::atomic_thread_fence(std::memory_order_acq_rel);
1505#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1506        atomic_thread_fence(memory_order_acq_rel);
1507#else
1508        RTEMS_COMPILER_MEMORY_BARRIER();
1509#endif
1510}
1511
1512static inline void
1513atomic_thread_fence_seq_cst(void)
1514{
1515
1516#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1517        std::atomic_thread_fence(std::memory_order_seq_cst);
1518#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1519        atomic_thread_fence(memory_order_seq_cst);
1520#else
1521        RTEMS_COMPILER_MEMORY_BARRIER();
1522#endif
1523}
1524
1525#endif /* _RTEMS_BSD_MACHINE_ATOMIC_H_ */
Note: See TracBrowser for help on using the repository browser.