source: rtems-libbsd/rtemsbsd/include/machine/atomic.h @ e5a0175

4.1155-freebsd-126-freebsd-12freebsd-9.3
Last change on this file since e5a0175 was e5a0175, checked in by Sebastian Huber <sebastian.huber@…>, on 02/13/15 at 15:30:47

atomic.h: Use <stdatomic.h> or <atomic> for SMP

  • Property mode set to 100644
File size: 36.1 KB
Line 
1/**
2 * @file
3 *
4 * @ingroup rtems_bsd_machine
5 *
6 * @brief TODO.
7 */
8
9/*
10 * Copyright (c) 2009, 2015 embedded brains GmbH.  All rights reserved.
11 *
12 *  embedded brains GmbH
13 *  Dornierstr. 4
14 *  82178 Puchheim
15 *  Germany
16 *  <rtems@embedded-brains.de>
17 *
18 * Redistribution and use in source and binary forms, with or without
19 * modification, are permitted provided that the following conditions
20 * are met:
21 * 1. Redistributions of source code must retain the above copyright
22 *    notice, this list of conditions and the following disclaimer.
23 * 2. Redistributions in binary form must reproduce the above copyright
24 *    notice, this list of conditions and the following disclaimer in the
25 *    documentation and/or other materials provided with the distribution.
26 *
27 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
28 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
29 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
30 * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
31 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
32 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
33 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
34 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
35 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
36 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
37 * SUCH DAMAGE.
38 */
39
40#ifndef _RTEMS_BSD_MACHINE_ATOMIC_H_
41#define _RTEMS_BSD_MACHINE_ATOMIC_H_
42
43#ifndef _RTEMS_BSD_MACHINE_RTEMS_BSD_KERNEL_SPACE_H_
44#error "the header file <machine/rtems-bsd-kernel-space.h> must be included first"
45#endif
46
47#include <rtems.h>
48
49#ifdef RTEMS_SMP
50  #if defined(__cplusplus) && __GNUC__ >= 4 && __GNUC_MINOR__ >= 9
51    /*
52     * The GCC 4.9 ships its own <stdatomic.h> which is not C++ compatible.  The
53     * suggested solution was to include <atomic> in case C++ is used.  This works
54     * at least with GCC 4.9.  See also:
55     *
56     * http://gcc.gnu.org/bugzilla/show_bug.cgi?id=60932
57     * http://gcc.gnu.org/bugzilla/show_bug.cgi?id=60940
58     */
59    #include <atomic>
60    #define _RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC
61  #else
62    #include <stdatomic.h>
63    #define _RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC
64  #endif
65#endif
66
67static inline void
68mb(void)
69{
70#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
71        std::atomic_thread_fence(std::memory_order_seq_cst);
72#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
73        atomic_thread_fence(memory_order_seq_cst);
74#else
75        RTEMS_COMPILER_MEMORY_BARRIER();
76#endif
77}
78
79static inline void
80wmb(void)
81{
82#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
83        std::atomic_thread_fence(std::memory_order_release);
84#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
85        atomic_thread_fence(memory_order_release);
86#else
87        RTEMS_COMPILER_MEMORY_BARRIER();
88#endif
89}
90
91static inline void
92rmb(void)
93{
94#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
95        std::atomic_thread_fence(std::memory_order_acquire);
96#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
97        atomic_thread_fence(memory_order_acquire);
98#else
99        RTEMS_COMPILER_MEMORY_BARRIER();
100#endif
101}
102
103static inline void
104atomic_add_int(volatile int *p, int v)
105{
106#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
107        std::atomic_int *q =
108            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
109
110        q->fetch_add(v, std::memory_order_seq_cst);
111#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
112        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
113
114        atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
115#else
116        rtems_interrupt_level level;
117
118        rtems_interrupt_disable(level);
119        *p += v;
120        rtems_interrupt_enable(level);
121#endif
122}
123
124static inline void
125atomic_add_acq_int(volatile int *p, int v)
126{
127#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
128        std::atomic_int *q =
129            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
130
131        q->fetch_add(v, std::memory_order_acquire);
132#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
133        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
134
135        atomic_fetch_add_explicit(q, v, memory_order_acquire);
136#else
137        rtems_interrupt_level level;
138
139        rtems_interrupt_disable(level);
140        *p += v;
141        rtems_interrupt_enable(level);
142#endif
143}
144
145static inline void
146atomic_add_rel_int(volatile int *p, int v)
147{
148#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
149        std::atomic_int *q =
150            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
151
152        q->fetch_add(v, std::memory_order_release);
153#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
154        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
155
156        atomic_fetch_add_explicit(q, v, memory_order_release);
157#else
158        rtems_interrupt_level level;
159
160        rtems_interrupt_disable(level);
161        *p += v;
162        rtems_interrupt_enable(level);
163#endif
164}
165
166static inline void
167atomic_subtract_int(volatile int *p, int v)
168{
169#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
170        std::atomic_int *q =
171            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
172
173        q->fetch_sub(v, std::memory_order_seq_cst);
174#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
175        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
176
177        atomic_fetch_sub_explicit(q, v, memory_order_seq_cst);
178#else
179        rtems_interrupt_level level;
180
181        rtems_interrupt_disable(level);
182        *p -= v;
183        rtems_interrupt_enable(level);
184#endif
185}
186
187static inline void
188atomic_subtract_acq_int(volatile int *p, int v)
189{
190#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
191        std::atomic_int *q =
192            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
193
194        q->fetch_sub(v, std::memory_order_acquire);
195#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
196        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
197
198        atomic_fetch_sub_explicit(q, v, memory_order_acquire);
199#else
200        rtems_interrupt_level level;
201
202        rtems_interrupt_disable(level);
203        *p -= v;
204        rtems_interrupt_enable(level);
205#endif
206}
207
208static inline void
209atomic_subtract_rel_int(volatile int *p, int v)
210{
211#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
212        std::atomic_int *q =
213            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
214
215        q->fetch_sub(v, std::memory_order_release);
216#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
217        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
218
219        atomic_fetch_sub_explicit(q, v, memory_order_release);
220#else
221        rtems_interrupt_level level;
222
223        rtems_interrupt_disable(level);
224        *p -= v;
225        rtems_interrupt_enable(level);
226#endif
227}
228
229static inline void
230atomic_set_int(volatile int *p, int v)
231{
232#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
233        std::atomic_int *q =
234            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
235
236        q->fetch_or(v, std::memory_order_seq_cst);
237#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
238        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
239
240        atomic_fetch_or_explicit(q, v, memory_order_seq_cst);
241#else
242        rtems_interrupt_level level;
243
244        rtems_interrupt_disable(level);
245        *p |= v;
246        rtems_interrupt_enable(level);
247#endif
248}
249
250static inline void
251atomic_set_acq_int(volatile int *p, int v)
252{
253#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
254        std::atomic_int *q =
255            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
256
257        q->fetch_or(v, std::memory_order_acquire);
258#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
259        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
260
261        atomic_fetch_or_explicit(q, v, memory_order_acquire);
262#else
263        rtems_interrupt_level level;
264
265        rtems_interrupt_disable(level);
266        *p |= v;
267        rtems_interrupt_enable(level);
268#endif
269}
270
271static inline void
272atomic_set_rel_int(volatile int *p, int v)
273{
274#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
275        std::atomic_int *q =
276            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
277
278        q->fetch_or(v, std::memory_order_release);
279#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
280        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
281
282        atomic_fetch_or_explicit(q, v, memory_order_release);
283#else
284        rtems_interrupt_level level;
285
286        rtems_interrupt_disable(level);
287        *p |= v;
288        rtems_interrupt_enable(level);
289#endif
290}
291
292static inline void
293atomic_clear_int(volatile int *p, int v)
294{
295#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
296        std::atomic_int *q =
297            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
298
299        q->fetch_and(~v, std::memory_order_seq_cst);
300#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
301        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
302
303        atomic_fetch_and_explicit(q, ~v, memory_order_seq_cst);
304#else
305        rtems_interrupt_level level;
306
307        rtems_interrupt_disable(level);
308        *p &= ~v;
309        rtems_interrupt_enable(level);
310#endif
311}
312
313static inline void
314atomic_clear_acq_int(volatile int *p, int v)
315{
316#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
317        std::atomic_int *q =
318            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
319
320        q->fetch_and(~v, std::memory_order_acquire);
321#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
322        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
323
324        atomic_fetch_and_explicit(q, ~v, memory_order_acquire);
325#else
326        rtems_interrupt_level level;
327
328        rtems_interrupt_disable(level);
329        *p &= ~v;
330        rtems_interrupt_enable(level);
331#endif
332}
333
334static inline void
335atomic_clear_rel_int(volatile int *p, int v)
336{
337#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
338        std::atomic_int *q =
339            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
340
341        q->fetch_and(~v, std::memory_order_release);
342#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
343        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
344
345        atomic_fetch_and_explicit(q, ~v, memory_order_release);
346#else
347        rtems_interrupt_level level;
348
349        rtems_interrupt_disable(level);
350        *p &= ~v;
351        rtems_interrupt_enable(level);
352#endif
353}
354
355static inline int
356atomic_cmpset_int(volatile int *p, int cmp, int set)
357{
358        int rv;
359
360#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
361        std::atomic_int *q =
362            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
363
364        rv = q->compare_exchange_strong(cmp, set, std::memory_order_seq_cst,
365            std::memory_order_relaxed);
366#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
367        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
368
369        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
370            memory_order_seq_cst, memory_order_relaxed);
371#else
372        rtems_interrupt_level level;
373
374        rtems_interrupt_disable(level);
375        rv = *p == cmp;
376        if (rv) {
377                *p = set;
378        }
379        rtems_interrupt_enable(level);
380#endif
381
382        return (rv);
383}
384
385static inline int
386atomic_cmpset_acq_int(volatile int *p, int cmp, int set)
387{
388        int rv;
389
390#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
391        std::atomic_int *q =
392            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
393
394        rv = q->compare_exchange_strong(cmp, set, std::memory_order_acquire,
395            std::memory_order_relaxed);
396#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
397        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
398
399        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
400            memory_order_acquire, memory_order_relaxed);
401#else
402        rtems_interrupt_level level;
403
404        rtems_interrupt_disable(level);
405        rv = *p == cmp;
406        if (rv) {
407                *p = set;
408        }
409        rtems_interrupt_enable(level);
410#endif
411
412        return (rv);
413}
414
415static inline int
416atomic_cmpset_rel_int(volatile int *p, int cmp, int set)
417{
418        int rv;
419
420#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
421        std::atomic_int *q =
422            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
423
424        rv = q->compare_exchange_strong(cmp, set, std::memory_order_release,
425            std::memory_order_relaxed);
426#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
427        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
428
429        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
430            memory_order_release, memory_order_relaxed);
431#else
432        rtems_interrupt_level level;
433
434        rtems_interrupt_disable(level);
435        rv = *p == cmp;
436        if (rv) {
437                *p = set;
438        }
439        rtems_interrupt_enable(level);
440#endif
441
442        return (rv);
443}
444
445static inline int
446atomic_fetchadd_int(volatile int *p, int v)
447{
448        int tmp;
449
450#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
451        std::atomic_int *q =
452            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
453
454        tmp = q->fetch_add(v, std::memory_order_seq_cst);
455#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
456        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
457
458        tmp = atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
459#else
460        rtems_interrupt_level level;
461
462        rtems_interrupt_disable(level);
463        tmp = *p;
464        *p += v;
465        rtems_interrupt_enable(level);
466#endif
467
468        return (tmp);
469}
470
471static inline int
472atomic_readandclear_int(volatile int *p)
473{
474        int tmp;
475
476#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
477        std::atomic_int *q =
478            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
479
480        tmp = q->exchange(0, std::memory_order_seq_cst);
481#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
482        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
483
484        tmp = atomic_exchange_explicit(q, 0, memory_order_seq_cst);
485#else
486        rtems_interrupt_level level;
487
488        rtems_interrupt_disable(level);
489        tmp = *p;
490        *p = 0;
491        rtems_interrupt_enable(level);
492#endif
493
494        return (tmp);
495}
496
497static inline int
498atomic_load_acq_int(volatile int *p)
499{
500        int tmp;
501
502#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
503        std::atomic_int *q =
504            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
505
506        tmp = q->load(std::memory_order_acquire);
507#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
508        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
509
510        tmp = atomic_load_explicit(q, memory_order_acquire);
511#else
512        RTEMS_COMPILER_MEMORY_BARRIER();
513        tmp = *p;
514#endif
515
516        return (tmp);
517}
518
519static inline void
520atomic_store_rel_int(volatile int *p, int v)
521{
522#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
523        std::atomic_int *q =
524            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
525
526        q->store(v, std::memory_order_release);
527#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
528        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
529
530        atomic_store_explicit(q, v, memory_order_release);
531#else
532        *p = v;
533        RTEMS_COMPILER_MEMORY_BARRIER();
534#endif
535}
536
537static inline void
538atomic_add_32(volatile uint32_t *p, uint32_t v)
539{
540#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
541        std::atomic_uint_least32_t *q =
542            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
543
544        q->fetch_add(v, std::memory_order_seq_cst);
545#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
546        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
547
548        atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
549#else
550        rtems_interrupt_level level;
551
552        rtems_interrupt_disable(level);
553        *p += v;
554        rtems_interrupt_enable(level);
555#endif
556}
557
558static inline void
559atomic_add_acq_32(volatile uint32_t *p, uint32_t v)
560{
561#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
562        std::atomic_uint_least32_t *q =
563            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
564
565        q->fetch_add(v, std::memory_order_acquire);
566#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
567        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
568
569        atomic_fetch_add_explicit(q, v, memory_order_acquire);
570#else
571        rtems_interrupt_level level;
572
573        rtems_interrupt_disable(level);
574        *p += v;
575        rtems_interrupt_enable(level);
576#endif
577}
578
579static inline void
580atomic_add_rel_32(volatile uint32_t *p, uint32_t v)
581{
582#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
583        std::atomic_uint_least32_t *q =
584            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
585
586        q->fetch_add(v, std::memory_order_release);
587#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
588        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
589
590        atomic_fetch_add_explicit(q, v, memory_order_release);
591#else
592        rtems_interrupt_level level;
593
594        rtems_interrupt_disable(level);
595        *p += v;
596        rtems_interrupt_enable(level);
597#endif
598}
599
600static inline void
601atomic_subtract_32(volatile uint32_t *p, uint32_t v)
602{
603#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
604        std::atomic_uint_least32_t *q =
605            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
606
607        q->fetch_sub(v, std::memory_order_seq_cst);
608#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
609        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
610
611        atomic_fetch_sub_explicit(q, v, memory_order_seq_cst);
612#else
613        rtems_interrupt_level level;
614
615        rtems_interrupt_disable(level);
616        *p -= v;
617        rtems_interrupt_enable(level);
618#endif
619}
620
621static inline void
622atomic_subtract_acq_32(volatile uint32_t *p, uint32_t v)
623{
624#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
625        std::atomic_uint_least32_t *q =
626            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
627
628        q->fetch_sub(v, std::memory_order_acquire);
629#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
630        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
631
632        atomic_fetch_sub_explicit(q, v, memory_order_acquire);
633#else
634        rtems_interrupt_level level;
635
636        rtems_interrupt_disable(level);
637        *p -= v;
638        rtems_interrupt_enable(level);
639#endif
640}
641
642static inline void
643atomic_subtract_rel_32(volatile uint32_t *p, uint32_t v)
644{
645#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
646        std::atomic_uint_least32_t *q =
647            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
648
649        q->fetch_sub(v, std::memory_order_release);
650#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
651        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
652
653        atomic_fetch_sub_explicit(q, v, memory_order_release);
654#else
655        rtems_interrupt_level level;
656
657        rtems_interrupt_disable(level);
658        *p -= v;
659        rtems_interrupt_enable(level);
660#endif
661}
662
663static inline void
664atomic_set_32(volatile uint32_t *p, uint32_t v)
665{
666#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
667        std::atomic_uint_least32_t *q =
668            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
669
670        q->fetch_or(v, std::memory_order_seq_cst);
671#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
672        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
673
674        atomic_fetch_or_explicit(q, v, memory_order_seq_cst);
675#else
676        rtems_interrupt_level level;
677
678        rtems_interrupt_disable(level);
679        *p |= v;
680        rtems_interrupt_enable(level);
681#endif
682}
683
684static inline void
685atomic_set_acq_32(volatile uint32_t *p, uint32_t v)
686{
687#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
688        std::atomic_uint_least32_t *q =
689            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
690
691        q->fetch_or(v, std::memory_order_acquire);
692#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
693        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
694
695        atomic_fetch_or_explicit(q, v, memory_order_acquire);
696#else
697        rtems_interrupt_level level;
698
699        rtems_interrupt_disable(level);
700        *p |= v;
701        rtems_interrupt_enable(level);
702#endif
703}
704
705static inline void
706atomic_set_rel_32(volatile uint32_t *p, uint32_t v)
707{
708#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
709        std::atomic_uint_least32_t *q =
710            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
711
712        q->fetch_or(v, std::memory_order_release);
713#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
714        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
715
716        atomic_fetch_or_explicit(q, v, memory_order_release);
717#else
718        rtems_interrupt_level level;
719
720        rtems_interrupt_disable(level);
721        *p |= v;
722        rtems_interrupt_enable(level);
723#endif
724}
725
726static inline void
727atomic_clear_32(volatile uint32_t *p, uint32_t v)
728{
729#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
730        std::atomic_uint_least32_t *q =
731            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
732
733        q->fetch_and(~v, std::memory_order_seq_cst);
734#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
735        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
736
737        atomic_fetch_and_explicit(q, ~v, memory_order_seq_cst);
738#else
739        rtems_interrupt_level level;
740
741        rtems_interrupt_disable(level);
742        *p &= ~v;
743        rtems_interrupt_enable(level);
744#endif
745}
746
747static inline void
748atomic_clear_acq_32(volatile uint32_t *p, uint32_t v)
749{
750#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
751        std::atomic_uint_least32_t *q =
752            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
753
754        q->fetch_and(~v, std::memory_order_acquire);
755#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
756        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
757
758        atomic_fetch_and_explicit(q, ~v, memory_order_acquire);
759#else
760        rtems_interrupt_level level;
761
762        rtems_interrupt_disable(level);
763        *p &= ~v;
764        rtems_interrupt_enable(level);
765#endif
766}
767
768static inline void
769atomic_clear_rel_32(volatile uint32_t *p, uint32_t v)
770{
771#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
772        std::atomic_uint_least32_t *q =
773            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
774
775        q->fetch_and(~v, std::memory_order_release);
776#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
777        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
778
779        atomic_fetch_and_explicit(q, ~v, memory_order_release);
780#else
781        rtems_interrupt_level level;
782
783        rtems_interrupt_disable(level);
784        *p &= ~v;
785        rtems_interrupt_enable(level);
786#endif
787}
788
789static inline int
790atomic_cmpset_32(volatile uint32_t *p, uint32_t cmp, uint32_t set)
791{
792        int rv;
793
794#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
795        std::atomic_uint_least32_t *q =
796            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
797
798        rv = q->compare_exchange_strong(cmp, set, std::memory_order_seq_cst,
799            std::memory_order_relaxed);
800#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
801        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
802
803        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
804            memory_order_seq_cst, memory_order_relaxed);
805#else
806        rtems_interrupt_level level;
807
808        rtems_interrupt_disable(level);
809        rv = *p == cmp;
810        if (rv) {
811                *p = set;
812        }
813        rtems_interrupt_enable(level);
814#endif
815
816        return (rv);
817}
818
819static inline int
820atomic_cmpset_acq_32(volatile uint32_t *p, uint32_t cmp, uint32_t set)
821{
822        int rv;
823
824#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
825        std::atomic_uint_least32_t *q =
826            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
827
828        rv = q->compare_exchange_strong(cmp, set, std::memory_order_acquire,
829            std::memory_order_relaxed);
830#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
831        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
832
833        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
834            memory_order_acquire, memory_order_relaxed);
835#else
836        rtems_interrupt_level level;
837
838        rtems_interrupt_disable(level);
839        rv = *p == cmp;
840        if (rv) {
841                *p = set;
842        }
843        rtems_interrupt_enable(level);
844#endif
845
846        return (rv);
847}
848
849static inline int
850atomic_cmpset_rel_32(volatile uint32_t *p, uint32_t cmp, uint32_t set)
851{
852        int rv;
853
854#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
855        std::atomic_uint_least32_t *q =
856            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
857
858        rv = q->compare_exchange_strong(cmp, set, std::memory_order_release,
859            std::memory_order_relaxed);
860#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
861        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
862
863        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
864            memory_order_release, memory_order_relaxed);
865#else
866        rtems_interrupt_level level;
867
868        rtems_interrupt_disable(level);
869        rv = *p == cmp;
870        if (rv) {
871                *p = set;
872        }
873        rtems_interrupt_enable(level);
874#endif
875
876        return (rv);
877}
878
879static inline uint32_t
880atomic_fetchadd_32(volatile uint32_t *p, uint32_t v)
881{
882        uint32_t tmp;
883
884#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
885        std::atomic_uint_least32_t *q =
886            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
887
888        tmp = q->fetch_add(v, std::memory_order_seq_cst);
889#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
890        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
891
892        tmp = atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
893#else
894        rtems_interrupt_level level;
895
896        rtems_interrupt_disable(level);
897        tmp = *p;
898        *p += v;
899        rtems_interrupt_enable(level);
900#endif
901
902        return (tmp);
903}
904
905static inline uint32_t
906atomic_readandclear_32(volatile uint32_t *p)
907{
908        uint32_t tmp;
909
910#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
911        std::atomic_uint_least32_t *q =
912            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
913
914        tmp = q->exchange(0, std::memory_order_seq_cst);
915#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
916        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
917
918        tmp = atomic_exchange_explicit(q, 0, memory_order_seq_cst);
919#else
920        rtems_interrupt_level level;
921
922        rtems_interrupt_disable(level);
923        tmp = *p;
924        *p = 0;
925        rtems_interrupt_enable(level);
926#endif
927
928        return (tmp);
929}
930
931static inline uint32_t
932atomic_load_acq_32(volatile uint32_t *p)
933{
934        uint32_t tmp;
935
936#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
937        std::atomic_uint_least32_t *q =
938            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
939
940        tmp = q->load(std::memory_order_acquire);
941#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
942        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
943
944        tmp = atomic_load_explicit(q, memory_order_acquire);
945#else
946        RTEMS_COMPILER_MEMORY_BARRIER();
947        tmp = *p;
948#endif
949
950        return (tmp);
951}
952
953static inline void
954atomic_store_rel_32(volatile uint32_t *p, uint32_t v)
955{
956#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
957        std::atomic_uint_least32_t *q =
958            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
959
960        q->store(v, std::memory_order_release);
961#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
962        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
963
964        atomic_store_explicit(q, v, memory_order_release);
965#else
966        *p = v;
967        RTEMS_COMPILER_MEMORY_BARRIER();
968#endif
969}
970
971static inline void
972atomic_add_long(volatile long *p, long v)
973{
974#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
975        std::atomic_long *q =
976            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
977
978        q->fetch_add(v, std::memory_order_seq_cst);
979#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
980        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
981
982        atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
983#else
984        rtems_interrupt_level level;
985
986        rtems_interrupt_disable(level);
987        *p += v;
988        rtems_interrupt_enable(level);
989#endif
990}
991
992static inline void
993atomic_add_acq_long(volatile long *p, long v)
994{
995#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
996        std::atomic_long *q =
997            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
998
999        q->fetch_add(v, std::memory_order_acquire);
1000#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1001        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1002
1003        atomic_fetch_add_explicit(q, v, memory_order_acquire);
1004#else
1005        rtems_interrupt_level level;
1006
1007        rtems_interrupt_disable(level);
1008        *p += v;
1009        rtems_interrupt_enable(level);
1010#endif
1011}
1012
1013static inline void
1014atomic_add_rel_long(volatile long *p, long v)
1015{
1016#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1017        std::atomic_long *q =
1018            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1019
1020        q->fetch_add(v, std::memory_order_release);
1021#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1022        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1023
1024        atomic_fetch_add_explicit(q, v, memory_order_release);
1025#else
1026        rtems_interrupt_level level;
1027
1028        rtems_interrupt_disable(level);
1029        *p += v;
1030        rtems_interrupt_enable(level);
1031#endif
1032}
1033
1034static inline void
1035atomic_subtract_long(volatile long *p, long v)
1036{
1037#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1038        std::atomic_long *q =
1039            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1040
1041        q->fetch_sub(v, std::memory_order_seq_cst);
1042#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1043        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1044
1045        atomic_fetch_sub_explicit(q, v, memory_order_seq_cst);
1046#else
1047        rtems_interrupt_level level;
1048
1049        rtems_interrupt_disable(level);
1050        *p -= v;
1051        rtems_interrupt_enable(level);
1052#endif
1053}
1054
1055static inline void
1056atomic_subtract_acq_long(volatile long *p, long v)
1057{
1058#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1059        std::atomic_long *q =
1060            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1061
1062        q->fetch_sub(v, std::memory_order_acquire);
1063#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1064        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1065
1066        atomic_fetch_sub_explicit(q, v, memory_order_acquire);
1067#else
1068        rtems_interrupt_level level;
1069
1070        rtems_interrupt_disable(level);
1071        *p -= v;
1072        rtems_interrupt_enable(level);
1073#endif
1074}
1075
1076static inline void
1077atomic_subtract_rel_long(volatile long *p, long v)
1078{
1079#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1080        std::atomic_long *q =
1081            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1082
1083        q->fetch_sub(v, std::memory_order_release);
1084#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1085        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1086
1087        atomic_fetch_sub_explicit(q, v, memory_order_release);
1088#else
1089        rtems_interrupt_level level;
1090
1091        rtems_interrupt_disable(level);
1092        *p -= v;
1093        rtems_interrupt_enable(level);
1094#endif
1095}
1096
1097static inline void
1098atomic_set_long(volatile long *p, long v)
1099{
1100#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1101        std::atomic_long *q =
1102            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1103
1104        q->fetch_or(v, std::memory_order_seq_cst);
1105#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1106        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1107
1108        atomic_fetch_or_explicit(q, v, memory_order_seq_cst);
1109#else
1110        rtems_interrupt_level level;
1111
1112        rtems_interrupt_disable(level);
1113        *p |= v;
1114        rtems_interrupt_enable(level);
1115#endif
1116}
1117
1118static inline void
1119atomic_set_acq_long(volatile long *p, long v)
1120{
1121#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1122        std::atomic_long *q =
1123            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1124
1125        q->fetch_or(v, std::memory_order_acquire);
1126#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1127        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1128
1129        atomic_fetch_or_explicit(q, v, memory_order_acquire);
1130#else
1131        rtems_interrupt_level level;
1132
1133        rtems_interrupt_disable(level);
1134        *p |= v;
1135        rtems_interrupt_enable(level);
1136#endif
1137}
1138
1139static inline void
1140atomic_set_rel_long(volatile long *p, long v)
1141{
1142#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1143        std::atomic_long *q =
1144            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1145
1146        q->fetch_or(v, std::memory_order_release);
1147#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1148        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1149
1150        atomic_fetch_or_explicit(q, v, memory_order_release);
1151#else
1152        rtems_interrupt_level level;
1153
1154        rtems_interrupt_disable(level);
1155        *p |= v;
1156        rtems_interrupt_enable(level);
1157#endif
1158}
1159
1160static inline void
1161atomic_clear_long(volatile long *p, long v)
1162{
1163#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1164        std::atomic_long *q =
1165            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1166
1167        q->fetch_and(~v, std::memory_order_seq_cst);
1168#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1169        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1170
1171        atomic_fetch_and_explicit(q, ~v, memory_order_seq_cst);
1172#else
1173        rtems_interrupt_level level;
1174
1175        rtems_interrupt_disable(level);
1176        *p &= ~v;
1177        rtems_interrupt_enable(level);
1178#endif
1179}
1180
1181static inline void
1182atomic_clear_acq_long(volatile long *p, long v)
1183{
1184#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1185        std::atomic_long *q =
1186            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1187
1188        q->fetch_and(~v, std::memory_order_acquire);
1189#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1190        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1191
1192        atomic_fetch_and_explicit(q, ~v, memory_order_acquire);
1193#else
1194        rtems_interrupt_level level;
1195
1196        rtems_interrupt_disable(level);
1197        *p &= ~v;
1198        rtems_interrupt_enable(level);
1199#endif
1200}
1201
1202static inline void
1203atomic_clear_rel_long(volatile long *p, long v)
1204{
1205#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1206        std::atomic_long *q =
1207            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1208
1209        q->fetch_and(~v, std::memory_order_release);
1210#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1211        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1212
1213        atomic_fetch_and_explicit(q, ~v, memory_order_release);
1214#else
1215        rtems_interrupt_level level;
1216
1217        rtems_interrupt_disable(level);
1218        *p &= ~v;
1219        rtems_interrupt_enable(level);
1220#endif
1221}
1222
1223static inline int
1224atomic_cmpset_long(volatile long *p, long cmp, long set)
1225{
1226        int rv;
1227
1228#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1229        std::atomic_long *q =
1230            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1231
1232        rv = q->compare_exchange_strong(cmp, set, std::memory_order_seq_cst,
1233            std::memory_order_relaxed);
1234#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1235        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1236
1237        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
1238            memory_order_seq_cst, memory_order_relaxed);
1239#else
1240        rtems_interrupt_level level;
1241
1242        rtems_interrupt_disable(level);
1243        rv = *p == cmp;
1244        if (rv) {
1245                *p = set;
1246        }
1247        rtems_interrupt_enable(level);
1248#endif
1249
1250        return (rv);
1251}
1252
1253static inline int
1254atomic_cmpset_acq_long(volatile long *p, long cmp, long set)
1255{
1256        int rv;
1257
1258#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1259        std::atomic_long *q =
1260            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1261
1262        rv = q->compare_exchange_strong(cmp, set, std::memory_order_acquire,
1263            std::memory_order_relaxed);
1264#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1265        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1266
1267        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
1268            memory_order_acquire, memory_order_relaxed);
1269#else
1270        rtems_interrupt_level level;
1271
1272        rtems_interrupt_disable(level);
1273        rv = *p == cmp;
1274        if (rv) {
1275                *p = set;
1276        }
1277        rtems_interrupt_enable(level);
1278#endif
1279
1280        return (rv);
1281}
1282
1283static inline int
1284atomic_cmpset_rel_long(volatile long *p, long cmp, long set)
1285{
1286        int rv;
1287
1288#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1289        std::atomic_long *q =
1290            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1291
1292        rv = q->compare_exchange_strong(cmp, set, std::memory_order_release,
1293            std::memory_order_relaxed);
1294#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1295        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1296
1297        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
1298            memory_order_release, memory_order_relaxed);
1299#else
1300        rtems_interrupt_level level;
1301
1302        rtems_interrupt_disable(level);
1303        rv = *p == cmp;
1304        if (rv) {
1305                *p = set;
1306        }
1307        rtems_interrupt_enable(level);
1308#endif
1309
1310        return (rv);
1311}
1312
1313static inline long
1314atomic_fetchadd_long(volatile long *p, long v)
1315{
1316        long tmp;
1317
1318#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1319        std::atomic_long *q =
1320            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1321
1322        tmp = q->fetch_add(v, std::memory_order_seq_cst);
1323#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1324        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1325
1326        tmp = atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
1327#else
1328        rtems_interrupt_level level;
1329
1330        rtems_interrupt_disable(level);
1331        tmp = *p;
1332        *p += v;
1333        rtems_interrupt_enable(level);
1334#endif
1335
1336        return (tmp);
1337}
1338
1339static inline long
1340atomic_readandclear_long(volatile long *p)
1341{
1342        long tmp;
1343
1344#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1345        std::atomic_long *q =
1346            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1347
1348        tmp = q->exchange(0, std::memory_order_seq_cst);
1349#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1350        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1351
1352        tmp = atomic_exchange_explicit(q, 0, memory_order_seq_cst);
1353#else
1354        rtems_interrupt_level level;
1355
1356        rtems_interrupt_disable(level);
1357        tmp = *p;
1358        *p = 0;
1359        rtems_interrupt_enable(level);
1360#endif
1361
1362        return (tmp);
1363}
1364
1365static inline long
1366atomic_load_acq_long(volatile long *p)
1367{
1368        long tmp;
1369
1370#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1371        std::atomic_long *q =
1372            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1373
1374        tmp = q->load(std::memory_order_acquire);
1375#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1376        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1377
1378        tmp = atomic_load_explicit(q, memory_order_acquire);
1379#else
1380        RTEMS_COMPILER_MEMORY_BARRIER();
1381        tmp = *p;
1382#endif
1383
1384        return (tmp);
1385}
1386
1387static inline void
1388atomic_store_rel_long(volatile long *p, long v)
1389{
1390#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1391        std::atomic_long *q =
1392            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1393
1394        q->store(v, std::memory_order_release);
1395#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1396        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1397
1398        atomic_store_explicit(q, v, memory_order_release);
1399#else
1400        *p = v;
1401        RTEMS_COMPILER_MEMORY_BARRIER();
1402#endif
1403}
1404
1405#endif /* _RTEMS_BSD_MACHINE_ATOMIC_H_ */
Note: See TracBrowser for help on using the repository browser.