source: rtems-libbsd/rtemsbsd/include/machine/atomic.h @ 390e133

55-freebsd-126-freebsd-12freebsd-9.3
Last change on this file since 390e133 was 390e133, checked in by Sebastian Huber <sebastian.huber@…>, on 10/24/16 at 06:02:13

Make <machine/atomic.h> available to user space

Enables use of <sys/refcount.h> in user space.

  • Property mode set to 100644
File size: 36.0 KB
Line 
1/**
2 * @file
3 *
4 * @ingroup rtems_bsd_machine
5 *
6 * @brief TODO.
7 */
8
9/*
10 * Copyright (c) 2009, 2015 embedded brains GmbH.  All rights reserved.
11 *
12 *  embedded brains GmbH
13 *  Dornierstr. 4
14 *  82178 Puchheim
15 *  Germany
16 *  <rtems@embedded-brains.de>
17 *
18 * Redistribution and use in source and binary forms, with or without
19 * modification, are permitted provided that the following conditions
20 * are met:
21 * 1. Redistributions of source code must retain the above copyright
22 *    notice, this list of conditions and the following disclaimer.
23 * 2. Redistributions in binary form must reproduce the above copyright
24 *    notice, this list of conditions and the following disclaimer in the
25 *    documentation and/or other materials provided with the distribution.
26 *
27 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
28 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
29 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
30 * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
31 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
32 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
33 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
34 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
35 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
36 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
37 * SUCH DAMAGE.
38 */
39
40#ifndef _RTEMS_BSD_MACHINE_ATOMIC_H_
41#define _RTEMS_BSD_MACHINE_ATOMIC_H_
42
43#include <rtems.h>
44
45#ifdef RTEMS_SMP
46  #if defined(__cplusplus) \
47    && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 9))
48    /*
49     * The GCC 4.9 ships its own <stdatomic.h> which is not C++ compatible.  The
50     * suggested solution was to include <atomic> in case C++ is used.  This works
51     * at least with GCC 4.9.  See also:
52     *
53     * http://gcc.gnu.org/bugzilla/show_bug.cgi?id=60932
54     * http://gcc.gnu.org/bugzilla/show_bug.cgi?id=60940
55     */
56    #include <atomic>
57    #define _RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC
58  #else
59    #include <stdatomic.h>
60    #define _RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC
61  #endif
62#endif
63
64static inline void
65mb(void)
66{
67#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
68        std::atomic_thread_fence(std::memory_order_seq_cst);
69#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
70        atomic_thread_fence(memory_order_seq_cst);
71#else
72        RTEMS_COMPILER_MEMORY_BARRIER();
73#endif
74}
75
76static inline void
77wmb(void)
78{
79#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
80        std::atomic_thread_fence(std::memory_order_release);
81#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
82        atomic_thread_fence(memory_order_release);
83#else
84        RTEMS_COMPILER_MEMORY_BARRIER();
85#endif
86}
87
88static inline void
89rmb(void)
90{
91#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
92        std::atomic_thread_fence(std::memory_order_acquire);
93#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
94        atomic_thread_fence(memory_order_acquire);
95#else
96        RTEMS_COMPILER_MEMORY_BARRIER();
97#endif
98}
99
100static inline void
101atomic_add_int(volatile int *p, int v)
102{
103#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
104        std::atomic_int *q =
105            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
106
107        q->fetch_add(v, std::memory_order_seq_cst);
108#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
109        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
110
111        atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
112#else
113        rtems_interrupt_level level;
114
115        rtems_interrupt_disable(level);
116        *p += v;
117        rtems_interrupt_enable(level);
118#endif
119}
120
121static inline void
122atomic_add_acq_int(volatile int *p, int v)
123{
124#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
125        std::atomic_int *q =
126            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
127
128        q->fetch_add(v, std::memory_order_acquire);
129#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
130        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
131
132        atomic_fetch_add_explicit(q, v, memory_order_acquire);
133#else
134        rtems_interrupt_level level;
135
136        rtems_interrupt_disable(level);
137        *p += v;
138        rtems_interrupt_enable(level);
139#endif
140}
141
142static inline void
143atomic_add_rel_int(volatile int *p, int v)
144{
145#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
146        std::atomic_int *q =
147            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
148
149        q->fetch_add(v, std::memory_order_release);
150#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
151        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
152
153        atomic_fetch_add_explicit(q, v, memory_order_release);
154#else
155        rtems_interrupt_level level;
156
157        rtems_interrupt_disable(level);
158        *p += v;
159        rtems_interrupt_enable(level);
160#endif
161}
162
163static inline void
164atomic_subtract_int(volatile int *p, int v)
165{
166#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
167        std::atomic_int *q =
168            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
169
170        q->fetch_sub(v, std::memory_order_seq_cst);
171#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
172        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
173
174        atomic_fetch_sub_explicit(q, v, memory_order_seq_cst);
175#else
176        rtems_interrupt_level level;
177
178        rtems_interrupt_disable(level);
179        *p -= v;
180        rtems_interrupt_enable(level);
181#endif
182}
183
184static inline void
185atomic_subtract_acq_int(volatile int *p, int v)
186{
187#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
188        std::atomic_int *q =
189            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
190
191        q->fetch_sub(v, std::memory_order_acquire);
192#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
193        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
194
195        atomic_fetch_sub_explicit(q, v, memory_order_acquire);
196#else
197        rtems_interrupt_level level;
198
199        rtems_interrupt_disable(level);
200        *p -= v;
201        rtems_interrupt_enable(level);
202#endif
203}
204
205static inline void
206atomic_subtract_rel_int(volatile int *p, int v)
207{
208#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
209        std::atomic_int *q =
210            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
211
212        q->fetch_sub(v, std::memory_order_release);
213#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
214        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
215
216        atomic_fetch_sub_explicit(q, v, memory_order_release);
217#else
218        rtems_interrupt_level level;
219
220        rtems_interrupt_disable(level);
221        *p -= v;
222        rtems_interrupt_enable(level);
223#endif
224}
225
226static inline void
227atomic_set_int(volatile int *p, int v)
228{
229#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
230        std::atomic_int *q =
231            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
232
233        q->fetch_or(v, std::memory_order_seq_cst);
234#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
235        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
236
237        atomic_fetch_or_explicit(q, v, memory_order_seq_cst);
238#else
239        rtems_interrupt_level level;
240
241        rtems_interrupt_disable(level);
242        *p |= v;
243        rtems_interrupt_enable(level);
244#endif
245}
246
247static inline void
248atomic_set_acq_int(volatile int *p, int v)
249{
250#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
251        std::atomic_int *q =
252            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
253
254        q->fetch_or(v, std::memory_order_acquire);
255#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
256        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
257
258        atomic_fetch_or_explicit(q, v, memory_order_acquire);
259#else
260        rtems_interrupt_level level;
261
262        rtems_interrupt_disable(level);
263        *p |= v;
264        rtems_interrupt_enable(level);
265#endif
266}
267
268static inline void
269atomic_set_rel_int(volatile int *p, int v)
270{
271#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
272        std::atomic_int *q =
273            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
274
275        q->fetch_or(v, std::memory_order_release);
276#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
277        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
278
279        atomic_fetch_or_explicit(q, v, memory_order_release);
280#else
281        rtems_interrupt_level level;
282
283        rtems_interrupt_disable(level);
284        *p |= v;
285        rtems_interrupt_enable(level);
286#endif
287}
288
289static inline void
290atomic_clear_int(volatile int *p, int v)
291{
292#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
293        std::atomic_int *q =
294            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
295
296        q->fetch_and(~v, std::memory_order_seq_cst);
297#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
298        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
299
300        atomic_fetch_and_explicit(q, ~v, memory_order_seq_cst);
301#else
302        rtems_interrupt_level level;
303
304        rtems_interrupt_disable(level);
305        *p &= ~v;
306        rtems_interrupt_enable(level);
307#endif
308}
309
310static inline void
311atomic_clear_acq_int(volatile int *p, int v)
312{
313#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
314        std::atomic_int *q =
315            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
316
317        q->fetch_and(~v, std::memory_order_acquire);
318#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
319        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
320
321        atomic_fetch_and_explicit(q, ~v, memory_order_acquire);
322#else
323        rtems_interrupt_level level;
324
325        rtems_interrupt_disable(level);
326        *p &= ~v;
327        rtems_interrupt_enable(level);
328#endif
329}
330
331static inline void
332atomic_clear_rel_int(volatile int *p, int v)
333{
334#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
335        std::atomic_int *q =
336            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
337
338        q->fetch_and(~v, std::memory_order_release);
339#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
340        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
341
342        atomic_fetch_and_explicit(q, ~v, memory_order_release);
343#else
344        rtems_interrupt_level level;
345
346        rtems_interrupt_disable(level);
347        *p &= ~v;
348        rtems_interrupt_enable(level);
349#endif
350}
351
352static inline int
353atomic_cmpset_int(volatile int *p, int cmp, int set)
354{
355        int rv;
356
357#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
358        std::atomic_int *q =
359            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
360
361        rv = q->compare_exchange_strong(cmp, set, std::memory_order_seq_cst,
362            std::memory_order_relaxed);
363#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
364        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
365
366        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
367            memory_order_seq_cst, memory_order_relaxed);
368#else
369        rtems_interrupt_level level;
370
371        rtems_interrupt_disable(level);
372        rv = *p == cmp;
373        if (rv) {
374                *p = set;
375        }
376        rtems_interrupt_enable(level);
377#endif
378
379        return (rv);
380}
381
382static inline int
383atomic_cmpset_acq_int(volatile int *p, int cmp, int set)
384{
385        int rv;
386
387#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
388        std::atomic_int *q =
389            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
390
391        rv = q->compare_exchange_strong(cmp, set, std::memory_order_acquire,
392            std::memory_order_relaxed);
393#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
394        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
395
396        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
397            memory_order_acquire, memory_order_relaxed);
398#else
399        rtems_interrupt_level level;
400
401        rtems_interrupt_disable(level);
402        rv = *p == cmp;
403        if (rv) {
404                *p = set;
405        }
406        rtems_interrupt_enable(level);
407#endif
408
409        return (rv);
410}
411
412static inline int
413atomic_cmpset_rel_int(volatile int *p, int cmp, int set)
414{
415        int rv;
416
417#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
418        std::atomic_int *q =
419            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
420
421        rv = q->compare_exchange_strong(cmp, set, std::memory_order_release,
422            std::memory_order_relaxed);
423#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
424        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
425
426        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
427            memory_order_release, memory_order_relaxed);
428#else
429        rtems_interrupt_level level;
430
431        rtems_interrupt_disable(level);
432        rv = *p == cmp;
433        if (rv) {
434                *p = set;
435        }
436        rtems_interrupt_enable(level);
437#endif
438
439        return (rv);
440}
441
442static inline int
443atomic_fetchadd_int(volatile int *p, int v)
444{
445        int tmp;
446
447#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
448        std::atomic_int *q =
449            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
450
451        tmp = q->fetch_add(v, std::memory_order_seq_cst);
452#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
453        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
454
455        tmp = atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
456#else
457        rtems_interrupt_level level;
458
459        rtems_interrupt_disable(level);
460        tmp = *p;
461        *p += v;
462        rtems_interrupt_enable(level);
463#endif
464
465        return (tmp);
466}
467
468static inline int
469atomic_readandclear_int(volatile int *p)
470{
471        int tmp;
472
473#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
474        std::atomic_int *q =
475            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
476
477        tmp = q->exchange(0, std::memory_order_seq_cst);
478#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
479        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
480
481        tmp = atomic_exchange_explicit(q, 0, memory_order_seq_cst);
482#else
483        rtems_interrupt_level level;
484
485        rtems_interrupt_disable(level);
486        tmp = *p;
487        *p = 0;
488        rtems_interrupt_enable(level);
489#endif
490
491        return (tmp);
492}
493
494static inline int
495atomic_load_acq_int(volatile int *p)
496{
497        int tmp;
498
499#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
500        std::atomic_int *q =
501            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
502
503        tmp = q->load(std::memory_order_acquire);
504#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
505        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
506
507        tmp = atomic_load_explicit(q, memory_order_acquire);
508#else
509        RTEMS_COMPILER_MEMORY_BARRIER();
510        tmp = *p;
511#endif
512
513        return (tmp);
514}
515
516static inline void
517atomic_store_rel_int(volatile int *p, int v)
518{
519#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
520        std::atomic_int *q =
521            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
522
523        q->store(v, std::memory_order_release);
524#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
525        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
526
527        atomic_store_explicit(q, v, memory_order_release);
528#else
529        *p = v;
530        RTEMS_COMPILER_MEMORY_BARRIER();
531#endif
532}
533
534static inline void
535atomic_add_32(volatile uint32_t *p, uint32_t v)
536{
537#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
538        std::atomic_uint_least32_t *q =
539            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
540
541        q->fetch_add(v, std::memory_order_seq_cst);
542#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
543        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
544
545        atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
546#else
547        rtems_interrupt_level level;
548
549        rtems_interrupt_disable(level);
550        *p += v;
551        rtems_interrupt_enable(level);
552#endif
553}
554
555static inline void
556atomic_add_acq_32(volatile uint32_t *p, uint32_t v)
557{
558#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
559        std::atomic_uint_least32_t *q =
560            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
561
562        q->fetch_add(v, std::memory_order_acquire);
563#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
564        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
565
566        atomic_fetch_add_explicit(q, v, memory_order_acquire);
567#else
568        rtems_interrupt_level level;
569
570        rtems_interrupt_disable(level);
571        *p += v;
572        rtems_interrupt_enable(level);
573#endif
574}
575
576static inline void
577atomic_add_rel_32(volatile uint32_t *p, uint32_t v)
578{
579#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
580        std::atomic_uint_least32_t *q =
581            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
582
583        q->fetch_add(v, std::memory_order_release);
584#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
585        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
586
587        atomic_fetch_add_explicit(q, v, memory_order_release);
588#else
589        rtems_interrupt_level level;
590
591        rtems_interrupt_disable(level);
592        *p += v;
593        rtems_interrupt_enable(level);
594#endif
595}
596
597static inline void
598atomic_subtract_32(volatile uint32_t *p, uint32_t v)
599{
600#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
601        std::atomic_uint_least32_t *q =
602            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
603
604        q->fetch_sub(v, std::memory_order_seq_cst);
605#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
606        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
607
608        atomic_fetch_sub_explicit(q, v, memory_order_seq_cst);
609#else
610        rtems_interrupt_level level;
611
612        rtems_interrupt_disable(level);
613        *p -= v;
614        rtems_interrupt_enable(level);
615#endif
616}
617
618static inline void
619atomic_subtract_acq_32(volatile uint32_t *p, uint32_t v)
620{
621#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
622        std::atomic_uint_least32_t *q =
623            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
624
625        q->fetch_sub(v, std::memory_order_acquire);
626#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
627        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
628
629        atomic_fetch_sub_explicit(q, v, memory_order_acquire);
630#else
631        rtems_interrupt_level level;
632
633        rtems_interrupt_disable(level);
634        *p -= v;
635        rtems_interrupt_enable(level);
636#endif
637}
638
639static inline void
640atomic_subtract_rel_32(volatile uint32_t *p, uint32_t v)
641{
642#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
643        std::atomic_uint_least32_t *q =
644            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
645
646        q->fetch_sub(v, std::memory_order_release);
647#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
648        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
649
650        atomic_fetch_sub_explicit(q, v, memory_order_release);
651#else
652        rtems_interrupt_level level;
653
654        rtems_interrupt_disable(level);
655        *p -= v;
656        rtems_interrupt_enable(level);
657#endif
658}
659
660static inline void
661atomic_set_32(volatile uint32_t *p, uint32_t v)
662{
663#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
664        std::atomic_uint_least32_t *q =
665            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
666
667        q->fetch_or(v, std::memory_order_seq_cst);
668#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
669        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
670
671        atomic_fetch_or_explicit(q, v, memory_order_seq_cst);
672#else
673        rtems_interrupt_level level;
674
675        rtems_interrupt_disable(level);
676        *p |= v;
677        rtems_interrupt_enable(level);
678#endif
679}
680
681static inline void
682atomic_set_acq_32(volatile uint32_t *p, uint32_t v)
683{
684#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
685        std::atomic_uint_least32_t *q =
686            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
687
688        q->fetch_or(v, std::memory_order_acquire);
689#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
690        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
691
692        atomic_fetch_or_explicit(q, v, memory_order_acquire);
693#else
694        rtems_interrupt_level level;
695
696        rtems_interrupt_disable(level);
697        *p |= v;
698        rtems_interrupt_enable(level);
699#endif
700}
701
702static inline void
703atomic_set_rel_32(volatile uint32_t *p, uint32_t v)
704{
705#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
706        std::atomic_uint_least32_t *q =
707            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
708
709        q->fetch_or(v, std::memory_order_release);
710#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
711        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
712
713        atomic_fetch_or_explicit(q, v, memory_order_release);
714#else
715        rtems_interrupt_level level;
716
717        rtems_interrupt_disable(level);
718        *p |= v;
719        rtems_interrupt_enable(level);
720#endif
721}
722
723static inline void
724atomic_clear_32(volatile uint32_t *p, uint32_t v)
725{
726#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
727        std::atomic_uint_least32_t *q =
728            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
729
730        q->fetch_and(~v, std::memory_order_seq_cst);
731#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
732        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
733
734        atomic_fetch_and_explicit(q, ~v, memory_order_seq_cst);
735#else
736        rtems_interrupt_level level;
737
738        rtems_interrupt_disable(level);
739        *p &= ~v;
740        rtems_interrupt_enable(level);
741#endif
742}
743
744static inline void
745atomic_clear_acq_32(volatile uint32_t *p, uint32_t v)
746{
747#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
748        std::atomic_uint_least32_t *q =
749            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
750
751        q->fetch_and(~v, std::memory_order_acquire);
752#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
753        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
754
755        atomic_fetch_and_explicit(q, ~v, memory_order_acquire);
756#else
757        rtems_interrupt_level level;
758
759        rtems_interrupt_disable(level);
760        *p &= ~v;
761        rtems_interrupt_enable(level);
762#endif
763}
764
765static inline void
766atomic_clear_rel_32(volatile uint32_t *p, uint32_t v)
767{
768#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
769        std::atomic_uint_least32_t *q =
770            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
771
772        q->fetch_and(~v, std::memory_order_release);
773#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
774        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
775
776        atomic_fetch_and_explicit(q, ~v, memory_order_release);
777#else
778        rtems_interrupt_level level;
779
780        rtems_interrupt_disable(level);
781        *p &= ~v;
782        rtems_interrupt_enable(level);
783#endif
784}
785
786static inline int
787atomic_cmpset_32(volatile uint32_t *p, uint32_t cmp, uint32_t set)
788{
789        int rv;
790
791#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
792        std::atomic_uint_least32_t *q =
793            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
794
795        rv = q->compare_exchange_strong(cmp, set, std::memory_order_seq_cst,
796            std::memory_order_relaxed);
797#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
798        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
799
800        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
801            memory_order_seq_cst, memory_order_relaxed);
802#else
803        rtems_interrupt_level level;
804
805        rtems_interrupt_disable(level);
806        rv = *p == cmp;
807        if (rv) {
808                *p = set;
809        }
810        rtems_interrupt_enable(level);
811#endif
812
813        return (rv);
814}
815
816static inline int
817atomic_cmpset_acq_32(volatile uint32_t *p, uint32_t cmp, uint32_t set)
818{
819        int rv;
820
821#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
822        std::atomic_uint_least32_t *q =
823            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
824
825        rv = q->compare_exchange_strong(cmp, set, std::memory_order_acquire,
826            std::memory_order_relaxed);
827#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
828        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
829
830        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
831            memory_order_acquire, memory_order_relaxed);
832#else
833        rtems_interrupt_level level;
834
835        rtems_interrupt_disable(level);
836        rv = *p == cmp;
837        if (rv) {
838                *p = set;
839        }
840        rtems_interrupt_enable(level);
841#endif
842
843        return (rv);
844}
845
846static inline int
847atomic_cmpset_rel_32(volatile uint32_t *p, uint32_t cmp, uint32_t set)
848{
849        int rv;
850
851#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
852        std::atomic_uint_least32_t *q =
853            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
854
855        rv = q->compare_exchange_strong(cmp, set, std::memory_order_release,
856            std::memory_order_relaxed);
857#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
858        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
859
860        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
861            memory_order_release, memory_order_relaxed);
862#else
863        rtems_interrupt_level level;
864
865        rtems_interrupt_disable(level);
866        rv = *p == cmp;
867        if (rv) {
868                *p = set;
869        }
870        rtems_interrupt_enable(level);
871#endif
872
873        return (rv);
874}
875
876static inline uint32_t
877atomic_fetchadd_32(volatile uint32_t *p, uint32_t v)
878{
879        uint32_t tmp;
880
881#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
882        std::atomic_uint_least32_t *q =
883            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
884
885        tmp = q->fetch_add(v, std::memory_order_seq_cst);
886#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
887        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
888
889        tmp = atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
890#else
891        rtems_interrupt_level level;
892
893        rtems_interrupt_disable(level);
894        tmp = *p;
895        *p += v;
896        rtems_interrupt_enable(level);
897#endif
898
899        return (tmp);
900}
901
902static inline uint32_t
903atomic_readandclear_32(volatile uint32_t *p)
904{
905        uint32_t tmp;
906
907#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
908        std::atomic_uint_least32_t *q =
909            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
910
911        tmp = q->exchange(0, std::memory_order_seq_cst);
912#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
913        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
914
915        tmp = atomic_exchange_explicit(q, 0, memory_order_seq_cst);
916#else
917        rtems_interrupt_level level;
918
919        rtems_interrupt_disable(level);
920        tmp = *p;
921        *p = 0;
922        rtems_interrupt_enable(level);
923#endif
924
925        return (tmp);
926}
927
928static inline uint32_t
929atomic_load_acq_32(volatile uint32_t *p)
930{
931        uint32_t tmp;
932
933#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
934        std::atomic_uint_least32_t *q =
935            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
936
937        tmp = q->load(std::memory_order_acquire);
938#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
939        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
940
941        tmp = atomic_load_explicit(q, memory_order_acquire);
942#else
943        RTEMS_COMPILER_MEMORY_BARRIER();
944        tmp = *p;
945#endif
946
947        return (tmp);
948}
949
950static inline void
951atomic_store_rel_32(volatile uint32_t *p, uint32_t v)
952{
953#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
954        std::atomic_uint_least32_t *q =
955            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
956
957        q->store(v, std::memory_order_release);
958#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
959        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
960
961        atomic_store_explicit(q, v, memory_order_release);
962#else
963        *p = v;
964        RTEMS_COMPILER_MEMORY_BARRIER();
965#endif
966}
967
968static inline void
969atomic_add_long(volatile long *p, long v)
970{
971#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
972        std::atomic_long *q =
973            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
974
975        q->fetch_add(v, std::memory_order_seq_cst);
976#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
977        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
978
979        atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
980#else
981        rtems_interrupt_level level;
982
983        rtems_interrupt_disable(level);
984        *p += v;
985        rtems_interrupt_enable(level);
986#endif
987}
988
989static inline void
990atomic_add_acq_long(volatile long *p, long v)
991{
992#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
993        std::atomic_long *q =
994            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
995
996        q->fetch_add(v, std::memory_order_acquire);
997#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
998        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
999
1000        atomic_fetch_add_explicit(q, v, memory_order_acquire);
1001#else
1002        rtems_interrupt_level level;
1003
1004        rtems_interrupt_disable(level);
1005        *p += v;
1006        rtems_interrupt_enable(level);
1007#endif
1008}
1009
1010static inline void
1011atomic_add_rel_long(volatile long *p, long v)
1012{
1013#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1014        std::atomic_long *q =
1015            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1016
1017        q->fetch_add(v, std::memory_order_release);
1018#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1019        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1020
1021        atomic_fetch_add_explicit(q, v, memory_order_release);
1022#else
1023        rtems_interrupt_level level;
1024
1025        rtems_interrupt_disable(level);
1026        *p += v;
1027        rtems_interrupt_enable(level);
1028#endif
1029}
1030
1031static inline void
1032atomic_subtract_long(volatile long *p, long v)
1033{
1034#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1035        std::atomic_long *q =
1036            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1037
1038        q->fetch_sub(v, std::memory_order_seq_cst);
1039#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1040        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1041
1042        atomic_fetch_sub_explicit(q, v, memory_order_seq_cst);
1043#else
1044        rtems_interrupt_level level;
1045
1046        rtems_interrupt_disable(level);
1047        *p -= v;
1048        rtems_interrupt_enable(level);
1049#endif
1050}
1051
1052static inline void
1053atomic_subtract_acq_long(volatile long *p, long v)
1054{
1055#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1056        std::atomic_long *q =
1057            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1058
1059        q->fetch_sub(v, std::memory_order_acquire);
1060#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1061        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1062
1063        atomic_fetch_sub_explicit(q, v, memory_order_acquire);
1064#else
1065        rtems_interrupt_level level;
1066
1067        rtems_interrupt_disable(level);
1068        *p -= v;
1069        rtems_interrupt_enable(level);
1070#endif
1071}
1072
1073static inline void
1074atomic_subtract_rel_long(volatile long *p, long v)
1075{
1076#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1077        std::atomic_long *q =
1078            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1079
1080        q->fetch_sub(v, std::memory_order_release);
1081#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1082        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1083
1084        atomic_fetch_sub_explicit(q, v, memory_order_release);
1085#else
1086        rtems_interrupt_level level;
1087
1088        rtems_interrupt_disable(level);
1089        *p -= v;
1090        rtems_interrupt_enable(level);
1091#endif
1092}
1093
1094static inline void
1095atomic_set_long(volatile long *p, long v)
1096{
1097#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1098        std::atomic_long *q =
1099            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1100
1101        q->fetch_or(v, std::memory_order_seq_cst);
1102#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1103        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1104
1105        atomic_fetch_or_explicit(q, v, memory_order_seq_cst);
1106#else
1107        rtems_interrupt_level level;
1108
1109        rtems_interrupt_disable(level);
1110        *p |= v;
1111        rtems_interrupt_enable(level);
1112#endif
1113}
1114
1115static inline void
1116atomic_set_acq_long(volatile long *p, long v)
1117{
1118#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1119        std::atomic_long *q =
1120            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1121
1122        q->fetch_or(v, std::memory_order_acquire);
1123#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1124        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1125
1126        atomic_fetch_or_explicit(q, v, memory_order_acquire);
1127#else
1128        rtems_interrupt_level level;
1129
1130        rtems_interrupt_disable(level);
1131        *p |= v;
1132        rtems_interrupt_enable(level);
1133#endif
1134}
1135
1136static inline void
1137atomic_set_rel_long(volatile long *p, long v)
1138{
1139#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1140        std::atomic_long *q =
1141            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1142
1143        q->fetch_or(v, std::memory_order_release);
1144#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1145        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1146
1147        atomic_fetch_or_explicit(q, v, memory_order_release);
1148#else
1149        rtems_interrupt_level level;
1150
1151        rtems_interrupt_disable(level);
1152        *p |= v;
1153        rtems_interrupt_enable(level);
1154#endif
1155}
1156
1157static inline void
1158atomic_clear_long(volatile long *p, long v)
1159{
1160#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1161        std::atomic_long *q =
1162            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1163
1164        q->fetch_and(~v, std::memory_order_seq_cst);
1165#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1166        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1167
1168        atomic_fetch_and_explicit(q, ~v, memory_order_seq_cst);
1169#else
1170        rtems_interrupt_level level;
1171
1172        rtems_interrupt_disable(level);
1173        *p &= ~v;
1174        rtems_interrupt_enable(level);
1175#endif
1176}
1177
1178static inline void
1179atomic_clear_acq_long(volatile long *p, long v)
1180{
1181#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1182        std::atomic_long *q =
1183            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1184
1185        q->fetch_and(~v, std::memory_order_acquire);
1186#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1187        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1188
1189        atomic_fetch_and_explicit(q, ~v, memory_order_acquire);
1190#else
1191        rtems_interrupt_level level;
1192
1193        rtems_interrupt_disable(level);
1194        *p &= ~v;
1195        rtems_interrupt_enable(level);
1196#endif
1197}
1198
1199static inline void
1200atomic_clear_rel_long(volatile long *p, long v)
1201{
1202#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1203        std::atomic_long *q =
1204            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1205
1206        q->fetch_and(~v, std::memory_order_release);
1207#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1208        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1209
1210        atomic_fetch_and_explicit(q, ~v, memory_order_release);
1211#else
1212        rtems_interrupt_level level;
1213
1214        rtems_interrupt_disable(level);
1215        *p &= ~v;
1216        rtems_interrupt_enable(level);
1217#endif
1218}
1219
1220static inline int
1221atomic_cmpset_long(volatile long *p, long cmp, long set)
1222{
1223        int rv;
1224
1225#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1226        std::atomic_long *q =
1227            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1228
1229        rv = q->compare_exchange_strong(cmp, set, std::memory_order_seq_cst,
1230            std::memory_order_relaxed);
1231#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1232        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1233
1234        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
1235            memory_order_seq_cst, memory_order_relaxed);
1236#else
1237        rtems_interrupt_level level;
1238
1239        rtems_interrupt_disable(level);
1240        rv = *p == cmp;
1241        if (rv) {
1242                *p = set;
1243        }
1244        rtems_interrupt_enable(level);
1245#endif
1246
1247        return (rv);
1248}
1249
1250static inline int
1251atomic_cmpset_acq_long(volatile long *p, long cmp, long set)
1252{
1253        int rv;
1254
1255#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1256        std::atomic_long *q =
1257            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1258
1259        rv = q->compare_exchange_strong(cmp, set, std::memory_order_acquire,
1260            std::memory_order_relaxed);
1261#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1262        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1263
1264        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
1265            memory_order_acquire, memory_order_relaxed);
1266#else
1267        rtems_interrupt_level level;
1268
1269        rtems_interrupt_disable(level);
1270        rv = *p == cmp;
1271        if (rv) {
1272                *p = set;
1273        }
1274        rtems_interrupt_enable(level);
1275#endif
1276
1277        return (rv);
1278}
1279
1280static inline int
1281atomic_cmpset_rel_long(volatile long *p, long cmp, long set)
1282{
1283        int rv;
1284
1285#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1286        std::atomic_long *q =
1287            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1288
1289        rv = q->compare_exchange_strong(cmp, set, std::memory_order_release,
1290            std::memory_order_relaxed);
1291#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1292        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1293
1294        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
1295            memory_order_release, memory_order_relaxed);
1296#else
1297        rtems_interrupt_level level;
1298
1299        rtems_interrupt_disable(level);
1300        rv = *p == cmp;
1301        if (rv) {
1302                *p = set;
1303        }
1304        rtems_interrupt_enable(level);
1305#endif
1306
1307        return (rv);
1308}
1309
1310static inline long
1311atomic_fetchadd_long(volatile long *p, long v)
1312{
1313        long tmp;
1314
1315#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1316        std::atomic_long *q =
1317            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1318
1319        tmp = q->fetch_add(v, std::memory_order_seq_cst);
1320#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1321        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1322
1323        tmp = atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
1324#else
1325        rtems_interrupt_level level;
1326
1327        rtems_interrupt_disable(level);
1328        tmp = *p;
1329        *p += v;
1330        rtems_interrupt_enable(level);
1331#endif
1332
1333        return (tmp);
1334}
1335
1336static inline long
1337atomic_readandclear_long(volatile long *p)
1338{
1339        long tmp;
1340
1341#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1342        std::atomic_long *q =
1343            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1344
1345        tmp = q->exchange(0, std::memory_order_seq_cst);
1346#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1347        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1348
1349        tmp = atomic_exchange_explicit(q, 0, memory_order_seq_cst);
1350#else
1351        rtems_interrupt_level level;
1352
1353        rtems_interrupt_disable(level);
1354        tmp = *p;
1355        *p = 0;
1356        rtems_interrupt_enable(level);
1357#endif
1358
1359        return (tmp);
1360}
1361
1362static inline long
1363atomic_load_acq_long(volatile long *p)
1364{
1365        long tmp;
1366
1367#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1368        std::atomic_long *q =
1369            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1370
1371        tmp = q->load(std::memory_order_acquire);
1372#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1373        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1374
1375        tmp = atomic_load_explicit(q, memory_order_acquire);
1376#else
1377        RTEMS_COMPILER_MEMORY_BARRIER();
1378        tmp = *p;
1379#endif
1380
1381        return (tmp);
1382}
1383
1384static inline void
1385atomic_store_rel_long(volatile long *p, long v)
1386{
1387#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1388        std::atomic_long *q =
1389            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1390
1391        q->store(v, std::memory_order_release);
1392#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1393        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1394
1395        atomic_store_explicit(q, v, memory_order_release);
1396#else
1397        *p = v;
1398        RTEMS_COMPILER_MEMORY_BARRIER();
1399#endif
1400}
1401
1402#endif /* _RTEMS_BSD_MACHINE_ATOMIC_H_ */
Note: See TracBrowser for help on using the repository browser.