source: rtems-libbsd/rtemsbsd/include/machine/atomic.h @ e5724f7

4.1155-freebsd-126-freebsd-12freebsd-9.3
Last change on this file since e5724f7 was e5724f7, checked in by Sebastian Huber <sebastian.huber@…>, on 03/09/15 at 13:20:20

atomic.h: Fix for GCC version 5 and later

  • Property mode set to 100644
File size: 36.2 KB
Line 
1/**
2 * @file
3 *
4 * @ingroup rtems_bsd_machine
5 *
6 * @brief TODO.
7 */
8
9/*
10 * Copyright (c) 2009, 2015 embedded brains GmbH.  All rights reserved.
11 *
12 *  embedded brains GmbH
13 *  Dornierstr. 4
14 *  82178 Puchheim
15 *  Germany
16 *  <rtems@embedded-brains.de>
17 *
18 * Redistribution and use in source and binary forms, with or without
19 * modification, are permitted provided that the following conditions
20 * are met:
21 * 1. Redistributions of source code must retain the above copyright
22 *    notice, this list of conditions and the following disclaimer.
23 * 2. Redistributions in binary form must reproduce the above copyright
24 *    notice, this list of conditions and the following disclaimer in the
25 *    documentation and/or other materials provided with the distribution.
26 *
27 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
28 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
29 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
30 * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
31 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
32 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
33 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
34 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
35 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
36 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
37 * SUCH DAMAGE.
38 */
39
40#ifndef _RTEMS_BSD_MACHINE_ATOMIC_H_
41#define _RTEMS_BSD_MACHINE_ATOMIC_H_
42
43#ifndef _RTEMS_BSD_MACHINE_RTEMS_BSD_KERNEL_SPACE_H_
44#error "the header file <machine/rtems-bsd-kernel-space.h> must be included first"
45#endif
46
47#include <rtems.h>
48
49#ifdef RTEMS_SMP
50  #if defined(__cplusplus) \
51    && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 9))
52    /*
53     * The GCC 4.9 ships its own <stdatomic.h> which is not C++ compatible.  The
54     * suggested solution was to include <atomic> in case C++ is used.  This works
55     * at least with GCC 4.9.  See also:
56     *
57     * http://gcc.gnu.org/bugzilla/show_bug.cgi?id=60932
58     * http://gcc.gnu.org/bugzilla/show_bug.cgi?id=60940
59     */
60    #include <atomic>
61    #define _RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC
62  #else
63    #include <stdatomic.h>
64    #define _RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC
65  #endif
66#endif
67
68static inline void
69mb(void)
70{
71#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
72        std::atomic_thread_fence(std::memory_order_seq_cst);
73#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
74        atomic_thread_fence(memory_order_seq_cst);
75#else
76        RTEMS_COMPILER_MEMORY_BARRIER();
77#endif
78}
79
80static inline void
81wmb(void)
82{
83#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
84        std::atomic_thread_fence(std::memory_order_release);
85#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
86        atomic_thread_fence(memory_order_release);
87#else
88        RTEMS_COMPILER_MEMORY_BARRIER();
89#endif
90}
91
92static inline void
93rmb(void)
94{
95#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
96        std::atomic_thread_fence(std::memory_order_acquire);
97#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
98        atomic_thread_fence(memory_order_acquire);
99#else
100        RTEMS_COMPILER_MEMORY_BARRIER();
101#endif
102}
103
104static inline void
105atomic_add_int(volatile int *p, int v)
106{
107#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
108        std::atomic_int *q =
109            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
110
111        q->fetch_add(v, std::memory_order_seq_cst);
112#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
113        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
114
115        atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
116#else
117        rtems_interrupt_level level;
118
119        rtems_interrupt_disable(level);
120        *p += v;
121        rtems_interrupt_enable(level);
122#endif
123}
124
125static inline void
126atomic_add_acq_int(volatile int *p, int v)
127{
128#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
129        std::atomic_int *q =
130            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
131
132        q->fetch_add(v, std::memory_order_acquire);
133#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
134        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
135
136        atomic_fetch_add_explicit(q, v, memory_order_acquire);
137#else
138        rtems_interrupt_level level;
139
140        rtems_interrupt_disable(level);
141        *p += v;
142        rtems_interrupt_enable(level);
143#endif
144}
145
146static inline void
147atomic_add_rel_int(volatile int *p, int v)
148{
149#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
150        std::atomic_int *q =
151            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
152
153        q->fetch_add(v, std::memory_order_release);
154#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
155        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
156
157        atomic_fetch_add_explicit(q, v, memory_order_release);
158#else
159        rtems_interrupt_level level;
160
161        rtems_interrupt_disable(level);
162        *p += v;
163        rtems_interrupt_enable(level);
164#endif
165}
166
167static inline void
168atomic_subtract_int(volatile int *p, int v)
169{
170#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
171        std::atomic_int *q =
172            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
173
174        q->fetch_sub(v, std::memory_order_seq_cst);
175#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
176        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
177
178        atomic_fetch_sub_explicit(q, v, memory_order_seq_cst);
179#else
180        rtems_interrupt_level level;
181
182        rtems_interrupt_disable(level);
183        *p -= v;
184        rtems_interrupt_enable(level);
185#endif
186}
187
188static inline void
189atomic_subtract_acq_int(volatile int *p, int v)
190{
191#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
192        std::atomic_int *q =
193            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
194
195        q->fetch_sub(v, std::memory_order_acquire);
196#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
197        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
198
199        atomic_fetch_sub_explicit(q, v, memory_order_acquire);
200#else
201        rtems_interrupt_level level;
202
203        rtems_interrupt_disable(level);
204        *p -= v;
205        rtems_interrupt_enable(level);
206#endif
207}
208
209static inline void
210atomic_subtract_rel_int(volatile int *p, int v)
211{
212#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
213        std::atomic_int *q =
214            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
215
216        q->fetch_sub(v, std::memory_order_release);
217#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
218        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
219
220        atomic_fetch_sub_explicit(q, v, memory_order_release);
221#else
222        rtems_interrupt_level level;
223
224        rtems_interrupt_disable(level);
225        *p -= v;
226        rtems_interrupt_enable(level);
227#endif
228}
229
230static inline void
231atomic_set_int(volatile int *p, int v)
232{
233#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
234        std::atomic_int *q =
235            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
236
237        q->fetch_or(v, std::memory_order_seq_cst);
238#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
239        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
240
241        atomic_fetch_or_explicit(q, v, memory_order_seq_cst);
242#else
243        rtems_interrupt_level level;
244
245        rtems_interrupt_disable(level);
246        *p |= v;
247        rtems_interrupt_enable(level);
248#endif
249}
250
251static inline void
252atomic_set_acq_int(volatile int *p, int v)
253{
254#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
255        std::atomic_int *q =
256            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
257
258        q->fetch_or(v, std::memory_order_acquire);
259#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
260        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
261
262        atomic_fetch_or_explicit(q, v, memory_order_acquire);
263#else
264        rtems_interrupt_level level;
265
266        rtems_interrupt_disable(level);
267        *p |= v;
268        rtems_interrupt_enable(level);
269#endif
270}
271
272static inline void
273atomic_set_rel_int(volatile int *p, int v)
274{
275#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
276        std::atomic_int *q =
277            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
278
279        q->fetch_or(v, std::memory_order_release);
280#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
281        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
282
283        atomic_fetch_or_explicit(q, v, memory_order_release);
284#else
285        rtems_interrupt_level level;
286
287        rtems_interrupt_disable(level);
288        *p |= v;
289        rtems_interrupt_enable(level);
290#endif
291}
292
293static inline void
294atomic_clear_int(volatile int *p, int v)
295{
296#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
297        std::atomic_int *q =
298            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
299
300        q->fetch_and(~v, std::memory_order_seq_cst);
301#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
302        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
303
304        atomic_fetch_and_explicit(q, ~v, memory_order_seq_cst);
305#else
306        rtems_interrupt_level level;
307
308        rtems_interrupt_disable(level);
309        *p &= ~v;
310        rtems_interrupt_enable(level);
311#endif
312}
313
314static inline void
315atomic_clear_acq_int(volatile int *p, int v)
316{
317#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
318        std::atomic_int *q =
319            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
320
321        q->fetch_and(~v, std::memory_order_acquire);
322#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
323        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
324
325        atomic_fetch_and_explicit(q, ~v, memory_order_acquire);
326#else
327        rtems_interrupt_level level;
328
329        rtems_interrupt_disable(level);
330        *p &= ~v;
331        rtems_interrupt_enable(level);
332#endif
333}
334
335static inline void
336atomic_clear_rel_int(volatile int *p, int v)
337{
338#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
339        std::atomic_int *q =
340            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
341
342        q->fetch_and(~v, std::memory_order_release);
343#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
344        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
345
346        atomic_fetch_and_explicit(q, ~v, memory_order_release);
347#else
348        rtems_interrupt_level level;
349
350        rtems_interrupt_disable(level);
351        *p &= ~v;
352        rtems_interrupt_enable(level);
353#endif
354}
355
356static inline int
357atomic_cmpset_int(volatile int *p, int cmp, int set)
358{
359        int rv;
360
361#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
362        std::atomic_int *q =
363            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
364
365        rv = q->compare_exchange_strong(cmp, set, std::memory_order_seq_cst,
366            std::memory_order_relaxed);
367#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
368        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
369
370        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
371            memory_order_seq_cst, memory_order_relaxed);
372#else
373        rtems_interrupt_level level;
374
375        rtems_interrupt_disable(level);
376        rv = *p == cmp;
377        if (rv) {
378                *p = set;
379        }
380        rtems_interrupt_enable(level);
381#endif
382
383        return (rv);
384}
385
386static inline int
387atomic_cmpset_acq_int(volatile int *p, int cmp, int set)
388{
389        int rv;
390
391#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
392        std::atomic_int *q =
393            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
394
395        rv = q->compare_exchange_strong(cmp, set, std::memory_order_acquire,
396            std::memory_order_relaxed);
397#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
398        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
399
400        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
401            memory_order_acquire, memory_order_relaxed);
402#else
403        rtems_interrupt_level level;
404
405        rtems_interrupt_disable(level);
406        rv = *p == cmp;
407        if (rv) {
408                *p = set;
409        }
410        rtems_interrupt_enable(level);
411#endif
412
413        return (rv);
414}
415
416static inline int
417atomic_cmpset_rel_int(volatile int *p, int cmp, int set)
418{
419        int rv;
420
421#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
422        std::atomic_int *q =
423            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
424
425        rv = q->compare_exchange_strong(cmp, set, std::memory_order_release,
426            std::memory_order_relaxed);
427#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
428        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
429
430        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
431            memory_order_release, memory_order_relaxed);
432#else
433        rtems_interrupt_level level;
434
435        rtems_interrupt_disable(level);
436        rv = *p == cmp;
437        if (rv) {
438                *p = set;
439        }
440        rtems_interrupt_enable(level);
441#endif
442
443        return (rv);
444}
445
446static inline int
447atomic_fetchadd_int(volatile int *p, int v)
448{
449        int tmp;
450
451#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
452        std::atomic_int *q =
453            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
454
455        tmp = q->fetch_add(v, std::memory_order_seq_cst);
456#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
457        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
458
459        tmp = atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
460#else
461        rtems_interrupt_level level;
462
463        rtems_interrupt_disable(level);
464        tmp = *p;
465        *p += v;
466        rtems_interrupt_enable(level);
467#endif
468
469        return (tmp);
470}
471
472static inline int
473atomic_readandclear_int(volatile int *p)
474{
475        int tmp;
476
477#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
478        std::atomic_int *q =
479            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
480
481        tmp = q->exchange(0, std::memory_order_seq_cst);
482#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
483        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
484
485        tmp = atomic_exchange_explicit(q, 0, memory_order_seq_cst);
486#else
487        rtems_interrupt_level level;
488
489        rtems_interrupt_disable(level);
490        tmp = *p;
491        *p = 0;
492        rtems_interrupt_enable(level);
493#endif
494
495        return (tmp);
496}
497
498static inline int
499atomic_load_acq_int(volatile int *p)
500{
501        int tmp;
502
503#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
504        std::atomic_int *q =
505            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
506
507        tmp = q->load(std::memory_order_acquire);
508#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
509        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
510
511        tmp = atomic_load_explicit(q, memory_order_acquire);
512#else
513        RTEMS_COMPILER_MEMORY_BARRIER();
514        tmp = *p;
515#endif
516
517        return (tmp);
518}
519
520static inline void
521atomic_store_rel_int(volatile int *p, int v)
522{
523#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
524        std::atomic_int *q =
525            reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
526
527        q->store(v, std::memory_order_release);
528#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
529        atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
530
531        atomic_store_explicit(q, v, memory_order_release);
532#else
533        *p = v;
534        RTEMS_COMPILER_MEMORY_BARRIER();
535#endif
536}
537
538static inline void
539atomic_add_32(volatile uint32_t *p, uint32_t v)
540{
541#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
542        std::atomic_uint_least32_t *q =
543            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
544
545        q->fetch_add(v, std::memory_order_seq_cst);
546#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
547        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
548
549        atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
550#else
551        rtems_interrupt_level level;
552
553        rtems_interrupt_disable(level);
554        *p += v;
555        rtems_interrupt_enable(level);
556#endif
557}
558
559static inline void
560atomic_add_acq_32(volatile uint32_t *p, uint32_t v)
561{
562#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
563        std::atomic_uint_least32_t *q =
564            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
565
566        q->fetch_add(v, std::memory_order_acquire);
567#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
568        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
569
570        atomic_fetch_add_explicit(q, v, memory_order_acquire);
571#else
572        rtems_interrupt_level level;
573
574        rtems_interrupt_disable(level);
575        *p += v;
576        rtems_interrupt_enable(level);
577#endif
578}
579
580static inline void
581atomic_add_rel_32(volatile uint32_t *p, uint32_t v)
582{
583#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
584        std::atomic_uint_least32_t *q =
585            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
586
587        q->fetch_add(v, std::memory_order_release);
588#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
589        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
590
591        atomic_fetch_add_explicit(q, v, memory_order_release);
592#else
593        rtems_interrupt_level level;
594
595        rtems_interrupt_disable(level);
596        *p += v;
597        rtems_interrupt_enable(level);
598#endif
599}
600
601static inline void
602atomic_subtract_32(volatile uint32_t *p, uint32_t v)
603{
604#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
605        std::atomic_uint_least32_t *q =
606            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
607
608        q->fetch_sub(v, std::memory_order_seq_cst);
609#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
610        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
611
612        atomic_fetch_sub_explicit(q, v, memory_order_seq_cst);
613#else
614        rtems_interrupt_level level;
615
616        rtems_interrupt_disable(level);
617        *p -= v;
618        rtems_interrupt_enable(level);
619#endif
620}
621
622static inline void
623atomic_subtract_acq_32(volatile uint32_t *p, uint32_t v)
624{
625#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
626        std::atomic_uint_least32_t *q =
627            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
628
629        q->fetch_sub(v, std::memory_order_acquire);
630#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
631        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
632
633        atomic_fetch_sub_explicit(q, v, memory_order_acquire);
634#else
635        rtems_interrupt_level level;
636
637        rtems_interrupt_disable(level);
638        *p -= v;
639        rtems_interrupt_enable(level);
640#endif
641}
642
643static inline void
644atomic_subtract_rel_32(volatile uint32_t *p, uint32_t v)
645{
646#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
647        std::atomic_uint_least32_t *q =
648            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
649
650        q->fetch_sub(v, std::memory_order_release);
651#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
652        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
653
654        atomic_fetch_sub_explicit(q, v, memory_order_release);
655#else
656        rtems_interrupt_level level;
657
658        rtems_interrupt_disable(level);
659        *p -= v;
660        rtems_interrupt_enable(level);
661#endif
662}
663
664static inline void
665atomic_set_32(volatile uint32_t *p, uint32_t v)
666{
667#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
668        std::atomic_uint_least32_t *q =
669            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
670
671        q->fetch_or(v, std::memory_order_seq_cst);
672#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
673        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
674
675        atomic_fetch_or_explicit(q, v, memory_order_seq_cst);
676#else
677        rtems_interrupt_level level;
678
679        rtems_interrupt_disable(level);
680        *p |= v;
681        rtems_interrupt_enable(level);
682#endif
683}
684
685static inline void
686atomic_set_acq_32(volatile uint32_t *p, uint32_t v)
687{
688#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
689        std::atomic_uint_least32_t *q =
690            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
691
692        q->fetch_or(v, std::memory_order_acquire);
693#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
694        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
695
696        atomic_fetch_or_explicit(q, v, memory_order_acquire);
697#else
698        rtems_interrupt_level level;
699
700        rtems_interrupt_disable(level);
701        *p |= v;
702        rtems_interrupt_enable(level);
703#endif
704}
705
706static inline void
707atomic_set_rel_32(volatile uint32_t *p, uint32_t v)
708{
709#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
710        std::atomic_uint_least32_t *q =
711            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
712
713        q->fetch_or(v, std::memory_order_release);
714#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
715        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
716
717        atomic_fetch_or_explicit(q, v, memory_order_release);
718#else
719        rtems_interrupt_level level;
720
721        rtems_interrupt_disable(level);
722        *p |= v;
723        rtems_interrupt_enable(level);
724#endif
725}
726
727static inline void
728atomic_clear_32(volatile uint32_t *p, uint32_t v)
729{
730#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
731        std::atomic_uint_least32_t *q =
732            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
733
734        q->fetch_and(~v, std::memory_order_seq_cst);
735#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
736        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
737
738        atomic_fetch_and_explicit(q, ~v, memory_order_seq_cst);
739#else
740        rtems_interrupt_level level;
741
742        rtems_interrupt_disable(level);
743        *p &= ~v;
744        rtems_interrupt_enable(level);
745#endif
746}
747
748static inline void
749atomic_clear_acq_32(volatile uint32_t *p, uint32_t v)
750{
751#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
752        std::atomic_uint_least32_t *q =
753            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
754
755        q->fetch_and(~v, std::memory_order_acquire);
756#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
757        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
758
759        atomic_fetch_and_explicit(q, ~v, memory_order_acquire);
760#else
761        rtems_interrupt_level level;
762
763        rtems_interrupt_disable(level);
764        *p &= ~v;
765        rtems_interrupt_enable(level);
766#endif
767}
768
769static inline void
770atomic_clear_rel_32(volatile uint32_t *p, uint32_t v)
771{
772#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
773        std::atomic_uint_least32_t *q =
774            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
775
776        q->fetch_and(~v, std::memory_order_release);
777#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
778        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
779
780        atomic_fetch_and_explicit(q, ~v, memory_order_release);
781#else
782        rtems_interrupt_level level;
783
784        rtems_interrupt_disable(level);
785        *p &= ~v;
786        rtems_interrupt_enable(level);
787#endif
788}
789
790static inline int
791atomic_cmpset_32(volatile uint32_t *p, uint32_t cmp, uint32_t set)
792{
793        int rv;
794
795#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
796        std::atomic_uint_least32_t *q =
797            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
798
799        rv = q->compare_exchange_strong(cmp, set, std::memory_order_seq_cst,
800            std::memory_order_relaxed);
801#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
802        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
803
804        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
805            memory_order_seq_cst, memory_order_relaxed);
806#else
807        rtems_interrupt_level level;
808
809        rtems_interrupt_disable(level);
810        rv = *p == cmp;
811        if (rv) {
812                *p = set;
813        }
814        rtems_interrupt_enable(level);
815#endif
816
817        return (rv);
818}
819
820static inline int
821atomic_cmpset_acq_32(volatile uint32_t *p, uint32_t cmp, uint32_t set)
822{
823        int rv;
824
825#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
826        std::atomic_uint_least32_t *q =
827            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
828
829        rv = q->compare_exchange_strong(cmp, set, std::memory_order_acquire,
830            std::memory_order_relaxed);
831#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
832        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
833
834        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
835            memory_order_acquire, memory_order_relaxed);
836#else
837        rtems_interrupt_level level;
838
839        rtems_interrupt_disable(level);
840        rv = *p == cmp;
841        if (rv) {
842                *p = set;
843        }
844        rtems_interrupt_enable(level);
845#endif
846
847        return (rv);
848}
849
850static inline int
851atomic_cmpset_rel_32(volatile uint32_t *p, uint32_t cmp, uint32_t set)
852{
853        int rv;
854
855#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
856        std::atomic_uint_least32_t *q =
857            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
858
859        rv = q->compare_exchange_strong(cmp, set, std::memory_order_release,
860            std::memory_order_relaxed);
861#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
862        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
863
864        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
865            memory_order_release, memory_order_relaxed);
866#else
867        rtems_interrupt_level level;
868
869        rtems_interrupt_disable(level);
870        rv = *p == cmp;
871        if (rv) {
872                *p = set;
873        }
874        rtems_interrupt_enable(level);
875#endif
876
877        return (rv);
878}
879
880static inline uint32_t
881atomic_fetchadd_32(volatile uint32_t *p, uint32_t v)
882{
883        uint32_t tmp;
884
885#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
886        std::atomic_uint_least32_t *q =
887            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
888
889        tmp = q->fetch_add(v, std::memory_order_seq_cst);
890#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
891        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
892
893        tmp = atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
894#else
895        rtems_interrupt_level level;
896
897        rtems_interrupt_disable(level);
898        tmp = *p;
899        *p += v;
900        rtems_interrupt_enable(level);
901#endif
902
903        return (tmp);
904}
905
906static inline uint32_t
907atomic_readandclear_32(volatile uint32_t *p)
908{
909        uint32_t tmp;
910
911#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
912        std::atomic_uint_least32_t *q =
913            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
914
915        tmp = q->exchange(0, std::memory_order_seq_cst);
916#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
917        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
918
919        tmp = atomic_exchange_explicit(q, 0, memory_order_seq_cst);
920#else
921        rtems_interrupt_level level;
922
923        rtems_interrupt_disable(level);
924        tmp = *p;
925        *p = 0;
926        rtems_interrupt_enable(level);
927#endif
928
929        return (tmp);
930}
931
932static inline uint32_t
933atomic_load_acq_32(volatile uint32_t *p)
934{
935        uint32_t tmp;
936
937#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
938        std::atomic_uint_least32_t *q =
939            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
940
941        tmp = q->load(std::memory_order_acquire);
942#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
943        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
944
945        tmp = atomic_load_explicit(q, memory_order_acquire);
946#else
947        RTEMS_COMPILER_MEMORY_BARRIER();
948        tmp = *p;
949#endif
950
951        return (tmp);
952}
953
954static inline void
955atomic_store_rel_32(volatile uint32_t *p, uint32_t v)
956{
957#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
958        std::atomic_uint_least32_t *q =
959            reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
960
961        q->store(v, std::memory_order_release);
962#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
963        atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
964
965        atomic_store_explicit(q, v, memory_order_release);
966#else
967        *p = v;
968        RTEMS_COMPILER_MEMORY_BARRIER();
969#endif
970}
971
972static inline void
973atomic_add_long(volatile long *p, long v)
974{
975#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
976        std::atomic_long *q =
977            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
978
979        q->fetch_add(v, std::memory_order_seq_cst);
980#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
981        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
982
983        atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
984#else
985        rtems_interrupt_level level;
986
987        rtems_interrupt_disable(level);
988        *p += v;
989        rtems_interrupt_enable(level);
990#endif
991}
992
993static inline void
994atomic_add_acq_long(volatile long *p, long v)
995{
996#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
997        std::atomic_long *q =
998            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
999
1000        q->fetch_add(v, std::memory_order_acquire);
1001#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1002        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1003
1004        atomic_fetch_add_explicit(q, v, memory_order_acquire);
1005#else
1006        rtems_interrupt_level level;
1007
1008        rtems_interrupt_disable(level);
1009        *p += v;
1010        rtems_interrupt_enable(level);
1011#endif
1012}
1013
1014static inline void
1015atomic_add_rel_long(volatile long *p, long v)
1016{
1017#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1018        std::atomic_long *q =
1019            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1020
1021        q->fetch_add(v, std::memory_order_release);
1022#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1023        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1024
1025        atomic_fetch_add_explicit(q, v, memory_order_release);
1026#else
1027        rtems_interrupt_level level;
1028
1029        rtems_interrupt_disable(level);
1030        *p += v;
1031        rtems_interrupt_enable(level);
1032#endif
1033}
1034
1035static inline void
1036atomic_subtract_long(volatile long *p, long v)
1037{
1038#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1039        std::atomic_long *q =
1040            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1041
1042        q->fetch_sub(v, std::memory_order_seq_cst);
1043#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1044        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1045
1046        atomic_fetch_sub_explicit(q, v, memory_order_seq_cst);
1047#else
1048        rtems_interrupt_level level;
1049
1050        rtems_interrupt_disable(level);
1051        *p -= v;
1052        rtems_interrupt_enable(level);
1053#endif
1054}
1055
1056static inline void
1057atomic_subtract_acq_long(volatile long *p, long v)
1058{
1059#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1060        std::atomic_long *q =
1061            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1062
1063        q->fetch_sub(v, std::memory_order_acquire);
1064#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1065        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1066
1067        atomic_fetch_sub_explicit(q, v, memory_order_acquire);
1068#else
1069        rtems_interrupt_level level;
1070
1071        rtems_interrupt_disable(level);
1072        *p -= v;
1073        rtems_interrupt_enable(level);
1074#endif
1075}
1076
1077static inline void
1078atomic_subtract_rel_long(volatile long *p, long v)
1079{
1080#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1081        std::atomic_long *q =
1082            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1083
1084        q->fetch_sub(v, std::memory_order_release);
1085#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1086        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1087
1088        atomic_fetch_sub_explicit(q, v, memory_order_release);
1089#else
1090        rtems_interrupt_level level;
1091
1092        rtems_interrupt_disable(level);
1093        *p -= v;
1094        rtems_interrupt_enable(level);
1095#endif
1096}
1097
1098static inline void
1099atomic_set_long(volatile long *p, long v)
1100{
1101#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1102        std::atomic_long *q =
1103            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1104
1105        q->fetch_or(v, std::memory_order_seq_cst);
1106#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1107        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1108
1109        atomic_fetch_or_explicit(q, v, memory_order_seq_cst);
1110#else
1111        rtems_interrupt_level level;
1112
1113        rtems_interrupt_disable(level);
1114        *p |= v;
1115        rtems_interrupt_enable(level);
1116#endif
1117}
1118
1119static inline void
1120atomic_set_acq_long(volatile long *p, long v)
1121{
1122#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1123        std::atomic_long *q =
1124            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1125
1126        q->fetch_or(v, std::memory_order_acquire);
1127#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1128        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1129
1130        atomic_fetch_or_explicit(q, v, memory_order_acquire);
1131#else
1132        rtems_interrupt_level level;
1133
1134        rtems_interrupt_disable(level);
1135        *p |= v;
1136        rtems_interrupt_enable(level);
1137#endif
1138}
1139
1140static inline void
1141atomic_set_rel_long(volatile long *p, long v)
1142{
1143#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1144        std::atomic_long *q =
1145            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1146
1147        q->fetch_or(v, std::memory_order_release);
1148#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1149        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1150
1151        atomic_fetch_or_explicit(q, v, memory_order_release);
1152#else
1153        rtems_interrupt_level level;
1154
1155        rtems_interrupt_disable(level);
1156        *p |= v;
1157        rtems_interrupt_enable(level);
1158#endif
1159}
1160
1161static inline void
1162atomic_clear_long(volatile long *p, long v)
1163{
1164#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1165        std::atomic_long *q =
1166            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1167
1168        q->fetch_and(~v, std::memory_order_seq_cst);
1169#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1170        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1171
1172        atomic_fetch_and_explicit(q, ~v, memory_order_seq_cst);
1173#else
1174        rtems_interrupt_level level;
1175
1176        rtems_interrupt_disable(level);
1177        *p &= ~v;
1178        rtems_interrupt_enable(level);
1179#endif
1180}
1181
1182static inline void
1183atomic_clear_acq_long(volatile long *p, long v)
1184{
1185#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1186        std::atomic_long *q =
1187            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1188
1189        q->fetch_and(~v, std::memory_order_acquire);
1190#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1191        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1192
1193        atomic_fetch_and_explicit(q, ~v, memory_order_acquire);
1194#else
1195        rtems_interrupt_level level;
1196
1197        rtems_interrupt_disable(level);
1198        *p &= ~v;
1199        rtems_interrupt_enable(level);
1200#endif
1201}
1202
1203static inline void
1204atomic_clear_rel_long(volatile long *p, long v)
1205{
1206#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1207        std::atomic_long *q =
1208            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1209
1210        q->fetch_and(~v, std::memory_order_release);
1211#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1212        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1213
1214        atomic_fetch_and_explicit(q, ~v, memory_order_release);
1215#else
1216        rtems_interrupt_level level;
1217
1218        rtems_interrupt_disable(level);
1219        *p &= ~v;
1220        rtems_interrupt_enable(level);
1221#endif
1222}
1223
1224static inline int
1225atomic_cmpset_long(volatile long *p, long cmp, long set)
1226{
1227        int rv;
1228
1229#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1230        std::atomic_long *q =
1231            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1232
1233        rv = q->compare_exchange_strong(cmp, set, std::memory_order_seq_cst,
1234            std::memory_order_relaxed);
1235#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1236        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1237
1238        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
1239            memory_order_seq_cst, memory_order_relaxed);
1240#else
1241        rtems_interrupt_level level;
1242
1243        rtems_interrupt_disable(level);
1244        rv = *p == cmp;
1245        if (rv) {
1246                *p = set;
1247        }
1248        rtems_interrupt_enable(level);
1249#endif
1250
1251        return (rv);
1252}
1253
1254static inline int
1255atomic_cmpset_acq_long(volatile long *p, long cmp, long set)
1256{
1257        int rv;
1258
1259#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1260        std::atomic_long *q =
1261            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1262
1263        rv = q->compare_exchange_strong(cmp, set, std::memory_order_acquire,
1264            std::memory_order_relaxed);
1265#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1266        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1267
1268        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
1269            memory_order_acquire, memory_order_relaxed);
1270#else
1271        rtems_interrupt_level level;
1272
1273        rtems_interrupt_disable(level);
1274        rv = *p == cmp;
1275        if (rv) {
1276                *p = set;
1277        }
1278        rtems_interrupt_enable(level);
1279#endif
1280
1281        return (rv);
1282}
1283
1284static inline int
1285atomic_cmpset_rel_long(volatile long *p, long cmp, long set)
1286{
1287        int rv;
1288
1289#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1290        std::atomic_long *q =
1291            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1292
1293        rv = q->compare_exchange_strong(cmp, set, std::memory_order_release,
1294            std::memory_order_relaxed);
1295#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1296        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1297
1298        rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
1299            memory_order_release, memory_order_relaxed);
1300#else
1301        rtems_interrupt_level level;
1302
1303        rtems_interrupt_disable(level);
1304        rv = *p == cmp;
1305        if (rv) {
1306                *p = set;
1307        }
1308        rtems_interrupt_enable(level);
1309#endif
1310
1311        return (rv);
1312}
1313
1314static inline long
1315atomic_fetchadd_long(volatile long *p, long v)
1316{
1317        long tmp;
1318
1319#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1320        std::atomic_long *q =
1321            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1322
1323        tmp = q->fetch_add(v, std::memory_order_seq_cst);
1324#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1325        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1326
1327        tmp = atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
1328#else
1329        rtems_interrupt_level level;
1330
1331        rtems_interrupt_disable(level);
1332        tmp = *p;
1333        *p += v;
1334        rtems_interrupt_enable(level);
1335#endif
1336
1337        return (tmp);
1338}
1339
1340static inline long
1341atomic_readandclear_long(volatile long *p)
1342{
1343        long tmp;
1344
1345#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1346        std::atomic_long *q =
1347            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1348
1349        tmp = q->exchange(0, std::memory_order_seq_cst);
1350#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1351        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1352
1353        tmp = atomic_exchange_explicit(q, 0, memory_order_seq_cst);
1354#else
1355        rtems_interrupt_level level;
1356
1357        rtems_interrupt_disable(level);
1358        tmp = *p;
1359        *p = 0;
1360        rtems_interrupt_enable(level);
1361#endif
1362
1363        return (tmp);
1364}
1365
1366static inline long
1367atomic_load_acq_long(volatile long *p)
1368{
1369        long tmp;
1370
1371#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1372        std::atomic_long *q =
1373            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1374
1375        tmp = q->load(std::memory_order_acquire);
1376#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1377        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1378
1379        tmp = atomic_load_explicit(q, memory_order_acquire);
1380#else
1381        RTEMS_COMPILER_MEMORY_BARRIER();
1382        tmp = *p;
1383#endif
1384
1385        return (tmp);
1386}
1387
1388static inline void
1389atomic_store_rel_long(volatile long *p, long v)
1390{
1391#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
1392        std::atomic_long *q =
1393            reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
1394
1395        q->store(v, std::memory_order_release);
1396#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
1397        atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
1398
1399        atomic_store_explicit(q, v, memory_order_release);
1400#else
1401        *p = v;
1402        RTEMS_COMPILER_MEMORY_BARRIER();
1403#endif
1404}
1405
1406#endif /* _RTEMS_BSD_MACHINE_ATOMIC_H_ */
Note: See TracBrowser for help on using the repository browser.