Add CMM memory model
[urcu.git] / include / urcu / uatomic / builtins-generic.h
CommitLineData
d1854484
OD
1/*
2 * urcu/uatomic/builtins-generic.h
3 *
4 * Copyright (c) 2023 Olivier Dion <odion@efficios.com>
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2.1 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 */
20
21#ifndef _URCU_UATOMIC_BUILTINS_GENERIC_H
22#define _URCU_UATOMIC_BUILTINS_GENERIC_H
23
24#include <urcu/compiler.h>
25#include <urcu/system.h>
26
27#define uatomic_store(addr, v, mo) \
28 do { \
29 __atomic_store_n(cmm_cast_volatile(addr), v, \
30 cmm_to_c11(mo)); \
31 cmm_seq_cst_fence_after_atomic(mo); \
32 } while (0)
33
34#define uatomic_set(addr, v) \
35 do { \
36 uatomic_store(addr, v, CMM_RELAXED); \
37 } while (0)
38
39#define uatomic_load(addr, mo) \
40 __extension__ \
41 ({ \
42 __typeof__(*(addr)) _value = \
43 __atomic_load_n(cmm_cast_volatile(addr), \
44 cmm_to_c11(mo)); \
45 cmm_seq_cst_fence_after_atomic(mo); \
46 \
47 _value; \
48 })
49
50#define uatomic_read(addr) \
51 uatomic_load(addr, CMM_RELAXED)
52
53#define uatomic_cmpxchg_mo(addr, old, new, mos, mof) \
54 __extension__ \
55 ({ \
56 __typeof__(*(addr)) _old = (__typeof__(*(addr)))old; \
57 \
58 if (__atomic_compare_exchange_n(cmm_cast_volatile(addr), \
59 &_old, new, 0, \
60 cmm_to_c11(mos), \
61 cmm_to_c11(mof))) { \
62 cmm_seq_cst_fence_after_atomic(mos); \
63 } else { \
64 cmm_seq_cst_fence_after_atomic(mof); \
65 } \
66 _old; \
67 })
68
69#define uatomic_cmpxchg(addr, old, new) \
70 uatomic_cmpxchg_mo(addr, old, new, CMM_SEQ_CST_FENCE, CMM_RELAXED)
71
72#define uatomic_xchg_mo(addr, v, mo) \
73 __extension__ \
74 ({ \
75 __typeof__((*addr)) _old = \
76 __atomic_exchange_n(cmm_cast_volatile(addr), v, \
77 cmm_to_c11(mo)); \
78 cmm_seq_cst_fence_after_atomic(mo); \
79 _old; \
80 })
81
82#define uatomic_xchg(addr, v) \
83 uatomic_xchg_mo(addr, v, CMM_SEQ_CST_FENCE)
84
85#define uatomic_add_return_mo(addr, v, mo) \
86 __extension__ \
87 ({ \
88 __typeof__(*(addr)) _old = \
89 __atomic_add_fetch(cmm_cast_volatile(addr), v, \
90 cmm_to_c11(mo)); \
91 cmm_seq_cst_fence_after_atomic(mo); \
92 _old; \
93 })
94
95#define uatomic_add_return(addr, v) \
96 uatomic_add_return_mo(addr, v, CMM_SEQ_CST_FENCE)
97
98#define uatomic_sub_return_mo(addr, v, mo) \
99 __extension__ \
100 ({ \
101 __typeof__(*(addr)) _old = \
102 __atomic_sub_fetch(cmm_cast_volatile(addr), v, \
103 cmm_to_c11(mo)); \
104 cmm_seq_cst_fence_after_atomic(mo); \
105 _old; \
106 })
107
108#define uatomic_sub_return(addr, v) \
109 uatomic_sub_return_mo(addr, v, CMM_SEQ_CST_FENCE)
110
111#define uatomic_and_mo(addr, mask, mo) \
112 do { \
113 (void) __atomic_and_fetch(cmm_cast_volatile(addr), mask, \
114 cmm_to_c11(mo)); \
115 cmm_seq_cst_fence_after_atomic(mo); \
116 } while (0)
117
118#define uatomic_and(addr, mask) \
119 uatomic_and_mo(addr, mask, CMM_SEQ_CST)
120
121#define uatomic_or_mo(addr, mask, mo) \
122 do { \
123 (void) __atomic_or_fetch(cmm_cast_volatile(addr), mask, \
124 cmm_to_c11(mo)); \
125 cmm_seq_cst_fence_after_atomic(mo); \
126 } while (0)
127
128
129#define uatomic_or(addr, mask) \
130 uatomic_or_mo(addr, mask, CMM_RELAXED)
131
132#define uatomic_add_mo(addr, v, mo) \
133 (void) uatomic_add_return_mo(addr, v, mo)
134
135#define uatomic_add(addr, v) \
136 uatomic_add_mo(addr, v, CMM_RELAXED)
137
138#define uatomic_sub_mo(addr, v, mo) \
139 (void) uatomic_sub_return_mo(addr, v, mo)
140
141#define uatomic_sub(addr, v) \
142 uatomic_sub_mo(addr, v, CMM_RELAXED)
143
144#define uatomic_inc_mo(addr, mo) \
145 uatomic_add_mo(addr, 1, mo)
146
147#define uatomic_inc(addr) \
148 uatomic_inc_mo(addr, CMM_RELAXED)
149
150#define uatomic_dec_mo(addr, mo) \
151 uatomic_sub_mo(addr, 1, mo)
152
153#define uatomic_dec(addr) \
154 uatomic_dec_mo(addr, CMM_RELAXED)
155
156#define cmm_smp_mb__before_uatomic_and() cmm_smp_mb()
157#define cmm_smp_mb__after_uatomic_and() cmm_smp_mb()
158
159#define cmm_smp_mb__before_uatomic_or() cmm_smp_mb()
160#define cmm_smp_mb__after_uatomic_or() cmm_smp_mb()
161
162#define cmm_smp_mb__before_uatomic_add() cmm_smp_mb()
163#define cmm_smp_mb__after_uatomic_add() cmm_smp_mb()
164
165#define cmm_smp_mb__before_uatomic_sub() cmm_smp_mb()
166#define cmm_smp_mb__after_uatomic_sub() cmm_smp_mb()
167
168#define cmm_smp_mb__before_uatomic_inc() cmm_smp_mb()
169#define cmm_smp_mb__after_uatomic_inc() cmm_smp_mb()
170
171#define cmm_smp_mb__before_uatomic_dec() cmm_smp_mb()
172#define cmm_smp_mb__after_uatomic_dec() cmm_smp_mb()
173
174#endif /* _URCU_UATOMIC_BUILTINS_X86_H */
This page took 0.033052 seconds and 4 git commands to generate.