Fix: x86 and s390 uatomic: __hp() macro warning with gcc 11
[urcu.git] / include / urcu / uatomic / s390.h
1 #ifndef _URCU_UATOMIC_ARCH_S390_H
2 #define _URCU_UATOMIC_ARCH_S390_H
3
4 /*
5 * Atomic exchange operations for the S390 architecture. Based on information
6 * taken from the Principles of Operation Appendix A "Conditional Swapping
7 * Instructions (CS, CDS)".
8 *
9 * Copyright (c) 2009 Novell, Inc.
10 * Author: Jan Blunck <jblunck@suse.de>
11 * Copyright (c) 2009 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
12 *
13 * Permission is hereby granted, free of charge, to any person obtaining a copy
14 * of this software and associated documentation files (the "Software"), to
15 * deal in the Software without restriction, including without limitation the
16 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
17 * sell copies of the Software, and to permit persons to whom the Software is
18 * furnished to do so, subject to the following conditions:
19 *
20 * The above copyright notice and this permission notice shall be included in
21 * all copies or substantial portions of the Software.
22 *
23 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
24 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
25 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
26 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
27 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
28 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
29 * IN THE SOFTWARE.
30 */
31
32 #include <urcu/compiler.h>
33 #include <urcu/system.h>
34
35 #ifdef __cplusplus
36 extern "C" {
37 #endif
38
39 #if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2)
40 #define COMPILER_HAVE_SHORT_MEM_OPERAND
41 #endif
42
43 /*
44 * MEMOP assembler operand rules:
45 * - op refer to MEMOP_IN operand
46 * - MEMOP_IN can expand to more than a single operand. Use it at the end of
47 * operand list only.
48 */
49
50 #ifdef COMPILER_HAVE_SHORT_MEM_OPERAND
51
52 #define MEMOP_OUT(addr) "=Q" (*(addr))
53 #define MEMOP_IN(addr) "Q" (*(addr))
54 #define MEMOP_REF(op) #op /* op refer to MEMOP_IN operand */
55
56 #else /* !COMPILER_HAVE_SHORT_MEM_OPERAND */
57
58 #define MEMOP_OUT(addr) "=m" (*(addr))
59 #define MEMOP_IN(addr) "a" (addr), "m" (*(addr))
60 #define MEMOP_REF(op) "0(" #op ")" /* op refer to MEMOP_IN operand */
61
62 #endif /* !COMPILER_HAVE_SHORT_MEM_OPERAND */
63
64 /*
65 * The __hp() macro casts the void pointer "x" to a pointer to a structure
66 * containing an array of char of the specified size. This allows passing the
67 * @addr arguments of the following inline functions as "m" and "+m" operands
68 * to the assembly.
69 */
70
71 #define __hp(size, x) ((struct { char v[size]; } *)(x))
72
73 /* xchg */
74
75 static inline __attribute__((always_inline))
76 unsigned long _uatomic_exchange(volatile void *addr, unsigned long val, int len)
77 {
78 switch (len) {
79 case 4:
80 {
81 unsigned int old_val;
82
83 __asm__ __volatile__(
84 "0: cs %0,%2," MEMOP_REF(%3) "\n"
85 " brc 4,0b\n"
86 : "=&r" (old_val), MEMOP_OUT (__hp(len, addr))
87 : "r" (val), MEMOP_IN (__hp(len, addr))
88 : "memory", "cc");
89 return old_val;
90 }
91 #if (CAA_BITS_PER_LONG == 64)
92 case 8:
93 {
94 unsigned long old_val;
95
96 __asm__ __volatile__(
97 "0: csg %0,%2," MEMOP_REF(%3) "\n"
98 " brc 4,0b\n"
99 : "=&r" (old_val), MEMOP_OUT (__hp(len, addr))
100 : "r" (val), MEMOP_IN (__hp(len, addr))
101 : "memory", "cc");
102 return old_val;
103 }
104 #endif
105 default:
106 __asm__ __volatile__(".long 0xd00d00");
107 }
108
109 return 0;
110 }
111
112 #define uatomic_xchg(addr, v) \
113 (__typeof__(*(addr))) _uatomic_exchange((addr), \
114 caa_cast_long_keep_sign(v), \
115 sizeof(*(addr)))
116
117 /* cmpxchg */
118
119 static inline __attribute__((always_inline))
120 unsigned long _uatomic_cmpxchg(void *addr, unsigned long old,
121 unsigned long _new, int len)
122 {
123 switch (len) {
124 case 4:
125 {
126 unsigned int old_val = (unsigned int)old;
127
128 __asm__ __volatile__(
129 " cs %0,%2," MEMOP_REF(%3) "\n"
130 : "+r" (old_val), MEMOP_OUT (__hp(len, addr))
131 : "r" (_new), MEMOP_IN (__hp(len, addr))
132 : "memory", "cc");
133 return old_val;
134 }
135 #if (CAA_BITS_PER_LONG == 64)
136 case 8:
137 {
138 __asm__ __volatile__(
139 " csg %0,%2," MEMOP_REF(%3) "\n"
140 : "+r" (old), MEMOP_OUT (__hp(len, addr))
141 : "r" (_new), MEMOP_IN (__hp(len, addr))
142 : "memory", "cc");
143 return old;
144 }
145 #endif
146 default:
147 __asm__ __volatile__(".long 0xd00d00");
148 }
149
150 return 0;
151 }
152
153 #define uatomic_cmpxchg(addr, old, _new) \
154 (__typeof__(*(addr))) _uatomic_cmpxchg((addr), \
155 caa_cast_long_keep_sign(old), \
156 caa_cast_long_keep_sign(_new),\
157 sizeof(*(addr)))
158
159 #ifdef __cplusplus
160 }
161 #endif
162
163 #include <urcu/uatomic/generic.h>
164
165 #endif /* _URCU_UATOMIC_ARCH_S390_H */
This page took 0.032039 seconds and 4 git commands to generate.