Fix: x86 and s390: uatomic __hp() macro C++ support
[urcu.git] / include / urcu / uatomic / s390.h
1 #ifndef _URCU_UATOMIC_ARCH_S390_H
2 #define _URCU_UATOMIC_ARCH_S390_H
3
4 /*
5 * Atomic exchange operations for the S390 architecture. Based on information
6 * taken from the Principles of Operation Appendix A "Conditional Swapping
7 * Instructions (CS, CDS)".
8 *
9 * Copyright (c) 2009 Novell, Inc.
10 * Author: Jan Blunck <jblunck@suse.de>
11 * Copyright (c) 2009 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
12 *
13 * Permission is hereby granted, free of charge, to any person obtaining a copy
14 * of this software and associated documentation files (the "Software"), to
15 * deal in the Software without restriction, including without limitation the
16 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
17 * sell copies of the Software, and to permit persons to whom the Software is
18 * furnished to do so, subject to the following conditions:
19 *
20 * The above copyright notice and this permission notice shall be included in
21 * all copies or substantial portions of the Software.
22 *
23 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
24 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
25 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
26 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
27 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
28 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
29 * IN THE SOFTWARE.
30 */
31
32 #include <urcu/compiler.h>
33 #include <urcu/system.h>
34
35 #ifdef __cplusplus
36 extern "C" {
37 #endif
38
39 #if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2)
40 #define COMPILER_HAVE_SHORT_MEM_OPERAND
41 #endif
42
43 /*
44 * MEMOP assembler operand rules:
45 * - op refer to MEMOP_IN operand
46 * - MEMOP_IN can expand to more than a single operand. Use it at the end of
47 * operand list only.
48 */
49
50 #ifdef COMPILER_HAVE_SHORT_MEM_OPERAND
51
52 #define MEMOP_OUT(addr) "=Q" (*(addr))
53 #define MEMOP_IN(addr) "Q" (*(addr))
54 #define MEMOP_REF(op) #op /* op refer to MEMOP_IN operand */
55
56 #else /* !COMPILER_HAVE_SHORT_MEM_OPERAND */
57
58 #define MEMOP_OUT(addr) "=m" (*(addr))
59 #define MEMOP_IN(addr) "a" (addr), "m" (*(addr))
60 #define MEMOP_REF(op) "0(" #op ")" /* op refer to MEMOP_IN operand */
61
62 #endif /* !COMPILER_HAVE_SHORT_MEM_OPERAND */
63
64 /*
65 * The __hp() macro casts the void pointer @x to a pointer to a structure
66 * containing an array of char of the specified size. This allows passing the
67 * @addr arguments of the following inline functions as "m" and "+m" operands
68 * to the assembly. The @size parameter should be a constant to support
69 * compilers such as clang which do not support VLA. Create typedefs because
70 * C++ does not allow types be defined in casts.
71 */
72
73 typedef struct { char v[4]; } __hp_4;
74 typedef struct { char v[8]; } __hp_8;
75
76 #define __hp(size, x) ((__hp_##size *)(x))
77
78 /* xchg */
79
80 static inline __attribute__((always_inline))
81 unsigned long _uatomic_exchange(volatile void *addr, unsigned long val, int len)
82 {
83 switch (len) {
84 case 4:
85 {
86 unsigned int old_val;
87
88 __asm__ __volatile__(
89 "0: cs %0,%2," MEMOP_REF(%3) "\n"
90 " brc 4,0b\n"
91 : "=&r" (old_val), MEMOP_OUT (__hp(4, addr))
92 : "r" (val), MEMOP_IN (__hp(4, addr))
93 : "memory", "cc");
94 return old_val;
95 }
96 #if (CAA_BITS_PER_LONG == 64)
97 case 8:
98 {
99 unsigned long old_val;
100
101 __asm__ __volatile__(
102 "0: csg %0,%2," MEMOP_REF(%3) "\n"
103 " brc 4,0b\n"
104 : "=&r" (old_val), MEMOP_OUT (__hp(8, addr))
105 : "r" (val), MEMOP_IN (__hp(8, addr))
106 : "memory", "cc");
107 return old_val;
108 }
109 #endif
110 default:
111 __asm__ __volatile__(".long 0xd00d00");
112 }
113
114 return 0;
115 }
116
117 #define uatomic_xchg(addr, v) \
118 (__typeof__(*(addr))) _uatomic_exchange((addr), \
119 caa_cast_long_keep_sign(v), \
120 sizeof(*(addr)))
121
122 /* cmpxchg */
123
124 static inline __attribute__((always_inline))
125 unsigned long _uatomic_cmpxchg(void *addr, unsigned long old,
126 unsigned long _new, int len)
127 {
128 switch (len) {
129 case 4:
130 {
131 unsigned int old_val = (unsigned int)old;
132
133 __asm__ __volatile__(
134 " cs %0,%2," MEMOP_REF(%3) "\n"
135 : "+r" (old_val), MEMOP_OUT (__hp(4, addr))
136 : "r" (_new), MEMOP_IN (__hp(4, addr))
137 : "memory", "cc");
138 return old_val;
139 }
140 #if (CAA_BITS_PER_LONG == 64)
141 case 8:
142 {
143 __asm__ __volatile__(
144 " csg %0,%2," MEMOP_REF(%3) "\n"
145 : "+r" (old), MEMOP_OUT (__hp(8, addr))
146 : "r" (_new), MEMOP_IN (__hp(8, addr))
147 : "memory", "cc");
148 return old;
149 }
150 #endif
151 default:
152 __asm__ __volatile__(".long 0xd00d00");
153 }
154
155 return 0;
156 }
157
158 #define uatomic_cmpxchg(addr, old, _new) \
159 (__typeof__(*(addr))) _uatomic_cmpxchg((addr), \
160 caa_cast_long_keep_sign(old), \
161 caa_cast_long_keep_sign(_new),\
162 sizeof(*(addr)))
163
164 #ifdef __cplusplus
165 }
166 #endif
167
168 #include <urcu/uatomic/generic.h>
169
170 #endif /* _URCU_UATOMIC_ARCH_S390_H */
This page took 0.0321 seconds and 4 git commands to generate.