1 #ifndef _URCU_UATOMIC_ARCH_S390_H
2 #define _URCU_UATOMIC_ARCH_S390_H
5 * Atomic exchange operations for the S390 architecture. Based on information
6 * taken from the Principles of Operation Appendix A "Conditional Swapping
7 * Instructions (CS, CDS)".
9 * Copyright (c) 2009 Novell, Inc.
10 * Author: Jan Blunck <jblunck@suse.de>
11 * Copyright (c) 2009 Mathieu Desnoyers <mathieu.desnoyers@polymtl.ca>
13 * Permission is hereby granted, free of charge, to any person obtaining a copy
14 * of this software and associated documentation files (the "Software"), to
15 * deal in the Software without restriction, including without limitation the
16 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
17 * sell copies of the Software, and to permit persons to whom the Software is
18 * furnished to do so, subject to the following conditions:
20 * The above copyright notice and this permission notice shall be included in
21 * all copies or substantial portions of the Software.
23 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
24 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
25 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
26 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
27 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
28 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
32 #include <urcu/compiler.h>
33 #include <urcu/system.h>
35 #ifndef __SIZEOF_LONG__
37 #define __SIZEOF_LONG__ 8
39 #define __SIZEOF_LONG__ 4
44 #define BITS_PER_LONG (__SIZEOF_LONG__ * 8)
47 #if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2)
48 #define COMPILER_HAVE_SHORT_MEM_OPERAND
52 * MEMOP assembler operand rules:
53 * - op refer to MEMOP_IN operand
54 * - MEMOP_IN can expand to more than a single operand. Use it at the end of
58 #ifdef COMPILER_HAVE_SHORT_MEM_OPERAND
60 #define MEMOP_OUT(addr) "=Q" (*(addr))
61 #define MEMOP_IN "Q" (*(addr))
62 #define MEMOP_REF(op) #op /* op refer to MEMOP_IN operand */
64 #else /* !COMPILER_HAVE_SHORT_MEM_OPERAND */
66 #define MEMOP_OUT(addr) "=m" (*(addr))
67 #define MEMOP_IN "a" (addr), "m" (*(addr))
68 #define MEMOP_REF(op) "0(" #op ")" /* op refer to MEMOP_IN operand */
70 #endif /* !COMPILER_HAVE_SHORT_MEM_OPERAND */
72 struct __uatomic_dummy
{
75 #define __hp(x) ((struct __uatomic_dummy *)(x))
77 #define uatomic_set(addr, v) STORE_SHARED(*(addr), (v))
78 #define uatomic_read(addr) LOAD_SHARED(*(addr))
82 static inline __attribute__((always_inline
))
83 unsigned long _uatomic_exchange(volatile void *addr
, unsigned long val
, int len
)
91 "0: cs %0,%2," MEMOP_REF(%3) "\n"
93 : "=&r" (old_val
), MEMOP_OUT (__hp(addr
))
94 : "r" (val
), MEMOP_IN (__hp(addr
))
98 #if (BITS_PER_LONG == 64)
101 unsigned long old_val
;
103 __asm__
__volatile__(
104 "0: csg %0,%2," MEMOP_REF(%3) "\n"
106 : "=&r" (old_val
), MEMOP_OUT (__hp(addr
))
107 : "r" (val
), MEMOP_IN (__hp(addr
))
113 __asm__
__volatile__(".long 0xd00d00");
119 #define uatomic_xchg(addr, v) \
120 (__typeof__(*(addr))) _uatomic_exchange((addr), (unsigned long)(v), \
125 static inline __attribute__((always_inline
))
126 unsigned long _uatomic_cmpxchg(void *addr
, unsigned long old
,
127 unsigned long _new
, int len
)
132 unsigned int old_val
= (unsigned int)old
;
134 __asm__
__volatile__(
135 " cs %0,%2," MEMOP_REF(%3) "\n"
136 : "+r" (old_val
), MEMOP_OUT (__hp(addr
))
137 : "r" (_new
), MEMOP_IN (__hp(addr
))
141 #if (BITS_PER_LONG == 64)
144 __asm__
__volatile__(
145 " csg %0,%2," MEMOP_REF(%3) "\n"
146 : "+r" (old
), MEMOP_OUT (__hp(addr
))
147 : "r" (_new
), MEMOP_IN (__hp(addr
))
153 __asm__
__volatile__(".long 0xd00d00");
159 #define uatomic_cmpxchg(addr, old, _new) \
160 (__typeof__(*(addr))) _uatomic_cmpxchg((addr), \
161 (unsigned long)(old), \
162 (unsigned long)(_new), \
165 /* uatomic_add_return */
167 static inline __attribute__((always_inline
))
168 unsigned long _uatomic_add_return(void *addr
, unsigned long val
, int len
)
173 unsigned int old
, oldt
;
175 oldt
= uatomic_read((unsigned int *)addr
);
178 oldt
= _uatomic_cmpxchg(addr
, old
, old
+ val
, 4);
179 } while (oldt
!= old
);
183 #if (BITS_PER_LONG == 64)
186 unsigned long old
, oldt
;
188 oldt
= uatomic_read((unsigned long *)addr
);
191 oldt
= _uatomic_cmpxchg(addr
, old
, old
+ val
, 8);
192 } while (oldt
!= old
);
202 #define uatomic_add_return(addr, v) \
203 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
204 (unsigned long)(v), \
207 /* uatomic_sub_return, uatomic_add, uatomic_sub, uatomic_inc, uatomic_dec */
209 #define uatomic_sub_return(addr, v) uatomic_add_return((addr), -(v))
211 #define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v))
212 #define uatomic_sub(addr, v) (void)uatomic_sub_return((addr), (v))
214 #define uatomic_inc(addr) uatomic_add((addr), 1)
215 #define uatomic_dec(addr) uatomic_add((addr), -1)
217 #define compat_uatomic_cmpxchg(ptr, old, _new) uatomic_cmpxchg(ptr, old, _new)
219 #endif /* _URCU_UATOMIC_ARCH_S390_H */
This page took 0.037893 seconds and 5 git commands to generate.