LGPLv2.1 relicensing
[urcu.git] / arch_ppc.h
CommitLineData
121a5d44
MD
1#ifndef _ARCH_PPC_H
2#define _ARCH_PPC_H
3
6d0ce021
PM
4/*
5 * arch_x86.h: Definitions for the x86 architecture, derived from Linux.
6 *
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation; but only version 2 of the License given
10 * that this comes from the Linux kernel.
11 *
12 * This program is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with this program; if not, write to the Free Software
19 * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
20 *
21 * Copyright (c) 2009 Paul E. McKenney, IBM Corporation.
22 */
23
121a5d44
MD
24#include <compiler.h>
25
6d0ce021
PM
26#define CONFIG_HAVE_FENCE 1
27#define CONFIG_HAVE_MEM_COHERENCY
28
29#define mb() asm volatile("sync":::"memory")
30#define rmb() asm volatile("sync":::"memory")
31#define wmb() asm volatile("sync"::: "memory")
32
33/*
34 * Architectures without cache coherency need something like the following:
35 *
36 * #define mb() mc()
37 * #define rmb() rmc()
38 * #define wmb() wmc()
39 * #define mc() arch_cache_flush()
40 * #define rmc() arch_cache_flush_read()
41 * #define wmc() arch_cache_flush_write()
42 */
43
44#define mc() barrier()
45#define rmc() barrier()
46#define wmc() barrier()
47
121a5d44
MD
48/* Assume SMP machine, given we don't have this information */
49#define CONFIG_SMP 1
50
51#ifdef CONFIG_SMP
52#define smp_mb() mb()
53#define smp_rmb() rmb()
54#define smp_wmb() wmb()
55#define smp_mc() mc()
56#define smp_rmc() rmc()
57#define smp_wmc() wmc()
58#else
59#define smp_mb() barrier()
60#define smp_rmb() barrier()
61#define smp_wmb() barrier()
62#define smp_mc() barrier()
63#define smp_rmc() barrier()
64#define smp_wmc() barrier()
65#endif
66
67/* Nop everywhere except on alpha. */
68#define smp_read_barrier_depends()
69
6d0ce021
PM
70static inline void cpu_relax(void)
71{
72 barrier();
73}
74
75#define PPC405_ERR77(ra,rb)
76#define LWSYNC_ON_SMP "\n\tlwsync\n"
77#define ISYNC_ON_SMP "\n\tisync\n"
78
79#ifndef _INCLUDE_API_H
80
81static __inline__ void atomic_inc(int *v)
82{
83 int t;
84
85 __asm__ __volatile__(
86"1: lwarx %0,0,%2 # atomic_inc\n\
87 addic %0,%0,1\n"
88 PPC405_ERR77(0,%2)
89" stwcx. %0,0,%2 \n\
90 bne- 1b"
91 : "=&r" (t), "+m" (v)
92 : "r" (&v)
93 : "cc", "xer");
94}
95
96#endif /* #ifndef _INCLUDE_API_H */
97
98struct __xchg_dummy {
99 unsigned long a[100];
100};
101#define __xg(x) ((struct __xchg_dummy *)(x))
102
103#ifndef _INCLUDE_API_H
104
105/*
106 * Atomic exchange
107 *
108 * Changes the memory location '*ptr' to be val and returns
109 * the previous value stored there.
110 */
111static __always_inline unsigned long
112__xchg_u32(volatile void *p, unsigned long val)
113{
114 unsigned long prev;
115
116 __asm__ __volatile__(
117 LWSYNC_ON_SMP
118"1: lwarx %0,0,%2 \n"
119 PPC405_ERR77(0,%2)
120" stwcx. %3,0,%2 \n\
121 bne- 1b"
122 ISYNC_ON_SMP
123 : "=&r" (prev), "+m" (*(volatile unsigned int *)p)
124 : "r" (p), "r" (val)
125 : "cc", "memory");
126
127 return prev;
128}
129
130/*
131 * This function doesn't exist, so you'll get a linker error
132 * if something tries to do an invalid xchg().
133 */
134extern void __xchg_called_with_bad_pointer(void);
135
136static __always_inline unsigned long
137__xchg(volatile void *ptr, unsigned long x, unsigned int size)
138{
139 switch (size) {
140 case 4:
141 return __xchg_u32(ptr, x);
142#ifdef CONFIG_PPC64
143 case 8:
144 return __xchg_u64(ptr, x);
145#endif
146 }
147 __xchg_called_with_bad_pointer();
148 return x;
149}
150
151#define xchg(ptr,x) \
152 ({ \
153 __typeof__(*(ptr)) _x_ = (x); \
154 (__typeof__(*(ptr))) __xchg((ptr), (unsigned long)_x_, sizeof(*(ptr))); \
155 })
156
157#endif /* #ifndef _INCLUDE_API_H */
158
159#define mftbl() ({unsigned long rval; \
160 asm volatile("mftbl %0" : "=r" (rval)); rval;})
161#define mftbu() ({unsigned long rval; \
162 asm volatile("mftbu %0" : "=r" (rval)); rval;})
163
164typedef unsigned long long cycles_t;
165
166static inline cycles_t get_cycles (void)
167{
168 long h;
169 long l;
170
171 for (;;) {
172 h = mftbu();
173 smp_mb();
174 l = mftbl();
175 smp_mb();
176 if (mftbu() == h)
177 return (((long long)h) << 32) + l;
178 }
179}
121a5d44
MD
180
181#endif /* _ARCH_PPC_H */
This page took 0.028206 seconds and 4 git commands to generate.