(followup) remove lockid.
[dragonfly.git] / sys / amd64 / include / atomic.h
1 /*-
2  * Copyright (c) 1998 Doug Rabson
3  * All rights reserved.
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  * 2. Redistributions in binary form must reproduce the above copyright
11  *    notice, this list of conditions and the following disclaimer in the
12  *    documentation and/or other materials provided with the distribution.
13  *
14  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
15  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
17  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
18  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
20  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
21  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
22  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
24  * SUCH DAMAGE.
25  *
26  * $FreeBSD: src/sys/amd64/include/atomic.h,v 1.32 2003/11/21 03:02:00 peter Exp $
27  * $DragonFly: src/sys/amd64/include/Attic/atomic.h,v 1.1 2004/02/02 08:05:52 dillon Exp $
28  */
29 #ifndef _MACHINE_ATOMIC_H_
30 #define _MACHINE_ATOMIC_H_
31
32 /*
33  * Various simple arithmetic on memory which is atomic in the presence
34  * of interrupts and multiple processors.
35  *
36  * atomic_set_char(P, V)        (*(u_char*)(P) |= (V))
37  * atomic_clear_char(P, V)      (*(u_char*)(P) &= ~(V))
38  * atomic_add_char(P, V)        (*(u_char*)(P) += (V))
39  * atomic_subtract_char(P, V)   (*(u_char*)(P) -= (V))
40  *
41  * atomic_set_short(P, V)       (*(u_short*)(P) |= (V))
42  * atomic_clear_short(P, V)     (*(u_short*)(P) &= ~(V))
43  * atomic_add_short(P, V)       (*(u_short*)(P) += (V))
44  * atomic_subtract_short(P, V)  (*(u_short*)(P) -= (V))
45  *
46  * atomic_set_int(P, V)         (*(u_int*)(P) |= (V))
47  * atomic_clear_int(P, V)       (*(u_int*)(P) &= ~(V))
48  * atomic_add_int(P, V)         (*(u_int*)(P) += (V))
49  * atomic_subtract_int(P, V)    (*(u_int*)(P) -= (V))
50  * atomic_readandclear_int(P)   (return  *(u_int*)P; *(u_int*)P = 0;)
51  *
52  * atomic_set_long(P, V)        (*(u_long*)(P) |= (V))
53  * atomic_clear_long(P, V)      (*(u_long*)(P) &= ~(V))
54  * atomic_add_long(P, V)        (*(u_long*)(P) += (V))
55  * atomic_subtract_long(P, V)   (*(u_long*)(P) -= (V))
56  * atomic_readandclear_long(P)  (return  *(u_long*)P; *(u_long*)P = 0;)
57  */
58
59 /*
60  * The above functions are expanded inline in the statically-linked
61  * kernel.  Lock prefixes are generated if an SMP kernel is being
62  * built.
63  *
64  * Kernel modules call real functions which are built into the kernel.
65  * This allows kernel modules to be portable between UP and SMP systems.
66  */
67 #if defined(KLD_MODULE)
68 #define ATOMIC_ASM(NAME, TYPE, OP, CONS, V)                     \
69 void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
70
71 int atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src);
72 int atomic_cmpset_long(volatile u_long *dst, u_long exp, u_long src);
73
74 #define ATOMIC_STORE_LOAD(TYPE, LOP, SOP)                       \
75 u_##TYPE        atomic_load_acq_##TYPE(volatile u_##TYPE *p);   \
76 void            atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
77
78 #else /* !KLD_MODULE */
79
80 #ifdef __GNUC__
81
82 /*
83  * For userland, assume the SMP case and use lock prefixes so that
84  * the binaries will run on both types of systems.
85  */
86 #if defined(SMP) || !defined(_KERNEL)
87 #define MPLOCKED        lock ;
88 #else
89 #define MPLOCKED
90 #endif
91
92 /*
93  * The assembly is volatilized to demark potential before-and-after side
94  * effects if an interrupt or SMP collision were to occur.
95  */
96 #define ATOMIC_ASM(NAME, TYPE, OP, CONS, V)             \
97 static __inline void                                    \
98 atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
99 {                                                       \
100         __asm __volatile(__XSTRING(MPLOCKED) OP         \
101                          : "+m" (*p)                    \
102                          : CONS (V));                   \
103 }                                                       \
104 struct __hack
105
106 #else /* !__GNUC__ */
107
108 #define ATOMIC_ASM(NAME, TYPE, OP, CONS, V)                             \
109 extern void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
110
111 #endif /* __GNUC__ */
112
113 /*
114  * Atomic compare and set, used by the mutex functions
115  *
116  * if (*dst == exp) *dst = src (all 32 bit words)
117  *
118  * Returns 0 on failure, non-zero on success
119  */
120
121 #if defined(__GNUC__)
122
123 static __inline int
124 atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src)
125 {
126         int res = exp;
127
128         __asm __volatile (
129         "       " __XSTRING(MPLOCKED) " "
130         "       cmpxchgl %1,%2 ;        "
131         "       setz    %%al ;          "
132         "       movzbl  %%al,%0 ;       "
133         "1:                             "
134         "# atomic_cmpset_int"
135         : "+a" (res)                    /* 0 (result) */
136         : "r" (src),                    /* 1 */
137           "m" (*(dst))                  /* 2 */
138         : "memory");                             
139
140         return (res);
141 }
142
143 static __inline int
144 atomic_cmpset_long(volatile u_long *dst, u_long exp, u_long src)
145 {
146         long res = exp;
147
148         __asm __volatile (
149         "       " __XSTRING(MPLOCKED) " "
150         "       cmpxchgq %1,%2 ;        "
151         "       setz    %%al ;          "
152         "       movzbq  %%al,%0 ;       "
153         "1:                             "
154         "# atomic_cmpset_long"
155         : "+a" (res)                    /* 0 (result) */
156         : "r" (src),                    /* 1 */
157           "m" (*(dst))                  /* 2 */
158         : "memory");                             
159
160         return (res);
161 }
162 #endif /* defined(__GNUC__) */
163
164 #if defined(__GNUC__)
165
166 #define ATOMIC_STORE_LOAD(TYPE, LOP, SOP)               \
167 static __inline u_##TYPE                                \
168 atomic_load_acq_##TYPE(volatile u_##TYPE *p)            \
169 {                                                       \
170         u_##TYPE res;                                   \
171                                                         \
172         __asm __volatile(__XSTRING(MPLOCKED) LOP        \
173         : "=a" (res),                   /* 0 (result) */\
174           "+m" (*p)                     /* 1 */         \
175         : : "memory");                                  \
176                                                         \
177         return (res);                                   \
178 }                                                       \
179                                                         \
180 /*                                                      \
181  * The XCHG instruction asserts LOCK automagically.     \
182  */                                                     \
183 static __inline void                                    \
184 atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
185 {                                                       \
186         __asm __volatile(SOP                            \
187         : "+m" (*p),                    /* 0 */         \
188           "+r" (v)                      /* 1 */         \
189         : : "memory");                                  \
190 }                                                       \
191 struct __hack
192
193 #else /* !defined(__GNUC__) */
194
195 extern int atomic_cmpset_int(volatile u_int *, u_int, u_int);
196 extern int atomic_cmpset_long(volatile u_long *, u_long, u_long);
197
198 #define ATOMIC_STORE_LOAD(TYPE, LOP, SOP)                               \
199 extern u_##TYPE atomic_load_acq_##TYPE(volatile u_##TYPE *p);           \
200 extern void atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
201
202 #endif /* defined(__GNUC__) */
203
204 #endif /* KLD_MODULE */
205
206 ATOMIC_ASM(set,      char,  "orb %b1,%0",  "iq",  v);
207 ATOMIC_ASM(clear,    char,  "andb %b1,%0", "iq", ~v);
208 ATOMIC_ASM(add,      char,  "addb %b1,%0", "iq",  v);
209 ATOMIC_ASM(subtract, char,  "subb %b1,%0", "iq",  v);
210
211 ATOMIC_ASM(set,      short, "orw %w1,%0",  "ir",  v);
212 ATOMIC_ASM(clear,    short, "andw %w1,%0", "ir", ~v);
213 ATOMIC_ASM(add,      short, "addw %w1,%0", "ir",  v);
214 ATOMIC_ASM(subtract, short, "subw %w1,%0", "ir",  v);
215
216 ATOMIC_ASM(set,      int,   "orl %1,%0",   "ir",  v);
217 ATOMIC_ASM(clear,    int,   "andl %1,%0",  "ir", ~v);
218 ATOMIC_ASM(add,      int,   "addl %1,%0",  "ir",  v);
219 ATOMIC_ASM(subtract, int,   "subl %1,%0",  "ir",  v);
220
221 ATOMIC_ASM(set,      long,  "orq %1,%0",   "ir",  v);
222 ATOMIC_ASM(clear,    long,  "andq %1,%0",  "ir", ~v);
223 ATOMIC_ASM(add,      long,  "addq %1,%0",  "ir",  v);
224 ATOMIC_ASM(subtract, long,  "subq %1,%0",  "ir",  v);
225
226 ATOMIC_STORE_LOAD(char, "cmpxchgb %b0,%1", "xchgb %b1,%0");
227 ATOMIC_STORE_LOAD(short,"cmpxchgw %w0,%1", "xchgw %w1,%0");
228 ATOMIC_STORE_LOAD(int,  "cmpxchgl %0,%1",  "xchgl %1,%0");
229 ATOMIC_STORE_LOAD(long, "cmpxchgq %0,%1",  "xchgq %1,%0");
230
231 #undef ATOMIC_ASM
232 #undef ATOMIC_STORE_LOAD
233
234 #define atomic_set_acq_char             atomic_set_char
235 #define atomic_set_rel_char             atomic_set_char
236 #define atomic_clear_acq_char           atomic_clear_char
237 #define atomic_clear_rel_char           atomic_clear_char
238 #define atomic_add_acq_char             atomic_add_char
239 #define atomic_add_rel_char             atomic_add_char
240 #define atomic_subtract_acq_char        atomic_subtract_char
241 #define atomic_subtract_rel_char        atomic_subtract_char
242
243 #define atomic_set_acq_short            atomic_set_short
244 #define atomic_set_rel_short            atomic_set_short
245 #define atomic_clear_acq_short          atomic_clear_short
246 #define atomic_clear_rel_short          atomic_clear_short
247 #define atomic_add_acq_short            atomic_add_short
248 #define atomic_add_rel_short            atomic_add_short
249 #define atomic_subtract_acq_short       atomic_subtract_short
250 #define atomic_subtract_rel_short       atomic_subtract_short
251
252 #define atomic_set_acq_int              atomic_set_int
253 #define atomic_set_rel_int              atomic_set_int
254 #define atomic_clear_acq_int            atomic_clear_int
255 #define atomic_clear_rel_int            atomic_clear_int
256 #define atomic_add_acq_int              atomic_add_int
257 #define atomic_add_rel_int              atomic_add_int
258 #define atomic_subtract_acq_int         atomic_subtract_int
259 #define atomic_subtract_rel_int         atomic_subtract_int
260 #define atomic_cmpset_acq_int           atomic_cmpset_int
261 #define atomic_cmpset_rel_int           atomic_cmpset_int
262
263 #define atomic_set_acq_long             atomic_set_long
264 #define atomic_set_rel_long             atomic_set_long
265 #define atomic_clear_acq_long           atomic_clear_long
266 #define atomic_clear_rel_long           atomic_clear_long
267 #define atomic_add_acq_long             atomic_add_long
268 #define atomic_add_rel_long             atomic_add_long
269 #define atomic_subtract_acq_long        atomic_subtract_long
270 #define atomic_subtract_rel_long        atomic_subtract_long
271
272 #define atomic_cmpset_acq_ptr           atomic_cmpset_ptr
273 #define atomic_cmpset_rel_ptr           atomic_cmpset_ptr
274
275 #define atomic_set_8            atomic_set_char
276 #define atomic_set_acq_8        atomic_set_acq_char
277 #define atomic_set_rel_8        atomic_set_rel_char
278 #define atomic_clear_8          atomic_clear_char
279 #define atomic_clear_acq_8      atomic_clear_acq_char
280 #define atomic_clear_rel_8      atomic_clear_rel_char
281 #define atomic_add_8            atomic_add_char
282 #define atomic_add_acq_8        atomic_add_acq_char
283 #define atomic_add_rel_8        atomic_add_rel_char
284 #define atomic_subtract_8       atomic_subtract_char
285 #define atomic_subtract_acq_8   atomic_subtract_acq_char
286 #define atomic_subtract_rel_8   atomic_subtract_rel_char
287 #define atomic_load_acq_8       atomic_load_acq_char
288 #define atomic_store_rel_8      atomic_store_rel_char
289
290 #define atomic_set_16           atomic_set_short
291 #define atomic_set_acq_16       atomic_set_acq_short
292 #define atomic_set_rel_16       atomic_set_rel_short
293 #define atomic_clear_16         atomic_clear_short
294 #define atomic_clear_acq_16     atomic_clear_acq_short
295 #define atomic_clear_rel_16     atomic_clear_rel_short
296 #define atomic_add_16           atomic_add_short
297 #define atomic_add_acq_16       atomic_add_acq_short
298 #define atomic_add_rel_16       atomic_add_rel_short
299 #define atomic_subtract_16      atomic_subtract_short
300 #define atomic_subtract_acq_16  atomic_subtract_acq_short
301 #define atomic_subtract_rel_16  atomic_subtract_rel_short
302 #define atomic_load_acq_16      atomic_load_acq_short
303 #define atomic_store_rel_16     atomic_store_rel_short
304
305 #define atomic_set_32           atomic_set_int
306 #define atomic_set_acq_32       atomic_set_acq_int
307 #define atomic_set_rel_32       atomic_set_rel_int
308 #define atomic_clear_32         atomic_clear_int
309 #define atomic_clear_acq_32     atomic_clear_acq_int
310 #define atomic_clear_rel_32     atomic_clear_rel_int
311 #define atomic_add_32           atomic_add_int
312 #define atomic_add_acq_32       atomic_add_acq_int
313 #define atomic_add_rel_32       atomic_add_rel_int
314 #define atomic_subtract_32      atomic_subtract_int
315 #define atomic_subtract_acq_32  atomic_subtract_acq_int
316 #define atomic_subtract_rel_32  atomic_subtract_rel_int
317 #define atomic_load_acq_32      atomic_load_acq_int
318 #define atomic_store_rel_32     atomic_store_rel_int
319 #define atomic_cmpset_32        atomic_cmpset_int
320 #define atomic_cmpset_acq_32    atomic_cmpset_acq_int
321 #define atomic_cmpset_rel_32    atomic_cmpset_rel_int
322 #define atomic_readandclear_32  atomic_readandclear_int
323
324 #if !defined(WANT_FUNCTIONS)
325 static __inline int
326 atomic_cmpset_ptr(volatile void *dst, void *exp, void *src)
327 {
328
329         return (atomic_cmpset_long((volatile u_long *)dst,
330             (u_long)exp, (u_long)src));
331 }
332
333 static __inline void *
334 atomic_load_acq_ptr(volatile void *p)
335 {
336         /*
337          * The apparently-bogus cast to intptr_t in the following is to
338          * avoid a warning from "gcc -Wbad-function-cast".
339          */
340         return ((void *)(intptr_t)atomic_load_acq_long((volatile u_long *)p));
341 }
342
343 static __inline void
344 atomic_store_rel_ptr(volatile void *p, void *v)
345 {
346         atomic_store_rel_long((volatile u_long *)p, (u_long)v);
347 }
348
349 #define ATOMIC_PTR(NAME)                                \
350 static __inline void                                    \
351 atomic_##NAME##_ptr(volatile void *p, uintptr_t v)      \
352 {                                                       \
353         atomic_##NAME##_long((volatile u_long *)p, v);  \
354 }                                                       \
355                                                         \
356 static __inline void                                    \
357 atomic_##NAME##_acq_ptr(volatile void *p, uintptr_t v)  \
358 {                                                       \
359         atomic_##NAME##_acq_long((volatile u_long *)p, v);\
360 }                                                       \
361                                                         \
362 static __inline void                                    \
363 atomic_##NAME##_rel_ptr(volatile void *p, uintptr_t v)  \
364 {                                                       \
365         atomic_##NAME##_rel_long((volatile u_long *)p, v);\
366 }
367
368 ATOMIC_PTR(set)
369 ATOMIC_PTR(clear)
370 ATOMIC_PTR(add)
371 ATOMIC_PTR(subtract)
372
373 #undef ATOMIC_PTR
374
375 #if defined(__GNUC__)
376
377 static __inline u_int
378 atomic_readandclear_int(volatile u_int *addr)
379 {
380         u_int result;
381
382         __asm __volatile (
383         "       xorl    %0,%0 ;         "
384         "       xchgl   %1,%0 ;         "
385         "# atomic_readandclear_int"
386         : "=&r" (result)                /* 0 (result) */
387         : "m" (*addr));                 /* 1 (addr) */
388
389         return (result);
390 }
391
392 static __inline u_long
393 atomic_readandclear_long(volatile u_long *addr)
394 {
395         u_long result;
396
397         __asm __volatile (
398         "       xorq    %0,%0 ;         "
399         "       xchgq   %1,%0 ;         "
400         "# atomic_readandclear_int"
401         : "=&r" (result)                /* 0 (result) */
402         : "m" (*addr));                 /* 1 (addr) */
403
404         return (result);
405 }
406
407 #else /* !defined(__GNUC__) */
408
409 extern u_long   atomic_readandclear_long(volatile u_long *);
410 extern u_int    atomic_readandclear_int(volatile u_int *);
411
412 #endif /* defined(__GNUC__) */
413
414 #endif  /* !defined(WANT_FUNCTIONS) */
415 #endif /* ! _MACHINE_ATOMIC_H_ */