2 * Copyright (c) 2010 Isilon Systems, Inc.
3 * Copyright (c) 2010 iX Systems, Inc.
4 * Copyright (c) 2010 Panasas, Inc.
5 * Copyright (c) 2013-2015 François Tigeot
8 * Redistribution and use in source and binary forms, with or without
9 * modification, are permitted provided that the following conditions
11 * 1. Redistributions of source code must retain the above copyright
12 * notice unmodified, this list of conditions, and the following
14 * 2. Redistributions in binary form must reproduce the above copyright
15 * notice, this list of conditions and the following disclaimer in the
16 * documentation and/or other materials provided with the distribution.
18 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
19 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
20 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
21 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
22 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
23 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #ifndef _ASM_ATOMIC_H_
31 #define _ASM_ATOMIC_H_
33 #include <sys/types.h>
34 #include <machine/atomic.h>
35 #include <linux/compiler.h>
38 volatile u_int counter;
42 volatile u_long counter;
45 #define atomic_add(i, v) atomic_add_return((i), (v))
46 #define atomic_sub(i, v) atomic_sub_return((i), (v))
47 #define atomic_inc_return(v) atomic_add_return(1, (v))
48 #define atomic_add_negative(i, v) (atomic_add_return((i), (v)) < 0)
49 #define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
50 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
51 #define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0)
52 #define atomic_dec_return(v) atomic_sub_return(1, (v))
54 #define atomic64_add(i, v) atomic_add_return_long((i), (v))
55 #define atomic64_sub(i, v) atomic_sub_return_long((i), (v))
57 #define atomic_xchg(p, v) atomic_swap_int(&((p)->counter), v)
58 #define atomic64_xchg(p, v) atomic_swap_long(&((p)->counter), v)
60 #define atomic_cmpset(p, o, n) atomic_cmpset_32(&((p)->counter), o, n)
63 atomic_add_return(int i, atomic_t *v)
65 return i + atomic_fetchadd_int(&v->counter, i);
69 atomic_add_return_long(int64_t i, atomic64_t *v)
71 return i + atomic_fetchadd_long(&v->counter, i);
75 atomic_sub_return(int i, atomic_t *v)
77 return atomic_fetchadd_int(&v->counter, -i) - i;
81 atomic_sub_return_long(int64_t i, atomic64_t *v)
83 return atomic_fetchadd_long(&v->counter, -i) - i;
87 atomic_set(atomic_t *v, int i)
89 atomic_store_rel_int(&v->counter, i);
93 atomic64_set(atomic64_t *v, long i)
95 atomic_store_rel_long(&v->counter, i);
99 atomic_read(atomic_t *v)
101 return atomic_load_acq_int(&v->counter);
104 static inline int64_t
105 atomic64_read(atomic64_t *v)
107 return atomic_load_acq_long(&v->counter);
111 atomic_inc(atomic_t *v)
113 return atomic_fetchadd_int(&v->counter, 1) + 1;
117 atomic_dec(atomic_t *v)
119 return atomic_fetchadd_int(&v->counter, -1) - 1;
122 static inline int atomic_add_unless(atomic_t *v, int add, int unless)
127 if (unlikely(c == unless))
129 // old = atomic_cmpxchg((v), c, c + (a)); /*Linux*/
130 old = atomic_cmpxchg_int(&v->counter, c, c + add);
131 if (likely(old == c))
138 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
140 /* atomic_clear_mask: atomically clears a variable from the bit set in mask */
141 #define atomic_clear_mask(mask, addr) \
142 /* atomic *addr &= ~mask; */ \
143 __asm __volatile("lock andl %0, %1" \
145 : "r" (~mask), "m" (*addr) \
148 /* atomic_set_mask: atomically set bits in a variable */
149 #define atomic_set_mask(mask, addr) \
150 /* atomic *addr |= mask; */ \
151 __asm __volatile("lock orl %0, %1" \
153 : "r" (mask), "m" (*addr) \
157 #define smp_mb__before_atomic() cpu_ccfence()
158 #define smp_mb__after_atomic() cpu_ccfence()
160 #endif /* _ASM_ATOMIC_H_ */