1 #ifndef _X86_64_ATOMIC_H
2 #define _X86_64_ATOMIC_H
4 /* atomic_t should be 32 bit signed type */
7 * Atomic operations that C can't guarantee us. Useful for
8 * resource counting etc..
12 * Make sure gcc doesn't try to be clever and move things around
13 * on us. We need to use _exactly_ the address the user gave us,
14 * not some alias that contains the same information.
16 typedef struct { volatile int counter; } atomic_t;
18 #define ATOMIC_INIT(i) { (i) }
21 * atomic_read - read atomic variable
22 * @v: pointer of type atomic_t
24 * Atomically reads the value of @v.
26 #define atomic_read(v) ((v)->counter)
29 * atomic_set - set atomic variable
30 * @v: pointer of type atomic_t
33 * Atomically sets the value of @v to @i.
35 #define atomic_set(v,i) (((v)->counter) = (i))
38 * atomic_add - add integer to atomic variable
39 * @i: integer value to add
40 * @v: pointer of type atomic_t
42 * Atomically adds @i to @v.
44 static __inline__ void atomic_add(int i, atomic_t *v)
49 :"ir" (i), "m" (v->counter));
53 * atomic_sub - subtract the atomic variable
54 * @i: integer value to subtract
55 * @v: pointer of type atomic_t
57 * Atomically subtracts @i from @v.
59 static __inline__ void atomic_sub(int i, atomic_t *v)
64 :"ir" (i), "m" (v->counter));
68 * atomic_sub_and_test - subtract value from variable and test result
69 * @i: integer value to subtract
70 * @v: pointer of type atomic_t
72 * Atomically subtracts @i from @v and returns
73 * true if the result is zero, or false for all
76 static __inline__ int atomic_sub_and_test(int i, atomic_t *v)
81 "lock ; subl %2,%0; sete %1"
82 :"=m" (v->counter), "=qm" (c)
83 :"ir" (i), "m" (v->counter) : "memory");
88 * atomic_inc - increment atomic variable
89 * @v: pointer of type atomic_t
91 * Atomically increments @v by 1.
93 static __inline__ void atomic_inc(atomic_t *v)
102 * atomic_dec - decrement atomic variable
103 * @v: pointer of type atomic_t
105 * Atomically decrements @v by 1.
107 static __inline__ void atomic_dec(atomic_t *v)
109 __asm__ __volatile__(
116 * atomic_dec_and_test - decrement and test
117 * @v: pointer of type atomic_t
119 * Atomically decrements @v by 1 and
120 * returns true if the result is 0, or false for all other
123 static __inline__ int atomic_dec_and_test(atomic_t *v)
127 __asm__ __volatile__(
128 "lock ; decl %0; sete %1"
129 :"=m" (v->counter), "=qm" (c)
130 :"m" (v->counter) : "memory");
135 * atomic_inc_and_test - increment and test
136 * @v: pointer of type atomic_t
138 * Atomically increments @v by 1
139 * and returns true if the result is zero, or false for all
142 static __inline__ int atomic_inc_and_test(atomic_t *v)
146 __asm__ __volatile__(
147 "lock ; incl %0; sete %1"
148 :"=m" (v->counter), "=qm" (c)
149 :"m" (v->counter) : "memory");
154 * atomic_add_negative - add and test if negative
155 * @i: integer value to add
156 * @v: pointer of type atomic_t
158 * Atomically adds @i to @v and returns true
159 * if the result is negative, or false when
160 * result is greater than or equal to zero.
162 static __inline__ int atomic_add_negative(int i, atomic_t *v)
166 __asm__ __volatile__(
167 "lock ; addl %2,%0; sets %1"
168 :"=m" (v->counter), "=qm" (c)
169 :"ir" (i), "m" (v->counter) : "memory");
174 * atomic_add_return - add and return
175 * @i: integer value to add
176 * @v: pointer of type atomic_t
178 * Atomically adds @i to @v and returns @i + @v
180 static __inline__ int atomic_add_return(int i, atomic_t *v)
183 __asm__ __volatile__(
184 "lock ; xaddl %0, %1;"
186 :"m"(v->counter), "0"(i));
190 static __inline__ int atomic_sub_return(int i, atomic_t *v)
192 return atomic_add_return(-i,v);
195 #define atomic_inc_return(v) (atomic_add_return(1,v))
196 #define atomic_dec_return(v) (atomic_sub_return(1,v))
198 /* An 64bit atomic type */
200 typedef struct { volatile long counter; } atomic64_t;
202 #define ATOMIC64_INIT(i) { (i) }
205 * atomic64_read - read atomic64 variable
206 * @v: pointer of type atomic64_t
208 * Atomically reads the value of @v.
209 * Doesn't imply a read memory barrier.
211 #define atomic64_read(v) ((v)->counter)
214 * atomic64_set - set atomic64 variable
215 * @v: pointer to type atomic64_t
218 * Atomically sets the value of @v to @i.
220 #define atomic64_set(v,i) (((v)->counter) = (i))
223 * atomic64_add - add integer to atomic64 variable
224 * @i: integer value to add
225 * @v: pointer to type atomic64_t
227 * Atomically adds @i to @v.
229 static __inline__ void atomic64_add(long i, atomic64_t *v)
231 __asm__ __volatile__(
234 :"ir" (i), "m" (v->counter));
238 * atomic64_sub - subtract the atomic64 variable
239 * @i: integer value to subtract
240 * @v: pointer to type atomic64_t
242 * Atomically subtracts @i from @v.
244 static __inline__ void atomic64_sub(long i, atomic64_t *v)
246 __asm__ __volatile__(
249 :"ir" (i), "m" (v->counter));
253 * atomic64_sub_and_test - subtract value from variable and test result
254 * @i: integer value to subtract
255 * @v: pointer to type atomic64_t
257 * Atomically subtracts @i from @v and returns
258 * true if the result is zero, or false for all
261 static __inline__ int atomic64_sub_and_test(long i, atomic64_t *v)
265 __asm__ __volatile__(
266 "lock ; subq %2,%0; sete %1"
267 :"=m" (v->counter), "=qm" (c)
268 :"ir" (i), "m" (v->counter) : "memory");
273 * atomic64_inc - increment atomic64 variable
274 * @v: pointer to type atomic64_t
276 * Atomically increments @v by 1.
278 static __inline__ void atomic64_inc(atomic64_t *v)
280 __asm__ __volatile__(
287 * atomic64_dec - decrement atomic64 variable
288 * @v: pointer to type atomic64_t
290 * Atomically decrements @v by 1.
292 static __inline__ void atomic64_dec(atomic64_t *v)
294 __asm__ __volatile__(
301 * atomic64_dec_and_test - decrement and test
302 * @v: pointer to type atomic64_t
304 * Atomically decrements @v by 1 and
305 * returns true if the result is 0, or false for all other
308 static __inline__ int atomic64_dec_and_test(atomic64_t *v)
312 __asm__ __volatile__(
313 "lock ; decq %0; sete %1"
314 :"=m" (v->counter), "=qm" (c)
315 :"m" (v->counter) : "memory");
320 * atomic64_inc_and_test - increment and test
321 * @v: pointer to type atomic64_t
323 * Atomically increments @v by 1
324 * and returns true if the result is zero, or false for all
327 static __inline__ int atomic64_inc_and_test(atomic64_t *v)
331 __asm__ __volatile__(
332 "lock ; incq %0; sete %1"
333 :"=m" (v->counter), "=qm" (c)
334 :"m" (v->counter) : "memory");
339 * atomic64_add_negative - add and test if negative
340 * @i: integer value to add
341 * @v: pointer to type atomic64_t
343 * Atomically adds @i to @v and returns true
344 * if the result is negative, or false when
345 * result is greater than or equal to zero.
347 static __inline__ int atomic64_add_negative(long i, atomic64_t *v)
351 __asm__ __volatile__(
352 "lock ; addq %2,%0; sets %1"
353 :"=m" (v->counter), "=qm" (c)
354 :"ir" (i), "m" (v->counter) : "memory");
359 * atomic64_add_return - add and return
360 * @i: integer value to add
361 * @v: pointer to type atomic64_t
363 * Atomically adds @i to @v and returns @i + @v
365 static __inline__ long atomic64_add_return(long i, atomic64_t *v)
368 __asm__ __volatile__(
369 "lock ; xaddq %0, %1;"
371 :"m"(v->counter), "0"(i));
375 static __inline__ long atomic64_sub_return(long i, atomic64_t *v)
377 return atomic64_add_return(-i,v);
380 #define atomic64_inc_return(v) (atomic64_add_return(1,v))
381 #define atomic64_dec_return(v) (atomic64_sub_return(1,v))
383 #define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new))
384 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
387 * atomic_add_unless - add unless the number is a given value
388 * @v: pointer of type atomic_t
389 * @a: the amount to add to v...
390 * @u: ...unless v is equal to u.
392 * Atomically adds @a to @v, so long as it was not @u.
393 * Returns non-zero if @v was not @u, and zero otherwise.
395 #define atomic_add_unless(v, a, u) \
398 c = atomic_read(v); \
400 if (unlikely(c == (u))) \
402 old = atomic_cmpxchg((v), c, c + (a)); \
403 if (likely(old == c)) \
409 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
411 /* These are x86-specific, used by some header files */
412 #define atomic_clear_mask(mask, addr) \
413 __asm__ __volatile__("lock ; andl %0,%1" \
414 : : "r" (~(mask)),"m" (*addr) : "memory")
416 #define atomic_set_mask(mask, addr) \
417 __asm__ __volatile__("lock ; orl %0,%1" \
418 : : "r" ((unsigned)mask),"m" (*(addr)) : "memory")
420 /* Atomic operations are already serializing on x86 */
421 #define smp_mb__before_atomic_dec() barrier()
422 #define smp_mb__after_atomic_dec() barrier()
423 #define smp_mb__before_atomic_inc() barrier()
424 #define smp_mb__after_atomic_inc() barrier()
426 #include <arch-generic/atomic.h>