blob: 702ee539f87da5e0e397d091a9bd56218c4173d4 [file] [log] [blame]
Yoshinori Satod2a5f492015-05-11 02:20:06 +09001#ifndef __ARCH_H8300_ATOMIC__
2#define __ARCH_H8300_ATOMIC__
3
4#include <linux/types.h>
5#include <asm/cmpxchg.h>
6
7/*
8 * Atomic operations that C can't guarantee us. Useful for
9 * resource counting etc..
10 */
11
12#define ATOMIC_INIT(i) { (i) }
13
14#define atomic_read(v) ACCESS_ONCE((v)->counter)
15#define atomic_set(v, i) (((v)->counter) = i)
16
17#include <linux/kernel.h>
18
Peter Zijlstra73ada372015-07-10 12:55:45 +020019#define ATOMIC_OP_RETURN(op, c_op) \
20static inline int atomic_##op##_return(int i, atomic_t *v) \
21{ \
22 h8300flags flags; \
23 int ret; \
24 \
25 flags = arch_local_irq_save(); \
26 ret = v->counter c_op i; \
27 arch_local_irq_restore(flags); \
28 return ret; \
Yoshinori Satod2a5f492015-05-11 02:20:06 +090029}
30
Peter Zijlstra73ada372015-07-10 12:55:45 +020031#define ATOMIC_OP(op, c_op) \
32static inline void atomic_##op(int i, atomic_t *v) \
33{ \
34 h8300flags flags; \
35 \
36 flags = arch_local_irq_save(); \
37 v->counter c_op i; \
38 arch_local_irq_restore(flags); \
39}
40
41ATOMIC_OP_RETURN(add, +=)
42ATOMIC_OP_RETURN(sub, -=)
43
Peter Zijlstra73ada372015-07-10 12:55:45 +020044ATOMIC_OP(and, &=)
45ATOMIC_OP(or, |=)
46ATOMIC_OP(xor, ^=)
47
48#undef ATOMIC_OP_RETURN
49#undef ATOMIC_OP
50
51#define atomic_add(i, v) (void)atomic_add_return(i, v)
Yoshinori Satod2a5f492015-05-11 02:20:06 +090052#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
53
Peter Zijlstra73ada372015-07-10 12:55:45 +020054#define atomic_sub(i, v) (void)atomic_sub_return(i, v)
55#define atomic_sub_and_test(i, v) (atomic_sub_return(i, v) == 0)
Yoshinori Satod2a5f492015-05-11 02:20:06 +090056
Peter Zijlstra73ada372015-07-10 12:55:45 +020057#define atomic_inc_return(v) atomic_add_return(1, v)
58#define atomic_dec_return(v) atomic_sub_return(1, v)
Yoshinori Satod2a5f492015-05-11 02:20:06 +090059
Peter Zijlstra73ada372015-07-10 12:55:45 +020060#define atomic_inc(v) (void)atomic_inc_return(v)
61#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
Yoshinori Satod2a5f492015-05-11 02:20:06 +090062
Peter Zijlstra73ada372015-07-10 12:55:45 +020063#define atomic_dec(v) (void)atomic_dec_return(v)
64#define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
Yoshinori Satod2a5f492015-05-11 02:20:06 +090065
66static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
67{
68 int ret;
69 h8300flags flags;
70
71 flags = arch_local_irq_save();
72 ret = v->counter;
73 if (likely(ret == old))
74 v->counter = new;
75 arch_local_irq_restore(flags);
76 return ret;
77}
78
79static inline int __atomic_add_unless(atomic_t *v, int a, int u)
80{
81 int ret;
82 h8300flags flags;
83
84 flags = arch_local_irq_save();
85 ret = v->counter;
86 if (ret != u)
87 v->counter += a;
88 arch_local_irq_restore(flags);
89 return ret;
90}
91
Yoshinori Satod2a5f492015-05-11 02:20:06 +090092#endif /* __ARCH_H8300_ATOMIC __ */