aboutsummaryrefslogtreecommitdiff
path: root/arch/h8300/include/asm/atomic.h
blob: 7ca73f8546cc89307b5c712ab8572b491e06e94a (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
#ifndef __ARCH_H8300_ATOMIC__
#define __ARCH_H8300_ATOMIC__

#include <linux/types.h>
#include <asm/cmpxchg.h>

/*
 * Atomic operations that C can't guarantee us.  Useful for
 * resource counting etc..
 */

#define ATOMIC_INIT(i)	{ (i) }

#define atomic_read(v)		ACCESS_ONCE((v)->counter)
#define atomic_set(v, i)	(((v)->counter) = i)

#include <linux/kernel.h>

static inline int atomic_add_return(int i, atomic_t *v)
{
	h8300flags flags;
	int ret;

	flags = arch_local_irq_save();
	ret = v->counter += i;
	arch_local_irq_restore(flags);
	return ret;
}

#define atomic_add(i, v) atomic_add_return(i, v)
#define atomic_add_negative(a, v)	(atomic_add_return((a), (v)) < 0)

static inline int atomic_sub_return(int i, atomic_t *v)
{
	h8300flags flags;
	int ret;

	flags = arch_local_irq_save();
	ret = v->counter -= i;
	arch_local_irq_restore(flags);
	return ret;
}

#define atomic_sub(i, v) atomic_sub_return(i, v)
#define atomic_sub_and_test(i, v) (atomic_sub_return(i, v) == 0)

static inline int atomic_inc_return(atomic_t *v)
{
	h8300flags flags;
	int ret;

	flags = arch_local_irq_save();
	v->counter++;
	ret = v->counter;
	arch_local_irq_restore(flags);
	return ret;
}

#define atomic_inc(v) atomic_inc_return(v)

/*
 * atomic_inc_and_test - increment and test
 * @v: pointer of type atomic_t
 *
 * Atomically increments @v by 1
 * and returns true if the result is zero, or false for all
 * other cases.
 */
#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)

static inline int atomic_dec_return(atomic_t *v)
{
	h8300flags flags;
	int ret;

	flags = arch_local_irq_save();
	--v->counter;
	ret = v->counter;
	arch_local_irq_restore(flags);
	return ret;
}

#define atomic_dec(v) atomic_dec_return(v)

static inline int atomic_dec_and_test(atomic_t *v)
{
	h8300flags flags;
	int ret;

	flags = arch_local_irq_save();
	--v->counter;
	ret = v->counter;
	arch_local_irq_restore(flags);
	return ret == 0;
}

static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
{
	int ret;
	h8300flags flags;

	flags = arch_local_irq_save();
	ret = v->counter;
	if (likely(ret == old))
		v->counter = new;
	arch_local_irq_restore(flags);
	return ret;
}

static inline int __atomic_add_unless(atomic_t *v, int a, int u)
{
	int ret;
	h8300flags flags;

	flags = arch_local_irq_save();
	ret = v->counter;
	if (ret != u)
		v->counter += a;
	arch_local_irq_restore(flags);
	return ret;
}

static inline void atomic_clear_mask(unsigned long mask, unsigned long *v)
{
	unsigned char ccr;
	unsigned long tmp;

	__asm__ __volatile__("stc ccr,%w3\n\t"
			     "orc #0x80,ccr\n\t"
			     "mov.l %0,%1\n\t"
			     "and.l %2,%1\n\t"
			     "mov.l %1,%0\n\t"
			     "ldc %w3,ccr"
			     : "=m"(*v), "=r"(tmp)
			     : "g"(~(mask)), "r"(ccr));
}

static inline void atomic_set_mask(unsigned long mask, unsigned long *v)
{
	unsigned char ccr;
	unsigned long tmp;

	__asm__ __volatile__("stc ccr,%w3\n\t"
			     "orc #0x80,ccr\n\t"
			     "mov.l %0,%1\n\t"
			     "or.l %2,%1\n\t"
			     "mov.l %1,%0\n\t"
			     "ldc %w3,ccr"
			     : "=m"(*v), "=r"(tmp)
			     : "g"(~(mask)), "r"(ccr));
}

/* Atomic operations are already serializing */
#define smp_mb__before_atomic_dec()    barrier()
#define smp_mb__after_atomic_dec() barrier()
#define smp_mb__before_atomic_inc()    barrier()
#define smp_mb__after_atomic_inc() barrier()

#endif /* __ARCH_H8300_ATOMIC __ */