1#ifndef __ASM_SH_ATOMIC_IRQ_H
2#define __ASM_SH_ATOMIC_IRQ_H
3
4/*
5 * To get proper branch prediction for the main line, we must branch
6 * forward to code at the end of this object's .text section, then
7 * branch back to restart the operation.
8 */
9static inline void atomic_add(int i, atomic_t *v)
10{
11	unsigned long flags;
12
13	local_irq_save(flags);
14	*(long *)v += i;
15	local_irq_restore(flags);
16}
17
18static inline void atomic_sub(int i, atomic_t *v)
19{
20	unsigned long flags;
21
22	local_irq_save(flags);
23	*(long *)v -= i;
24	local_irq_restore(flags);
25}
26
27static inline int atomic_add_return(int i, atomic_t *v)
28{
29	unsigned long temp, flags;
30
31	local_irq_save(flags);
32	temp = *(long *)v;
33	temp += i;
34	*(long *)v = temp;
35	local_irq_restore(flags);
36
37	return temp;
38}
39
40static inline int atomic_sub_return(int i, atomic_t *v)
41{
42	unsigned long temp, flags;
43
44	local_irq_save(flags);
45	temp = *(long *)v;
46	temp -= i;
47	*(long *)v = temp;
48	local_irq_restore(flags);
49
50	return temp;
51}
52
53static inline void atomic_clear_mask(unsigned int mask, atomic_t *v)
54{
55	unsigned long flags;
56
57	local_irq_save(flags);
58	*(long *)v &= ~mask;
59	local_irq_restore(flags);
60}
61
62static inline void atomic_set_mask(unsigned int mask, atomic_t *v)
63{
64	unsigned long flags;
65
66	local_irq_save(flags);
67	*(long *)v |= mask;
68	local_irq_restore(flags);
69}
70
71#endif /* __ASM_SH_ATOMIC_IRQ_H */
72