1
2#if __LINUX_ARM_ARCH__ >= 6 && defined(CONFIG_CPU_32v6K)
3	.macro	bitop, instr
4	mov	r2, #1
5	and	r3, r0, #7		@ Get bit offset
6	add	r1, r1, r0, lsr #3	@ Get byte offset
7	mov	r3, r2, lsl r3
81:	ldrexb	r2, [r1]
9	\instr	r2, r2, r3
10	strexb	r0, r2, [r1]
11	cmp	r0, #0
12	bne	1b
13	mov	pc, lr
14	.endm
15
16	.macro	testop, instr, store
17	and	r3, r0, #7		@ Get bit offset
18	mov	r2, #1
19	add	r1, r1, r0, lsr #3	@ Get byte offset
20	mov	r3, r2, lsl r3		@ create mask
211:	ldrexb	r2, [r1]
22	ands	r0, r2, r3		@ save old value of bit
23	\instr	r2, r2, r3			@ toggle bit
24	strexb	ip, r2, [r1]
25	cmp	ip, #0
26	bne	1b
27	cmp	r0, #0
28	movne	r0, #1
292:	mov	pc, lr
30	.endm
31#else
32	.macro	bitop, instr
33	and	r2, r0, #7
34	mov	r3, #1
35	mov	r3, r3, lsl r2
36	save_and_disable_irqs ip
37	ldrb	r2, [r1, r0, lsr #3]
38	\instr	r2, r2, r3
39	strb	r2, [r1, r0, lsr #3]
40	restore_irqs ip
41	mov	pc, lr
42	.endm
43
44/**
45 * testop - implement a test_and_xxx_bit operation.
46 * @instr: operational instruction
47 * @store: store instruction
48 *
49 * Note: we can trivially conditionalise the store instruction
50 * to avoid dirtying the data cache.
51 */
52	.macro	testop, instr, store
53	add	r1, r1, r0, lsr #3
54	and	r3, r0, #7
55	mov	r0, #1
56	save_and_disable_irqs ip
57	ldrb	r2, [r1]
58	tst	r2, r0, lsl r3
59	\instr	r2, r2, r0, lsl r3
60	\store	r2, [r1]
61	restore_irqs ip
62	moveq	r0, #0
63	mov	pc, lr
64	.endm
65#endif
66