Russell King | 54ea06f | 2005-07-16 15:21:51 +0100 | [diff] [blame^] | 1 | #if __LINUX_ARM_ARCH__ >= 6 |
| 2 | .macro bitop, instr |
| 3 | mov r2, #1 |
| 4 | and r3, r0, #7 @ Get bit offset |
| 5 | add r1, r1, r0, lsr #3 @ Get byte offset |
| 6 | mov r3, r2, lsl r3 |
| 7 | 1: ldrexb r2, [r1] |
| 8 | \instr r2, r2, r3 |
| 9 | strexb r0, r2, [r1] |
| 10 | cmpne r0, #0 |
| 11 | bne 1b |
| 12 | mov pc, lr |
| 13 | .endm |
| 14 | |
| 15 | .macro testop, instr, store |
| 16 | and r3, r0, #7 @ Get bit offset |
| 17 | mov r2, #1 |
| 18 | add r1, r1, r0, lsr #3 @ Get byte offset |
| 19 | mov r3, r2, lsl r3 @ create mask |
| 20 | 1: ldrexb r2, [r1] |
| 21 | ands r0, r2, r3 @ save old value of bit |
| 22 | \instr ip, r2, r3 @ toggle bit |
| 23 | strexb r2, ip, [r1] |
| 24 | cmp r2, #0 |
| 25 | bne 1b |
| 26 | cmp r0, #0 |
| 27 | movne r0, #1 |
| 28 | 2: mov pc, lr |
| 29 | .endm |
| 30 | #else |
Russell King | 7a55fd0 | 2005-04-18 22:50:01 +0100 | [diff] [blame] | 31 | .macro bitop, instr |
| 32 | and r2, r0, #7 |
| 33 | mov r3, #1 |
| 34 | mov r3, r3, lsl r2 |
| 35 | save_and_disable_irqs ip, r2 |
| 36 | ldrb r2, [r1, r0, lsr #3] |
| 37 | \instr r2, r2, r3 |
| 38 | strb r2, [r1, r0, lsr #3] |
| 39 | restore_irqs ip |
| 40 | mov pc, lr |
| 41 | .endm |
| 42 | |
| 43 | /** |
| 44 | * testop - implement a test_and_xxx_bit operation. |
| 45 | * @instr: operational instruction |
| 46 | * @store: store instruction |
| 47 | * |
| 48 | * Note: we can trivially conditionalise the store instruction |
| 49 | * to avoid dirting the data cache. |
| 50 | */ |
| 51 | .macro testop, instr, store |
| 52 | add r1, r1, r0, lsr #3 |
| 53 | and r3, r0, #7 |
| 54 | mov r0, #1 |
| 55 | save_and_disable_irqs ip, r2 |
| 56 | ldrb r2, [r1] |
| 57 | tst r2, r0, lsl r3 |
| 58 | \instr r2, r2, r0, lsl r3 |
| 59 | \store r2, [r1] |
| 60 | restore_irqs ip |
| 61 | moveq r0, #0 |
| 62 | mov pc, lr |
| 63 | .endm |
Russell King | 54ea06f | 2005-07-16 15:21:51 +0100 | [diff] [blame^] | 64 | #endif |