| Jakub Jelinek | 4732efb | 2005-09-06 15:16:25 -0700 | [diff] [blame] | 1 | #ifndef _ASM_FUTEX_H | 
 | 2 | #define _ASM_FUTEX_H | 
 | 3 |  | 
 | 4 | #ifdef __KERNEL__ | 
 | 5 |  | 
 | 6 | #include <linux/futex.h> | 
 | 7 | #include <asm/errno.h> | 
 | 8 | #include <asm/system.h> | 
 | 9 | #include <asm/uaccess.h> | 
 | 10 |  | 
 | 11 | #define __futex_atomic_op1(insn, ret, oldval, uaddr, oparg) \ | 
 | 12 |   __asm__ __volatile (						\ | 
 | 13 | "1:	" insn "\n"						\ | 
 | 14 | "2:	.section .fixup,\"ax\"\n\ | 
 | 15 | 3:	mov	%3, %1\n\ | 
 | 16 | 	jmp	2b\n\ | 
 | 17 | 	.previous\n\ | 
 | 18 | 	.section __ex_table,\"a\"\n\ | 
 | 19 | 	.align	8\n\ | 
 | 20 | 	.quad	1b,3b\n\ | 
 | 21 | 	.previous"						\ | 
 | 22 | 	: "=r" (oldval), "=r" (ret), "=m" (*uaddr)		\ | 
 | 23 | 	: "i" (-EFAULT), "m" (*uaddr), "0" (oparg), "1" (0)) | 
 | 24 |  | 
 | 25 | #define __futex_atomic_op2(insn, ret, oldval, uaddr, oparg) \ | 
 | 26 |   __asm__ __volatile (						\ | 
 | 27 | "1:	movl	%2, %0\n\ | 
 | 28 | 	movl	%0, %3\n"					\ | 
 | 29 | 	insn "\n"						\ | 
 | 30 | "2:	" LOCK_PREFIX "cmpxchgl %3, %2\n\ | 
 | 31 | 	jnz	1b\n\ | 
 | 32 | 3:	.section .fixup,\"ax\"\n\ | 
 | 33 | 4:	mov	%5, %1\n\ | 
 | 34 | 	jmp	3b\n\ | 
 | 35 | 	.previous\n\ | 
 | 36 | 	.section __ex_table,\"a\"\n\ | 
 | 37 | 	.align	8\n\ | 
 | 38 | 	.quad	1b,4b,2b,4b\n\ | 
 | 39 | 	.previous"						\ | 
 | 40 | 	: "=&a" (oldval), "=&r" (ret), "=m" (*uaddr),		\ | 
 | 41 | 	  "=&r" (tem)						\ | 
 | 42 | 	: "r" (oparg), "i" (-EFAULT), "m" (*uaddr), "1" (0)) | 
 | 43 |  | 
 | 44 | static inline int | 
 | 45 | futex_atomic_op_inuser (int encoded_op, int __user *uaddr) | 
 | 46 | { | 
 | 47 | 	int op = (encoded_op >> 28) & 7; | 
 | 48 | 	int cmp = (encoded_op >> 24) & 15; | 
 | 49 | 	int oparg = (encoded_op << 8) >> 20; | 
 | 50 | 	int cmparg = (encoded_op << 20) >> 20; | 
 | 51 | 	int oldval = 0, ret, tem; | 
 | 52 | 	if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28)) | 
 | 53 | 		oparg = 1 << oparg; | 
 | 54 |  | 
 | 55 | 	if (! access_ok (VERIFY_WRITE, uaddr, sizeof(int))) | 
 | 56 | 		return -EFAULT; | 
 | 57 |  | 
 | 58 | 	inc_preempt_count(); | 
 | 59 |  | 
 | 60 | 	switch (op) { | 
 | 61 | 	case FUTEX_OP_SET: | 
 | 62 | 		__futex_atomic_op1("xchgl %0, %2", ret, oldval, uaddr, oparg); | 
 | 63 | 		break; | 
 | 64 | 	case FUTEX_OP_ADD: | 
 | 65 | 		__futex_atomic_op1(LOCK_PREFIX "xaddl %0, %2", ret, oldval, | 
 | 66 | 				   uaddr, oparg); | 
 | 67 | 		break; | 
 | 68 | 	case FUTEX_OP_OR: | 
 | 69 | 		__futex_atomic_op2("orl %4, %3", ret, oldval, uaddr, oparg); | 
 | 70 | 		break; | 
 | 71 | 	case FUTEX_OP_ANDN: | 
 | 72 | 		__futex_atomic_op2("andl %4, %3", ret, oldval, uaddr, ~oparg); | 
 | 73 | 		break; | 
 | 74 | 	case FUTEX_OP_XOR: | 
 | 75 | 		__futex_atomic_op2("xorl %4, %3", ret, oldval, uaddr, oparg); | 
 | 76 | 		break; | 
 | 77 | 	default: | 
 | 78 | 		ret = -ENOSYS; | 
 | 79 | 	} | 
 | 80 |  | 
 | 81 | 	dec_preempt_count(); | 
 | 82 |  | 
 | 83 | 	if (!ret) { | 
 | 84 | 		switch (cmp) { | 
 | 85 | 		case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break; | 
 | 86 | 		case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break; | 
 | 87 | 		case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break; | 
 | 88 | 		case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break; | 
 | 89 | 		case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break; | 
 | 90 | 		case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break; | 
 | 91 | 		default: ret = -ENOSYS; | 
 | 92 | 		} | 
 | 93 | 	} | 
 | 94 | 	return ret; | 
 | 95 | } | 
 | 96 |  | 
 | 97 | #endif | 
 | 98 | #endif |