| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 1 | /* | 
|  | 2 | * linux/kernel/futex_compat.c | 
|  | 3 | * | 
|  | 4 | * Futex compatibililty routines. | 
|  | 5 | * | 
|  | 6 | * Copyright 2006, Red Hat, Inc., Ingo Molnar | 
|  | 7 | */ | 
|  | 8 |  | 
|  | 9 | #include <linux/linkage.h> | 
|  | 10 | #include <linux/compat.h> | 
|  | 11 | #include <linux/futex.h> | 
|  | 12 |  | 
|  | 13 | #include <asm/uaccess.h> | 
|  | 14 |  | 
| Ingo Molnar | e3f2dde | 2006-07-29 05:17:57 +0200 | [diff] [blame] | 15 |  | 
|  | 16 | /* | 
|  | 17 | * Fetch a robust-list pointer. Bit 0 signals PI futexes: | 
|  | 18 | */ | 
|  | 19 | static inline int | 
|  | 20 | fetch_robust_entry(compat_uptr_t *uentry, struct robust_list __user **entry, | 
| Al Viro | ba46df9 | 2006-10-10 22:46:07 +0100 | [diff] [blame] | 21 | compat_uptr_t __user *head, int *pi) | 
| Ingo Molnar | e3f2dde | 2006-07-29 05:17:57 +0200 | [diff] [blame] | 22 | { | 
|  | 23 | if (get_user(*uentry, head)) | 
|  | 24 | return -EFAULT; | 
|  | 25 |  | 
|  | 26 | *entry = compat_ptr((*uentry) & ~1); | 
|  | 27 | *pi = (unsigned int)(*uentry) & 1; | 
|  | 28 |  | 
|  | 29 | return 0; | 
|  | 30 | } | 
|  | 31 |  | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 32 | /* | 
|  | 33 | * Walk curr->robust_list (very carefully, it's a userspace list!) | 
|  | 34 | * and mark any locks found there dead, and notify any waiters. | 
|  | 35 | * | 
|  | 36 | * We silently return on any sign of list-walking problem. | 
|  | 37 | */ | 
|  | 38 | void compat_exit_robust_list(struct task_struct *curr) | 
|  | 39 | { | 
|  | 40 | struct compat_robust_list_head __user *head = curr->compat_robust_list; | 
| Martin Schwidefsky | 9f96cb1 | 2007-10-01 01:20:13 -0700 | [diff] [blame] | 41 | struct robust_list __user *entry, *next_entry, *pending; | 
|  | 42 | unsigned int limit = ROBUST_LIST_LIMIT, pi, next_pi, pip; | 
|  | 43 | compat_uptr_t uentry, next_uentry, upending; | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 44 | compat_long_t futex_offset; | 
| Martin Schwidefsky | 9f96cb1 | 2007-10-01 01:20:13 -0700 | [diff] [blame] | 45 | int rc; | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 46 |  | 
|  | 47 | /* | 
|  | 48 | * Fetch the list head (which was registered earlier, via | 
|  | 49 | * sys_set_robust_list()): | 
|  | 50 | */ | 
| Ingo Molnar | e3f2dde | 2006-07-29 05:17:57 +0200 | [diff] [blame] | 51 | if (fetch_robust_entry(&uentry, &entry, &head->list.next, &pi)) | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 52 | return; | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 53 | /* | 
|  | 54 | * Fetch the relative futex offset: | 
|  | 55 | */ | 
|  | 56 | if (get_user(futex_offset, &head->futex_offset)) | 
|  | 57 | return; | 
|  | 58 | /* | 
|  | 59 | * Fetch any possibly pending lock-add first, and handle it | 
|  | 60 | * if it exists: | 
|  | 61 | */ | 
| Ingo Molnar | e3f2dde | 2006-07-29 05:17:57 +0200 | [diff] [blame] | 62 | if (fetch_robust_entry(&upending, &pending, | 
| Thomas Gleixner | ce2c6b5 | 2006-08-05 12:15:15 -0700 | [diff] [blame] | 63 | &head->list_op_pending, &pip)) | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 64 | return; | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 65 |  | 
| Martin Schwidefsky | 9f96cb1 | 2007-10-01 01:20:13 -0700 | [diff] [blame] | 66 | next_entry = NULL;	/* avoid warning with gcc */ | 
| Arnd Bergmann | 179c85e | 2007-09-11 15:23:49 -0700 | [diff] [blame] | 67 | while (entry != (struct robust_list __user *) &head->list) { | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 68 | /* | 
| Martin Schwidefsky | 9f96cb1 | 2007-10-01 01:20:13 -0700 | [diff] [blame] | 69 | * Fetch the next entry in the list before calling | 
|  | 70 | * handle_futex_death: | 
|  | 71 | */ | 
|  | 72 | rc = fetch_robust_entry(&next_uentry, &next_entry, | 
|  | 73 | (compat_uptr_t __user *)&entry->next, &next_pi); | 
|  | 74 | /* | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 75 | * A pending lock might already be on the list, so | 
|  | 76 | * dont process it twice: | 
|  | 77 | */ | 
|  | 78 | if (entry != pending) | 
| Al Viro | ba46df9 | 2006-10-10 22:46:07 +0100 | [diff] [blame] | 79 | if (handle_futex_death((void __user *)entry + futex_offset, | 
| Ingo Molnar | e3f2dde | 2006-07-29 05:17:57 +0200 | [diff] [blame] | 80 | curr, pi)) | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 81 | return; | 
|  | 82 |  | 
| Martin Schwidefsky | 9f96cb1 | 2007-10-01 01:20:13 -0700 | [diff] [blame] | 83 | if (rc) | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 84 | return; | 
| Martin Schwidefsky | 9f96cb1 | 2007-10-01 01:20:13 -0700 | [diff] [blame] | 85 | uentry = next_uentry; | 
|  | 86 | entry = next_entry; | 
|  | 87 | pi = next_pi; | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 88 | /* | 
|  | 89 | * Avoid excessively long or circular lists: | 
|  | 90 | */ | 
|  | 91 | if (!--limit) | 
|  | 92 | break; | 
|  | 93 |  | 
|  | 94 | cond_resched(); | 
|  | 95 | } | 
| Martin Schwidefsky | 9f96cb1 | 2007-10-01 01:20:13 -0700 | [diff] [blame] | 96 | if (pending) | 
|  | 97 | handle_futex_death((void __user *)pending + futex_offset, | 
|  | 98 | curr, pip); | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 99 | } | 
|  | 100 |  | 
|  | 101 | asmlinkage long | 
|  | 102 | compat_sys_set_robust_list(struct compat_robust_list_head __user *head, | 
|  | 103 | compat_size_t len) | 
|  | 104 | { | 
|  | 105 | if (unlikely(len != sizeof(*head))) | 
|  | 106 | return -EINVAL; | 
|  | 107 |  | 
|  | 108 | current->compat_robust_list = head; | 
|  | 109 |  | 
|  | 110 | return 0; | 
|  | 111 | } | 
|  | 112 |  | 
|  | 113 | asmlinkage long | 
| Al Viro | ba46df9 | 2006-10-10 22:46:07 +0100 | [diff] [blame] | 114 | compat_sys_get_robust_list(int pid, compat_uptr_t __user *head_ptr, | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 115 | compat_size_t __user *len_ptr) | 
|  | 116 | { | 
| Al Viro | ba46df9 | 2006-10-10 22:46:07 +0100 | [diff] [blame] | 117 | struct compat_robust_list_head __user *head; | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 118 | unsigned long ret; | 
|  | 119 |  | 
|  | 120 | if (!pid) | 
|  | 121 | head = current->compat_robust_list; | 
|  | 122 | else { | 
|  | 123 | struct task_struct *p; | 
|  | 124 |  | 
|  | 125 | ret = -ESRCH; | 
|  | 126 | read_lock(&tasklist_lock); | 
|  | 127 | p = find_task_by_pid(pid); | 
|  | 128 | if (!p) | 
|  | 129 | goto err_unlock; | 
|  | 130 | ret = -EPERM; | 
|  | 131 | if ((current->euid != p->euid) && (current->euid != p->uid) && | 
|  | 132 | !capable(CAP_SYS_PTRACE)) | 
|  | 133 | goto err_unlock; | 
|  | 134 | head = p->compat_robust_list; | 
|  | 135 | read_unlock(&tasklist_lock); | 
|  | 136 | } | 
|  | 137 |  | 
|  | 138 | if (put_user(sizeof(*head), len_ptr)) | 
|  | 139 | return -EFAULT; | 
|  | 140 | return put_user(ptr_to_compat(head), head_ptr); | 
|  | 141 |  | 
|  | 142 | err_unlock: | 
|  | 143 | read_unlock(&tasklist_lock); | 
|  | 144 |  | 
|  | 145 | return ret; | 
|  | 146 | } | 
|  | 147 |  | 
| Ingo Molnar | 8f17d3a | 2006-03-27 01:16:27 -0800 | [diff] [blame] | 148 | asmlinkage long compat_sys_futex(u32 __user *uaddr, int op, u32 val, | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 149 | struct compat_timespec __user *utime, u32 __user *uaddr2, | 
| Ingo Molnar | 8f17d3a | 2006-03-27 01:16:27 -0800 | [diff] [blame] | 150 | u32 val3) | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 151 | { | 
| Pierre Peiffer | c19384b | 2007-05-09 02:35:02 -0700 | [diff] [blame] | 152 | struct timespec ts; | 
|  | 153 | ktime_t t, *tp = NULL; | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 154 | int val2 = 0; | 
| Ulrich Drepper | f0ede66 | 2007-06-01 00:46:41 -0700 | [diff] [blame] | 155 | int cmd = op & FUTEX_CMD_MASK; | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 156 |  | 
| Ulrich Drepper | f0ede66 | 2007-06-01 00:46:41 -0700 | [diff] [blame] | 157 | if (utime && (cmd == FUTEX_WAIT || cmd == FUTEX_LOCK_PI)) { | 
| Pierre Peiffer | c19384b | 2007-05-09 02:35:02 -0700 | [diff] [blame] | 158 | if (get_compat_timespec(&ts, utime)) | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 159 | return -EFAULT; | 
| Pierre Peiffer | c19384b | 2007-05-09 02:35:02 -0700 | [diff] [blame] | 160 | if (!timespec_valid(&ts)) | 
| Thomas Gleixner | 9741ef9 | 2006-03-31 02:31:32 -0800 | [diff] [blame] | 161 | return -EINVAL; | 
| Pierre Peiffer | c19384b | 2007-05-09 02:35:02 -0700 | [diff] [blame] | 162 |  | 
|  | 163 | t = timespec_to_ktime(ts); | 
| Ulrich Drepper | f0ede66 | 2007-06-01 00:46:41 -0700 | [diff] [blame] | 164 | if (cmd == FUTEX_WAIT) | 
| Pierre Peiffer | c19384b | 2007-05-09 02:35:02 -0700 | [diff] [blame] | 165 | t = ktime_add(ktime_get(), t); | 
|  | 166 | tp = &t; | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 167 | } | 
| Thomas Gleixner | bd19723 | 2007-06-17 21:11:10 +0200 | [diff] [blame] | 168 | if (cmd == FUTEX_REQUEUE || cmd == FUTEX_CMP_REQUEUE) | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 169 | val2 = (int) (unsigned long) utime; | 
|  | 170 |  | 
| Pierre Peiffer | c19384b | 2007-05-09 02:35:02 -0700 | [diff] [blame] | 171 | return do_futex(uaddr, op, val, tp, uaddr2, val2, val3); | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 172 | } |