| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 1 | /* | 
|  | 2 | * linux/kernel/futex_compat.c | 
|  | 3 | * | 
|  | 4 | * Futex compatibililty routines. | 
|  | 5 | * | 
|  | 6 | * Copyright 2006, Red Hat, Inc., Ingo Molnar | 
|  | 7 | */ | 
|  | 8 |  | 
|  | 9 | #include <linux/linkage.h> | 
|  | 10 | #include <linux/compat.h> | 
| Pavel Emelyanov | b488893 | 2007-10-18 23:40:14 -0700 | [diff] [blame] | 11 | #include <linux/nsproxy.h> | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 12 | #include <linux/futex.h> | 
|  | 13 |  | 
|  | 14 | #include <asm/uaccess.h> | 
|  | 15 |  | 
| Ingo Molnar | e3f2dde | 2006-07-29 05:17:57 +0200 | [diff] [blame] | 16 |  | 
|  | 17 | /* | 
|  | 18 | * Fetch a robust-list pointer. Bit 0 signals PI futexes: | 
|  | 19 | */ | 
|  | 20 | static inline int | 
|  | 21 | fetch_robust_entry(compat_uptr_t *uentry, struct robust_list __user **entry, | 
| Namhyung Kim | 1dcc41b | 2010-09-14 21:43:46 +0900 | [diff] [blame] | 22 | compat_uptr_t __user *head, unsigned int *pi) | 
| Ingo Molnar | e3f2dde | 2006-07-29 05:17:57 +0200 | [diff] [blame] | 23 | { | 
|  | 24 | if (get_user(*uentry, head)) | 
|  | 25 | return -EFAULT; | 
|  | 26 |  | 
|  | 27 | *entry = compat_ptr((*uentry) & ~1); | 
|  | 28 | *pi = (unsigned int)(*uentry) & 1; | 
|  | 29 |  | 
|  | 30 | return 0; | 
|  | 31 | } | 
|  | 32 |  | 
| Al Viro | 8481664 | 2008-03-29 03:07:58 +0000 | [diff] [blame] | 33 | static void __user *futex_uaddr(struct robust_list __user *entry, | 
| David Miller | 3c5fd9c7 | 2007-11-06 21:13:56 -0800 | [diff] [blame] | 34 | compat_long_t futex_offset) | 
|  | 35 | { | 
|  | 36 | compat_uptr_t base = ptr_to_compat(entry); | 
|  | 37 | void __user *uaddr = compat_ptr(base + futex_offset); | 
|  | 38 |  | 
|  | 39 | return uaddr; | 
|  | 40 | } | 
|  | 41 |  | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 42 | /* | 
|  | 43 | * Walk curr->robust_list (very carefully, it's a userspace list!) | 
|  | 44 | * and mark any locks found there dead, and notify any waiters. | 
|  | 45 | * | 
|  | 46 | * We silently return on any sign of list-walking problem. | 
|  | 47 | */ | 
|  | 48 | void compat_exit_robust_list(struct task_struct *curr) | 
|  | 49 | { | 
|  | 50 | struct compat_robust_list_head __user *head = curr->compat_robust_list; | 
| Martin Schwidefsky | 9f96cb1 | 2007-10-01 01:20:13 -0700 | [diff] [blame] | 51 | struct robust_list __user *entry, *next_entry, *pending; | 
| Darren Hart | 4c115e9 | 2010-11-04 15:00:00 -0400 | [diff] [blame] | 52 | unsigned int limit = ROBUST_LIST_LIMIT, pi, pip; | 
|  | 53 | unsigned int uninitialized_var(next_pi); | 
| Martin Schwidefsky | 9f96cb1 | 2007-10-01 01:20:13 -0700 | [diff] [blame] | 54 | compat_uptr_t uentry, next_uentry, upending; | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 55 | compat_long_t futex_offset; | 
| Martin Schwidefsky | 9f96cb1 | 2007-10-01 01:20:13 -0700 | [diff] [blame] | 56 | int rc; | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 57 |  | 
| Thomas Gleixner | a0c1e90 | 2008-02-23 15:23:57 -0800 | [diff] [blame] | 58 | if (!futex_cmpxchg_enabled) | 
|  | 59 | return; | 
|  | 60 |  | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 61 | /* | 
|  | 62 | * Fetch the list head (which was registered earlier, via | 
|  | 63 | * sys_set_robust_list()): | 
|  | 64 | */ | 
| Ingo Molnar | e3f2dde | 2006-07-29 05:17:57 +0200 | [diff] [blame] | 65 | if (fetch_robust_entry(&uentry, &entry, &head->list.next, &pi)) | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 66 | return; | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 67 | /* | 
|  | 68 | * Fetch the relative futex offset: | 
|  | 69 | */ | 
|  | 70 | if (get_user(futex_offset, &head->futex_offset)) | 
|  | 71 | return; | 
|  | 72 | /* | 
|  | 73 | * Fetch any possibly pending lock-add first, and handle it | 
|  | 74 | * if it exists: | 
|  | 75 | */ | 
| Ingo Molnar | e3f2dde | 2006-07-29 05:17:57 +0200 | [diff] [blame] | 76 | if (fetch_robust_entry(&upending, &pending, | 
| Thomas Gleixner | ce2c6b5 | 2006-08-05 12:15:15 -0700 | [diff] [blame] | 77 | &head->list_op_pending, &pip)) | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 78 | return; | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 79 |  | 
| Martin Schwidefsky | 9f96cb1 | 2007-10-01 01:20:13 -0700 | [diff] [blame] | 80 | next_entry = NULL;	/* avoid warning with gcc */ | 
| Arnd Bergmann | 179c85e | 2007-09-11 15:23:49 -0700 | [diff] [blame] | 81 | while (entry != (struct robust_list __user *) &head->list) { | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 82 | /* | 
| Martin Schwidefsky | 9f96cb1 | 2007-10-01 01:20:13 -0700 | [diff] [blame] | 83 | * Fetch the next entry in the list before calling | 
|  | 84 | * handle_futex_death: | 
|  | 85 | */ | 
|  | 86 | rc = fetch_robust_entry(&next_uentry, &next_entry, | 
|  | 87 | (compat_uptr_t __user *)&entry->next, &next_pi); | 
|  | 88 | /* | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 89 | * A pending lock might already be on the list, so | 
|  | 90 | * dont process it twice: | 
|  | 91 | */ | 
| David Miller | 3c5fd9c7 | 2007-11-06 21:13:56 -0800 | [diff] [blame] | 92 | if (entry != pending) { | 
|  | 93 | void __user *uaddr = futex_uaddr(entry, futex_offset); | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 94 |  | 
| David Miller | 3c5fd9c7 | 2007-11-06 21:13:56 -0800 | [diff] [blame] | 95 | if (handle_futex_death(uaddr, curr, pi)) | 
|  | 96 | return; | 
|  | 97 | } | 
| Martin Schwidefsky | 9f96cb1 | 2007-10-01 01:20:13 -0700 | [diff] [blame] | 98 | if (rc) | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 99 | return; | 
| Martin Schwidefsky | 9f96cb1 | 2007-10-01 01:20:13 -0700 | [diff] [blame] | 100 | uentry = next_uentry; | 
|  | 101 | entry = next_entry; | 
|  | 102 | pi = next_pi; | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 103 | /* | 
|  | 104 | * Avoid excessively long or circular lists: | 
|  | 105 | */ | 
|  | 106 | if (!--limit) | 
|  | 107 | break; | 
|  | 108 |  | 
|  | 109 | cond_resched(); | 
|  | 110 | } | 
| David Miller | 3c5fd9c7 | 2007-11-06 21:13:56 -0800 | [diff] [blame] | 111 | if (pending) { | 
|  | 112 | void __user *uaddr = futex_uaddr(pending, futex_offset); | 
|  | 113 |  | 
|  | 114 | handle_futex_death(uaddr, curr, pip); | 
|  | 115 | } | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 116 | } | 
|  | 117 |  | 
|  | 118 | asmlinkage long | 
|  | 119 | compat_sys_set_robust_list(struct compat_robust_list_head __user *head, | 
|  | 120 | compat_size_t len) | 
|  | 121 | { | 
| Thomas Gleixner | a0c1e90 | 2008-02-23 15:23:57 -0800 | [diff] [blame] | 122 | if (!futex_cmpxchg_enabled) | 
|  | 123 | return -ENOSYS; | 
|  | 124 |  | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 125 | if (unlikely(len != sizeof(*head))) | 
|  | 126 | return -EINVAL; | 
|  | 127 |  | 
|  | 128 | current->compat_robust_list = head; | 
|  | 129 |  | 
|  | 130 | return 0; | 
|  | 131 | } | 
|  | 132 |  | 
|  | 133 | asmlinkage long | 
| Al Viro | ba46df9 | 2006-10-10 22:46:07 +0100 | [diff] [blame] | 134 | compat_sys_get_robust_list(int pid, compat_uptr_t __user *head_ptr, | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 135 | compat_size_t __user *len_ptr) | 
|  | 136 | { | 
| Al Viro | ba46df9 | 2006-10-10 22:46:07 +0100 | [diff] [blame] | 137 | struct compat_robust_list_head __user *head; | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 138 | unsigned long ret; | 
| David Howells | c69e8d9 | 2008-11-14 10:39:19 +1100 | [diff] [blame] | 139 | const struct cred *cred = current_cred(), *pcred; | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 140 |  | 
| Thomas Gleixner | a0c1e90 | 2008-02-23 15:23:57 -0800 | [diff] [blame] | 141 | if (!futex_cmpxchg_enabled) | 
|  | 142 | return -ENOSYS; | 
|  | 143 |  | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 144 | if (!pid) | 
|  | 145 | head = current->compat_robust_list; | 
|  | 146 | else { | 
|  | 147 | struct task_struct *p; | 
|  | 148 |  | 
|  | 149 | ret = -ESRCH; | 
| Thomas Gleixner | f409adf | 2009-12-01 14:02:00 +0100 | [diff] [blame] | 150 | rcu_read_lock(); | 
| Pavel Emelyanov | 228ebcb | 2007-10-18 23:40:16 -0700 | [diff] [blame] | 151 | p = find_task_by_vpid(pid); | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 152 | if (!p) | 
|  | 153 | goto err_unlock; | 
|  | 154 | ret = -EPERM; | 
| David Howells | c69e8d9 | 2008-11-14 10:39:19 +1100 | [diff] [blame] | 155 | pcred = __task_cred(p); | 
|  | 156 | if (cred->euid != pcred->euid && | 
|  | 157 | cred->euid != pcred->uid && | 
| David Howells | b6dff3e | 2008-11-14 10:39:16 +1100 | [diff] [blame] | 158 | !capable(CAP_SYS_PTRACE)) | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 159 | goto err_unlock; | 
|  | 160 | head = p->compat_robust_list; | 
| Thomas Gleixner | f409adf | 2009-12-01 14:02:00 +0100 | [diff] [blame] | 161 | rcu_read_unlock(); | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 162 | } | 
|  | 163 |  | 
|  | 164 | if (put_user(sizeof(*head), len_ptr)) | 
|  | 165 | return -EFAULT; | 
|  | 166 | return put_user(ptr_to_compat(head), head_ptr); | 
|  | 167 |  | 
|  | 168 | err_unlock: | 
| Thomas Gleixner | f409adf | 2009-12-01 14:02:00 +0100 | [diff] [blame] | 169 | rcu_read_unlock(); | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 170 |  | 
|  | 171 | return ret; | 
|  | 172 | } | 
|  | 173 |  | 
| Ingo Molnar | 8f17d3a | 2006-03-27 01:16:27 -0800 | [diff] [blame] | 174 | asmlinkage long compat_sys_futex(u32 __user *uaddr, int op, u32 val, | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 175 | struct compat_timespec __user *utime, u32 __user *uaddr2, | 
| Ingo Molnar | 8f17d3a | 2006-03-27 01:16:27 -0800 | [diff] [blame] | 176 | u32 val3) | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 177 | { | 
| Pierre Peiffer | c19384b | 2007-05-09 02:35:02 -0700 | [diff] [blame] | 178 | struct timespec ts; | 
|  | 179 | ktime_t t, *tp = NULL; | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 180 | int val2 = 0; | 
| Ulrich Drepper | f0ede66 | 2007-06-01 00:46:41 -0700 | [diff] [blame] | 181 | int cmd = op & FUTEX_CMD_MASK; | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 182 |  | 
| Thomas Gleixner | cd68998 | 2008-02-01 17:45:14 +0100 | [diff] [blame] | 183 | if (utime && (cmd == FUTEX_WAIT || cmd == FUTEX_LOCK_PI || | 
| Dinakar Guniguntala | 4dc8802 | 2009-08-10 18:31:42 +0530 | [diff] [blame] | 184 | cmd == FUTEX_WAIT_BITSET || | 
|  | 185 | cmd == FUTEX_WAIT_REQUEUE_PI)) { | 
| Pierre Peiffer | c19384b | 2007-05-09 02:35:02 -0700 | [diff] [blame] | 186 | if (get_compat_timespec(&ts, utime)) | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 187 | return -EFAULT; | 
| Pierre Peiffer | c19384b | 2007-05-09 02:35:02 -0700 | [diff] [blame] | 188 | if (!timespec_valid(&ts)) | 
| Thomas Gleixner | 9741ef9 | 2006-03-31 02:31:32 -0800 | [diff] [blame] | 189 | return -EINVAL; | 
| Pierre Peiffer | c19384b | 2007-05-09 02:35:02 -0700 | [diff] [blame] | 190 |  | 
|  | 191 | t = timespec_to_ktime(ts); | 
| Ulrich Drepper | f0ede66 | 2007-06-01 00:46:41 -0700 | [diff] [blame] | 192 | if (cmd == FUTEX_WAIT) | 
| Thomas Gleixner | 5a7780e | 2008-02-13 09:20:43 +0100 | [diff] [blame] | 193 | t = ktime_add_safe(ktime_get(), t); | 
| Pierre Peiffer | c19384b | 2007-05-09 02:35:02 -0700 | [diff] [blame] | 194 | tp = &t; | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 195 | } | 
| Dinakar Guniguntala | 4dc8802 | 2009-08-10 18:31:42 +0530 | [diff] [blame] | 196 | if (cmd == FUTEX_REQUEUE || cmd == FUTEX_CMP_REQUEUE || | 
|  | 197 | cmd == FUTEX_CMP_REQUEUE_PI || cmd == FUTEX_WAKE_OP) | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 198 | val2 = (int) (unsigned long) utime; | 
|  | 199 |  | 
| Pierre Peiffer | c19384b | 2007-05-09 02:35:02 -0700 | [diff] [blame] | 200 | return do_futex(uaddr, op, val, tp, uaddr2, val2, val3); | 
| Ingo Molnar | 34f192c | 2006-03-27 01:16:24 -0800 | [diff] [blame] | 201 | } |