| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 1 | /* align.c - handle alignment exceptions for the Power PC. | 
|  | 2 | * | 
|  | 3 | * Copyright (c) 1996 Paul Mackerras <paulus@cs.anu.edu.au> | 
|  | 4 | * Copyright (c) 1998-1999 TiVo, Inc. | 
|  | 5 | *   PowerPC 403GCX modifications. | 
|  | 6 | * Copyright (c) 1999 Grant Erickson <grant@lcse.umn.edu> | 
|  | 7 | *   PowerPC 403GCX/405GP modifications. | 
|  | 8 | * Copyright (c) 2001-2002 PPC64 team, IBM Corp | 
|  | 9 | *   64-bit and Power4 support | 
|  | 10 | * Copyright (c) 2005 Benjamin Herrenschmidt, IBM Corp | 
|  | 11 | *                    <benh@kernel.crashing.org> | 
|  | 12 | *   Merge ppc32 and ppc64 implementations | 
|  | 13 | * | 
|  | 14 | * This program is free software; you can redistribute it and/or | 
|  | 15 | * modify it under the terms of the GNU General Public License | 
|  | 16 | * as published by the Free Software Foundation; either version | 
|  | 17 | * 2 of the License, or (at your option) any later version. | 
|  | 18 | */ | 
|  | 19 |  | 
|  | 20 | #include <linux/kernel.h> | 
|  | 21 | #include <linux/mm.h> | 
|  | 22 | #include <asm/processor.h> | 
|  | 23 | #include <asm/uaccess.h> | 
|  | 24 | #include <asm/system.h> | 
|  | 25 | #include <asm/cache.h> | 
|  | 26 | #include <asm/cputable.h> | 
| Geert Uytterhoeven | 80947e7 | 2009-05-18 02:10:05 +0000 | [diff] [blame] | 27 | #include <asm/emulated_ops.h> | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 28 |  | 
|  | 29 | struct aligninfo { | 
|  | 30 | unsigned char len; | 
|  | 31 | unsigned char flags; | 
|  | 32 | }; | 
|  | 33 |  | 
|  | 34 | #define IS_XFORM(inst)	(((inst) >> 26) == 31) | 
|  | 35 | #define IS_DSFORM(inst)	(((inst) >> 26) >= 56) | 
|  | 36 |  | 
|  | 37 | #define INVALID	{ 0, 0 } | 
|  | 38 |  | 
| Paul Mackerras | fab5db9 | 2006-06-07 16:14:40 +1000 | [diff] [blame] | 39 | /* Bits in the flags field */ | 
|  | 40 | #define LD	0	/* load */ | 
|  | 41 | #define ST	1	/* store */ | 
| Paul Mackerras | c6d4267 | 2007-08-10 14:07:38 +1000 | [diff] [blame] | 42 | #define SE	2	/* sign-extend value, or FP ld/st as word */ | 
| Paul Mackerras | fab5db9 | 2006-06-07 16:14:40 +1000 | [diff] [blame] | 43 | #define F	4	/* to/from fp regs */ | 
|  | 44 | #define U	8	/* update index register */ | 
|  | 45 | #define M	0x10	/* multiple load/store */ | 
|  | 46 | #define SW	0x20	/* byte swap */ | 
|  | 47 | #define S	0x40	/* single-precision fp or... */ | 
|  | 48 | #define SX	0x40	/* ... byte count in XER */ | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 49 | #define HARD	0x80	/* string, stwcx. */ | 
| Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 50 | #define E4	0x40	/* SPE endianness is word */ | 
|  | 51 | #define E8	0x80	/* SPE endianness is double word */ | 
| Michael Neuling | cd6f37b | 2008-07-11 16:31:09 +1000 | [diff] [blame] | 52 | #define SPLT	0x80	/* VSX SPLAT load */ | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 53 |  | 
| Paul Mackerras | fab5db9 | 2006-06-07 16:14:40 +1000 | [diff] [blame] | 54 | /* DSISR bits reported for a DCBZ instruction: */ | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 55 | #define DCBZ	0x5f	/* 8xx/82xx dcbz faults when cache not enabled */ | 
|  | 56 |  | 
|  | 57 | #define SWAP(a, b)	(t = (a), (a) = (b), (b) = t) | 
|  | 58 |  | 
|  | 59 | /* | 
|  | 60 | * The PowerPC stores certain bits of the instruction that caused the | 
|  | 61 | * alignment exception in the DSISR register.  This array maps those | 
|  | 62 | * bits to information about the operand length and what the | 
|  | 63 | * instruction would do. | 
|  | 64 | */ | 
|  | 65 | static struct aligninfo aligninfo[128] = { | 
|  | 66 | { 4, LD },		/* 00 0 0000: lwz / lwarx */ | 
|  | 67 | INVALID,		/* 00 0 0001 */ | 
|  | 68 | { 4, ST },		/* 00 0 0010: stw */ | 
|  | 69 | INVALID,		/* 00 0 0011 */ | 
|  | 70 | { 2, LD },		/* 00 0 0100: lhz */ | 
|  | 71 | { 2, LD+SE },		/* 00 0 0101: lha */ | 
|  | 72 | { 2, ST },		/* 00 0 0110: sth */ | 
|  | 73 | { 4, LD+M },		/* 00 0 0111: lmw */ | 
|  | 74 | { 4, LD+F+S },		/* 00 0 1000: lfs */ | 
|  | 75 | { 8, LD+F },		/* 00 0 1001: lfd */ | 
|  | 76 | { 4, ST+F+S },		/* 00 0 1010: stfs */ | 
|  | 77 | { 8, ST+F },		/* 00 0 1011: stfd */ | 
|  | 78 | INVALID,		/* 00 0 1100 */ | 
|  | 79 | { 8, LD },		/* 00 0 1101: ld/ldu/lwa */ | 
|  | 80 | INVALID,		/* 00 0 1110 */ | 
|  | 81 | { 8, ST },		/* 00 0 1111: std/stdu */ | 
|  | 82 | { 4, LD+U },		/* 00 1 0000: lwzu */ | 
|  | 83 | INVALID,		/* 00 1 0001 */ | 
|  | 84 | { 4, ST+U },		/* 00 1 0010: stwu */ | 
|  | 85 | INVALID,		/* 00 1 0011 */ | 
|  | 86 | { 2, LD+U },		/* 00 1 0100: lhzu */ | 
|  | 87 | { 2, LD+SE+U },		/* 00 1 0101: lhau */ | 
|  | 88 | { 2, ST+U },		/* 00 1 0110: sthu */ | 
|  | 89 | { 4, ST+M },		/* 00 1 0111: stmw */ | 
|  | 90 | { 4, LD+F+S+U },	/* 00 1 1000: lfsu */ | 
|  | 91 | { 8, LD+F+U },		/* 00 1 1001: lfdu */ | 
|  | 92 | { 4, ST+F+S+U },	/* 00 1 1010: stfsu */ | 
|  | 93 | { 8, ST+F+U },		/* 00 1 1011: stfdu */ | 
| Paul Mackerras | c6d4267 | 2007-08-10 14:07:38 +1000 | [diff] [blame] | 94 | { 16, LD+F },		/* 00 1 1100: lfdp */ | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 95 | INVALID,		/* 00 1 1101 */ | 
| Paul Mackerras | c6d4267 | 2007-08-10 14:07:38 +1000 | [diff] [blame] | 96 | { 16, ST+F },		/* 00 1 1110: stfdp */ | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 97 | INVALID,		/* 00 1 1111 */ | 
|  | 98 | { 8, LD },		/* 01 0 0000: ldx */ | 
|  | 99 | INVALID,		/* 01 0 0001 */ | 
|  | 100 | { 8, ST },		/* 01 0 0010: stdx */ | 
|  | 101 | INVALID,		/* 01 0 0011 */ | 
|  | 102 | INVALID,		/* 01 0 0100 */ | 
|  | 103 | { 4, LD+SE },		/* 01 0 0101: lwax */ | 
|  | 104 | INVALID,		/* 01 0 0110 */ | 
|  | 105 | INVALID,		/* 01 0 0111 */ | 
|  | 106 | { 4, LD+M+HARD+SX },	/* 01 0 1000: lswx */ | 
|  | 107 | { 4, LD+M+HARD },	/* 01 0 1001: lswi */ | 
|  | 108 | { 4, ST+M+HARD+SX },	/* 01 0 1010: stswx */ | 
|  | 109 | { 4, ST+M+HARD },	/* 01 0 1011: stswi */ | 
|  | 110 | INVALID,		/* 01 0 1100 */ | 
|  | 111 | { 8, LD+U },		/* 01 0 1101: ldu */ | 
|  | 112 | INVALID,		/* 01 0 1110 */ | 
|  | 113 | { 8, ST+U },		/* 01 0 1111: stdu */ | 
|  | 114 | { 8, LD+U },		/* 01 1 0000: ldux */ | 
|  | 115 | INVALID,		/* 01 1 0001 */ | 
|  | 116 | { 8, ST+U },		/* 01 1 0010: stdux */ | 
|  | 117 | INVALID,		/* 01 1 0011 */ | 
|  | 118 | INVALID,		/* 01 1 0100 */ | 
|  | 119 | { 4, LD+SE+U },		/* 01 1 0101: lwaux */ | 
|  | 120 | INVALID,		/* 01 1 0110 */ | 
|  | 121 | INVALID,		/* 01 1 0111 */ | 
|  | 122 | INVALID,		/* 01 1 1000 */ | 
|  | 123 | INVALID,		/* 01 1 1001 */ | 
|  | 124 | INVALID,		/* 01 1 1010 */ | 
|  | 125 | INVALID,		/* 01 1 1011 */ | 
|  | 126 | INVALID,		/* 01 1 1100 */ | 
|  | 127 | INVALID,		/* 01 1 1101 */ | 
|  | 128 | INVALID,		/* 01 1 1110 */ | 
|  | 129 | INVALID,		/* 01 1 1111 */ | 
|  | 130 | INVALID,		/* 10 0 0000 */ | 
|  | 131 | INVALID,		/* 10 0 0001 */ | 
|  | 132 | INVALID,		/* 10 0 0010: stwcx. */ | 
|  | 133 | INVALID,		/* 10 0 0011 */ | 
|  | 134 | INVALID,		/* 10 0 0100 */ | 
|  | 135 | INVALID,		/* 10 0 0101 */ | 
|  | 136 | INVALID,		/* 10 0 0110 */ | 
|  | 137 | INVALID,		/* 10 0 0111 */ | 
|  | 138 | { 4, LD+SW },		/* 10 0 1000: lwbrx */ | 
|  | 139 | INVALID,		/* 10 0 1001 */ | 
|  | 140 | { 4, ST+SW },		/* 10 0 1010: stwbrx */ | 
|  | 141 | INVALID,		/* 10 0 1011 */ | 
|  | 142 | { 2, LD+SW },		/* 10 0 1100: lhbrx */ | 
|  | 143 | { 4, LD+SE },		/* 10 0 1101  lwa */ | 
|  | 144 | { 2, ST+SW },		/* 10 0 1110: sthbrx */ | 
|  | 145 | INVALID,		/* 10 0 1111 */ | 
|  | 146 | INVALID,		/* 10 1 0000 */ | 
|  | 147 | INVALID,		/* 10 1 0001 */ | 
|  | 148 | INVALID,		/* 10 1 0010 */ | 
|  | 149 | INVALID,		/* 10 1 0011 */ | 
|  | 150 | INVALID,		/* 10 1 0100 */ | 
|  | 151 | INVALID,		/* 10 1 0101 */ | 
|  | 152 | INVALID,		/* 10 1 0110 */ | 
|  | 153 | INVALID,		/* 10 1 0111 */ | 
|  | 154 | INVALID,		/* 10 1 1000 */ | 
|  | 155 | INVALID,		/* 10 1 1001 */ | 
|  | 156 | INVALID,		/* 10 1 1010 */ | 
|  | 157 | INVALID,		/* 10 1 1011 */ | 
|  | 158 | INVALID,		/* 10 1 1100 */ | 
|  | 159 | INVALID,		/* 10 1 1101 */ | 
|  | 160 | INVALID,		/* 10 1 1110 */ | 
|  | 161 | { 0, ST+HARD },		/* 10 1 1111: dcbz */ | 
|  | 162 | { 4, LD },		/* 11 0 0000: lwzx */ | 
|  | 163 | INVALID,		/* 11 0 0001 */ | 
|  | 164 | { 4, ST },		/* 11 0 0010: stwx */ | 
|  | 165 | INVALID,		/* 11 0 0011 */ | 
|  | 166 | { 2, LD },		/* 11 0 0100: lhzx */ | 
|  | 167 | { 2, LD+SE },		/* 11 0 0101: lhax */ | 
|  | 168 | { 2, ST },		/* 11 0 0110: sthx */ | 
|  | 169 | INVALID,		/* 11 0 0111 */ | 
|  | 170 | { 4, LD+F+S },		/* 11 0 1000: lfsx */ | 
|  | 171 | { 8, LD+F },		/* 11 0 1001: lfdx */ | 
|  | 172 | { 4, ST+F+S },		/* 11 0 1010: stfsx */ | 
|  | 173 | { 8, ST+F },		/* 11 0 1011: stfdx */ | 
| Paul Mackerras | c6d4267 | 2007-08-10 14:07:38 +1000 | [diff] [blame] | 174 | { 16, LD+F },		/* 11 0 1100: lfdpx */ | 
|  | 175 | { 4, LD+F+SE },		/* 11 0 1101: lfiwax */ | 
|  | 176 | { 16, ST+F },		/* 11 0 1110: stfdpx */ | 
|  | 177 | { 4, ST+F },		/* 11 0 1111: stfiwx */ | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 178 | { 4, LD+U },		/* 11 1 0000: lwzux */ | 
|  | 179 | INVALID,		/* 11 1 0001 */ | 
|  | 180 | { 4, ST+U },		/* 11 1 0010: stwux */ | 
|  | 181 | INVALID,		/* 11 1 0011 */ | 
|  | 182 | { 2, LD+U },		/* 11 1 0100: lhzux */ | 
|  | 183 | { 2, LD+SE+U },		/* 11 1 0101: lhaux */ | 
|  | 184 | { 2, ST+U },		/* 11 1 0110: sthux */ | 
|  | 185 | INVALID,		/* 11 1 0111 */ | 
|  | 186 | { 4, LD+F+S+U },	/* 11 1 1000: lfsux */ | 
|  | 187 | { 8, LD+F+U },		/* 11 1 1001: lfdux */ | 
|  | 188 | { 4, ST+F+S+U },	/* 11 1 1010: stfsux */ | 
|  | 189 | { 8, ST+F+U },		/* 11 1 1011: stfdux */ | 
|  | 190 | INVALID,		/* 11 1 1100 */ | 
| Michael Neuling | 545bba1 | 2009-02-19 18:51:37 +0000 | [diff] [blame] | 191 | { 4, LD+F },		/* 11 1 1101: lfiwzx */ | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 192 | INVALID,		/* 11 1 1110 */ | 
|  | 193 | INVALID,		/* 11 1 1111 */ | 
|  | 194 | }; | 
|  | 195 |  | 
|  | 196 | /* | 
|  | 197 | * Create a DSISR value from the instruction | 
|  | 198 | */ | 
|  | 199 | static inline unsigned make_dsisr(unsigned instr) | 
|  | 200 | { | 
|  | 201 | unsigned dsisr; | 
|  | 202 |  | 
|  | 203 |  | 
|  | 204 | /* bits  6:15 --> 22:31 */ | 
|  | 205 | dsisr = (instr & 0x03ff0000) >> 16; | 
|  | 206 |  | 
|  | 207 | if (IS_XFORM(instr)) { | 
|  | 208 | /* bits 29:30 --> 15:16 */ | 
|  | 209 | dsisr |= (instr & 0x00000006) << 14; | 
|  | 210 | /* bit     25 -->    17 */ | 
|  | 211 | dsisr |= (instr & 0x00000040) << 8; | 
|  | 212 | /* bits 21:24 --> 18:21 */ | 
|  | 213 | dsisr |= (instr & 0x00000780) << 3; | 
|  | 214 | } else { | 
|  | 215 | /* bit      5 -->    17 */ | 
|  | 216 | dsisr |= (instr & 0x04000000) >> 12; | 
|  | 217 | /* bits  1: 4 --> 18:21 */ | 
|  | 218 | dsisr |= (instr & 0x78000000) >> 17; | 
|  | 219 | /* bits 30:31 --> 12:13 */ | 
|  | 220 | if (IS_DSFORM(instr)) | 
|  | 221 | dsisr |= (instr & 0x00000003) << 18; | 
|  | 222 | } | 
|  | 223 |  | 
|  | 224 | return dsisr; | 
|  | 225 | } | 
|  | 226 |  | 
|  | 227 | /* | 
|  | 228 | * The dcbz (data cache block zero) instruction | 
|  | 229 | * gives an alignment fault if used on non-cacheable | 
|  | 230 | * memory.  We handle the fault mainly for the | 
|  | 231 | * case when we are running with the cache disabled | 
|  | 232 | * for debugging. | 
|  | 233 | */ | 
|  | 234 | static int emulate_dcbz(struct pt_regs *regs, unsigned char __user *addr) | 
|  | 235 | { | 
|  | 236 | long __user *p; | 
|  | 237 | int i, size; | 
|  | 238 |  | 
|  | 239 | #ifdef __powerpc64__ | 
|  | 240 | size = ppc64_caches.dline_size; | 
|  | 241 | #else | 
|  | 242 | size = L1_CACHE_BYTES; | 
|  | 243 | #endif | 
|  | 244 | p = (long __user *) (regs->dar & -size); | 
|  | 245 | if (user_mode(regs) && !access_ok(VERIFY_WRITE, p, size)) | 
|  | 246 | return -EFAULT; | 
|  | 247 | for (i = 0; i < size / sizeof(long); ++i) | 
| Benjamin Herrenschmidt | e4ee3891 | 2007-04-11 16:13:19 +1000 | [diff] [blame] | 248 | if (__put_user_inatomic(0, p+i)) | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 249 | return -EFAULT; | 
|  | 250 | return 1; | 
|  | 251 | } | 
|  | 252 |  | 
|  | 253 | /* | 
|  | 254 | * Emulate load & store multiple instructions | 
|  | 255 | * On 64-bit machines, these instructions only affect/use the | 
|  | 256 | * bottom 4 bytes of each register, and the loads clear the | 
|  | 257 | * top 4 bytes of the affected register. | 
|  | 258 | */ | 
|  | 259 | #ifdef CONFIG_PPC64 | 
|  | 260 | #define REG_BYTE(rp, i)		*((u8 *)((rp) + ((i) >> 2)) + ((i) & 3) + 4) | 
|  | 261 | #else | 
|  | 262 | #define REG_BYTE(rp, i)		*((u8 *)(rp) + (i)) | 
|  | 263 | #endif | 
|  | 264 |  | 
| Paul Mackerras | fab5db9 | 2006-06-07 16:14:40 +1000 | [diff] [blame] | 265 | #define SWIZ_PTR(p)		((unsigned char __user *)((p) ^ swiz)) | 
|  | 266 |  | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 267 | static int emulate_multiple(struct pt_regs *regs, unsigned char __user *addr, | 
|  | 268 | unsigned int reg, unsigned int nb, | 
| Paul Mackerras | fab5db9 | 2006-06-07 16:14:40 +1000 | [diff] [blame] | 269 | unsigned int flags, unsigned int instr, | 
|  | 270 | unsigned long swiz) | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 271 | { | 
|  | 272 | unsigned long *rptr; | 
| Paul Mackerras | fab5db9 | 2006-06-07 16:14:40 +1000 | [diff] [blame] | 273 | unsigned int nb0, i, bswiz; | 
|  | 274 | unsigned long p; | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 275 |  | 
|  | 276 | /* | 
|  | 277 | * We do not try to emulate 8 bytes multiple as they aren't really | 
|  | 278 | * available in our operating environments and we don't try to | 
|  | 279 | * emulate multiples operations in kernel land as they should never | 
|  | 280 | * be used/generated there at least not on unaligned boundaries | 
|  | 281 | */ | 
|  | 282 | if (unlikely((nb > 4) || !user_mode(regs))) | 
|  | 283 | return 0; | 
|  | 284 |  | 
|  | 285 | /* lmw, stmw, lswi/x, stswi/x */ | 
|  | 286 | nb0 = 0; | 
|  | 287 | if (flags & HARD) { | 
|  | 288 | if (flags & SX) { | 
|  | 289 | nb = regs->xer & 127; | 
|  | 290 | if (nb == 0) | 
|  | 291 | return 1; | 
|  | 292 | } else { | 
| Paul Mackerras | fab5db9 | 2006-06-07 16:14:40 +1000 | [diff] [blame] | 293 | unsigned long pc = regs->nip ^ (swiz & 4); | 
|  | 294 |  | 
| Benjamin Herrenschmidt | e4ee3891 | 2007-04-11 16:13:19 +1000 | [diff] [blame] | 295 | if (__get_user_inatomic(instr, | 
|  | 296 | (unsigned int __user *)pc)) | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 297 | return -EFAULT; | 
| Paul Mackerras | fab5db9 | 2006-06-07 16:14:40 +1000 | [diff] [blame] | 298 | if (swiz == 0 && (flags & SW)) | 
|  | 299 | instr = cpu_to_le32(instr); | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 300 | nb = (instr >> 11) & 0x1f; | 
|  | 301 | if (nb == 0) | 
|  | 302 | nb = 32; | 
|  | 303 | } | 
|  | 304 | if (nb + reg * 4 > 128) { | 
|  | 305 | nb0 = nb + reg * 4 - 128; | 
|  | 306 | nb = 128 - reg * 4; | 
|  | 307 | } | 
|  | 308 | } else { | 
|  | 309 | /* lwm, stmw */ | 
|  | 310 | nb = (32 - reg) * 4; | 
|  | 311 | } | 
|  | 312 |  | 
|  | 313 | if (!access_ok((flags & ST ? VERIFY_WRITE: VERIFY_READ), addr, nb+nb0)) | 
|  | 314 | return -EFAULT;	/* bad address */ | 
|  | 315 |  | 
|  | 316 | rptr = ®s->gpr[reg]; | 
| Paul Mackerras | fab5db9 | 2006-06-07 16:14:40 +1000 | [diff] [blame] | 317 | p = (unsigned long) addr; | 
|  | 318 | bswiz = (flags & SW)? 3: 0; | 
|  | 319 |  | 
|  | 320 | if (!(flags & ST)) { | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 321 | /* | 
|  | 322 | * This zeroes the top 4 bytes of the affected registers | 
|  | 323 | * in 64-bit mode, and also zeroes out any remaining | 
|  | 324 | * bytes of the last register for lsw*. | 
|  | 325 | */ | 
|  | 326 | memset(rptr, 0, ((nb + 3) / 4) * sizeof(unsigned long)); | 
|  | 327 | if (nb0 > 0) | 
|  | 328 | memset(®s->gpr[0], 0, | 
|  | 329 | ((nb0 + 3) / 4) * sizeof(unsigned long)); | 
|  | 330 |  | 
| Paul Mackerras | fab5db9 | 2006-06-07 16:14:40 +1000 | [diff] [blame] | 331 | for (i = 0; i < nb; ++i, ++p) | 
| Benjamin Herrenschmidt | e4ee3891 | 2007-04-11 16:13:19 +1000 | [diff] [blame] | 332 | if (__get_user_inatomic(REG_BYTE(rptr, i ^ bswiz), | 
|  | 333 | SWIZ_PTR(p))) | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 334 | return -EFAULT; | 
|  | 335 | if (nb0 > 0) { | 
|  | 336 | rptr = ®s->gpr[0]; | 
|  | 337 | addr += nb; | 
| Paul Mackerras | fab5db9 | 2006-06-07 16:14:40 +1000 | [diff] [blame] | 338 | for (i = 0; i < nb0; ++i, ++p) | 
| Benjamin Herrenschmidt | e4ee3891 | 2007-04-11 16:13:19 +1000 | [diff] [blame] | 339 | if (__get_user_inatomic(REG_BYTE(rptr, | 
|  | 340 | i ^ bswiz), | 
|  | 341 | SWIZ_PTR(p))) | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 342 | return -EFAULT; | 
|  | 343 | } | 
|  | 344 |  | 
|  | 345 | } else { | 
| Paul Mackerras | fab5db9 | 2006-06-07 16:14:40 +1000 | [diff] [blame] | 346 | for (i = 0; i < nb; ++i, ++p) | 
| Benjamin Herrenschmidt | e4ee3891 | 2007-04-11 16:13:19 +1000 | [diff] [blame] | 347 | if (__put_user_inatomic(REG_BYTE(rptr, i ^ bswiz), | 
|  | 348 | SWIZ_PTR(p))) | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 349 | return -EFAULT; | 
|  | 350 | if (nb0 > 0) { | 
|  | 351 | rptr = ®s->gpr[0]; | 
|  | 352 | addr += nb; | 
| Paul Mackerras | fab5db9 | 2006-06-07 16:14:40 +1000 | [diff] [blame] | 353 | for (i = 0; i < nb0; ++i, ++p) | 
| Benjamin Herrenschmidt | e4ee3891 | 2007-04-11 16:13:19 +1000 | [diff] [blame] | 354 | if (__put_user_inatomic(REG_BYTE(rptr, | 
|  | 355 | i ^ bswiz), | 
|  | 356 | SWIZ_PTR(p))) | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 357 | return -EFAULT; | 
|  | 358 | } | 
|  | 359 | } | 
|  | 360 | return 1; | 
|  | 361 | } | 
|  | 362 |  | 
| Paul Mackerras | c6d4267 | 2007-08-10 14:07:38 +1000 | [diff] [blame] | 363 | /* | 
|  | 364 | * Emulate floating-point pair loads and stores. | 
|  | 365 | * Only POWER6 has these instructions, and it does true little-endian, | 
|  | 366 | * so we don't need the address swizzling. | 
|  | 367 | */ | 
| Michael Neuling | b887ec6 | 2008-07-08 18:53:03 +1000 | [diff] [blame] | 368 | static int emulate_fp_pair(unsigned char __user *addr, unsigned int reg, | 
|  | 369 | unsigned int flags) | 
| Paul Mackerras | c6d4267 | 2007-08-10 14:07:38 +1000 | [diff] [blame] | 370 | { | 
| Michael Neuling | 553631e | 2009-02-19 18:52:20 +0000 | [diff] [blame] | 371 | char *ptr0 = (char *) ¤t->thread.TS_FPR(reg); | 
|  | 372 | char *ptr1 = (char *) ¤t->thread.TS_FPR(reg+1); | 
|  | 373 | int i, ret, sw = 0; | 
| Paul Mackerras | c6d4267 | 2007-08-10 14:07:38 +1000 | [diff] [blame] | 374 |  | 
|  | 375 | if (!(flags & F)) | 
|  | 376 | return 0; | 
|  | 377 | if (reg & 1) | 
|  | 378 | return 0;	/* invalid form: FRS/FRT must be even */ | 
| Michael Neuling | 553631e | 2009-02-19 18:52:20 +0000 | [diff] [blame] | 379 | if (flags & SW) | 
|  | 380 | sw = 7; | 
|  | 381 | ret = 0; | 
|  | 382 | for (i = 0; i < 8; ++i) { | 
|  | 383 | if (!(flags & ST)) { | 
|  | 384 | ret |= __get_user(ptr0[i^sw], addr + i); | 
|  | 385 | ret |= __get_user(ptr1[i^sw], addr + i + 8); | 
|  | 386 | } else { | 
|  | 387 | ret |= __put_user(ptr0[i^sw], addr + i); | 
|  | 388 | ret |= __put_user(ptr1[i^sw], addr + i + 8); | 
| Paul Mackerras | c6d4267 | 2007-08-10 14:07:38 +1000 | [diff] [blame] | 389 | } | 
|  | 390 | } | 
|  | 391 | if (ret) | 
|  | 392 | return -EFAULT; | 
|  | 393 | return 1;	/* exception handled and fixed up */ | 
|  | 394 | } | 
|  | 395 |  | 
| Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 396 | #ifdef CONFIG_SPE | 
|  | 397 |  | 
|  | 398 | static struct aligninfo spe_aligninfo[32] = { | 
|  | 399 | { 8, LD+E8 },		/* 0 00 00: evldd[x] */ | 
|  | 400 | { 8, LD+E4 },		/* 0 00 01: evldw[x] */ | 
|  | 401 | { 8, LD },		/* 0 00 10: evldh[x] */ | 
|  | 402 | INVALID,		/* 0 00 11 */ | 
|  | 403 | { 2, LD },		/* 0 01 00: evlhhesplat[x] */ | 
|  | 404 | INVALID,		/* 0 01 01 */ | 
|  | 405 | { 2, LD },		/* 0 01 10: evlhhousplat[x] */ | 
|  | 406 | { 2, LD+SE },		/* 0 01 11: evlhhossplat[x] */ | 
|  | 407 | { 4, LD },		/* 0 10 00: evlwhe[x] */ | 
|  | 408 | INVALID,		/* 0 10 01 */ | 
|  | 409 | { 4, LD },		/* 0 10 10: evlwhou[x] */ | 
|  | 410 | { 4, LD+SE },		/* 0 10 11: evlwhos[x] */ | 
|  | 411 | { 4, LD+E4 },		/* 0 11 00: evlwwsplat[x] */ | 
|  | 412 | INVALID,		/* 0 11 01 */ | 
|  | 413 | { 4, LD },		/* 0 11 10: evlwhsplat[x] */ | 
|  | 414 | INVALID,		/* 0 11 11 */ | 
|  | 415 |  | 
|  | 416 | { 8, ST+E8 },		/* 1 00 00: evstdd[x] */ | 
|  | 417 | { 8, ST+E4 },		/* 1 00 01: evstdw[x] */ | 
|  | 418 | { 8, ST },		/* 1 00 10: evstdh[x] */ | 
|  | 419 | INVALID,		/* 1 00 11 */ | 
|  | 420 | INVALID,		/* 1 01 00 */ | 
|  | 421 | INVALID,		/* 1 01 01 */ | 
|  | 422 | INVALID,		/* 1 01 10 */ | 
|  | 423 | INVALID,		/* 1 01 11 */ | 
|  | 424 | { 4, ST },		/* 1 10 00: evstwhe[x] */ | 
|  | 425 | INVALID,		/* 1 10 01 */ | 
|  | 426 | { 4, ST },		/* 1 10 10: evstwho[x] */ | 
|  | 427 | INVALID,		/* 1 10 11 */ | 
|  | 428 | { 4, ST+E4 },		/* 1 11 00: evstwwe[x] */ | 
|  | 429 | INVALID,		/* 1 11 01 */ | 
|  | 430 | { 4, ST+E4 },		/* 1 11 10: evstwwo[x] */ | 
|  | 431 | INVALID,		/* 1 11 11 */ | 
|  | 432 | }; | 
|  | 433 |  | 
|  | 434 | #define	EVLDD		0x00 | 
|  | 435 | #define	EVLDW		0x01 | 
|  | 436 | #define	EVLDH		0x02 | 
|  | 437 | #define	EVLHHESPLAT	0x04 | 
|  | 438 | #define	EVLHHOUSPLAT	0x06 | 
|  | 439 | #define	EVLHHOSSPLAT	0x07 | 
|  | 440 | #define	EVLWHE		0x08 | 
|  | 441 | #define	EVLWHOU		0x0A | 
|  | 442 | #define	EVLWHOS		0x0B | 
|  | 443 | #define	EVLWWSPLAT	0x0C | 
|  | 444 | #define	EVLWHSPLAT	0x0E | 
|  | 445 | #define	EVSTDD		0x10 | 
|  | 446 | #define	EVSTDW		0x11 | 
|  | 447 | #define	EVSTDH		0x12 | 
|  | 448 | #define	EVSTWHE		0x18 | 
|  | 449 | #define	EVSTWHO		0x1A | 
|  | 450 | #define	EVSTWWE		0x1C | 
|  | 451 | #define	EVSTWWO		0x1E | 
|  | 452 |  | 
|  | 453 | /* | 
|  | 454 | * Emulate SPE loads and stores. | 
|  | 455 | * Only Book-E has these instructions, and it does true little-endian, | 
|  | 456 | * so we don't need the address swizzling. | 
|  | 457 | */ | 
|  | 458 | static int emulate_spe(struct pt_regs *regs, unsigned int reg, | 
|  | 459 | unsigned int instr) | 
|  | 460 | { | 
|  | 461 | int t, ret; | 
|  | 462 | union { | 
|  | 463 | u64 ll; | 
|  | 464 | u32 w[2]; | 
|  | 465 | u16 h[4]; | 
|  | 466 | u8 v[8]; | 
|  | 467 | } data, temp; | 
|  | 468 | unsigned char __user *p, *addr; | 
|  | 469 | unsigned long *evr = ¤t->thread.evr[reg]; | 
|  | 470 | unsigned int nb, flags; | 
|  | 471 |  | 
|  | 472 | instr = (instr >> 1) & 0x1f; | 
|  | 473 |  | 
|  | 474 | /* DAR has the operand effective address */ | 
|  | 475 | addr = (unsigned char __user *)regs->dar; | 
|  | 476 |  | 
|  | 477 | nb = spe_aligninfo[instr].len; | 
|  | 478 | flags = spe_aligninfo[instr].flags; | 
|  | 479 |  | 
|  | 480 | /* Verify the address of the operand */ | 
|  | 481 | if (unlikely(user_mode(regs) && | 
|  | 482 | !access_ok((flags & ST ? VERIFY_WRITE : VERIFY_READ), | 
|  | 483 | addr, nb))) | 
|  | 484 | return -EFAULT; | 
|  | 485 |  | 
|  | 486 | /* userland only */ | 
|  | 487 | if (unlikely(!user_mode(regs))) | 
|  | 488 | return 0; | 
|  | 489 |  | 
|  | 490 | flush_spe_to_thread(current); | 
|  | 491 |  | 
|  | 492 | /* If we are loading, get the data from user space, else | 
|  | 493 | * get it from register values | 
|  | 494 | */ | 
|  | 495 | if (flags & ST) { | 
|  | 496 | data.ll = 0; | 
|  | 497 | switch (instr) { | 
|  | 498 | case EVSTDD: | 
|  | 499 | case EVSTDW: | 
|  | 500 | case EVSTDH: | 
|  | 501 | data.w[0] = *evr; | 
|  | 502 | data.w[1] = regs->gpr[reg]; | 
|  | 503 | break; | 
|  | 504 | case EVSTWHE: | 
|  | 505 | data.h[2] = *evr >> 16; | 
|  | 506 | data.h[3] = regs->gpr[reg] >> 16; | 
|  | 507 | break; | 
|  | 508 | case EVSTWHO: | 
|  | 509 | data.h[2] = *evr & 0xffff; | 
|  | 510 | data.h[3] = regs->gpr[reg] & 0xffff; | 
|  | 511 | break; | 
|  | 512 | case EVSTWWE: | 
|  | 513 | data.w[1] = *evr; | 
|  | 514 | break; | 
|  | 515 | case EVSTWWO: | 
|  | 516 | data.w[1] = regs->gpr[reg]; | 
|  | 517 | break; | 
|  | 518 | default: | 
|  | 519 | return -EINVAL; | 
|  | 520 | } | 
|  | 521 | } else { | 
|  | 522 | temp.ll = data.ll = 0; | 
|  | 523 | ret = 0; | 
|  | 524 | p = addr; | 
|  | 525 |  | 
|  | 526 | switch (nb) { | 
|  | 527 | case 8: | 
|  | 528 | ret |= __get_user_inatomic(temp.v[0], p++); | 
|  | 529 | ret |= __get_user_inatomic(temp.v[1], p++); | 
|  | 530 | ret |= __get_user_inatomic(temp.v[2], p++); | 
|  | 531 | ret |= __get_user_inatomic(temp.v[3], p++); | 
|  | 532 | case 4: | 
|  | 533 | ret |= __get_user_inatomic(temp.v[4], p++); | 
|  | 534 | ret |= __get_user_inatomic(temp.v[5], p++); | 
|  | 535 | case 2: | 
|  | 536 | ret |= __get_user_inatomic(temp.v[6], p++); | 
|  | 537 | ret |= __get_user_inatomic(temp.v[7], p++); | 
|  | 538 | if (unlikely(ret)) | 
|  | 539 | return -EFAULT; | 
|  | 540 | } | 
|  | 541 |  | 
|  | 542 | switch (instr) { | 
|  | 543 | case EVLDD: | 
|  | 544 | case EVLDW: | 
|  | 545 | case EVLDH: | 
|  | 546 | data.ll = temp.ll; | 
|  | 547 | break; | 
|  | 548 | case EVLHHESPLAT: | 
|  | 549 | data.h[0] = temp.h[3]; | 
|  | 550 | data.h[2] = temp.h[3]; | 
|  | 551 | break; | 
|  | 552 | case EVLHHOUSPLAT: | 
|  | 553 | case EVLHHOSSPLAT: | 
|  | 554 | data.h[1] = temp.h[3]; | 
|  | 555 | data.h[3] = temp.h[3]; | 
|  | 556 | break; | 
|  | 557 | case EVLWHE: | 
|  | 558 | data.h[0] = temp.h[2]; | 
|  | 559 | data.h[2] = temp.h[3]; | 
|  | 560 | break; | 
|  | 561 | case EVLWHOU: | 
|  | 562 | case EVLWHOS: | 
|  | 563 | data.h[1] = temp.h[2]; | 
|  | 564 | data.h[3] = temp.h[3]; | 
|  | 565 | break; | 
|  | 566 | case EVLWWSPLAT: | 
|  | 567 | data.w[0] = temp.w[1]; | 
|  | 568 | data.w[1] = temp.w[1]; | 
|  | 569 | break; | 
|  | 570 | case EVLWHSPLAT: | 
|  | 571 | data.h[0] = temp.h[2]; | 
|  | 572 | data.h[1] = temp.h[2]; | 
|  | 573 | data.h[2] = temp.h[3]; | 
|  | 574 | data.h[3] = temp.h[3]; | 
|  | 575 | break; | 
|  | 576 | default: | 
|  | 577 | return -EINVAL; | 
|  | 578 | } | 
|  | 579 | } | 
|  | 580 |  | 
|  | 581 | if (flags & SW) { | 
|  | 582 | switch (flags & 0xf0) { | 
|  | 583 | case E8: | 
|  | 584 | SWAP(data.v[0], data.v[7]); | 
|  | 585 | SWAP(data.v[1], data.v[6]); | 
|  | 586 | SWAP(data.v[2], data.v[5]); | 
|  | 587 | SWAP(data.v[3], data.v[4]); | 
|  | 588 | break; | 
|  | 589 | case E4: | 
|  | 590 |  | 
|  | 591 | SWAP(data.v[0], data.v[3]); | 
|  | 592 | SWAP(data.v[1], data.v[2]); | 
|  | 593 | SWAP(data.v[4], data.v[7]); | 
|  | 594 | SWAP(data.v[5], data.v[6]); | 
|  | 595 | break; | 
|  | 596 | /* Its half word endian */ | 
|  | 597 | default: | 
|  | 598 | SWAP(data.v[0], data.v[1]); | 
|  | 599 | SWAP(data.v[2], data.v[3]); | 
|  | 600 | SWAP(data.v[4], data.v[5]); | 
|  | 601 | SWAP(data.v[6], data.v[7]); | 
|  | 602 | break; | 
|  | 603 | } | 
|  | 604 | } | 
|  | 605 |  | 
|  | 606 | if (flags & SE) { | 
|  | 607 | data.w[0] = (s16)data.h[1]; | 
|  | 608 | data.w[1] = (s16)data.h[3]; | 
|  | 609 | } | 
|  | 610 |  | 
|  | 611 | /* Store result to memory or update registers */ | 
|  | 612 | if (flags & ST) { | 
|  | 613 | ret = 0; | 
|  | 614 | p = addr; | 
|  | 615 | switch (nb) { | 
|  | 616 | case 8: | 
|  | 617 | ret |= __put_user_inatomic(data.v[0], p++); | 
|  | 618 | ret |= __put_user_inatomic(data.v[1], p++); | 
|  | 619 | ret |= __put_user_inatomic(data.v[2], p++); | 
|  | 620 | ret |= __put_user_inatomic(data.v[3], p++); | 
|  | 621 | case 4: | 
|  | 622 | ret |= __put_user_inatomic(data.v[4], p++); | 
|  | 623 | ret |= __put_user_inatomic(data.v[5], p++); | 
|  | 624 | case 2: | 
|  | 625 | ret |= __put_user_inatomic(data.v[6], p++); | 
|  | 626 | ret |= __put_user_inatomic(data.v[7], p++); | 
|  | 627 | } | 
|  | 628 | if (unlikely(ret)) | 
|  | 629 | return -EFAULT; | 
|  | 630 | } else { | 
|  | 631 | *evr = data.w[0]; | 
|  | 632 | regs->gpr[reg] = data.w[1]; | 
|  | 633 | } | 
|  | 634 |  | 
|  | 635 | return 1; | 
|  | 636 | } | 
|  | 637 | #endif /* CONFIG_SPE */ | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 638 |  | 
| Michael Neuling | cd6f37b | 2008-07-11 16:31:09 +1000 | [diff] [blame] | 639 | #ifdef CONFIG_VSX | 
|  | 640 | /* | 
|  | 641 | * Emulate VSX instructions... | 
|  | 642 | */ | 
|  | 643 | static int emulate_vsx(unsigned char __user *addr, unsigned int reg, | 
|  | 644 | unsigned int areg, struct pt_regs *regs, | 
| Neil Campbell | bb7f20b | 2009-12-14 04:08:57 +0000 | [diff] [blame] | 645 | unsigned int flags, unsigned int length, | 
|  | 646 | unsigned int elsize) | 
| Michael Neuling | cd6f37b | 2008-07-11 16:31:09 +1000 | [diff] [blame] | 647 | { | 
| Michael Neuling | 26456dc | 2009-02-12 19:08:58 +0000 | [diff] [blame] | 648 | char *ptr; | 
| Neil Campbell | bb7f20b | 2009-12-14 04:08:57 +0000 | [diff] [blame] | 649 | unsigned long *lptr; | 
| Michael Neuling | 78fbc82 | 2008-08-28 14:57:39 +1000 | [diff] [blame] | 650 | int ret = 0; | 
| Neil Campbell | bb7f20b | 2009-12-14 04:08:57 +0000 | [diff] [blame] | 651 | int sw = 0; | 
|  | 652 | int i, j; | 
| Michael Neuling | cd6f37b | 2008-07-11 16:31:09 +1000 | [diff] [blame] | 653 |  | 
|  | 654 | flush_vsx_to_thread(current); | 
|  | 655 |  | 
| Michael Neuling | 26456dc | 2009-02-12 19:08:58 +0000 | [diff] [blame] | 656 | if (reg < 32) | 
|  | 657 | ptr = (char *) ¤t->thread.TS_FPR(reg); | 
|  | 658 | else | 
|  | 659 | ptr = (char *) ¤t->thread.vr[reg - 32]; | 
|  | 660 |  | 
| Neil Campbell | bb7f20b | 2009-12-14 04:08:57 +0000 | [diff] [blame] | 661 | lptr = (unsigned long *) ptr; | 
|  | 662 |  | 
|  | 663 | if (flags & SW) | 
|  | 664 | sw = elsize-1; | 
|  | 665 |  | 
|  | 666 | for (j = 0; j < length; j += elsize) { | 
|  | 667 | for (i = 0; i < elsize; ++i) { | 
|  | 668 | if (flags & ST) | 
|  | 669 | ret |= __put_user(ptr[i^sw], addr + i); | 
|  | 670 | else | 
|  | 671 | ret |= __get_user(ptr[i^sw], addr + i); | 
| Michael Neuling | cd6f37b | 2008-07-11 16:31:09 +1000 | [diff] [blame] | 672 | } | 
| Neil Campbell | bb7f20b | 2009-12-14 04:08:57 +0000 | [diff] [blame] | 673 | ptr  += elsize; | 
|  | 674 | addr += elsize; | 
| Michael Neuling | cd6f37b | 2008-07-11 16:31:09 +1000 | [diff] [blame] | 675 | } | 
| Neil Campbell | bb7f20b | 2009-12-14 04:08:57 +0000 | [diff] [blame] | 676 |  | 
|  | 677 | if (!ret) { | 
|  | 678 | if (flags & U) | 
|  | 679 | regs->gpr[areg] = regs->dar; | 
|  | 680 |  | 
|  | 681 | /* Splat load copies the same data to top and bottom 8 bytes */ | 
|  | 682 | if (flags & SPLT) | 
|  | 683 | lptr[1] = lptr[0]; | 
|  | 684 | /* For 8 byte loads, zero the top 8 bytes */ | 
|  | 685 | else if (!(flags & ST) && (8 == length)) | 
|  | 686 | lptr[1] = 0; | 
|  | 687 | } else | 
| Michael Neuling | cd6f37b | 2008-07-11 16:31:09 +1000 | [diff] [blame] | 688 | return -EFAULT; | 
| Neil Campbell | bb7f20b | 2009-12-14 04:08:57 +0000 | [diff] [blame] | 689 |  | 
| Michael Neuling | cd6f37b | 2008-07-11 16:31:09 +1000 | [diff] [blame] | 690 | return 1; | 
|  | 691 | } | 
|  | 692 | #endif | 
|  | 693 |  | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 694 | /* | 
|  | 695 | * Called on alignment exception. Attempts to fixup | 
|  | 696 | * | 
|  | 697 | * Return 1 on success | 
|  | 698 | * Return 0 if unable to handle the interrupt | 
|  | 699 | * Return -EFAULT if data address is bad | 
|  | 700 | */ | 
|  | 701 |  | 
|  | 702 | int fix_alignment(struct pt_regs *regs) | 
|  | 703 | { | 
| Michael Neuling | cd6f37b | 2008-07-11 16:31:09 +1000 | [diff] [blame] | 704 | unsigned int instr, nb, flags, instruction = 0; | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 705 | unsigned int reg, areg; | 
|  | 706 | unsigned int dsisr; | 
|  | 707 | unsigned char __user *addr; | 
| Paul Mackerras | fab5db9 | 2006-06-07 16:14:40 +1000 | [diff] [blame] | 708 | unsigned long p, swiz; | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 709 | int ret, t; | 
|  | 710 | union { | 
|  | 711 | u64 ll; | 
|  | 712 | double dd; | 
|  | 713 | unsigned char v[8]; | 
|  | 714 | struct { | 
|  | 715 | unsigned hi32; | 
|  | 716 | int	 low32; | 
|  | 717 | } x32; | 
|  | 718 | struct { | 
|  | 719 | unsigned char hi48[6]; | 
|  | 720 | short	      low16; | 
|  | 721 | } x16; | 
|  | 722 | } data; | 
|  | 723 |  | 
|  | 724 | /* | 
|  | 725 | * We require a complete register set, if not, then our assembly | 
|  | 726 | * is broken | 
|  | 727 | */ | 
|  | 728 | CHECK_FULL_REGS(regs); | 
|  | 729 |  | 
|  | 730 | dsisr = regs->dsisr; | 
|  | 731 |  | 
|  | 732 | /* Some processors don't provide us with a DSISR we can use here, | 
|  | 733 | * let's make one up from the instruction | 
|  | 734 | */ | 
|  | 735 | if (cpu_has_feature(CPU_FTR_NODSISRALIGN)) { | 
| Paul Mackerras | fab5db9 | 2006-06-07 16:14:40 +1000 | [diff] [blame] | 736 | unsigned long pc = regs->nip; | 
|  | 737 |  | 
|  | 738 | if (cpu_has_feature(CPU_FTR_PPC_LE) && (regs->msr & MSR_LE)) | 
|  | 739 | pc ^= 4; | 
| Benjamin Herrenschmidt | e4ee3891 | 2007-04-11 16:13:19 +1000 | [diff] [blame] | 740 | if (unlikely(__get_user_inatomic(instr, | 
|  | 741 | (unsigned int __user *)pc))) | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 742 | return -EFAULT; | 
| Paul Mackerras | fab5db9 | 2006-06-07 16:14:40 +1000 | [diff] [blame] | 743 | if (cpu_has_feature(CPU_FTR_REAL_LE) && (regs->msr & MSR_LE)) | 
|  | 744 | instr = cpu_to_le32(instr); | 
|  | 745 | dsisr = make_dsisr(instr); | 
| Michael Neuling | cd6f37b | 2008-07-11 16:31:09 +1000 | [diff] [blame] | 746 | instruction = instr; | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 747 | } | 
|  | 748 |  | 
|  | 749 | /* extract the operation and registers from the dsisr */ | 
|  | 750 | reg = (dsisr >> 5) & 0x1f;	/* source/dest register */ | 
|  | 751 | areg = dsisr & 0x1f;		/* register to update */ | 
| Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 752 |  | 
|  | 753 | #ifdef CONFIG_SPE | 
| Geert Uytterhoeven | 80947e7 | 2009-05-18 02:10:05 +0000 | [diff] [blame] | 754 | if ((instr >> 26) == 0x4) { | 
| Anton Blanchard | eecff81 | 2009-10-27 18:46:55 +0000 | [diff] [blame] | 755 | PPC_WARN_ALIGNMENT(spe, regs); | 
| Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 756 | return emulate_spe(regs, reg, instr); | 
| Geert Uytterhoeven | 80947e7 | 2009-05-18 02:10:05 +0000 | [diff] [blame] | 757 | } | 
| Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 758 | #endif | 
|  | 759 |  | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 760 | instr = (dsisr >> 10) & 0x7f; | 
|  | 761 | instr |= (dsisr >> 13) & 0x60; | 
|  | 762 |  | 
|  | 763 | /* Lookup the operation in our table */ | 
|  | 764 | nb = aligninfo[instr].len; | 
|  | 765 | flags = aligninfo[instr].flags; | 
|  | 766 |  | 
| Paul Mackerras | fab5db9 | 2006-06-07 16:14:40 +1000 | [diff] [blame] | 767 | /* Byteswap little endian loads and stores */ | 
|  | 768 | swiz = 0; | 
|  | 769 | if (regs->msr & MSR_LE) { | 
|  | 770 | flags ^= SW; | 
|  | 771 | /* | 
|  | 772 | * So-called "PowerPC little endian" mode works by | 
|  | 773 | * swizzling addresses rather than by actually doing | 
|  | 774 | * any byte-swapping.  To emulate this, we XOR each | 
|  | 775 | * byte address with 7.  We also byte-swap, because | 
|  | 776 | * the processor's address swizzling depends on the | 
|  | 777 | * operand size (it xors the address with 7 for bytes, | 
|  | 778 | * 6 for halfwords, 4 for words, 0 for doublewords) but | 
|  | 779 | * we will xor with 7 and load/store each byte separately. | 
|  | 780 | */ | 
|  | 781 | if (cpu_has_feature(CPU_FTR_PPC_LE)) | 
|  | 782 | swiz = 7; | 
|  | 783 | } | 
|  | 784 |  | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 785 | /* DAR has the operand effective address */ | 
|  | 786 | addr = (unsigned char __user *)regs->dar; | 
|  | 787 |  | 
| Michael Neuling | cd6f37b | 2008-07-11 16:31:09 +1000 | [diff] [blame] | 788 | #ifdef CONFIG_VSX | 
|  | 789 | if ((instruction & 0xfc00003e) == 0x7c000018) { | 
| Neil Campbell | bb7f20b | 2009-12-14 04:08:57 +0000 | [diff] [blame] | 790 | unsigned int elsize; | 
|  | 791 |  | 
|  | 792 | /* Additional register addressing bit (64 VSX vs 32 FPR/GPR) */ | 
| Michael Neuling | cd6f37b | 2008-07-11 16:31:09 +1000 | [diff] [blame] | 793 | reg |= (instruction & 0x1) << 5; | 
|  | 794 | /* Simple inline decoder instead of a table */ | 
| Neil Campbell | bb7f20b | 2009-12-14 04:08:57 +0000 | [diff] [blame] | 795 | /* VSX has only 8 and 16 byte memory accesses */ | 
|  | 796 | nb = 8; | 
| Michael Neuling | cd6f37b | 2008-07-11 16:31:09 +1000 | [diff] [blame] | 797 | if (instruction & 0x200) | 
|  | 798 | nb = 16; | 
| Neil Campbell | bb7f20b | 2009-12-14 04:08:57 +0000 | [diff] [blame] | 799 |  | 
|  | 800 | /* Vector stores in little-endian mode swap individual | 
|  | 801 | elements, so process them separately */ | 
|  | 802 | elsize = 4; | 
|  | 803 | if (instruction & 0x80) | 
|  | 804 | elsize = 8; | 
|  | 805 |  | 
| Michael Neuling | cd6f37b | 2008-07-11 16:31:09 +1000 | [diff] [blame] | 806 | flags = 0; | 
| Neil Campbell | bb7f20b | 2009-12-14 04:08:57 +0000 | [diff] [blame] | 807 | if (regs->msr & MSR_LE) | 
|  | 808 | flags |= SW; | 
| Michael Neuling | cd6f37b | 2008-07-11 16:31:09 +1000 | [diff] [blame] | 809 | if (instruction & 0x100) | 
|  | 810 | flags |= ST; | 
|  | 811 | if (instruction & 0x040) | 
|  | 812 | flags |= U; | 
|  | 813 | /* splat load needs a special decoder */ | 
|  | 814 | if ((instruction & 0x400) == 0){ | 
|  | 815 | flags |= SPLT; | 
|  | 816 | nb = 8; | 
|  | 817 | } | 
| Anton Blanchard | eecff81 | 2009-10-27 18:46:55 +0000 | [diff] [blame] | 818 | PPC_WARN_ALIGNMENT(vsx, regs); | 
| Neil Campbell | bb7f20b | 2009-12-14 04:08:57 +0000 | [diff] [blame] | 819 | return emulate_vsx(addr, reg, areg, regs, flags, nb, elsize); | 
| Michael Neuling | cd6f37b | 2008-07-11 16:31:09 +1000 | [diff] [blame] | 820 | } | 
|  | 821 | #endif | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 822 | /* A size of 0 indicates an instruction we don't support, with | 
|  | 823 | * the exception of DCBZ which is handled as a special case here | 
|  | 824 | */ | 
| Geert Uytterhoeven | 80947e7 | 2009-05-18 02:10:05 +0000 | [diff] [blame] | 825 | if (instr == DCBZ) { | 
| Anton Blanchard | eecff81 | 2009-10-27 18:46:55 +0000 | [diff] [blame] | 826 | PPC_WARN_ALIGNMENT(dcbz, regs); | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 827 | return emulate_dcbz(regs, addr); | 
| Geert Uytterhoeven | 80947e7 | 2009-05-18 02:10:05 +0000 | [diff] [blame] | 828 | } | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 829 | if (unlikely(nb == 0)) | 
|  | 830 | return 0; | 
|  | 831 |  | 
|  | 832 | /* Load/Store Multiple instructions are handled in their own | 
|  | 833 | * function | 
|  | 834 | */ | 
| Geert Uytterhoeven | 80947e7 | 2009-05-18 02:10:05 +0000 | [diff] [blame] | 835 | if (flags & M) { | 
| Anton Blanchard | eecff81 | 2009-10-27 18:46:55 +0000 | [diff] [blame] | 836 | PPC_WARN_ALIGNMENT(multiple, regs); | 
| Paul Mackerras | fab5db9 | 2006-06-07 16:14:40 +1000 | [diff] [blame] | 837 | return emulate_multiple(regs, addr, reg, nb, | 
|  | 838 | flags, instr, swiz); | 
| Geert Uytterhoeven | 80947e7 | 2009-05-18 02:10:05 +0000 | [diff] [blame] | 839 | } | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 840 |  | 
|  | 841 | /* Verify the address of the operand */ | 
|  | 842 | if (unlikely(user_mode(regs) && | 
|  | 843 | !access_ok((flags & ST ? VERIFY_WRITE : VERIFY_READ), | 
|  | 844 | addr, nb))) | 
|  | 845 | return -EFAULT; | 
|  | 846 |  | 
|  | 847 | /* Force the fprs into the save area so we can reference them */ | 
|  | 848 | if (flags & F) { | 
|  | 849 | /* userland only */ | 
|  | 850 | if (unlikely(!user_mode(regs))) | 
|  | 851 | return 0; | 
|  | 852 | flush_fp_to_thread(current); | 
|  | 853 | } | 
|  | 854 |  | 
| Paul Mackerras | c6d4267 | 2007-08-10 14:07:38 +1000 | [diff] [blame] | 855 | /* Special case for 16-byte FP loads and stores */ | 
| Geert Uytterhoeven | 80947e7 | 2009-05-18 02:10:05 +0000 | [diff] [blame] | 856 | if (nb == 16) { | 
| Anton Blanchard | eecff81 | 2009-10-27 18:46:55 +0000 | [diff] [blame] | 857 | PPC_WARN_ALIGNMENT(fp_pair, regs); | 
| Michael Neuling | b887ec6 | 2008-07-08 18:53:03 +1000 | [diff] [blame] | 858 | return emulate_fp_pair(addr, reg, flags); | 
| Geert Uytterhoeven | 80947e7 | 2009-05-18 02:10:05 +0000 | [diff] [blame] | 859 | } | 
|  | 860 |  | 
| Anton Blanchard | eecff81 | 2009-10-27 18:46:55 +0000 | [diff] [blame] | 861 | PPC_WARN_ALIGNMENT(unaligned, regs); | 
| Paul Mackerras | c6d4267 | 2007-08-10 14:07:38 +1000 | [diff] [blame] | 862 |  | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 863 | /* If we are loading, get the data from user space, else | 
|  | 864 | * get it from register values | 
|  | 865 | */ | 
| Paul Mackerras | fab5db9 | 2006-06-07 16:14:40 +1000 | [diff] [blame] | 866 | if (!(flags & ST)) { | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 867 | data.ll = 0; | 
|  | 868 | ret = 0; | 
| Paul Mackerras | fab5db9 | 2006-06-07 16:14:40 +1000 | [diff] [blame] | 869 | p = (unsigned long) addr; | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 870 | switch (nb) { | 
|  | 871 | case 8: | 
| Benjamin Herrenschmidt | e4ee3891 | 2007-04-11 16:13:19 +1000 | [diff] [blame] | 872 | ret |= __get_user_inatomic(data.v[0], SWIZ_PTR(p++)); | 
|  | 873 | ret |= __get_user_inatomic(data.v[1], SWIZ_PTR(p++)); | 
|  | 874 | ret |= __get_user_inatomic(data.v[2], SWIZ_PTR(p++)); | 
|  | 875 | ret |= __get_user_inatomic(data.v[3], SWIZ_PTR(p++)); | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 876 | case 4: | 
| Benjamin Herrenschmidt | e4ee3891 | 2007-04-11 16:13:19 +1000 | [diff] [blame] | 877 | ret |= __get_user_inatomic(data.v[4], SWIZ_PTR(p++)); | 
|  | 878 | ret |= __get_user_inatomic(data.v[5], SWIZ_PTR(p++)); | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 879 | case 2: | 
| Benjamin Herrenschmidt | e4ee3891 | 2007-04-11 16:13:19 +1000 | [diff] [blame] | 880 | ret |= __get_user_inatomic(data.v[6], SWIZ_PTR(p++)); | 
|  | 881 | ret |= __get_user_inatomic(data.v[7], SWIZ_PTR(p++)); | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 882 | if (unlikely(ret)) | 
|  | 883 | return -EFAULT; | 
|  | 884 | } | 
| Paul Mackerras | fab5db9 | 2006-06-07 16:14:40 +1000 | [diff] [blame] | 885 | } else if (flags & F) { | 
| Michael Neuling | 9c75a31 | 2008-06-26 17:07:48 +1000 | [diff] [blame] | 886 | data.dd = current->thread.TS_FPR(reg); | 
| Paul Mackerras | fab5db9 | 2006-06-07 16:14:40 +1000 | [diff] [blame] | 887 | if (flags & S) { | 
|  | 888 | /* Single-precision FP store requires conversion... */ | 
|  | 889 | #ifdef CONFIG_PPC_FPU | 
|  | 890 | preempt_disable(); | 
|  | 891 | enable_kernel_fp(); | 
|  | 892 | cvt_df(&data.dd, (float *)&data.v[4], ¤t->thread); | 
|  | 893 | preempt_enable(); | 
|  | 894 | #else | 
|  | 895 | return 0; | 
|  | 896 | #endif | 
|  | 897 | } | 
|  | 898 | } else | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 899 | data.ll = regs->gpr[reg]; | 
|  | 900 |  | 
| Paul Mackerras | fab5db9 | 2006-06-07 16:14:40 +1000 | [diff] [blame] | 901 | if (flags & SW) { | 
|  | 902 | switch (nb) { | 
|  | 903 | case 8: | 
|  | 904 | SWAP(data.v[0], data.v[7]); | 
|  | 905 | SWAP(data.v[1], data.v[6]); | 
|  | 906 | SWAP(data.v[2], data.v[5]); | 
|  | 907 | SWAP(data.v[3], data.v[4]); | 
|  | 908 | break; | 
|  | 909 | case 4: | 
|  | 910 | SWAP(data.v[4], data.v[7]); | 
|  | 911 | SWAP(data.v[5], data.v[6]); | 
|  | 912 | break; | 
|  | 913 | case 2: | 
|  | 914 | SWAP(data.v[6], data.v[7]); | 
|  | 915 | break; | 
|  | 916 | } | 
|  | 917 | } | 
|  | 918 |  | 
|  | 919 | /* Perform other misc operations like sign extension | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 920 | * or floating point single precision conversion | 
|  | 921 | */ | 
| Paul Mackerras | fab5db9 | 2006-06-07 16:14:40 +1000 | [diff] [blame] | 922 | switch (flags & ~(U|SW)) { | 
| Paul Mackerras | c6d4267 | 2007-08-10 14:07:38 +1000 | [diff] [blame] | 923 | case LD+SE:	/* sign extending integer loads */ | 
|  | 924 | case LD+F+SE:	/* sign extend for lfiwax */ | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 925 | if ( nb == 2 ) | 
|  | 926 | data.ll = data.x16.low16; | 
|  | 927 | else	/* nb must be 4 */ | 
|  | 928 | data.ll = data.x32.low32; | 
|  | 929 | break; | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 930 |  | 
| Paul Mackerras | fab5db9 | 2006-06-07 16:14:40 +1000 | [diff] [blame] | 931 | /* Single-precision FP load requires conversion... */ | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 932 | case LD+F+S: | 
|  | 933 | #ifdef CONFIG_PPC_FPU | 
|  | 934 | preempt_disable(); | 
|  | 935 | enable_kernel_fp(); | 
|  | 936 | cvt_fd((float *)&data.v[4], &data.dd, ¤t->thread); | 
|  | 937 | preempt_enable(); | 
|  | 938 | #else | 
|  | 939 | return 0; | 
|  | 940 | #endif | 
|  | 941 | break; | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 942 | } | 
|  | 943 |  | 
|  | 944 | /* Store result to memory or update registers */ | 
|  | 945 | if (flags & ST) { | 
|  | 946 | ret = 0; | 
| Paul Mackerras | fab5db9 | 2006-06-07 16:14:40 +1000 | [diff] [blame] | 947 | p = (unsigned long) addr; | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 948 | switch (nb) { | 
|  | 949 | case 8: | 
| Benjamin Herrenschmidt | e4ee3891 | 2007-04-11 16:13:19 +1000 | [diff] [blame] | 950 | ret |= __put_user_inatomic(data.v[0], SWIZ_PTR(p++)); | 
|  | 951 | ret |= __put_user_inatomic(data.v[1], SWIZ_PTR(p++)); | 
|  | 952 | ret |= __put_user_inatomic(data.v[2], SWIZ_PTR(p++)); | 
|  | 953 | ret |= __put_user_inatomic(data.v[3], SWIZ_PTR(p++)); | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 954 | case 4: | 
| Benjamin Herrenschmidt | e4ee3891 | 2007-04-11 16:13:19 +1000 | [diff] [blame] | 955 | ret |= __put_user_inatomic(data.v[4], SWIZ_PTR(p++)); | 
|  | 956 | ret |= __put_user_inatomic(data.v[5], SWIZ_PTR(p++)); | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 957 | case 2: | 
| Benjamin Herrenschmidt | e4ee3891 | 2007-04-11 16:13:19 +1000 | [diff] [blame] | 958 | ret |= __put_user_inatomic(data.v[6], SWIZ_PTR(p++)); | 
|  | 959 | ret |= __put_user_inatomic(data.v[7], SWIZ_PTR(p++)); | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 960 | } | 
|  | 961 | if (unlikely(ret)) | 
|  | 962 | return -EFAULT; | 
|  | 963 | } else if (flags & F) | 
| Michael Neuling | 9c75a31 | 2008-06-26 17:07:48 +1000 | [diff] [blame] | 964 | current->thread.TS_FPR(reg) = data.dd; | 
| Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 965 | else | 
|  | 966 | regs->gpr[reg] = data.ll; | 
|  | 967 |  | 
|  | 968 | /* Update RA as needed */ | 
|  | 969 | if (flags & U) | 
|  | 970 | regs->gpr[areg] = regs->dar; | 
|  | 971 |  | 
|  | 972 | return 1; | 
|  | 973 | } |