| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 1 | /* | 
|  | 2 | * This file is subject to the terms and conditions of the GNU General Public | 
|  | 3 | * License.  See the file "COPYING" in the main directory of this archive | 
|  | 4 | * for more details. | 
|  | 5 | * | 
|  | 6 | * Copyright (C) 1994, 95, 96, 99, 2001 Ralf Baechle | 
|  | 7 | * Copyright (C) 1994, 1995, 1996 Paul M. Antoine. | 
|  | 8 | * Copyright (C) 1999 Silicon Graphics, Inc. | 
|  | 9 | */ | 
|  | 10 | #ifndef _ASM_STACKFRAME_H | 
|  | 11 | #define _ASM_STACKFRAME_H | 
|  | 12 |  | 
|  | 13 | #include <linux/config.h> | 
|  | 14 | #include <linux/threads.h> | 
|  | 15 |  | 
|  | 16 | #include <asm/asm.h> | 
|  | 17 | #include <asm/mipsregs.h> | 
|  | 18 | #include <asm/offset.h> | 
|  | 19 |  | 
|  | 20 | .macro	SAVE_AT | 
|  | 21 | .set	push | 
|  | 22 | .set	noat | 
|  | 23 | LONG_S	$1, PT_R1(sp) | 
|  | 24 | .set	pop | 
|  | 25 | .endm | 
|  | 26 |  | 
|  | 27 | .macro	SAVE_TEMP | 
|  | 28 | mfhi	v1 | 
|  | 29 | #ifdef CONFIG_MIPS32 | 
|  | 30 | LONG_S	$8, PT_R8(sp) | 
|  | 31 | LONG_S	$9, PT_R9(sp) | 
|  | 32 | #endif | 
|  | 33 | LONG_S	v1, PT_HI(sp) | 
|  | 34 | mflo	v1 | 
|  | 35 | LONG_S	$10, PT_R10(sp) | 
|  | 36 | LONG_S	$11, PT_R11(sp) | 
|  | 37 | LONG_S	v1,  PT_LO(sp) | 
|  | 38 | LONG_S	$12, PT_R12(sp) | 
|  | 39 | LONG_S	$13, PT_R13(sp) | 
|  | 40 | LONG_S	$14, PT_R14(sp) | 
|  | 41 | LONG_S	$15, PT_R15(sp) | 
|  | 42 | LONG_S	$24, PT_R24(sp) | 
|  | 43 | .endm | 
|  | 44 |  | 
|  | 45 | .macro	SAVE_STATIC | 
|  | 46 | LONG_S	$16, PT_R16(sp) | 
|  | 47 | LONG_S	$17, PT_R17(sp) | 
|  | 48 | LONG_S	$18, PT_R18(sp) | 
|  | 49 | LONG_S	$19, PT_R19(sp) | 
|  | 50 | LONG_S	$20, PT_R20(sp) | 
|  | 51 | LONG_S	$21, PT_R21(sp) | 
|  | 52 | LONG_S	$22, PT_R22(sp) | 
|  | 53 | LONG_S	$23, PT_R23(sp) | 
|  | 54 | LONG_S	$30, PT_R30(sp) | 
|  | 55 | .endm | 
|  | 56 |  | 
|  | 57 | #ifdef CONFIG_SMP | 
|  | 58 | .macro	get_saved_sp	/* SMP variation */ | 
|  | 59 | #ifdef CONFIG_MIPS32 | 
|  | 60 | mfc0	k0, CP0_CONTEXT | 
|  | 61 | lui	k1, %hi(kernelsp) | 
|  | 62 | srl	k0, k0, 23 | 
|  | 63 | sll	k0, k0, 2 | 
|  | 64 | addu	k1, k0 | 
|  | 65 | LONG_L	k1, %lo(kernelsp)(k1) | 
|  | 66 | #endif | 
|  | 67 | #if defined(CONFIG_MIPS64) && !defined(CONFIG_BUILD_ELF64) | 
|  | 68 | MFC0	k1, CP0_CONTEXT | 
|  | 69 | dsra	k1, 23 | 
|  | 70 | lui	k0, %hi(pgd_current) | 
|  | 71 | addiu	k0, %lo(pgd_current) | 
|  | 72 | dsubu	k1, k0 | 
|  | 73 | lui	k0, %hi(kernelsp) | 
|  | 74 | daddu	k1, k0 | 
|  | 75 | LONG_L	k1, %lo(kernelsp)(k1) | 
|  | 76 | #endif | 
|  | 77 | #if defined(CONFIG_MIPS64) && defined(CONFIG_BUILD_ELF64) | 
|  | 78 | MFC0	k1, CP0_CONTEXT | 
|  | 79 | dsrl	k1, 23 | 
|  | 80 | dsll	k1, k1, 3 | 
|  | 81 | LONG_L	k1, kernelsp(k1) | 
|  | 82 | #endif | 
|  | 83 | .endm | 
|  | 84 |  | 
|  | 85 | .macro	set_saved_sp stackp temp temp2 | 
|  | 86 | #ifdef CONFIG_MIPS32 | 
|  | 87 | mfc0	\temp, CP0_CONTEXT | 
|  | 88 | srl	\temp, 23 | 
|  | 89 | sll	\temp, 2 | 
|  | 90 | LONG_S	\stackp, kernelsp(\temp) | 
|  | 91 | #endif | 
|  | 92 | #if defined(CONFIG_MIPS64) && !defined(CONFIG_BUILD_ELF64) | 
|  | 93 | lw	\temp, TI_CPU(gp) | 
|  | 94 | dsll	\temp, 3 | 
|  | 95 | lui	\temp2, %hi(kernelsp) | 
|  | 96 | daddu	\temp, \temp2 | 
|  | 97 | LONG_S	\stackp, %lo(kernelsp)(\temp) | 
|  | 98 | #endif | 
|  | 99 | #if defined(CONFIG_MIPS64) && defined(CONFIG_BUILD_ELF64) | 
|  | 100 | lw	\temp, TI_CPU(gp) | 
|  | 101 | dsll	\temp, 3 | 
|  | 102 | LONG_S	\stackp, kernelsp(\temp) | 
|  | 103 | #endif | 
|  | 104 | .endm | 
|  | 105 | #else | 
|  | 106 | .macro	get_saved_sp	/* Uniprocessor variation */ | 
|  | 107 | lui	k1, %hi(kernelsp) | 
|  | 108 | LONG_L	k1, %lo(kernelsp)(k1) | 
|  | 109 | .endm | 
|  | 110 |  | 
|  | 111 | .macro	set_saved_sp stackp temp temp2 | 
|  | 112 | LONG_S	\stackp, kernelsp | 
|  | 113 | .endm | 
|  | 114 | #endif | 
|  | 115 |  | 
|  | 116 | .macro	SAVE_SOME | 
|  | 117 | .set	push | 
|  | 118 | .set	noat | 
|  | 119 | .set	reorder | 
|  | 120 | mfc0	k0, CP0_STATUS | 
|  | 121 | sll	k0, 3		/* extract cu0 bit */ | 
|  | 122 | .set	noreorder | 
|  | 123 | bltz	k0, 8f | 
|  | 124 | move	k1, sp | 
|  | 125 | .set	reorder | 
|  | 126 | /* Called from user mode, new stack. */ | 
|  | 127 | get_saved_sp | 
|  | 128 | 8:		move	k0, sp | 
|  | 129 | PTR_SUBU sp, k1, PT_SIZE | 
|  | 130 | LONG_S	k0, PT_R29(sp) | 
|  | 131 | LONG_S	$3, PT_R3(sp) | 
|  | 132 | LONG_S	$0, PT_R0(sp) | 
|  | 133 | mfc0	v1, CP0_STATUS | 
|  | 134 | LONG_S	$2, PT_R2(sp) | 
|  | 135 | LONG_S	v1, PT_STATUS(sp) | 
|  | 136 | LONG_S	$4, PT_R4(sp) | 
|  | 137 | mfc0	v1, CP0_CAUSE | 
|  | 138 | LONG_S	$5, PT_R5(sp) | 
|  | 139 | LONG_S	v1, PT_CAUSE(sp) | 
|  | 140 | LONG_S	$6, PT_R6(sp) | 
|  | 141 | MFC0	v1, CP0_EPC | 
|  | 142 | LONG_S	$7, PT_R7(sp) | 
|  | 143 | #ifdef CONFIG_MIPS64 | 
|  | 144 | LONG_S	$8, PT_R8(sp) | 
|  | 145 | LONG_S	$9, PT_R9(sp) | 
|  | 146 | #endif | 
|  | 147 | LONG_S	v1, PT_EPC(sp) | 
|  | 148 | LONG_S	$25, PT_R25(sp) | 
|  | 149 | LONG_S	$28, PT_R28(sp) | 
|  | 150 | LONG_S	$31, PT_R31(sp) | 
|  | 151 | ori	$28, sp, _THREAD_MASK | 
|  | 152 | xori	$28, _THREAD_MASK | 
|  | 153 | .set	pop | 
|  | 154 | .endm | 
|  | 155 |  | 
|  | 156 | .macro	SAVE_ALL | 
|  | 157 | SAVE_SOME | 
|  | 158 | SAVE_AT | 
|  | 159 | SAVE_TEMP | 
|  | 160 | SAVE_STATIC | 
|  | 161 | .endm | 
|  | 162 |  | 
|  | 163 | .macro	RESTORE_AT | 
|  | 164 | .set	push | 
|  | 165 | .set	noat | 
|  | 166 | LONG_L	$1,  PT_R1(sp) | 
|  | 167 | .set	pop | 
|  | 168 | .endm | 
|  | 169 |  | 
|  | 170 | .macro	RESTORE_TEMP | 
|  | 171 | LONG_L	$24, PT_LO(sp) | 
|  | 172 | #ifdef CONFIG_MIPS32 | 
|  | 173 | LONG_L	$8, PT_R8(sp) | 
|  | 174 | LONG_L	$9, PT_R9(sp) | 
|  | 175 | #endif | 
|  | 176 | mtlo	$24 | 
|  | 177 | LONG_L	$24, PT_HI(sp) | 
|  | 178 | LONG_L	$10, PT_R10(sp) | 
|  | 179 | LONG_L	$11, PT_R11(sp) | 
|  | 180 | mthi	$24 | 
|  | 181 | LONG_L	$12, PT_R12(sp) | 
|  | 182 | LONG_L	$13, PT_R13(sp) | 
|  | 183 | LONG_L	$14, PT_R14(sp) | 
|  | 184 | LONG_L	$15, PT_R15(sp) | 
|  | 185 | LONG_L	$24, PT_R24(sp) | 
|  | 186 | .endm | 
|  | 187 |  | 
|  | 188 | .macro	RESTORE_STATIC | 
|  | 189 | LONG_L	$16, PT_R16(sp) | 
|  | 190 | LONG_L	$17, PT_R17(sp) | 
|  | 191 | LONG_L	$18, PT_R18(sp) | 
|  | 192 | LONG_L	$19, PT_R19(sp) | 
|  | 193 | LONG_L	$20, PT_R20(sp) | 
|  | 194 | LONG_L	$21, PT_R21(sp) | 
|  | 195 | LONG_L	$22, PT_R22(sp) | 
|  | 196 | LONG_L	$23, PT_R23(sp) | 
|  | 197 | LONG_L	$30, PT_R30(sp) | 
|  | 198 | .endm | 
|  | 199 |  | 
|  | 200 | #if defined(CONFIG_CPU_R3000) || defined(CONFIG_CPU_TX39XX) | 
|  | 201 |  | 
|  | 202 | .macro	RESTORE_SOME | 
|  | 203 | .set	push | 
|  | 204 | .set	reorder | 
|  | 205 | .set	noat | 
|  | 206 | mfc0	a0, CP0_STATUS | 
|  | 207 | ori	a0, 0x1f | 
|  | 208 | xori	a0, 0x1f | 
|  | 209 | mtc0	a0, CP0_STATUS | 
|  | 210 | li	v1, 0xff00 | 
|  | 211 | and	a0, v1 | 
|  | 212 | LONG_L	v0, PT_STATUS(sp) | 
|  | 213 | nor	v1, $0, v1 | 
|  | 214 | and	v0, v1 | 
|  | 215 | or	v0, a0 | 
|  | 216 | mtc0	v0, CP0_STATUS | 
|  | 217 | LONG_L	$31, PT_R31(sp) | 
|  | 218 | LONG_L	$28, PT_R28(sp) | 
|  | 219 | LONG_L	$25, PT_R25(sp) | 
|  | 220 | #ifdef CONFIG_MIPS64 | 
|  | 221 | LONG_L	$8, PT_R8(sp) | 
|  | 222 | LONG_L	$9, PT_R9(sp) | 
|  | 223 | #endif | 
|  | 224 | LONG_L	$7,  PT_R7(sp) | 
|  | 225 | LONG_L	$6,  PT_R6(sp) | 
|  | 226 | LONG_L	$5,  PT_R5(sp) | 
|  | 227 | LONG_L	$4,  PT_R4(sp) | 
|  | 228 | LONG_L	$3,  PT_R3(sp) | 
|  | 229 | LONG_L	$2,  PT_R2(sp) | 
|  | 230 | .set	pop | 
|  | 231 | .endm | 
|  | 232 |  | 
|  | 233 | .macro	RESTORE_SP_AND_RET | 
|  | 234 | .set	push | 
|  | 235 | .set	noreorder | 
|  | 236 | LONG_L	k0, PT_EPC(sp) | 
|  | 237 | LONG_L	sp, PT_R29(sp) | 
|  | 238 | jr	k0 | 
|  | 239 | rfe | 
|  | 240 | .set	pop | 
|  | 241 | .endm | 
|  | 242 |  | 
|  | 243 | #else | 
|  | 244 |  | 
|  | 245 | .macro	RESTORE_SOME | 
|  | 246 | .set	push | 
|  | 247 | .set	reorder | 
|  | 248 | .set	noat | 
|  | 249 | mfc0	a0, CP0_STATUS | 
|  | 250 | ori	a0, 0x1f | 
|  | 251 | xori	a0, 0x1f | 
|  | 252 | mtc0	a0, CP0_STATUS | 
|  | 253 | li	v1, 0xff00 | 
|  | 254 | and	a0, v1 | 
|  | 255 | LONG_L	v0, PT_STATUS(sp) | 
|  | 256 | nor	v1, $0, v1 | 
|  | 257 | and	v0, v1 | 
|  | 258 | or	v0, a0 | 
|  | 259 | mtc0	v0, CP0_STATUS | 
|  | 260 | LONG_L	v1, PT_EPC(sp) | 
|  | 261 | MTC0	v1, CP0_EPC | 
|  | 262 | LONG_L	$31, PT_R31(sp) | 
|  | 263 | LONG_L	$28, PT_R28(sp) | 
|  | 264 | LONG_L	$25, PT_R25(sp) | 
|  | 265 | #ifdef CONFIG_MIPS64 | 
|  | 266 | LONG_L	$8, PT_R8(sp) | 
|  | 267 | LONG_L	$9, PT_R9(sp) | 
|  | 268 | #endif | 
|  | 269 | LONG_L	$7,  PT_R7(sp) | 
|  | 270 | LONG_L	$6,  PT_R6(sp) | 
|  | 271 | LONG_L	$5,  PT_R5(sp) | 
|  | 272 | LONG_L	$4,  PT_R4(sp) | 
|  | 273 | LONG_L	$3,  PT_R3(sp) | 
|  | 274 | LONG_L	$2,  PT_R2(sp) | 
|  | 275 | .set	pop | 
|  | 276 | .endm | 
|  | 277 |  | 
|  | 278 | .macro	RESTORE_SP_AND_RET | 
|  | 279 | LONG_L	sp, PT_R29(sp) | 
|  | 280 | .set	mips3 | 
|  | 281 | eret | 
|  | 282 | .set	mips0 | 
|  | 283 | .endm | 
|  | 284 |  | 
|  | 285 | #endif | 
|  | 286 |  | 
|  | 287 | .macro	RESTORE_SP | 
|  | 288 | LONG_L	sp, PT_R29(sp) | 
|  | 289 | .endm | 
|  | 290 |  | 
|  | 291 | .macro	RESTORE_ALL | 
|  | 292 | RESTORE_TEMP | 
|  | 293 | RESTORE_STATIC | 
|  | 294 | RESTORE_AT | 
|  | 295 | RESTORE_SOME | 
|  | 296 | RESTORE_SP | 
|  | 297 | .endm | 
|  | 298 |  | 
|  | 299 | .macro	RESTORE_ALL_AND_RET | 
|  | 300 | RESTORE_TEMP | 
|  | 301 | RESTORE_STATIC | 
|  | 302 | RESTORE_AT | 
|  | 303 | RESTORE_SOME | 
|  | 304 | RESTORE_SP_AND_RET | 
|  | 305 | .endm | 
|  | 306 |  | 
|  | 307 | /* | 
|  | 308 | * Move to kernel mode and disable interrupts. | 
|  | 309 | * Set cp0 enable bit as sign that we're running on the kernel stack | 
|  | 310 | */ | 
|  | 311 | .macro	CLI | 
|  | 312 | mfc0	t0, CP0_STATUS | 
|  | 313 | li	t1, ST0_CU0 | 0x1f | 
|  | 314 | or	t0, t1 | 
|  | 315 | xori	t0, 0x1f | 
|  | 316 | mtc0	t0, CP0_STATUS | 
|  | 317 | irq_disable_hazard | 
|  | 318 | .endm | 
|  | 319 |  | 
|  | 320 | /* | 
|  | 321 | * Move to kernel mode and enable interrupts. | 
|  | 322 | * Set cp0 enable bit as sign that we're running on the kernel stack | 
|  | 323 | */ | 
|  | 324 | .macro	STI | 
|  | 325 | mfc0	t0, CP0_STATUS | 
|  | 326 | li	t1, ST0_CU0 | 0x1f | 
|  | 327 | or	t0, t1 | 
|  | 328 | xori	t0, 0x1e | 
|  | 329 | mtc0	t0, CP0_STATUS | 
|  | 330 | irq_enable_hazard | 
|  | 331 | .endm | 
|  | 332 |  | 
|  | 333 | /* | 
|  | 334 | * Just move to kernel mode and leave interrupts as they are. | 
|  | 335 | * Set cp0 enable bit as sign that we're running on the kernel stack | 
|  | 336 | */ | 
|  | 337 | .macro	KMODE | 
|  | 338 | mfc0	t0, CP0_STATUS | 
|  | 339 | li	t1, ST0_CU0 | 0x1e | 
|  | 340 | or	t0, t1 | 
|  | 341 | xori	t0, 0x1e | 
|  | 342 | mtc0	t0, CP0_STATUS | 
|  | 343 | irq_disable_hazard | 
|  | 344 | .endm | 
|  | 345 |  | 
|  | 346 | #endif /* _ASM_STACKFRAME_H */ |