| Marc Zyngier | 348b2b0 | 2013-01-21 19:36:15 -0500 | [diff] [blame] | 1 | #include <linux/irqchip/arm-gic.h> | 
|  | 2 |  | 
| Christoffer Dall | f7ed45b | 2013-01-20 18:47:42 -0500 | [diff] [blame] | 3 | #define VCPU_USR_REG(_reg_nr)	(VCPU_USR_REGS + (_reg_nr * 4)) | 
|  | 4 | #define VCPU_USR_SP		(VCPU_USR_REG(13)) | 
|  | 5 | #define VCPU_USR_LR		(VCPU_USR_REG(14)) | 
|  | 6 | #define CP15_OFFSET(_cp15_reg_idx) (VCPU_CP15 + (_cp15_reg_idx * 4)) | 
|  | 7 |  | 
|  | 8 | /* | 
|  | 9 | * Many of these macros need to access the VCPU structure, which is always | 
|  | 10 | * held in r0. These macros should never clobber r1, as it is used to hold the | 
|  | 11 | * exception code on the return path (except of course the macro that switches | 
|  | 12 | * all the registers before the final jump to the VM). | 
|  | 13 | */ | 
|  | 14 | vcpu	.req	r0		@ vcpu pointer always in r0 | 
|  | 15 |  | 
|  | 16 | /* Clobbers {r2-r6} */ | 
|  | 17 | .macro store_vfp_state vfp_base | 
|  | 18 | @ The VFPFMRX and VFPFMXR macros are the VMRS and VMSR instructions | 
|  | 19 | VFPFMRX	r2, FPEXC | 
|  | 20 | @ Make sure VFP is enabled so we can touch the registers. | 
|  | 21 | orr	r6, r2, #FPEXC_EN | 
|  | 22 | VFPFMXR	FPEXC, r6 | 
|  | 23 |  | 
|  | 24 | VFPFMRX	r3, FPSCR | 
|  | 25 | tst	r2, #FPEXC_EX		@ Check for VFP Subarchitecture | 
|  | 26 | beq	1f | 
|  | 27 | @ If FPEXC_EX is 0, then FPINST/FPINST2 reads are upredictable, so | 
|  | 28 | @ we only need to save them if FPEXC_EX is set. | 
|  | 29 | VFPFMRX r4, FPINST | 
|  | 30 | tst	r2, #FPEXC_FP2V | 
|  | 31 | VFPFMRX r5, FPINST2, ne		@ vmrsne | 
|  | 32 | bic	r6, r2, #FPEXC_EX	@ FPEXC_EX disable | 
|  | 33 | VFPFMXR	FPEXC, r6 | 
|  | 34 | 1: | 
|  | 35 | VFPFSTMIA \vfp_base, r6		@ Save VFP registers | 
|  | 36 | stm	\vfp_base, {r2-r5}	@ Save FPEXC, FPSCR, FPINST, FPINST2 | 
|  | 37 | .endm | 
|  | 38 |  | 
|  | 39 | /* Assume FPEXC_EN is on and FPEXC_EX is off, clobbers {r2-r6} */ | 
|  | 40 | .macro restore_vfp_state vfp_base | 
|  | 41 | VFPFLDMIA \vfp_base, r6		@ Load VFP registers | 
|  | 42 | ldm	\vfp_base, {r2-r5}	@ Load FPEXC, FPSCR, FPINST, FPINST2 | 
|  | 43 |  | 
|  | 44 | VFPFMXR FPSCR, r3 | 
|  | 45 | tst	r2, #FPEXC_EX		@ Check for VFP Subarchitecture | 
|  | 46 | beq	1f | 
|  | 47 | VFPFMXR FPINST, r4 | 
|  | 48 | tst	r2, #FPEXC_FP2V | 
|  | 49 | VFPFMXR FPINST2, r5, ne | 
|  | 50 | 1: | 
|  | 51 | VFPFMXR FPEXC, r2	@ FPEXC	(last, in case !EN) | 
|  | 52 | .endm | 
|  | 53 |  | 
|  | 54 | /* These are simply for the macros to work - value don't have meaning */ | 
|  | 55 | .equ usr, 0 | 
|  | 56 | .equ svc, 1 | 
|  | 57 | .equ abt, 2 | 
|  | 58 | .equ und, 3 | 
|  | 59 | .equ irq, 4 | 
|  | 60 | .equ fiq, 5 | 
|  | 61 |  | 
|  | 62 | .macro push_host_regs_mode mode | 
|  | 63 | mrs	r2, SP_\mode | 
|  | 64 | mrs	r3, LR_\mode | 
|  | 65 | mrs	r4, SPSR_\mode | 
|  | 66 | push	{r2, r3, r4} | 
|  | 67 | .endm | 
|  | 68 |  | 
|  | 69 | /* | 
|  | 70 | * Store all host persistent registers on the stack. | 
|  | 71 | * Clobbers all registers, in all modes, except r0 and r1. | 
|  | 72 | */ | 
|  | 73 | .macro save_host_regs | 
|  | 74 | /* Hyp regs. Only ELR_hyp (SPSR_hyp already saved) */ | 
|  | 75 | mrs	r2, ELR_hyp | 
|  | 76 | push	{r2} | 
|  | 77 |  | 
|  | 78 | /* usr regs */ | 
|  | 79 | push	{r4-r12}	@ r0-r3 are always clobbered | 
|  | 80 | mrs	r2, SP_usr | 
|  | 81 | mov	r3, lr | 
|  | 82 | push	{r2, r3} | 
|  | 83 |  | 
|  | 84 | push_host_regs_mode svc | 
|  | 85 | push_host_regs_mode abt | 
|  | 86 | push_host_regs_mode und | 
|  | 87 | push_host_regs_mode irq | 
|  | 88 |  | 
|  | 89 | /* fiq regs */ | 
|  | 90 | mrs	r2, r8_fiq | 
|  | 91 | mrs	r3, r9_fiq | 
|  | 92 | mrs	r4, r10_fiq | 
|  | 93 | mrs	r5, r11_fiq | 
|  | 94 | mrs	r6, r12_fiq | 
|  | 95 | mrs	r7, SP_fiq | 
|  | 96 | mrs	r8, LR_fiq | 
|  | 97 | mrs	r9, SPSR_fiq | 
|  | 98 | push	{r2-r9} | 
|  | 99 | .endm | 
|  | 100 |  | 
|  | 101 | .macro pop_host_regs_mode mode | 
|  | 102 | pop	{r2, r3, r4} | 
|  | 103 | msr	SP_\mode, r2 | 
|  | 104 | msr	LR_\mode, r3 | 
|  | 105 | msr	SPSR_\mode, r4 | 
|  | 106 | .endm | 
|  | 107 |  | 
|  | 108 | /* | 
|  | 109 | * Restore all host registers from the stack. | 
|  | 110 | * Clobbers all registers, in all modes, except r0 and r1. | 
|  | 111 | */ | 
|  | 112 | .macro restore_host_regs | 
|  | 113 | pop	{r2-r9} | 
|  | 114 | msr	r8_fiq, r2 | 
|  | 115 | msr	r9_fiq, r3 | 
|  | 116 | msr	r10_fiq, r4 | 
|  | 117 | msr	r11_fiq, r5 | 
|  | 118 | msr	r12_fiq, r6 | 
|  | 119 | msr	SP_fiq, r7 | 
|  | 120 | msr	LR_fiq, r8 | 
|  | 121 | msr	SPSR_fiq, r9 | 
|  | 122 |  | 
|  | 123 | pop_host_regs_mode irq | 
|  | 124 | pop_host_regs_mode und | 
|  | 125 | pop_host_regs_mode abt | 
|  | 126 | pop_host_regs_mode svc | 
|  | 127 |  | 
|  | 128 | pop	{r2, r3} | 
|  | 129 | msr	SP_usr, r2 | 
|  | 130 | mov	lr, r3 | 
|  | 131 | pop	{r4-r12} | 
|  | 132 |  | 
|  | 133 | pop	{r2} | 
|  | 134 | msr	ELR_hyp, r2 | 
|  | 135 | .endm | 
|  | 136 |  | 
|  | 137 | /* | 
|  | 138 | * Restore SP, LR and SPSR for a given mode. offset is the offset of | 
|  | 139 | * this mode's registers from the VCPU base. | 
|  | 140 | * | 
|  | 141 | * Assumes vcpu pointer in vcpu reg | 
|  | 142 | * | 
|  | 143 | * Clobbers r1, r2, r3, r4. | 
|  | 144 | */ | 
|  | 145 | .macro restore_guest_regs_mode mode, offset | 
|  | 146 | add	r1, vcpu, \offset | 
|  | 147 | ldm	r1, {r2, r3, r4} | 
|  | 148 | msr	SP_\mode, r2 | 
|  | 149 | msr	LR_\mode, r3 | 
|  | 150 | msr	SPSR_\mode, r4 | 
|  | 151 | .endm | 
|  | 152 |  | 
|  | 153 | /* | 
|  | 154 | * Restore all guest registers from the vcpu struct. | 
|  | 155 | * | 
|  | 156 | * Assumes vcpu pointer in vcpu reg | 
|  | 157 | * | 
|  | 158 | * Clobbers *all* registers. | 
|  | 159 | */ | 
|  | 160 | .macro restore_guest_regs | 
|  | 161 | restore_guest_regs_mode svc, #VCPU_SVC_REGS | 
|  | 162 | restore_guest_regs_mode abt, #VCPU_ABT_REGS | 
|  | 163 | restore_guest_regs_mode und, #VCPU_UND_REGS | 
|  | 164 | restore_guest_regs_mode irq, #VCPU_IRQ_REGS | 
|  | 165 |  | 
|  | 166 | add	r1, vcpu, #VCPU_FIQ_REGS | 
|  | 167 | ldm	r1, {r2-r9} | 
|  | 168 | msr	r8_fiq, r2 | 
|  | 169 | msr	r9_fiq, r3 | 
|  | 170 | msr	r10_fiq, r4 | 
|  | 171 | msr	r11_fiq, r5 | 
|  | 172 | msr	r12_fiq, r6 | 
|  | 173 | msr	SP_fiq, r7 | 
|  | 174 | msr	LR_fiq, r8 | 
|  | 175 | msr	SPSR_fiq, r9 | 
|  | 176 |  | 
|  | 177 | @ Load return state | 
|  | 178 | ldr	r2, [vcpu, #VCPU_PC] | 
|  | 179 | ldr	r3, [vcpu, #VCPU_CPSR] | 
|  | 180 | msr	ELR_hyp, r2 | 
|  | 181 | msr	SPSR_cxsf, r3 | 
|  | 182 |  | 
|  | 183 | @ Load user registers | 
|  | 184 | ldr	r2, [vcpu, #VCPU_USR_SP] | 
|  | 185 | ldr	r3, [vcpu, #VCPU_USR_LR] | 
|  | 186 | msr	SP_usr, r2 | 
|  | 187 | mov	lr, r3 | 
|  | 188 | add	vcpu, vcpu, #(VCPU_USR_REGS) | 
|  | 189 | ldm	vcpu, {r0-r12} | 
|  | 190 | .endm | 
|  | 191 |  | 
|  | 192 | /* | 
|  | 193 | * Save SP, LR and SPSR for a given mode. offset is the offset of | 
|  | 194 | * this mode's registers from the VCPU base. | 
|  | 195 | * | 
|  | 196 | * Assumes vcpu pointer in vcpu reg | 
|  | 197 | * | 
|  | 198 | * Clobbers r2, r3, r4, r5. | 
|  | 199 | */ | 
|  | 200 | .macro save_guest_regs_mode mode, offset | 
|  | 201 | add	r2, vcpu, \offset | 
|  | 202 | mrs	r3, SP_\mode | 
|  | 203 | mrs	r4, LR_\mode | 
|  | 204 | mrs	r5, SPSR_\mode | 
|  | 205 | stm	r2, {r3, r4, r5} | 
|  | 206 | .endm | 
|  | 207 |  | 
|  | 208 | /* | 
|  | 209 | * Save all guest registers to the vcpu struct | 
|  | 210 | * Expects guest's r0, r1, r2 on the stack. | 
|  | 211 | * | 
|  | 212 | * Assumes vcpu pointer in vcpu reg | 
|  | 213 | * | 
|  | 214 | * Clobbers r2, r3, r4, r5. | 
|  | 215 | */ | 
|  | 216 | .macro save_guest_regs | 
|  | 217 | @ Store usr registers | 
|  | 218 | add	r2, vcpu, #VCPU_USR_REG(3) | 
|  | 219 | stm	r2, {r3-r12} | 
|  | 220 | add	r2, vcpu, #VCPU_USR_REG(0) | 
|  | 221 | pop	{r3, r4, r5}		@ r0, r1, r2 | 
|  | 222 | stm	r2, {r3, r4, r5} | 
|  | 223 | mrs	r2, SP_usr | 
|  | 224 | mov	r3, lr | 
|  | 225 | str	r2, [vcpu, #VCPU_USR_SP] | 
|  | 226 | str	r3, [vcpu, #VCPU_USR_LR] | 
|  | 227 |  | 
|  | 228 | @ Store return state | 
|  | 229 | mrs	r2, ELR_hyp | 
|  | 230 | mrs	r3, spsr | 
|  | 231 | str	r2, [vcpu, #VCPU_PC] | 
|  | 232 | str	r3, [vcpu, #VCPU_CPSR] | 
|  | 233 |  | 
|  | 234 | @ Store other guest registers | 
|  | 235 | save_guest_regs_mode svc, #VCPU_SVC_REGS | 
|  | 236 | save_guest_regs_mode abt, #VCPU_ABT_REGS | 
|  | 237 | save_guest_regs_mode und, #VCPU_UND_REGS | 
|  | 238 | save_guest_regs_mode irq, #VCPU_IRQ_REGS | 
|  | 239 | .endm | 
|  | 240 |  | 
|  | 241 | /* Reads cp15 registers from hardware and stores them in memory | 
|  | 242 | * @store_to_vcpu: If 0, registers are written in-order to the stack, | 
|  | 243 | * 		   otherwise to the VCPU struct pointed to by vcpup | 
|  | 244 | * | 
|  | 245 | * Assumes vcpu pointer in vcpu reg | 
|  | 246 | * | 
|  | 247 | * Clobbers r2 - r12 | 
|  | 248 | */ | 
|  | 249 | .macro read_cp15_state store_to_vcpu | 
|  | 250 | mrc	p15, 0, r2, c1, c0, 0	@ SCTLR | 
|  | 251 | mrc	p15, 0, r3, c1, c0, 2	@ CPACR | 
|  | 252 | mrc	p15, 0, r4, c2, c0, 2	@ TTBCR | 
|  | 253 | mrc	p15, 0, r5, c3, c0, 0	@ DACR | 
|  | 254 | mrrc	p15, 0, r6, r7, c2	@ TTBR 0 | 
|  | 255 | mrrc	p15, 1, r8, r9, c2	@ TTBR 1 | 
|  | 256 | mrc	p15, 0, r10, c10, c2, 0	@ PRRR | 
|  | 257 | mrc	p15, 0, r11, c10, c2, 1	@ NMRR | 
|  | 258 | mrc	p15, 2, r12, c0, c0, 0	@ CSSELR | 
|  | 259 |  | 
|  | 260 | .if \store_to_vcpu == 0 | 
|  | 261 | push	{r2-r12}		@ Push CP15 registers | 
|  | 262 | .else | 
|  | 263 | str	r2, [vcpu, #CP15_OFFSET(c1_SCTLR)] | 
|  | 264 | str	r3, [vcpu, #CP15_OFFSET(c1_CPACR)] | 
|  | 265 | str	r4, [vcpu, #CP15_OFFSET(c2_TTBCR)] | 
|  | 266 | str	r5, [vcpu, #CP15_OFFSET(c3_DACR)] | 
|  | 267 | add	r2, vcpu, #CP15_OFFSET(c2_TTBR0) | 
|  | 268 | strd	r6, r7, [r2] | 
|  | 269 | add	r2, vcpu, #CP15_OFFSET(c2_TTBR1) | 
|  | 270 | strd	r8, r9, [r2] | 
|  | 271 | str	r10, [vcpu, #CP15_OFFSET(c10_PRRR)] | 
|  | 272 | str	r11, [vcpu, #CP15_OFFSET(c10_NMRR)] | 
|  | 273 | str	r12, [vcpu, #CP15_OFFSET(c0_CSSELR)] | 
|  | 274 | .endif | 
|  | 275 |  | 
|  | 276 | mrc	p15, 0, r2, c13, c0, 1	@ CID | 
|  | 277 | mrc	p15, 0, r3, c13, c0, 2	@ TID_URW | 
|  | 278 | mrc	p15, 0, r4, c13, c0, 3	@ TID_URO | 
|  | 279 | mrc	p15, 0, r5, c13, c0, 4	@ TID_PRIV | 
|  | 280 | mrc	p15, 0, r6, c5, c0, 0	@ DFSR | 
|  | 281 | mrc	p15, 0, r7, c5, c0, 1	@ IFSR | 
|  | 282 | mrc	p15, 0, r8, c5, c1, 0	@ ADFSR | 
|  | 283 | mrc	p15, 0, r9, c5, c1, 1	@ AIFSR | 
|  | 284 | mrc	p15, 0, r10, c6, c0, 0	@ DFAR | 
|  | 285 | mrc	p15, 0, r11, c6, c0, 2	@ IFAR | 
|  | 286 | mrc	p15, 0, r12, c12, c0, 0	@ VBAR | 
|  | 287 |  | 
|  | 288 | .if \store_to_vcpu == 0 | 
|  | 289 | push	{r2-r12}		@ Push CP15 registers | 
|  | 290 | .else | 
|  | 291 | str	r2, [vcpu, #CP15_OFFSET(c13_CID)] | 
|  | 292 | str	r3, [vcpu, #CP15_OFFSET(c13_TID_URW)] | 
|  | 293 | str	r4, [vcpu, #CP15_OFFSET(c13_TID_URO)] | 
|  | 294 | str	r5, [vcpu, #CP15_OFFSET(c13_TID_PRIV)] | 
|  | 295 | str	r6, [vcpu, #CP15_OFFSET(c5_DFSR)] | 
|  | 296 | str	r7, [vcpu, #CP15_OFFSET(c5_IFSR)] | 
|  | 297 | str	r8, [vcpu, #CP15_OFFSET(c5_ADFSR)] | 
|  | 298 | str	r9, [vcpu, #CP15_OFFSET(c5_AIFSR)] | 
|  | 299 | str	r10, [vcpu, #CP15_OFFSET(c6_DFAR)] | 
|  | 300 | str	r11, [vcpu, #CP15_OFFSET(c6_IFAR)] | 
|  | 301 | str	r12, [vcpu, #CP15_OFFSET(c12_VBAR)] | 
|  | 302 | .endif | 
| Marc Zyngier | c7e3ba6 | 2013-01-23 13:21:59 -0500 | [diff] [blame] | 303 |  | 
|  | 304 | mrc	p15, 0, r2, c14, c1, 0	@ CNTKCTL | 
| Marc Zyngier | 6a077e4 | 2013-06-21 13:08:46 +0100 | [diff] [blame^] | 305 | mrrc	p15, 0, r4, r5, c7	@ PAR | 
| Marc Zyngier | c7e3ba6 | 2013-01-23 13:21:59 -0500 | [diff] [blame] | 306 |  | 
|  | 307 | .if \store_to_vcpu == 0 | 
| Marc Zyngier | 6a077e4 | 2013-06-21 13:08:46 +0100 | [diff] [blame^] | 308 | push	{r2,r4-r5} | 
| Marc Zyngier | c7e3ba6 | 2013-01-23 13:21:59 -0500 | [diff] [blame] | 309 | .else | 
|  | 310 | str	r2, [vcpu, #CP15_OFFSET(c14_CNTKCTL)] | 
| Marc Zyngier | 6a077e4 | 2013-06-21 13:08:46 +0100 | [diff] [blame^] | 311 | add	r12, vcpu, #CP15_OFFSET(c7_PAR) | 
|  | 312 | strd	r4, r5, [r12] | 
| Marc Zyngier | c7e3ba6 | 2013-01-23 13:21:59 -0500 | [diff] [blame] | 313 | .endif | 
| Christoffer Dall | f7ed45b | 2013-01-20 18:47:42 -0500 | [diff] [blame] | 314 | .endm | 
|  | 315 |  | 
|  | 316 | /* | 
|  | 317 | * Reads cp15 registers from memory and writes them to hardware | 
|  | 318 | * @read_from_vcpu: If 0, registers are read in-order from the stack, | 
|  | 319 | *		    otherwise from the VCPU struct pointed to by vcpup | 
|  | 320 | * | 
|  | 321 | * Assumes vcpu pointer in vcpu reg | 
|  | 322 | */ | 
|  | 323 | .macro write_cp15_state read_from_vcpu | 
|  | 324 | .if \read_from_vcpu == 0 | 
| Marc Zyngier | 6a077e4 | 2013-06-21 13:08:46 +0100 | [diff] [blame^] | 325 | pop	{r2,r4-r5} | 
| Marc Zyngier | c7e3ba6 | 2013-01-23 13:21:59 -0500 | [diff] [blame] | 326 | .else | 
|  | 327 | ldr	r2, [vcpu, #CP15_OFFSET(c14_CNTKCTL)] | 
| Marc Zyngier | 6a077e4 | 2013-06-21 13:08:46 +0100 | [diff] [blame^] | 328 | add	r12, vcpu, #CP15_OFFSET(c7_PAR) | 
|  | 329 | ldrd	r4, r5, [r12] | 
| Marc Zyngier | c7e3ba6 | 2013-01-23 13:21:59 -0500 | [diff] [blame] | 330 | .endif | 
|  | 331 |  | 
|  | 332 | mcr	p15, 0, r2, c14, c1, 0	@ CNTKCTL | 
| Marc Zyngier | 6a077e4 | 2013-06-21 13:08:46 +0100 | [diff] [blame^] | 333 | mcrr	p15, 0, r4, r5, c7	@ PAR | 
| Marc Zyngier | c7e3ba6 | 2013-01-23 13:21:59 -0500 | [diff] [blame] | 334 |  | 
|  | 335 | .if \read_from_vcpu == 0 | 
| Christoffer Dall | f7ed45b | 2013-01-20 18:47:42 -0500 | [diff] [blame] | 336 | pop	{r2-r12} | 
|  | 337 | .else | 
|  | 338 | ldr	r2, [vcpu, #CP15_OFFSET(c13_CID)] | 
|  | 339 | ldr	r3, [vcpu, #CP15_OFFSET(c13_TID_URW)] | 
|  | 340 | ldr	r4, [vcpu, #CP15_OFFSET(c13_TID_URO)] | 
|  | 341 | ldr	r5, [vcpu, #CP15_OFFSET(c13_TID_PRIV)] | 
|  | 342 | ldr	r6, [vcpu, #CP15_OFFSET(c5_DFSR)] | 
|  | 343 | ldr	r7, [vcpu, #CP15_OFFSET(c5_IFSR)] | 
|  | 344 | ldr	r8, [vcpu, #CP15_OFFSET(c5_ADFSR)] | 
|  | 345 | ldr	r9, [vcpu, #CP15_OFFSET(c5_AIFSR)] | 
|  | 346 | ldr	r10, [vcpu, #CP15_OFFSET(c6_DFAR)] | 
|  | 347 | ldr	r11, [vcpu, #CP15_OFFSET(c6_IFAR)] | 
|  | 348 | ldr	r12, [vcpu, #CP15_OFFSET(c12_VBAR)] | 
|  | 349 | .endif | 
|  | 350 |  | 
|  | 351 | mcr	p15, 0, r2, c13, c0, 1	@ CID | 
|  | 352 | mcr	p15, 0, r3, c13, c0, 2	@ TID_URW | 
|  | 353 | mcr	p15, 0, r4, c13, c0, 3	@ TID_URO | 
|  | 354 | mcr	p15, 0, r5, c13, c0, 4	@ TID_PRIV | 
|  | 355 | mcr	p15, 0, r6, c5, c0, 0	@ DFSR | 
|  | 356 | mcr	p15, 0, r7, c5, c0, 1	@ IFSR | 
|  | 357 | mcr	p15, 0, r8, c5, c1, 0	@ ADFSR | 
|  | 358 | mcr	p15, 0, r9, c5, c1, 1	@ AIFSR | 
|  | 359 | mcr	p15, 0, r10, c6, c0, 0	@ DFAR | 
|  | 360 | mcr	p15, 0, r11, c6, c0, 2	@ IFAR | 
|  | 361 | mcr	p15, 0, r12, c12, c0, 0	@ VBAR | 
|  | 362 |  | 
|  | 363 | .if \read_from_vcpu == 0 | 
|  | 364 | pop	{r2-r12} | 
|  | 365 | .else | 
|  | 366 | ldr	r2, [vcpu, #CP15_OFFSET(c1_SCTLR)] | 
|  | 367 | ldr	r3, [vcpu, #CP15_OFFSET(c1_CPACR)] | 
|  | 368 | ldr	r4, [vcpu, #CP15_OFFSET(c2_TTBCR)] | 
|  | 369 | ldr	r5, [vcpu, #CP15_OFFSET(c3_DACR)] | 
|  | 370 | add	r12, vcpu, #CP15_OFFSET(c2_TTBR0) | 
|  | 371 | ldrd	r6, r7, [r12] | 
|  | 372 | add	r12, vcpu, #CP15_OFFSET(c2_TTBR1) | 
|  | 373 | ldrd	r8, r9, [r12] | 
|  | 374 | ldr	r10, [vcpu, #CP15_OFFSET(c10_PRRR)] | 
|  | 375 | ldr	r11, [vcpu, #CP15_OFFSET(c10_NMRR)] | 
|  | 376 | ldr	r12, [vcpu, #CP15_OFFSET(c0_CSSELR)] | 
|  | 377 | .endif | 
|  | 378 |  | 
|  | 379 | mcr	p15, 0, r2, c1, c0, 0	@ SCTLR | 
|  | 380 | mcr	p15, 0, r3, c1, c0, 2	@ CPACR | 
|  | 381 | mcr	p15, 0, r4, c2, c0, 2	@ TTBCR | 
|  | 382 | mcr	p15, 0, r5, c3, c0, 0	@ DACR | 
|  | 383 | mcrr	p15, 0, r6, r7, c2	@ TTBR 0 | 
|  | 384 | mcrr	p15, 1, r8, r9, c2	@ TTBR 1 | 
|  | 385 | mcr	p15, 0, r10, c10, c2, 0	@ PRRR | 
|  | 386 | mcr	p15, 0, r11, c10, c2, 1	@ NMRR | 
|  | 387 | mcr	p15, 2, r12, c0, c0, 0	@ CSSELR | 
|  | 388 | .endm | 
|  | 389 |  | 
|  | 390 | /* | 
|  | 391 | * Save the VGIC CPU state into memory | 
|  | 392 | * | 
|  | 393 | * Assumes vcpu pointer in vcpu reg | 
|  | 394 | */ | 
|  | 395 | .macro save_vgic_state | 
| Marc Zyngier | 348b2b0 | 2013-01-21 19:36:15 -0500 | [diff] [blame] | 396 | #ifdef CONFIG_KVM_ARM_VGIC | 
|  | 397 | /* Get VGIC VCTRL base into r2 */ | 
|  | 398 | ldr	r2, [vcpu, #VCPU_KVM] | 
|  | 399 | ldr	r2, [r2, #KVM_VGIC_VCTRL] | 
|  | 400 | cmp	r2, #0 | 
|  | 401 | beq	2f | 
|  | 402 |  | 
|  | 403 | /* Compute the address of struct vgic_cpu */ | 
|  | 404 | add	r11, vcpu, #VCPU_VGIC_CPU | 
|  | 405 |  | 
|  | 406 | /* Save all interesting registers */ | 
|  | 407 | ldr	r3, [r2, #GICH_HCR] | 
|  | 408 | ldr	r4, [r2, #GICH_VMCR] | 
|  | 409 | ldr	r5, [r2, #GICH_MISR] | 
|  | 410 | ldr	r6, [r2, #GICH_EISR0] | 
|  | 411 | ldr	r7, [r2, #GICH_EISR1] | 
|  | 412 | ldr	r8, [r2, #GICH_ELRSR0] | 
|  | 413 | ldr	r9, [r2, #GICH_ELRSR1] | 
|  | 414 | ldr	r10, [r2, #GICH_APR] | 
|  | 415 |  | 
|  | 416 | str	r3, [r11, #VGIC_CPU_HCR] | 
|  | 417 | str	r4, [r11, #VGIC_CPU_VMCR] | 
|  | 418 | str	r5, [r11, #VGIC_CPU_MISR] | 
|  | 419 | str	r6, [r11, #VGIC_CPU_EISR] | 
|  | 420 | str	r7, [r11, #(VGIC_CPU_EISR + 4)] | 
|  | 421 | str	r8, [r11, #VGIC_CPU_ELRSR] | 
|  | 422 | str	r9, [r11, #(VGIC_CPU_ELRSR + 4)] | 
|  | 423 | str	r10, [r11, #VGIC_CPU_APR] | 
|  | 424 |  | 
|  | 425 | /* Clear GICH_HCR */ | 
|  | 426 | mov	r5, #0 | 
|  | 427 | str	r5, [r2, #GICH_HCR] | 
|  | 428 |  | 
|  | 429 | /* Save list registers */ | 
|  | 430 | add	r2, r2, #GICH_LR0 | 
|  | 431 | add	r3, r11, #VGIC_CPU_LR | 
|  | 432 | ldr	r4, [r11, #VGIC_CPU_NR_LR] | 
|  | 433 | 1:	ldr	r6, [r2], #4 | 
|  | 434 | str	r6, [r3], #4 | 
|  | 435 | subs	r4, r4, #1 | 
|  | 436 | bne	1b | 
|  | 437 | 2: | 
|  | 438 | #endif | 
| Christoffer Dall | f7ed45b | 2013-01-20 18:47:42 -0500 | [diff] [blame] | 439 | .endm | 
|  | 440 |  | 
|  | 441 | /* | 
|  | 442 | * Restore the VGIC CPU state from memory | 
|  | 443 | * | 
|  | 444 | * Assumes vcpu pointer in vcpu reg | 
|  | 445 | */ | 
|  | 446 | .macro restore_vgic_state | 
| Marc Zyngier | 348b2b0 | 2013-01-21 19:36:15 -0500 | [diff] [blame] | 447 | #ifdef CONFIG_KVM_ARM_VGIC | 
|  | 448 | /* Get VGIC VCTRL base into r2 */ | 
|  | 449 | ldr	r2, [vcpu, #VCPU_KVM] | 
|  | 450 | ldr	r2, [r2, #KVM_VGIC_VCTRL] | 
|  | 451 | cmp	r2, #0 | 
|  | 452 | beq	2f | 
|  | 453 |  | 
|  | 454 | /* Compute the address of struct vgic_cpu */ | 
|  | 455 | add	r11, vcpu, #VCPU_VGIC_CPU | 
|  | 456 |  | 
|  | 457 | /* We only restore a minimal set of registers */ | 
|  | 458 | ldr	r3, [r11, #VGIC_CPU_HCR] | 
|  | 459 | ldr	r4, [r11, #VGIC_CPU_VMCR] | 
|  | 460 | ldr	r8, [r11, #VGIC_CPU_APR] | 
|  | 461 |  | 
|  | 462 | str	r3, [r2, #GICH_HCR] | 
|  | 463 | str	r4, [r2, #GICH_VMCR] | 
|  | 464 | str	r8, [r2, #GICH_APR] | 
|  | 465 |  | 
|  | 466 | /* Restore list registers */ | 
|  | 467 | add	r2, r2, #GICH_LR0 | 
|  | 468 | add	r3, r11, #VGIC_CPU_LR | 
|  | 469 | ldr	r4, [r11, #VGIC_CPU_NR_LR] | 
|  | 470 | 1:	ldr	r6, [r3], #4 | 
|  | 471 | str	r6, [r2], #4 | 
|  | 472 | subs	r4, r4, #1 | 
|  | 473 | bne	1b | 
|  | 474 | 2: | 
|  | 475 | #endif | 
| Christoffer Dall | f7ed45b | 2013-01-20 18:47:42 -0500 | [diff] [blame] | 476 | .endm | 
|  | 477 |  | 
| Marc Zyngier | 53e7240 | 2013-01-23 13:21:58 -0500 | [diff] [blame] | 478 | #define CNTHCTL_PL1PCTEN	(1 << 0) | 
|  | 479 | #define CNTHCTL_PL1PCEN		(1 << 1) | 
|  | 480 |  | 
|  | 481 | /* | 
|  | 482 | * Save the timer state onto the VCPU and allow physical timer/counter access | 
|  | 483 | * for the host. | 
|  | 484 | * | 
|  | 485 | * Assumes vcpu pointer in vcpu reg | 
| Marc Zyngier | c7e3ba6 | 2013-01-23 13:21:59 -0500 | [diff] [blame] | 486 | * Clobbers r2-r5 | 
| Marc Zyngier | 53e7240 | 2013-01-23 13:21:58 -0500 | [diff] [blame] | 487 | */ | 
|  | 488 | .macro save_timer_state | 
| Marc Zyngier | c7e3ba6 | 2013-01-23 13:21:59 -0500 | [diff] [blame] | 489 | #ifdef CONFIG_KVM_ARM_TIMER | 
|  | 490 | ldr	r4, [vcpu, #VCPU_KVM] | 
|  | 491 | ldr	r2, [r4, #KVM_TIMER_ENABLED] | 
|  | 492 | cmp	r2, #0 | 
|  | 493 | beq	1f | 
|  | 494 |  | 
|  | 495 | mrc	p15, 0, r2, c14, c3, 1	@ CNTV_CTL | 
|  | 496 | str	r2, [vcpu, #VCPU_TIMER_CNTV_CTL] | 
|  | 497 | bic	r2, #1			@ Clear ENABLE | 
|  | 498 | mcr	p15, 0, r2, c14, c3, 1	@ CNTV_CTL | 
|  | 499 | isb | 
|  | 500 |  | 
|  | 501 | mrrc	p15, 3, r2, r3, c14	@ CNTV_CVAL | 
|  | 502 | ldr	r4, =VCPU_TIMER_CNTV_CVAL | 
|  | 503 | add	r5, vcpu, r4 | 
|  | 504 | strd	r2, r3, [r5] | 
|  | 505 |  | 
|  | 506 | 1: | 
|  | 507 | #endif | 
| Marc Zyngier | 53e7240 | 2013-01-23 13:21:58 -0500 | [diff] [blame] | 508 | @ Allow physical timer/counter access for the host | 
|  | 509 | mrc	p15, 4, r2, c14, c1, 0	@ CNTHCTL | 
|  | 510 | orr	r2, r2, #(CNTHCTL_PL1PCEN | CNTHCTL_PL1PCTEN) | 
|  | 511 | mcr	p15, 4, r2, c14, c1, 0	@ CNTHCTL | 
|  | 512 | .endm | 
|  | 513 |  | 
|  | 514 | /* | 
|  | 515 | * Load the timer state from the VCPU and deny physical timer/counter access | 
|  | 516 | * for the host. | 
|  | 517 | * | 
|  | 518 | * Assumes vcpu pointer in vcpu reg | 
| Marc Zyngier | c7e3ba6 | 2013-01-23 13:21:59 -0500 | [diff] [blame] | 519 | * Clobbers r2-r5 | 
| Marc Zyngier | 53e7240 | 2013-01-23 13:21:58 -0500 | [diff] [blame] | 520 | */ | 
|  | 521 | .macro restore_timer_state | 
|  | 522 | @ Disallow physical timer access for the guest | 
|  | 523 | @ Physical counter access is allowed | 
|  | 524 | mrc	p15, 4, r2, c14, c1, 0	@ CNTHCTL | 
|  | 525 | orr	r2, r2, #CNTHCTL_PL1PCTEN | 
|  | 526 | bic	r2, r2, #CNTHCTL_PL1PCEN | 
|  | 527 | mcr	p15, 4, r2, c14, c1, 0	@ CNTHCTL | 
| Marc Zyngier | c7e3ba6 | 2013-01-23 13:21:59 -0500 | [diff] [blame] | 528 |  | 
|  | 529 | #ifdef CONFIG_KVM_ARM_TIMER | 
|  | 530 | ldr	r4, [vcpu, #VCPU_KVM] | 
|  | 531 | ldr	r2, [r4, #KVM_TIMER_ENABLED] | 
|  | 532 | cmp	r2, #0 | 
|  | 533 | beq	1f | 
|  | 534 |  | 
|  | 535 | ldr	r2, [r4, #KVM_TIMER_CNTVOFF] | 
|  | 536 | ldr	r3, [r4, #(KVM_TIMER_CNTVOFF + 4)] | 
|  | 537 | mcrr	p15, 4, r2, r3, c14	@ CNTVOFF | 
|  | 538 |  | 
|  | 539 | ldr	r4, =VCPU_TIMER_CNTV_CVAL | 
|  | 540 | add	r5, vcpu, r4 | 
|  | 541 | ldrd	r2, r3, [r5] | 
|  | 542 | mcrr	p15, 3, r2, r3, c14	@ CNTV_CVAL | 
|  | 543 | isb | 
|  | 544 |  | 
|  | 545 | ldr	r2, [vcpu, #VCPU_TIMER_CNTV_CTL] | 
|  | 546 | and	r2, r2, #3 | 
|  | 547 | mcr	p15, 0, r2, c14, c3, 1	@ CNTV_CTL | 
|  | 548 | 1: | 
|  | 549 | #endif | 
| Marc Zyngier | 53e7240 | 2013-01-23 13:21:58 -0500 | [diff] [blame] | 550 | .endm | 
|  | 551 |  | 
| Christoffer Dall | f7ed45b | 2013-01-20 18:47:42 -0500 | [diff] [blame] | 552 | .equ vmentry,	0 | 
|  | 553 | .equ vmexit,	1 | 
|  | 554 |  | 
|  | 555 | /* Configures the HSTR (Hyp System Trap Register) on entry/return | 
|  | 556 | * (hardware reset value is 0) */ | 
|  | 557 | .macro set_hstr operation | 
|  | 558 | mrc	p15, 4, r2, c1, c1, 3 | 
|  | 559 | ldr	r3, =HSTR_T(15) | 
|  | 560 | .if \operation == vmentry | 
|  | 561 | orr	r2, r2, r3		@ Trap CR{15} | 
|  | 562 | .else | 
|  | 563 | bic	r2, r2, r3		@ Don't trap any CRx accesses | 
|  | 564 | .endif | 
|  | 565 | mcr	p15, 4, r2, c1, c1, 3 | 
|  | 566 | .endm | 
|  | 567 |  | 
|  | 568 | /* Configures the HCPTR (Hyp Coprocessor Trap Register) on entry/return | 
|  | 569 | * (hardware reset value is 0). Keep previous value in r2. */ | 
|  | 570 | .macro set_hcptr operation, mask | 
|  | 571 | mrc	p15, 4, r2, c1, c1, 2 | 
|  | 572 | ldr	r3, =\mask | 
|  | 573 | .if \operation == vmentry | 
|  | 574 | orr	r3, r2, r3		@ Trap coproc-accesses defined in mask | 
|  | 575 | .else | 
|  | 576 | bic	r3, r2, r3		@ Don't trap defined coproc-accesses | 
|  | 577 | .endif | 
|  | 578 | mcr	p15, 4, r3, c1, c1, 2 | 
|  | 579 | .endm | 
|  | 580 |  | 
|  | 581 | /* Configures the HDCR (Hyp Debug Configuration Register) on entry/return | 
|  | 582 | * (hardware reset value is 0) */ | 
|  | 583 | .macro set_hdcr operation | 
|  | 584 | mrc	p15, 4, r2, c1, c1, 1 | 
|  | 585 | ldr	r3, =(HDCR_TPM|HDCR_TPMCR) | 
|  | 586 | .if \operation == vmentry | 
|  | 587 | orr	r2, r2, r3		@ Trap some perfmon accesses | 
|  | 588 | .else | 
|  | 589 | bic	r2, r2, r3		@ Don't trap any perfmon accesses | 
|  | 590 | .endif | 
|  | 591 | mcr	p15, 4, r2, c1, c1, 1 | 
|  | 592 | .endm | 
|  | 593 |  | 
|  | 594 | /* Enable/Disable: stage-2 trans., trap interrupts, trap wfi, trap smc */ | 
|  | 595 | .macro configure_hyp_role operation | 
|  | 596 | mrc	p15, 4, r2, c1, c1, 0	@ HCR | 
|  | 597 | bic	r2, r2, #HCR_VIRT_EXCP_MASK | 
|  | 598 | ldr	r3, =HCR_GUEST_MASK | 
|  | 599 | .if \operation == vmentry | 
|  | 600 | orr	r2, r2, r3 | 
|  | 601 | ldr	r3, [vcpu, #VCPU_IRQ_LINES] | 
|  | 602 | orr	r2, r2, r3 | 
|  | 603 | .else | 
|  | 604 | bic	r2, r2, r3 | 
|  | 605 | .endif | 
|  | 606 | mcr	p15, 4, r2, c1, c1, 0 | 
|  | 607 | .endm | 
|  | 608 |  | 
|  | 609 | .macro load_vcpu | 
|  | 610 | mrc	p15, 4, vcpu, c13, c0, 2	@ HTPIDR | 
|  | 611 | .endm |