blob: b73ddc8efbc86af1eed2a3e9c5431aa197ef2474 [file] [log] [blame]
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -07001/*
2 * Idle processing for ARMv7-based Qualcomm SoCs.
3 *
4 * Copyright (C) 2007 Google, Inc.
Pratik Patel17f3b822011-11-21 12:41:47 -08005 * Copyright (c) 2007-2009, 2011-2012 Code Aurora Forum. All rights reserved.
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -07006 *
7 * This software is licensed under the terms of the GNU General Public
8 * License version 2, as published by the Free Software Foundation, and
9 * may be copied, distributed, and modified under those terms.
10 *
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
15 *
16 */
17
18#include <linux/linkage.h>
19#include <linux/threads.h>
20#include <asm/assembler.h>
21
Steve Mucklefcece052012-02-18 20:09:58 -080022#include "idle.h"
23
Maheshkumar Sivasubramanianfa1d0dd2011-07-26 16:02:55 -060024#ifdef CONFIG_ARCH_MSM_KRAIT
25#define SCM_SVC_BOOT 0x1
26#define SCM_CMD_TERMINATE_PC 0x2
27#endif
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -070028
Murali Nalajala93f29992012-03-21 15:59:27 +053029/* Switch between smp_to_amp/amp_to_smp configuration */
30.macro SET_SMP_COHERENCY, on = 0
31ldr r0, =target_type
32ldr r0, [r0]
33mov r1, #TARGET_IS_8625
34cmp r0, r1
35bne skip\@
36mrc p15, 0, r0, c1, c0, 1 /* read ACTLR register */
37.if \on
38orr r0, r0, #(1 << 6) /* Set the SMP bit in ACTLR */
39.else
40bic r0, r0, #(1 << 6) /* Clear the SMP bit */
41.endif
42mcr p15, 0, r0, c1, c0, 1 /* write ACTLR register */
43isb
44skip\@:
45.endm
46
Murali Nalajala3a6de242012-05-11 15:05:35 +053047/* Add NOPs for 8x25 target */
48.macro DELAY_8x25, rept
49#ifdef CONFIG_ARCH_MSM8625
50 .rept \rept
51 nop
52 .endr
53#endif
54.endm
55
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -070056ENTRY(msm_arch_idle)
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -070057 wfi
Pratik Patelcbcc1f02011-11-08 12:58:00 -080058#ifdef CONFIG_ARCH_MSM8X60
59 mrc p14, 1, r1, c1, c5, 4 /* read ETM PDSR to clear sticky bit */
60 mrc p14, 0, r1, c1, c5, 4 /* read DBG PRSR to clear sticky bit */
61 isb
62#endif
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -070063 bx lr
64
65ENTRY(msm_pm_collapse)
66#if defined(CONFIG_MSM_FIQ_SUPPORT)
67 cpsid f
68#endif
69
Steve Mucklefcece052012-02-18 20:09:58 -080070 ldr r0, =msm_saved_state /* address of msm_saved_state ptr */
71 ldr r0, [r0] /* load ptr */
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -070072#if (NR_CPUS >= 2)
73 mrc p15, 0, r1, c0, c0, 5 /* MPIDR */
74 ands r1, r1, #15 /* What CPU am I */
Mahesh Sivasubramanian0ff37e72011-12-15 14:12:31 -070075 mov r2, #CPU_SAVED_STATE_SIZE
76 mul r1, r1, r2
77 add r0, r0, r1
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -070078#endif
79
80 stmia r0!, {r4-r14}
81 mrc p15, 0, r1, c1, c0, 0 /* MMU control */
82 mrc p15, 0, r2, c2, c0, 0 /* TTBR0 */
83 mrc p15, 0, r3, c3, c0, 0 /* dacr */
84#ifdef CONFIG_ARCH_MSM_SCORPION
85 /* This instruction is not valid for non scorpion processors */
86 mrc p15, 3, r4, c15, c0, 3 /* L2CR1 is the L2 cache control reg 1 */
87#endif
88 mrc p15, 0, r5, c10, c2, 0 /* PRRR */
89 mrc p15, 0, r6, c10, c2, 1 /* NMRR */
90 mrc p15, 0, r7, c1, c0, 1 /* ACTLR */
91 mrc p15, 0, r8, c2, c0, 1 /* TTBR1 */
92 mrc p15, 0, r9, c13, c0, 3 /* TPIDRURO */
93 mrc p15, 0, ip, c13, c0, 1 /* context ID */
94 stmia r0!, {r1-r9, ip}
95#ifdef CONFIG_MSM_CPU_AVS
96 mrc p15, 7, r1, c15, c1, 7 /* AVSCSR is the Adaptive Voltage Scaling
97 * Control and Status Register */
98 mrc p15, 7, r2, c15, c0, 6 /* AVSDSCR is the Adaptive Voltage
99 * Scaling Delay Synthesizer Control
100 * Register */
101#ifndef CONFIG_ARCH_MSM_KRAIT
102 mrc p15, 7, r3, c15, c1, 0 /* TSCSR is the Temperature Status and
103 * Control Register
104 */
105#endif
106
107 stmia r0!, {r1-r3}
108#endif
109
Pratik Patel17f3b822011-11-21 12:41:47 -0800110#ifdef CONFIG_MSM_JTAG
111 bl msm_jtag_save_state
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700112#endif
Maheshkumar Sivasubramaniana012e092011-08-18 10:13:03 -0600113
114 ldr r0, =msm_pm_flush_l2_flag
115 ldr r0, [r0]
116 mov r1, #0
117 mcr p15, 2, r1, c0, c0, 0 /*CCSELR*/
Maheshkumar Sivasubramanian1d2b69c2011-11-17 10:26:09 -0700118 isb
Maheshkumar Sivasubramaniana012e092011-08-18 10:13:03 -0600119 mrc p15, 1, r1, c0, c0, 0 /*CCSIDR*/
120 mov r2, #1
121 and r1, r2, r1, ASR #30 /* Check if the cache is write back */
122 orr r1, r0, r1
123 cmp r1, #1
124 bne skip
125 bl v7_flush_dcache_all
Steve Mucklefcece052012-02-18 20:09:58 -0800126skip:
Maheshkumar Sivasubramanianfa1d0dd2011-07-26 16:02:55 -0600127#ifdef CONFIG_ARCH_MSM_KRAIT
128 ldr r0, =SCM_SVC_BOOT
129 ldr r1, =SCM_CMD_TERMINATE_PC
Maheshkumar Sivasubramanian16588412011-10-13 12:16:23 -0600130 ldr r2, =msm_pm_flush_l2_flag
131 ldr r2, [r2]
Maheshkumar Sivasubramanianfa1d0dd2011-07-26 16:02:55 -0600132 bl scm_call_atomic1
133#else
Stepan Moskovchenko34dc00f2012-01-28 19:31:41 -0800134 mrc p15, 0, r4, c1, c0, 0 /* read current CR */
135 bic r0, r4, #(1 << 2) /* clear dcache bit */
136 bic r0, r0, #(1 << 12) /* clear icache bit */
137 mcr p15, 0, r0, c1, c0, 0 /* disable d/i cache */
138 dsb
139
Murali Nalajala93f29992012-03-21 15:59:27 +0530140 SET_SMP_COHERENCY OFF
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700141 wfi
Murali Nalajala3a6de242012-05-11 15:05:35 +0530142 DELAY_8x25 300
Stepan Moskovchenko34dc00f2012-01-28 19:31:41 -0800143
Maheshkumar Sivasubramanianfa1d0dd2011-07-26 16:02:55 -0600144 mcr p15, 0, r4, c1, c0, 0 /* restore d/i cache */
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700145 isb
Stepan Moskovchenko34dc00f2012-01-28 19:31:41 -0800146#endif
Murali Nalajala93f29992012-03-21 15:59:27 +0530147 SET_SMP_COHERENCY ON
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700148#if defined(CONFIG_MSM_FIQ_SUPPORT)
149 cpsie f
150#endif
Pratik Patel17f3b822011-11-21 12:41:47 -0800151#ifdef CONFIG_MSM_JTAG
152 bl msm_jtag_restore_state
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700153#endif
Steve Mucklefcece052012-02-18 20:09:58 -0800154 ldr r0, =msm_saved_state /* address of msm_saved_state ptr */
155 ldr r0, [r0] /* load ptr */
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700156#if (NR_CPUS >= 2)
157 mrc p15, 0, r1, c0, c0, 5 /* MPIDR */
158 ands r1, r1, #15 /* What CPU am I */
Praveen Chidambaram4b1e6f02012-02-11 16:17:30 -0700159 mov r2, #CPU_SAVED_STATE_SIZE
160 mul r2, r2, r1
161 add r0, r0, r2
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700162#endif
Steve Mucklefcece052012-02-18 20:09:58 -0800163 ldmfd r0, {r4-r14} /* restore registers */
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700164 mov r0, #0 /* return power collapse failed */
165 bx lr
166
167ENTRY(msm_pm_collapse_exit)
168#if 0 /* serial debug */
169 mov r0, #0x80000016
170 mcr p15, 0, r0, c15, c2, 4
171 mov r0, #0xA9000000
172 add r0, r0, #0x00A00000 /* UART1 */
173 /*add r0, r0, #0x00C00000*/ /* UART3 */
174 mov r1, #'A'
175 str r1, [r0, #0x00C]
176#endif
Steve Mucklefcece052012-02-18 20:09:58 -0800177 ldr r1, =msm_saved_state_phys
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700178 ldr r2, =msm_pm_collapse_exit
179 adr r3, msm_pm_collapse_exit
180 add r1, r1, r3
181 sub r1, r1, r2
Steve Mucklefcece052012-02-18 20:09:58 -0800182 ldr r1, [r1]
Mahesh Sivasubramanian0ff37e72011-12-15 14:12:31 -0700183 add r1, r1, #CPU_SAVED_STATE_SIZE
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700184#if (NR_CPUS >= 2)
185 mrc p15, 0, r2, c0, c0, 5 /* MPIDR */
186 ands r2, r2, #15 /* What CPU am I */
Mahesh Sivasubramanian0ff37e72011-12-15 14:12:31 -0700187 mov r3, #CPU_SAVED_STATE_SIZE
188 mul r2, r2, r3
189 add r1, r1, r2
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700190#endif
191
192#ifdef CONFIG_MSM_CPU_AVS
193 ldmdb r1!, {r2-r4}
194#ifndef CONFIG_ARCH_MSM_KRAIT
195 mcr p15, 7, r4, c15, c1, 0 /* TSCSR */
196#endif
197 mcr p15, 7, r3, c15, c0, 6 /* AVSDSCR */
198 mcr p15, 7, r2, c15, c1, 7 /* AVSCSR */
199#endif
200 ldmdb r1!, {r2-r11}
201 mcr p15, 0, r4, c3, c0, 0 /* dacr */
202 mcr p15, 0, r3, c2, c0, 0 /* TTBR0 */
203#ifdef CONFIG_ARCH_MSM_SCORPION
204 /* This instruction is not valid for non scorpion processors */
205 mcr p15, 3, r5, c15, c0, 3 /* L2CR1 */
206#endif
207 mcr p15, 0, r6, c10, c2, 0 /* PRRR */
208 mcr p15, 0, r7, c10, c2, 1 /* NMRR */
209 mcr p15, 0, r8, c1, c0, 1 /* ACTLR */
210 mcr p15, 0, r9, c2, c0, 1 /* TTBR1 */
211 mcr p15, 0, r10, c13, c0, 3 /* TPIDRURO */
212 mcr p15, 0, r11, c13, c0, 1 /* context ID */
213 isb
214 ldmdb r1!, {r4-r14}
215 ldr r0, =msm_pm_pc_pgd
216 ldr r1, =msm_pm_collapse_exit
217 adr r3, msm_pm_collapse_exit
218 add r0, r0, r3
219 sub r0, r0, r1
220 ldr r0, [r0]
221 mrc p15, 0, r1, c2, c0, 0 /* save current TTBR0 */
222 and r3, r1, #0x7f /* mask to get TTB flags */
223 orr r0, r0, r3 /* add TTB flags to switch TTBR value */
224 mcr p15, 0, r0, c2, c0, 0 /* temporary switch TTBR0 */
225 isb
226 mcr p15, 0, r2, c1, c0, 0 /* MMU control */
227 isb
228msm_pm_mapped_pa:
229 /* Switch to virtual */
230 ldr r0, =msm_pm_pa_to_va
231 mov pc, r0
232msm_pm_pa_to_va:
233 mcr p15, 0, r1, c2, c0, 0 /* restore TTBR0 */
234 isb
235 mcr p15, 0, r3, c8, c7, 0 /* UTLBIALL */
236 mcr p15, 0, r3, c7, c5, 6 /* BPIALL */
237 dsb
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700238 isb
Murali Nalajala93f29992012-03-21 15:59:27 +0530239
240 SET_SMP_COHERENCY ON
Stepan Moskovchenko6fd9c922011-12-08 18:15:05 -0800241#ifdef CONFIG_ARCH_MSM_KRAIT
242 mrc p15, 0, r1, c0, c0, 0
243 ldr r3, =0xff00fc00
244 and r3, r1, r3
245 ldr r1, =0x51000400
246 cmp r3, r1
247 mrceq p15, 7, r3, c15, c0, 2
248 biceq r3, r3, #0x400
249 mcreq p15, 7, r3, c15, c0, 2
250#endif
Pratik Patel17f3b822011-11-21 12:41:47 -0800251#ifdef CONFIG_MSM_JTAG
Steve Mucklec1421e32012-03-26 11:05:06 -0700252 stmfd sp!, {lr}
Pratik Patel17f3b822011-11-21 12:41:47 -0800253 bl msm_jtag_restore_state
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700254 ldmfd sp!, {lr}
Steve Mucklec1421e32012-03-26 11:05:06 -0700255#endif
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700256 mov r0, #1
257 bx lr
258 nop
259 nop
260 nop
261 nop
262 nop
2631: b 1b
264
265ENTRY(msm_pm_boot_entry)
266 mrc p15, 0, r0, c0, c0, 5 /* MPIDR */
267 and r0, r0, #15 /* what CPU am I */
268
269 ldr r1, =msm_pm_boot_vector
270 ldr r2, =msm_pm_boot_entry
271 adr r3, msm_pm_boot_entry
272 add r1, r1, r3 /* translate virt to phys addr */
273 sub r1, r1, r2
274
275 add r1, r1, r0, LSL #2 /* locate boot vector for our cpu */
276 ldr pc, [r1] /* jump */
277
Maheshkumar Sivasubramaniana012e092011-08-18 10:13:03 -0600278ENTRY(msm_pm_set_l2_flush_flag)
279 ldr r1, =msm_pm_flush_l2_flag
280 str r0, [r1]
281 bx lr
282
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700283 .data
284
285 .globl msm_pm_pc_pgd
286msm_pm_pc_pgd:
287 .long 0x0
288
Steve Mucklefcece052012-02-18 20:09:58 -0800289 .globl msm_saved_state
290msm_saved_state:
291 .long 0x0
292
293 .globl msm_saved_state_phys
294msm_saved_state_phys:
295 .long 0x0
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700296
Steve Mucklec25a9362012-03-22 16:40:01 -0700297 .globl msm_pm_boot_vector
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700298msm_pm_boot_vector:
299 .space 4 * NR_CPUS
Maheshkumar Sivasubramaniana012e092011-08-18 10:13:03 -0600300
Murali Nalajala93f29992012-03-21 15:59:27 +0530301 .globl target_type
302target_type:
303 .long 0x0
304
Maheshkumar Sivasubramaniana012e092011-08-18 10:13:03 -0600305/*
306 * Default the l2 flush flag to 1 so that caches are flushed during power
307 * collapse unless the L2 driver decides to flush them only during L2
308 * Power collapse.
309 */
310msm_pm_flush_l2_flag:
311 .long 0x1