blob: 9e823c39568de4aaaa06d91e7a724c8644ea6e4d [file] [log] [blame]
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -07001/*
2 * Idle processing for ARMv7-based Qualcomm SoCs.
3 *
4 * Copyright (C) 2007 Google, Inc.
Pushkar Joshief5b46f2012-12-15 18:59:03 -08005 * Copyright (c) 2007-2009, 2011-2013 The Linux Foundation. All rights reserved.
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -07006 *
7 * This software is licensed under the terms of the GNU General Public
8 * License version 2, as published by the Free Software Foundation, and
9 * may be copied, distributed, and modified under those terms.
10 *
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
15 *
16 */
17
18#include <linux/linkage.h>
19#include <linux/threads.h>
20#include <asm/assembler.h>
21
Steve Mucklefcece052012-02-18 20:09:58 -080022#include "idle.h"
Murali Nalajala73c13332012-05-15 11:30:59 +053023#include "idle-macros.S"
Steve Mucklefcece052012-02-18 20:09:58 -080024
Maheshkumar Sivasubramanianfa1d0dd2011-07-26 16:02:55 -060025#ifdef CONFIG_ARCH_MSM_KRAIT
26#define SCM_SVC_BOOT 0x1
27#define SCM_CMD_TERMINATE_PC 0x2
28#endif
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -070029
30ENTRY(msm_arch_idle)
Stepan Moskovchenkoae920e72012-07-03 19:10:44 -070031#ifdef CONFIG_ARCH_MSM_KRAIT
32 mrc p15, 0, r0, c0, c0, 0
33 bic r1, r0, #0xff
34 movw r2, #0x0400
35 movt r2, #0x511F
36 movw r3, #0x0600
37 movt r3, #0x510F
38 cmp r2, r1
39 cmpne r3, r1
40 bne go_wfi
41
42 mrs r0, cpsr
43 cpsid if
44
45 mrc p15, 7, r1, c15, c0, 5
46 bic r2, r1, #0x20000
47 mcr p15, 7, r2, c15, c0, 5
48 isb
49
50go_wfi:
51 wfi
52 bne wfi_done
53 mcr p15, 7, r1, c15, c0, 5
54 isb
55 msr cpsr_c, r0
56
57wfi_done:
58 bx lr
59#else
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -070060 wfi
Pratik Patelcbcc1f02011-11-08 12:58:00 -080061#ifdef CONFIG_ARCH_MSM8X60
62 mrc p14, 1, r1, c1, c5, 4 /* read ETM PDSR to clear sticky bit */
63 mrc p14, 0, r1, c1, c5, 4 /* read DBG PRSR to clear sticky bit */
64 isb
65#endif
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -070066 bx lr
Stepan Moskovchenkoae920e72012-07-03 19:10:44 -070067#endif
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -070068
69ENTRY(msm_pm_collapse)
70#if defined(CONFIG_MSM_FIQ_SUPPORT)
71 cpsid f
72#endif
73
Steve Mucklefcece052012-02-18 20:09:58 -080074 ldr r0, =msm_saved_state /* address of msm_saved_state ptr */
75 ldr r0, [r0] /* load ptr */
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -070076#if (NR_CPUS >= 2)
77 mrc p15, 0, r1, c0, c0, 5 /* MPIDR */
78 ands r1, r1, #15 /* What CPU am I */
Mahesh Sivasubramanian0ff37e72011-12-15 14:12:31 -070079 mov r2, #CPU_SAVED_STATE_SIZE
80 mul r1, r1, r2
81 add r0, r0, r1
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -070082#endif
83
84 stmia r0!, {r4-r14}
85 mrc p15, 0, r1, c1, c0, 0 /* MMU control */
86 mrc p15, 0, r2, c2, c0, 0 /* TTBR0 */
87 mrc p15, 0, r3, c3, c0, 0 /* dacr */
88#ifdef CONFIG_ARCH_MSM_SCORPION
89 /* This instruction is not valid for non scorpion processors */
90 mrc p15, 3, r4, c15, c0, 3 /* L2CR1 is the L2 cache control reg 1 */
91#endif
92 mrc p15, 0, r5, c10, c2, 0 /* PRRR */
93 mrc p15, 0, r6, c10, c2, 1 /* NMRR */
94 mrc p15, 0, r7, c1, c0, 1 /* ACTLR */
95 mrc p15, 0, r8, c2, c0, 1 /* TTBR1 */
96 mrc p15, 0, r9, c13, c0, 3 /* TPIDRURO */
97 mrc p15, 0, ip, c13, c0, 1 /* context ID */
98 stmia r0!, {r1-r9, ip}
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -070099
Pushkar Joshief5b46f2012-12-15 18:59:03 -0800100#if defined(CONFIG_MSM_JTAG) || defined(CONFIG_MSM_JTAG_MM)
Pratik Patel17f3b822011-11-21 12:41:47 -0800101 bl msm_jtag_save_state
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700102#endif
Maheshkumar Sivasubramaniana012e092011-08-18 10:13:03 -0600103
104 ldr r0, =msm_pm_flush_l2_flag
105 ldr r0, [r0]
106 mov r1, #0
107 mcr p15, 2, r1, c0, c0, 0 /*CCSELR*/
Maheshkumar Sivasubramanian1d2b69c2011-11-17 10:26:09 -0700108 isb
Maheshkumar Sivasubramaniana012e092011-08-18 10:13:03 -0600109 mrc p15, 1, r1, c0, c0, 0 /*CCSIDR*/
110 mov r2, #1
111 and r1, r2, r1, ASR #30 /* Check if the cache is write back */
112 orr r1, r0, r1
113 cmp r1, #1
114 bne skip
115 bl v7_flush_dcache_all
Steve Mucklefcece052012-02-18 20:09:58 -0800116skip:
Mahesh Sivasubramanian24b2cea2012-03-14 14:50:37 -0600117 mrc p15, 0, r0, c0, c0, 5 /* MPIDR */
118 and r0, r0, #15 /* what CPU am I */
119
120 ldr r1, =msm_pc_debug_counters /*load the IMEM debug location */
121 ldr r1, [r1]
122 cmp r1, #0
123 beq skip_pc_debug1
124 add r1, r1, r0, LSL #4 /* debug location for this CPU */
125 ldr r2, [r1]
126 add r2, #1
127 str r2, [r1]
128skip_pc_debug1:
129
Maheshkumar Sivasubramanianfa1d0dd2011-07-26 16:02:55 -0600130#ifdef CONFIG_ARCH_MSM_KRAIT
131 ldr r0, =SCM_SVC_BOOT
132 ldr r1, =SCM_CMD_TERMINATE_PC
Maheshkumar Sivasubramanian16588412011-10-13 12:16:23 -0600133 ldr r2, =msm_pm_flush_l2_flag
134 ldr r2, [r2]
Maheshkumar Sivasubramanianfa1d0dd2011-07-26 16:02:55 -0600135 bl scm_call_atomic1
136#else
Stepan Moskovchenko34dc00f2012-01-28 19:31:41 -0800137 mrc p15, 0, r4, c1, c0, 0 /* read current CR */
138 bic r0, r4, #(1 << 2) /* clear dcache bit */
139 bic r0, r0, #(1 << 12) /* clear icache bit */
140 mcr p15, 0, r0, c1, c0, 0 /* disable d/i cache */
Murali Nalajala73c13332012-05-15 11:30:59 +0530141 isb
Stepan Moskovchenko34dc00f2012-01-28 19:31:41 -0800142
Murali Nalajala73c13332012-05-15 11:30:59 +0530143 SUSPEND_8x25_L2
Murali Nalajala93f29992012-03-21 15:59:27 +0530144 SET_SMP_COHERENCY OFF
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700145 wfi
Murali Nalajala3a6de242012-05-11 15:05:35 +0530146 DELAY_8x25 300
Stepan Moskovchenko34dc00f2012-01-28 19:31:41 -0800147
Maheshkumar Sivasubramanianfa1d0dd2011-07-26 16:02:55 -0600148 mcr p15, 0, r4, c1, c0, 0 /* restore d/i cache */
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700149 isb
Murali Nalajala73c13332012-05-15 11:30:59 +0530150 ENABLE_8x25_L2 /* enable only l2, no need to restore the reg back */
Murali Nalajala93f29992012-03-21 15:59:27 +0530151 SET_SMP_COHERENCY ON
Murali Nalajala73c13332012-05-15 11:30:59 +0530152#endif
153
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700154#if defined(CONFIG_MSM_FIQ_SUPPORT)
155 cpsie f
156#endif
Mahesh Sivasubramanian24b2cea2012-03-14 14:50:37 -0600157 mrc p15, 0, r0, c0, c0, 5 /* MPIDR */
158 and r0, r0, #15 /* what CPU am I */
159
160 ldr r1, =msm_pc_debug_counters /*load the IMEM debug location */
161 ldr r1, [r1]
162 cmp r1, #0
163 beq skip_pc_debug2
164 add r1, r1, r0, LSL #4 /* debug location for this CPU */
165 add r1, #8
166 ldr r2, [r1]
167 add r2, #1
168 str r2, [r1]
169
170skip_pc_debug2:
171
Pushkar Joshief5b46f2012-12-15 18:59:03 -0800172#if defined(CONFIG_MSM_JTAG) || defined(CONFIG_MSM_JTAG_MM)
Pratik Patel17f3b822011-11-21 12:41:47 -0800173 bl msm_jtag_restore_state
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700174#endif
Steve Mucklefcece052012-02-18 20:09:58 -0800175 ldr r0, =msm_saved_state /* address of msm_saved_state ptr */
176 ldr r0, [r0] /* load ptr */
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700177#if (NR_CPUS >= 2)
178 mrc p15, 0, r1, c0, c0, 5 /* MPIDR */
179 ands r1, r1, #15 /* What CPU am I */
Praveen Chidambaram4b1e6f02012-02-11 16:17:30 -0700180 mov r2, #CPU_SAVED_STATE_SIZE
181 mul r2, r2, r1
182 add r0, r0, r2
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700183#endif
Steve Mucklefcece052012-02-18 20:09:58 -0800184 ldmfd r0, {r4-r14} /* restore registers */
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700185 mov r0, #0 /* return power collapse failed */
186 bx lr
187
188ENTRY(msm_pm_collapse_exit)
189#if 0 /* serial debug */
190 mov r0, #0x80000016
191 mcr p15, 0, r0, c15, c2, 4
192 mov r0, #0xA9000000
193 add r0, r0, #0x00A00000 /* UART1 */
194 /*add r0, r0, #0x00C00000*/ /* UART3 */
195 mov r1, #'A'
196 str r1, [r0, #0x00C]
197#endif
Steve Mucklefcece052012-02-18 20:09:58 -0800198 ldr r1, =msm_saved_state_phys
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700199 ldr r2, =msm_pm_collapse_exit
200 adr r3, msm_pm_collapse_exit
201 add r1, r1, r3
202 sub r1, r1, r2
Steve Mucklefcece052012-02-18 20:09:58 -0800203 ldr r1, [r1]
Mahesh Sivasubramanian0ff37e72011-12-15 14:12:31 -0700204 add r1, r1, #CPU_SAVED_STATE_SIZE
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700205#if (NR_CPUS >= 2)
206 mrc p15, 0, r2, c0, c0, 5 /* MPIDR */
207 ands r2, r2, #15 /* What CPU am I */
Mahesh Sivasubramanian0ff37e72011-12-15 14:12:31 -0700208 mov r3, #CPU_SAVED_STATE_SIZE
209 mul r2, r2, r3
210 add r1, r1, r2
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700211#endif
212
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700213 ldmdb r1!, {r2-r11}
214 mcr p15, 0, r4, c3, c0, 0 /* dacr */
215 mcr p15, 0, r3, c2, c0, 0 /* TTBR0 */
216#ifdef CONFIG_ARCH_MSM_SCORPION
217 /* This instruction is not valid for non scorpion processors */
218 mcr p15, 3, r5, c15, c0, 3 /* L2CR1 */
219#endif
220 mcr p15, 0, r6, c10, c2, 0 /* PRRR */
221 mcr p15, 0, r7, c10, c2, 1 /* NMRR */
222 mcr p15, 0, r8, c1, c0, 1 /* ACTLR */
223 mcr p15, 0, r9, c2, c0, 1 /* TTBR1 */
224 mcr p15, 0, r10, c13, c0, 3 /* TPIDRURO */
225 mcr p15, 0, r11, c13, c0, 1 /* context ID */
226 isb
227 ldmdb r1!, {r4-r14}
228 ldr r0, =msm_pm_pc_pgd
229 ldr r1, =msm_pm_collapse_exit
230 adr r3, msm_pm_collapse_exit
231 add r0, r0, r3
232 sub r0, r0, r1
233 ldr r0, [r0]
234 mrc p15, 0, r1, c2, c0, 0 /* save current TTBR0 */
235 and r3, r1, #0x7f /* mask to get TTB flags */
236 orr r0, r0, r3 /* add TTB flags to switch TTBR value */
237 mcr p15, 0, r0, c2, c0, 0 /* temporary switch TTBR0 */
238 isb
239 mcr p15, 0, r2, c1, c0, 0 /* MMU control */
240 isb
241msm_pm_mapped_pa:
242 /* Switch to virtual */
243 ldr r0, =msm_pm_pa_to_va
244 mov pc, r0
245msm_pm_pa_to_va:
246 mcr p15, 0, r1, c2, c0, 0 /* restore TTBR0 */
247 isb
248 mcr p15, 0, r3, c8, c7, 0 /* UTLBIALL */
249 mcr p15, 0, r3, c7, c5, 6 /* BPIALL */
250 dsb
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700251 isb
Murali Nalajala93f29992012-03-21 15:59:27 +0530252
Stepan Moskovchenko6fd9c922011-12-08 18:15:05 -0800253#ifdef CONFIG_ARCH_MSM_KRAIT
254 mrc p15, 0, r1, c0, c0, 0
255 ldr r3, =0xff00fc00
256 and r3, r1, r3
257 ldr r1, =0x51000400
258 cmp r3, r1
259 mrceq p15, 7, r3, c15, c0, 2
260 biceq r3, r3, #0x400
261 mcreq p15, 7, r3, c15, c0, 2
Murali Nalajala73c13332012-05-15 11:30:59 +0530262#else
263 RESUME_8x25_L2
264 SET_SMP_COHERENCY ON
Stepan Moskovchenko6fd9c922011-12-08 18:15:05 -0800265#endif
Murali Nalajala73c13332012-05-15 11:30:59 +0530266
Pushkar Joshief5b46f2012-12-15 18:59:03 -0800267#if defined(CONFIG_MSM_JTAG) || defined(CONFIG_MSM_JTAG_MM)
Steve Mucklec1421e32012-03-26 11:05:06 -0700268 stmfd sp!, {lr}
Pratik Patel17f3b822011-11-21 12:41:47 -0800269 bl msm_jtag_restore_state
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700270 ldmfd sp!, {lr}
Steve Mucklec1421e32012-03-26 11:05:06 -0700271#endif
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700272 mov r0, #1
273 bx lr
274 nop
275 nop
276 nop
277 nop
278 nop
2791: b 1b
280
281ENTRY(msm_pm_boot_entry)
282 mrc p15, 0, r0, c0, c0, 5 /* MPIDR */
283 and r0, r0, #15 /* what CPU am I */
284
Mahesh Sivasubramanian24b2cea2012-03-14 14:50:37 -0600285 ldr r1, =msm_pc_debug_counters_phys /*phys addr for IMEM reg */
286 ldr r2, =msm_pm_boot_entry
287 adr r3, msm_pm_boot_entry
288 add r1, r1, r3 /* translate virt to phys addr */
289 sub r1, r1, r2
290 ldr r1,[r1]
291
292 cmp r1, #0
293 beq skip_pc_debug3
294 add r1, r1, r0, LSL #4 /* debug location for this CPU */
295 add r1, #4 /* warmboot entry counter*/
296 ldr r2, [r1]
297 add r2, #1
298 str r2, [r1]
299
300skip_pc_debug3:
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700301 ldr r1, =msm_pm_boot_vector
302 ldr r2, =msm_pm_boot_entry
303 adr r3, msm_pm_boot_entry
304 add r1, r1, r3 /* translate virt to phys addr */
305 sub r1, r1, r2
306
307 add r1, r1, r0, LSL #2 /* locate boot vector for our cpu */
308 ldr pc, [r1] /* jump */
309
Maheshkumar Sivasubramaniana012e092011-08-18 10:13:03 -0600310ENTRY(msm_pm_set_l2_flush_flag)
Praveen Chidambaram79f3b4a2012-09-18 19:48:29 -0600311 ldr r1, =msm_pm_flush_l2_flag
312 str r0, [r1]
313 bx lr
314
315ENTRY(msm_pm_get_l2_flush_flag)
316 ldr r1, =msm_pm_flush_l2_flag
317 ldr r0, [r1]
318 bx lr
Maheshkumar Sivasubramaniana012e092011-08-18 10:13:03 -0600319
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700320 .data
321
322 .globl msm_pm_pc_pgd
323msm_pm_pc_pgd:
324 .long 0x0
325
Steve Mucklefcece052012-02-18 20:09:58 -0800326 .globl msm_saved_state
327msm_saved_state:
328 .long 0x0
329
330 .globl msm_saved_state_phys
331msm_saved_state_phys:
332 .long 0x0
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700333
Steve Mucklec25a9362012-03-22 16:40:01 -0700334 .globl msm_pm_boot_vector
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700335msm_pm_boot_vector:
336 .space 4 * NR_CPUS
Maheshkumar Sivasubramaniana012e092011-08-18 10:13:03 -0600337
Murali Nalajala93f29992012-03-21 15:59:27 +0530338 .globl target_type
339target_type:
340 .long 0x0
341
Murali Nalajala73c13332012-05-15 11:30:59 +0530342 .globl apps_power_collapse
343apps_power_collapse:
344 .long 0x0
345
346 .globl l2x0_base_addr
347l2x0_base_addr:
348 .long 0x0
349
Mahesh Sivasubramanian24b2cea2012-03-14 14:50:37 -0600350 .globl msm_pc_debug_counters_phys
351msm_pc_debug_counters_phys:
352 .long 0x0
353
354 .globl msm_pc_debug_counters
355msm_pc_debug_counters:
356 .long 0x0
357
Maheshkumar Sivasubramaniana012e092011-08-18 10:13:03 -0600358/*
359 * Default the l2 flush flag to 1 so that caches are flushed during power
360 * collapse unless the L2 driver decides to flush them only during L2
361 * Power collapse.
362 */
363msm_pm_flush_l2_flag:
364 .long 0x1
Murali Nalajala73c13332012-05-15 11:30:59 +0530365
366/*
367 * Save & restore l2x0 registers while system is entering and resuming
368 * from Power Collapse.
369 * 1. aux_ctrl_save (0x0)
370 * 2. data_latency_ctrl (0x4)
371 * 3. prefetch control (0x8)
372 */
373l2x0_saved_ctrl_reg_val:
374 .space 4 * 3