2 * linux/arch/arm/mach-omap2/sleep.S
6 * Karthik Dasu <karthik-dp@ti.com>
9 * Texas Instruments, <www.ti.com>
10 * Richard Woodruff <r-woodruff2@ti.com>
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License as
14 * published by the Free Software Foundation; either version 2 of
15 * the License, or (at your option) any later version.
17 * This program is distributed in the hope that it will be useful,
18 * but WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR /PURPOSE. See the
20 * GNU General Public License for more details.
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, write to the Free Software
24 * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
27 #include <linux/linkage.h>
28 #include <asm/assembler.h>
30 #include <plat/control.h>
35 #define PM_PREPWSTST_CORE_V OMAP34XX_PRM_REGADDR(CORE_MOD, \
36 OMAP3430_PM_PREPWSTST)
37 #define PM_PREPWSTST_CORE_P 0x48306AE8
38 #define PM_PREPWSTST_MPU_V OMAP34XX_PRM_REGADDR(MPU_MOD, \
39 OMAP3430_PM_PREPWSTST)
40 #define PM_PWSTCTRL_MPU_P OMAP3430_PRM_BASE + MPU_MOD + PM_PWSTCTRL
41 #define SRAM_BASE_P 0x40200000
42 #define CONTROL_STAT 0x480022F0
43 #define SCRATCHPAD_MEM_OFFS 0x310 /* Move this as correct place is
45 #define SCRATCHPAD_BASE_P (OMAP343X_CTRL_BASE + OMAP343X_CONTROL_MEM_WKUP\
46 + SCRATCHPAD_MEM_OFFS)
47 #define SDRC_POWER_V OMAP34XX_SDRC_REGADDR(SDRC_POWER)
48 #define SDRC_SYSCONFIG_P (OMAP343X_SDRC_BASE + SDRC_SYSCONFIG)
49 #define SDRC_MR_0_P (OMAP343X_SDRC_BASE + SDRC_MR_0)
50 #define SDRC_EMR2_0_P (OMAP343X_SDRC_BASE + SDRC_EMR2_0)
51 #define SDRC_MANUAL_0_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_0)
52 #define SDRC_MR_1_P (OMAP343X_SDRC_BASE + SDRC_MR_1)
53 #define SDRC_EMR2_1_P (OMAP343X_SDRC_BASE + SDRC_EMR2_1)
54 #define SDRC_MANUAL_1_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_1)
57 /* Function call to get the restore pointer for resume from OFF */
58 ENTRY(get_restore_pointer)
59 stmfd sp!, {lr} @ save registers on stack
61 ldmfd sp!, {pc} @ restore regs and return
62 ENTRY(get_restore_pointer_sz)
63 .word . - get_restore_pointer
66 /* Function call to get the restore pointer for for ES3 to resume from OFF */
67 ENTRY(get_es3_restore_pointer)
68 stmfd sp!, {lr} @ save registers on stack
70 ldmfd sp!, {pc} @ restore regs and return
71 ENTRY(get_es3_restore_pointer_sz)
72 .word . - get_es3_restore_pointer
75 ldr r4, sdrc_syscfg @ get config addr
76 ldr r5, [r4] @ get value
77 tst r5, #0x100 @ is part access blocked
79 biceq r5, r5, #0x100 @ clear bit if set
80 str r5, [r4] @ write back change
81 ldr r4, sdrc_mr_0 @ get config addr
82 ldr r5, [r4] @ get value
83 str r5, [r4] @ write back change
84 ldr r4, sdrc_emr2_0 @ get config addr
85 ldr r5, [r4] @ get value
86 str r5, [r4] @ write back change
87 ldr r4, sdrc_manual_0 @ get config addr
88 mov r5, #0x2 @ autorefresh command
89 str r5, [r4] @ kick off refreshes
90 ldr r4, sdrc_mr_1 @ get config addr
91 ldr r5, [r4] @ get value
92 str r5, [r4] @ write back change
93 ldr r4, sdrc_emr2_1 @ get config addr
94 ldr r5, [r4] @ get value
95 str r5, [r4] @ write back change
96 ldr r4, sdrc_manual_1 @ get config addr
97 mov r5, #0x2 @ autorefresh command
98 str r5, [r4] @ kick off refreshes
101 .word SDRC_SYSCONFIG_P
107 .word SDRC_MANUAL_0_P
113 .word SDRC_MANUAL_1_P
114 ENTRY(es3_sdrc_fix_sz)
115 .word . - es3_sdrc_fix
117 /* Function to call rom code to save secure ram context */
118 ENTRY(save_secure_ram_context)
119 stmfd sp!, {r1-r12, lr} @ save registers on stack
120 save_secure_ram_debug:
121 /* b save_secure_ram_debug */ @ enable to debug save code
122 adr r3, api_params @ r3 points to parameters
123 str r0, [r3,#0x4] @ r0 has sdram address
126 ldr r12, sram_phy_addr_mask
128 mov r0, #25 @ set service ID for PPA
129 mov r12, r0 @ copy secure service ID in r12
130 mov r1, #0 @ set task id for ROM code in r1
131 mov r2, #4 @ set some flags in r2, r6
133 mcr p15, 0, r0, c7, c10, 4 @ data write barrier
134 mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
135 .word 0xE1600071 @ call SMI monitor (smi #1)
140 ldmfd sp!, {r1-r12, pc}
146 .word 0x4, 0x0, 0x0, 0x1, 0x1
147 ENTRY(save_secure_ram_context_sz)
148 .word . - save_secure_ram_context
151 * Forces OMAP into idle state
153 * omap34xx_suspend() - This bit of code just executes the WFI
156 * Note: This code get's copied to internal SRAM at boot. When the OMAP
157 * wakes up it continues execution at the point it went to sleep.
159 ENTRY(omap34xx_cpu_suspend)
160 stmfd sp!, {r0-r12, lr} @ save registers on stack
162 /*b loop*/ @Enable to debug by stepping through code
163 /* r0 contains restore pointer in sdram */
164 /* r1 contains information about saving context */
165 ldr r4, sdrc_power @ read the SDRC_POWER register
166 ldr r5, [r4] @ read the contents of SDRC_POWER
167 orr r5, r5, #0x40 @ enable self refresh on idle req
168 str r5, [r4] @ write back to SDRC_POWER register
171 /* If context save is required, do that and execute wfi */
173 /* Data memory barrier and Data sync barrier */
175 mcr p15, 0, r1, c7, c10, 4
176 mcr p15, 0, r1, c7, c10, 5
178 wfi @ wait for interrupt
192 ldmfd sp!, {r0-r12, pc} @ restore regs and return
194 /*b restore_es3*/ @ Enable to debug restore code
195 ldr r5, pm_prepwstst_core_p
198 cmp r4, #0x0 @ Check if previous power state of CORE is OFF
202 ldr r2, es3_sdrc_fix_sz
205 ldmia r0!, {r3} @ val = *src
206 stmia r1!, {r3} @ *dst = val
207 subs r2, r2, #0x1 @ num_words--
212 /* b restore*/ @ Enable to debug restore code
213 /* Check what was the reason for mpu reset and store the reason in r9*/
214 /* 1 - Only L1 and logic lost */
215 /* 2 - Only L2 lost - In this case, we wont be here */
216 /* 3 - Both L1 and L2 lost */
217 ldr r1, pm_pwstctrl_mpu
220 cmp r2, #0x0 @ Check if target power state was OFF or RET
221 moveq r9, #0x3 @ MPU OFF => L1 and L2 lost
222 movne r9, #0x1 @ Only L1 and L2 lost => avoid L2 invalidation
229 mov r0, #40 @ set service ID for PPA
230 mov r12, r0 @ copy secure Service ID in r12
231 mov r1, #0 @ set task id for ROM code in r1
232 mov r2, #4 @ set some flags in r2, r6
234 adr r3, l2_inv_api_params @ r3 points to dummy parameters
235 mcr p15, 0, r0, c7, c10, 4 @ data write barrier
236 mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
237 .word 0xE1600071 @ call SMI monitor (smi #1)
238 /* Write to Aux control register to set some bits */
239 mov r0, #42 @ set service ID for PPA
240 mov r12, r0 @ copy secure Service ID in r12
241 mov r1, #0 @ set task id for ROM code in r1
242 mov r2, #4 @ set some flags in r2, r6
244 adr r3, write_aux_control_params @ r3 points to parameters
245 mcr p15, 0, r0, c7, c10, 4 @ data write barrier
246 mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
247 .word 0xE1600071 @ call SMI monitor (smi #1)
252 write_aux_control_params:
255 /* Execute smi to invalidate L2 cache */
256 mov r12, #0x1 @ set up to invalide L2
257 smi: .word 0xE1600070 @ Call SMI monitor (smieq)
258 /* Write to Aux control register to set some bits */
261 .word 0xE1600070 @ Call SMI monitor (smieq)
264 /* Invalidate all instruction caches to PoU
265 * and flush branch target cache */
266 mcr p15, 0, r1, c7, c5, 0
268 ldr r4, scratchpad_base
276 /* Coprocessor access Control Register */
277 mcr p15, 0, r4, c1, c0, 2
280 MCR p15, 0, r5, c2, c0, 0
282 MCR p15, 0, r6, c2, c0, 1
283 /* Translation table base control register */
284 MCR p15, 0, r7, c2, c0, 2
285 /*domain access Control Register */
286 MCR p15, 0, r8, c3, c0, 0
287 /* data fault status Register */
288 MCR p15, 0, r9, c5, c0, 0
291 /* instruction fault status Register */
292 MCR p15, 0, r4, c5, c0, 1
293 /*Data Auxiliary Fault Status Register */
294 MCR p15, 0, r5, c5, c1, 0
295 /*Instruction Auxiliary Fault Status Register*/
296 MCR p15, 0, r6, c5, c1, 1
297 /*Data Fault Address Register */
298 MCR p15, 0, r7, c6, c0, 0
299 /*Instruction Fault Address Register*/
300 MCR p15, 0, r8, c6, c0, 2
303 /* user r/w thread and process ID */
304 MCR p15, 0, r4, c13, c0, 2
305 /* user ro thread and process ID */
306 MCR p15, 0, r5, c13, c0, 3
307 /*Privileged only thread and process ID */
308 MCR p15, 0, r6, c13, c0, 4
309 /* cache size selection */
310 MCR p15, 2, r7, c0, c0, 0
312 /* Data TLB lockdown registers */
313 MCR p15, 0, r4, c10, c0, 0
314 /* Instruction TLB lockdown registers */
315 MCR p15, 0, r5, c10, c0, 1
316 /* Secure or Nonsecure Vector Base Address */
317 MCR p15, 0, r6, c12, c0, 0
319 MCR p15, 0, r7, c13, c0, 0
321 MCR p15, 0, r8, c13, c0, 1
324 /* primary memory remap register */
325 MCR p15, 0, r4, c10, c2, 0
326 /*normal memory remap register */
327 MCR p15, 0, r5, c10, c2, 1
330 ldmia r3!,{r4} /*load CPSR from SDRAM*/
331 msr cpsr, r4 /*store cpsr */
333 /* Enabling MMU here */
334 mrc p15, 0, r7, c2, c0, 2 /* Read TTBRControl */
335 /* Extract N (0:2) bits and decide whether to use TTBR0 or TTBR1*/
340 /* More work needs to be done to support N[0:2] value other than 0
341 * So looping here so that the error can be detected
345 mrc p15, 0, r2, c2, c0, 0
349 ldr r5, table_index_mask
350 and r4, r5 /* r4 = 31 to 20 bits of pc */
351 /* Extract the value to be written to table entry */
353 add r1, r1, r4 /* r1 has value to be written to table entry*/
354 /* Getting the address of table entry to modify */
356 add r2, r4 /* r2 has the location which needs to be modified */
357 /* Storing previous entry of location being modified */
358 ldr r5, scratchpad_base
361 /* Modify the table entry */
363 /* Storing address of entry being modified
364 * - will be restored after enabling MMU */
365 ldr r5, scratchpad_base
369 mcr p15, 0, r0, c7, c5, 4 @ Flush prefetch buffer
370 mcr p15, 0, r0, c7, c5, 6 @ Invalidate branch predictor array
371 mcr p15, 0, r0, c8, c5, 0 @ Invalidate instruction TLB
372 mcr p15, 0, r0, c8, c6, 0 @ Invalidate data TLB
373 /* Restore control register but dont enable caches here*/
374 /* Caches will be enabled after restoring MMU table entry */
376 /* Store previous value of control register in scratchpad */
378 ldr r2, cache_pred_disable_mask
380 mcr p15, 0, r4, c1, c0, 0
382 ldmfd sp!, {r0-r12, pc} @ restore regs and return
384 /*b save_context_wfi*/ @ enable to debug save code
385 mov r8, r0 /* Store SDRAM address in r8 */
386 /* Check what that target sleep state is:stored in r1*/
387 /* 1 - Only L1 and logic lost */
388 /* 2 - Only L2 lost */
389 /* 3 - Both L1 and L2 lost */
390 cmp r1, #0x2 /* Only L2 lost */
392 cmp r1, #0x1 /* L2 retained */
393 /* r9 stores whether to clean L2 or not*/
394 moveq r9, #0x0 /* Dont Clean L2 */
395 movne r9, #0x1 /* Clean L2 */
397 /* Store sp and spsr to SDRAM */
402 /* Save all ARM registers */
403 /* Coprocessor access control register */
404 mrc p15, 0, r6, c1, c0, 2
406 /* TTBR0, TTBR1 and Translation table base control */
407 mrc p15, 0, r4, c2, c0, 0
408 mrc p15, 0, r5, c2, c0, 1
409 mrc p15, 0, r6, c2, c0, 2
411 /* Domain access control register, data fault status register,
412 and instruction fault status register */
413 mrc p15, 0, r4, c3, c0, 0
414 mrc p15, 0, r5, c5, c0, 0
415 mrc p15, 0, r6, c5, c0, 1
417 /* Data aux fault status register, instruction aux fault status,
418 datat fault address register and instruction fault address register*/
419 mrc p15, 0, r4, c5, c1, 0
420 mrc p15, 0, r5, c5, c1, 1
421 mrc p15, 0, r6, c6, c0, 0
422 mrc p15, 0, r7, c6, c0, 2
424 /* user r/w thread and process ID, user r/o thread and process ID,
425 priv only thread and process ID, cache size selection */
426 mrc p15, 0, r4, c13, c0, 2
427 mrc p15, 0, r5, c13, c0, 3
428 mrc p15, 0, r6, c13, c0, 4
429 mrc p15, 2, r7, c0, c0, 0
431 /* Data TLB lockdown, instruction TLB lockdown registers */
432 mrc p15, 0, r5, c10, c0, 0
433 mrc p15, 0, r6, c10, c0, 1
435 /* Secure or non secure vector base address, FCSE PID, Context PID*/
436 mrc p15, 0, r4, c12, c0, 0
437 mrc p15, 0, r5, c13, c0, 0
438 mrc p15, 0, r6, c13, c0, 1
440 /* Primary remap, normal remap registers */
441 mrc p15, 0, r4, c10, c2, 0
442 mrc p15, 0, r5, c10, c2, 1
445 /* Store current cpsr*/
449 mrc p15, 0, r4, c1, c0, 0
450 /* save control register */
453 /* Clean Data or unified cache to POU*/
454 /* How to invalidate only L1 cache???? - #FIX_ME# */
455 /* mcr p15, 0, r11, c7, c11, 1 */
456 cmp r9, #1 /* Check whether L2 inval is required or not*/
460 mrc p15, 1, r0, c0, c0, 1
461 /* extract loc from clidr */
462 ands r3, r0, #0x7000000
463 /* left align loc bit field */
465 /* if loc is 0, then no need to clean */
467 /* start clean at cache level 0 */
470 /* work out 3x current cache level */
471 add r2, r10, r10, lsr #1
472 /* extract cache type bits from clidr*/
474 /* mask of the bits for current cache only */
476 /* see what cache we have at this level */
478 /* skip if no cache, or just i-cache */
480 /* select current cache level in cssr */
481 mcr p15, 2, r10, c0, c0, 0
482 /* isb to sych the new cssr&csidr */
484 /* read the new csidr */
485 mrc p15, 1, r1, c0, c0, 0
486 /* extract the length of the cache lines */
488 /* add 4 (line length offset) */
491 /* find maximum number on the way size */
492 ands r4, r4, r1, lsr #3
493 /* find bit position of way size increment */
496 /* extract max number of the index size*/
497 ands r7, r7, r1, lsr #13
500 /* create working copy of max way size*/
502 /* factor way and cache number into r11 */
503 orr r11, r10, r9, lsl r5
504 /* factor index number into r11 */
505 orr r11, r11, r7, lsl r2
506 /*clean & invalidate by set/way */
507 mcr p15, 0, r11, c7, c10, 2
508 /* decrement the way*/
511 /*decrement the index */
516 /* increment cache number */
520 /*swith back to cache level 0 */
522 /* select current cache level in cssr */
523 mcr p15, 2, r10, c0, c0, 0
526 /* Data memory barrier and Data sync barrier */
528 mcr p15, 0, r1, c7, c10, 4
529 mcr p15, 0, r1, c7, c10, 5
531 wfi @ wait for interrupt
543 /* restore regs and return */
544 ldmfd sp!, {r0-r12, pc}
547 ldr r4, clk_stabilize_delay
558 .word PM_PREPWSTST_CORE_V
560 .word PM_PREPWSTST_CORE_P
562 .word PM_PREPWSTST_MPU_V
564 .word PM_PWSTCTRL_MPU_P
566 .word SCRATCHPAD_BASE_P
568 .word SRAM_BASE_P + 0x8000
583 cache_pred_disable_mask:
587 ENTRY(omap34xx_cpu_suspend_sz)
588 .word . - omap34xx_cpu_suspend