]> bbs.cooldavid.org Git - net-next-2.6.git/blame - arch/arm/mach-omap2/sleep34xx.S
Merge branch 'ebt_config_compat_v4' of git://git.breakpoint.cc/fw/nf-next-2.6
[net-next-2.6.git] / arch / arm / mach-omap2 / sleep34xx.S
CommitLineData
8bd22949
KH
1/*
2 * linux/arch/arm/mach-omap2/sleep.S
3 *
4 * (C) Copyright 2007
5 * Texas Instruments
6 * Karthik Dasu <karthik-dp@ti.com>
7 *
8 * (C) Copyright 2004
9 * Texas Instruments, <www.ti.com>
10 * Richard Woodruff <r-woodruff2@ti.com>
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License as
14 * published by the Free Software Foundation; either version 2 of
15 * the License, or (at your option) any later version.
16 *
17 * This program is distributed in the hope that it will be useful,
18 * but WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR /PURPOSE. See the
20 * GNU General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, write to the Free Software
24 * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
25 * MA 02111-1307 USA
26 */
27#include <linux/linkage.h>
28#include <asm/assembler.h>
29#include <mach/io.h>
ce491cf8 30#include <plat/control.h>
8bd22949 31
89139dce 32#include "cm.h"
8bd22949
KH
33#include "prm.h"
34#include "sdrc.h"
35
36#define PM_PREPWSTST_CORE_V OMAP34XX_PRM_REGADDR(CORE_MOD, \
37 OMAP3430_PM_PREPWSTST)
0795a75a 38#define PM_PREPWSTST_CORE_P 0x48306AE8
8bd22949
KH
39#define PM_PREPWSTST_MPU_V OMAP34XX_PRM_REGADDR(MPU_MOD, \
40 OMAP3430_PM_PREPWSTST)
61255ab9 41#define PM_PWSTCTRL_MPU_P OMAP3430_PRM_BASE + MPU_MOD + PM_PWSTCTRL
89139dce 42#define CM_IDLEST1_CORE_V OMAP34XX_CM_REGADDR(CORE_MOD, CM_IDLEST1)
27d59a4a
TK
43#define SRAM_BASE_P 0x40200000
44#define CONTROL_STAT 0x480022F0
8bd22949
KH
45#define SCRATCHPAD_MEM_OFFS 0x310 /* Move this as correct place is
46 * available */
61255ab9
RN
47#define SCRATCHPAD_BASE_P (OMAP343X_CTRL_BASE + OMAP343X_CONTROL_MEM_WKUP\
48 + SCRATCHPAD_MEM_OFFS)
8bd22949 49#define SDRC_POWER_V OMAP34XX_SDRC_REGADDR(SDRC_POWER)
0795a75a
TK
50#define SDRC_SYSCONFIG_P (OMAP343X_SDRC_BASE + SDRC_SYSCONFIG)
51#define SDRC_MR_0_P (OMAP343X_SDRC_BASE + SDRC_MR_0)
52#define SDRC_EMR2_0_P (OMAP343X_SDRC_BASE + SDRC_EMR2_0)
53#define SDRC_MANUAL_0_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_0)
54#define SDRC_MR_1_P (OMAP343X_SDRC_BASE + SDRC_MR_1)
55#define SDRC_EMR2_1_P (OMAP343X_SDRC_BASE + SDRC_EMR2_1)
56#define SDRC_MANUAL_1_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_1)
89139dce
PDS
57#define SDRC_DLLA_STATUS_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_STATUS)
58#define SDRC_DLLA_CTRL_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_CTRL)
8bd22949
KH
59
60 .text
61/* Function call to get the restore pointer for resume from OFF */
62ENTRY(get_restore_pointer)
63 stmfd sp!, {lr} @ save registers on stack
64 adr r0, restore
65 ldmfd sp!, {pc} @ restore regs and return
66ENTRY(get_restore_pointer_sz)
0795a75a
TK
67 .word . - get_restore_pointer
68
69 .text
70/* Function call to get the restore pointer for for ES3 to resume from OFF */
71ENTRY(get_es3_restore_pointer)
72 stmfd sp!, {lr} @ save registers on stack
73 adr r0, restore_es3
74 ldmfd sp!, {pc} @ restore regs and return
75ENTRY(get_es3_restore_pointer_sz)
76 .word . - get_es3_restore_pointer
77
78ENTRY(es3_sdrc_fix)
79 ldr r4, sdrc_syscfg @ get config addr
80 ldr r5, [r4] @ get value
81 tst r5, #0x100 @ is part access blocked
82 it eq
83 biceq r5, r5, #0x100 @ clear bit if set
84 str r5, [r4] @ write back change
85 ldr r4, sdrc_mr_0 @ get config addr
86 ldr r5, [r4] @ get value
87 str r5, [r4] @ write back change
88 ldr r4, sdrc_emr2_0 @ get config addr
89 ldr r5, [r4] @ get value
90 str r5, [r4] @ write back change
91 ldr r4, sdrc_manual_0 @ get config addr
92 mov r5, #0x2 @ autorefresh command
93 str r5, [r4] @ kick off refreshes
94 ldr r4, sdrc_mr_1 @ get config addr
95 ldr r5, [r4] @ get value
96 str r5, [r4] @ write back change
97 ldr r4, sdrc_emr2_1 @ get config addr
98 ldr r5, [r4] @ get value
99 str r5, [r4] @ write back change
100 ldr r4, sdrc_manual_1 @ get config addr
101 mov r5, #0x2 @ autorefresh command
102 str r5, [r4] @ kick off refreshes
103 bx lr
104sdrc_syscfg:
105 .word SDRC_SYSCONFIG_P
106sdrc_mr_0:
107 .word SDRC_MR_0_P
108sdrc_emr2_0:
109 .word SDRC_EMR2_0_P
110sdrc_manual_0:
111 .word SDRC_MANUAL_0_P
112sdrc_mr_1:
113 .word SDRC_MR_1_P
114sdrc_emr2_1:
115 .word SDRC_EMR2_1_P
116sdrc_manual_1:
117 .word SDRC_MANUAL_1_P
118ENTRY(es3_sdrc_fix_sz)
119 .word . - es3_sdrc_fix
27d59a4a
TK
120
121/* Function to call rom code to save secure ram context */
122ENTRY(save_secure_ram_context)
123 stmfd sp!, {r1-r12, lr} @ save registers on stack
124save_secure_ram_debug:
125 /* b save_secure_ram_debug */ @ enable to debug save code
126 adr r3, api_params @ r3 points to parameters
127 str r0, [r3,#0x4] @ r0 has sdram address
128 ldr r12, high_mask
129 and r3, r3, r12
130 ldr r12, sram_phy_addr_mask
131 orr r3, r3, r12
132 mov r0, #25 @ set service ID for PPA
133 mov r12, r0 @ copy secure service ID in r12
134 mov r1, #0 @ set task id for ROM code in r1
ba50ea7e 135 mov r2, #4 @ set some flags in r2, r6
27d59a4a
TK
136 mov r6, #0xff
137 mcr p15, 0, r0, c7, c10, 4 @ data write barrier
138 mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
139 .word 0xE1600071 @ call SMI monitor (smi #1)
140 nop
141 nop
142 nop
143 nop
144 ldmfd sp!, {r1-r12, pc}
145sram_phy_addr_mask:
146 .word SRAM_BASE_P
147high_mask:
148 .word 0xffff
149api_params:
150 .word 0x4, 0x0, 0x0, 0x1, 0x1
151ENTRY(save_secure_ram_context_sz)
152 .word . - save_secure_ram_context
153
8bd22949
KH
154/*
155 * Forces OMAP into idle state
156 *
157 * omap34xx_suspend() - This bit of code just executes the WFI
158 * for normal idles.
159 *
160 * Note: This code get's copied to internal SRAM at boot. When the OMAP
161 * wakes up it continues execution at the point it went to sleep.
162 */
163ENTRY(omap34xx_cpu_suspend)
164 stmfd sp!, {r0-r12, lr} @ save registers on stack
165loop:
166 /*b loop*/ @Enable to debug by stepping through code
167 /* r0 contains restore pointer in sdram */
168 /* r1 contains information about saving context */
169 ldr r4, sdrc_power @ read the SDRC_POWER register
170 ldr r5, [r4] @ read the contents of SDRC_POWER
171 orr r5, r5, #0x40 @ enable self refresh on idle req
172 str r5, [r4] @ write back to SDRC_POWER register
173
174 cmp r1, #0x0
175 /* If context save is required, do that and execute wfi */
176 bne save_context_wfi
177 /* Data memory barrier and Data sync barrier */
178 mov r1, #0
179 mcr p15, 0, r1, c7, c10, 4
180 mcr p15, 0, r1, c7, c10, 5
181
182 wfi @ wait for interrupt
183
184 nop
185 nop
186 nop
187 nop
188 nop
189 nop
190 nop
191 nop
192 nop
193 nop
89139dce 194 bl wait_sdrc_ok
8bd22949
KH
195
196 ldmfd sp!, {r0-r12, pc} @ restore regs and return
0795a75a
TK
197restore_es3:
198 /*b restore_es3*/ @ Enable to debug restore code
199 ldr r5, pm_prepwstst_core_p
200 ldr r4, [r5]
201 and r4, r4, #0x3
202 cmp r4, #0x0 @ Check if previous power state of CORE is OFF
203 bne restore
204 adr r0, es3_sdrc_fix
205 ldr r1, sram_base
206 ldr r2, es3_sdrc_fix_sz
207 mov r2, r2, ror #2
208copy_to_sram:
209 ldmia r0!, {r3} @ val = *src
210 stmia r1!, {r3} @ *dst = val
211 subs r2, r2, #0x1 @ num_words--
212 bne copy_to_sram
213 ldr r1, sram_base
214 blx r1
8bd22949 215restore:
61255ab9 216 /* b restore*/ @ Enable to debug restore code
8bd22949
KH
217 /* Check what was the reason for mpu reset and store the reason in r9*/
218 /* 1 - Only L1 and logic lost */
219 /* 2 - Only L2 lost - In this case, we wont be here */
220 /* 3 - Both L1 and L2 lost */
221 ldr r1, pm_pwstctrl_mpu
222 ldr r2, [r1]
223 and r2, r2, #0x3
224 cmp r2, #0x0 @ Check if target power state was OFF or RET
225 moveq r9, #0x3 @ MPU OFF => L1 and L2 lost
226 movne r9, #0x1 @ Only L1 and L2 lost => avoid L2 invalidation
227 bne logic_l1_restore
27d59a4a
TK
228 ldr r0, control_stat
229 ldr r1, [r0]
230 and r1, #0x700
231 cmp r1, #0x300
232 beq l2_inv_gp
233 mov r0, #40 @ set service ID for PPA
234 mov r12, r0 @ copy secure Service ID in r12
235 mov r1, #0 @ set task id for ROM code in r1
236 mov r2, #4 @ set some flags in r2, r6
237 mov r6, #0xff
238 adr r3, l2_inv_api_params @ r3 points to dummy parameters
239 mcr p15, 0, r0, c7, c10, 4 @ data write barrier
240 mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
241 .word 0xE1600071 @ call SMI monitor (smi #1)
242 /* Write to Aux control register to set some bits */
243 mov r0, #42 @ set service ID for PPA
244 mov r12, r0 @ copy secure Service ID in r12
245 mov r1, #0 @ set task id for ROM code in r1
246 mov r2, #4 @ set some flags in r2, r6
247 mov r6, #0xff
a087cad9
TK
248 ldr r4, scratchpad_base
249 ldr r3, [r4, #0xBC] @ r3 points to parameters
27d59a4a
TK
250 mcr p15, 0, r0, c7, c10, 4 @ data write barrier
251 mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
252 .word 0xE1600071 @ call SMI monitor (smi #1)
253
254 b logic_l1_restore
255l2_inv_api_params:
256 .word 0x1, 0x00
27d59a4a 257l2_inv_gp:
8bd22949
KH
258 /* Execute smi to invalidate L2 cache */
259 mov r12, #0x1 @ set up to invalide L2
27d59a4a
TK
260smi: .word 0xE1600070 @ Call SMI monitor (smieq)
261 /* Write to Aux control register to set some bits */
a087cad9
TK
262 ldr r4, scratchpad_base
263 ldr r3, [r4,#0xBC]
264 ldr r0, [r3,#4]
27d59a4a
TK
265 mov r12, #0x3
266 .word 0xE1600070 @ Call SMI monitor (smieq)
8bd22949
KH
267logic_l1_restore:
268 mov r1, #0
269 /* Invalidate all instruction caches to PoU
270 * and flush branch target cache */
271 mcr p15, 0, r1, c7, c5, 0
272
273 ldr r4, scratchpad_base
274 ldr r3, [r4,#0xBC]
a087cad9 275 adds r3, r3, #8
8bd22949
KH
276 ldmia r3!, {r4-r6}
277 mov sp, r4
278 msr spsr_cxsf, r5
279 mov lr, r6
280
281 ldmia r3!, {r4-r9}
282 /* Coprocessor access Control Register */
283 mcr p15, 0, r4, c1, c0, 2
284
285 /* TTBR0 */
286 MCR p15, 0, r5, c2, c0, 0
287 /* TTBR1 */
288 MCR p15, 0, r6, c2, c0, 1
289 /* Translation table base control register */
290 MCR p15, 0, r7, c2, c0, 2
291 /*domain access Control Register */
292 MCR p15, 0, r8, c3, c0, 0
293 /* data fault status Register */
294 MCR p15, 0, r9, c5, c0, 0
295
296 ldmia r3!,{r4-r8}
297 /* instruction fault status Register */
298 MCR p15, 0, r4, c5, c0, 1
299 /*Data Auxiliary Fault Status Register */
300 MCR p15, 0, r5, c5, c1, 0
301 /*Instruction Auxiliary Fault Status Register*/
302 MCR p15, 0, r6, c5, c1, 1
303 /*Data Fault Address Register */
304 MCR p15, 0, r7, c6, c0, 0
305 /*Instruction Fault Address Register*/
306 MCR p15, 0, r8, c6, c0, 2
307 ldmia r3!,{r4-r7}
308
309 /* user r/w thread and process ID */
310 MCR p15, 0, r4, c13, c0, 2
311 /* user ro thread and process ID */
312 MCR p15, 0, r5, c13, c0, 3
313 /*Privileged only thread and process ID */
314 MCR p15, 0, r6, c13, c0, 4
315 /* cache size selection */
316 MCR p15, 2, r7, c0, c0, 0
317 ldmia r3!,{r4-r8}
318 /* Data TLB lockdown registers */
319 MCR p15, 0, r4, c10, c0, 0
320 /* Instruction TLB lockdown registers */
321 MCR p15, 0, r5, c10, c0, 1
322 /* Secure or Nonsecure Vector Base Address */
323 MCR p15, 0, r6, c12, c0, 0
324 /* FCSE PID */
325 MCR p15, 0, r7, c13, c0, 0
326 /* Context PID */
327 MCR p15, 0, r8, c13, c0, 1
328
329 ldmia r3!,{r4-r5}
330 /* primary memory remap register */
331 MCR p15, 0, r4, c10, c2, 0
332 /*normal memory remap register */
333 MCR p15, 0, r5, c10, c2, 1
334
335 /* Restore cpsr */
336 ldmia r3!,{r4} /*load CPSR from SDRAM*/
337 msr cpsr, r4 /*store cpsr */
338
339 /* Enabling MMU here */
340 mrc p15, 0, r7, c2, c0, 2 /* Read TTBRControl */
341 /* Extract N (0:2) bits and decide whether to use TTBR0 or TTBR1*/
342 and r7, #0x7
343 cmp r7, #0x0
344 beq usettbr0
345ttbr_error:
346 /* More work needs to be done to support N[0:2] value other than 0
347 * So looping here so that the error can be detected
348 */
349 b ttbr_error
350usettbr0:
351 mrc p15, 0, r2, c2, c0, 0
352 ldr r5, ttbrbit_mask
353 and r2, r5
354 mov r4, pc
355 ldr r5, table_index_mask
356 and r4, r5 /* r4 = 31 to 20 bits of pc */
357 /* Extract the value to be written to table entry */
358 ldr r1, table_entry
359 add r1, r1, r4 /* r1 has value to be written to table entry*/
360 /* Getting the address of table entry to modify */
361 lsr r4, #18
362 add r2, r4 /* r2 has the location which needs to be modified */
363 /* Storing previous entry of location being modified */
364 ldr r5, scratchpad_base
365 ldr r4, [r2]
366 str r4, [r5, #0xC0]
367 /* Modify the table entry */
368 str r1, [r2]
369 /* Storing address of entry being modified
370 * - will be restored after enabling MMU */
371 ldr r5, scratchpad_base
372 str r2, [r5, #0xC4]
373
374 mov r0, #0
375 mcr p15, 0, r0, c7, c5, 4 @ Flush prefetch buffer
376 mcr p15, 0, r0, c7, c5, 6 @ Invalidate branch predictor array
377 mcr p15, 0, r0, c8, c5, 0 @ Invalidate instruction TLB
378 mcr p15, 0, r0, c8, c6, 0 @ Invalidate data TLB
379 /* Restore control register but dont enable caches here*/
380 /* Caches will be enabled after restoring MMU table entry */
381 ldmia r3!, {r4}
382 /* Store previous value of control register in scratchpad */
383 str r4, [r5, #0xC8]
384 ldr r2, cache_pred_disable_mask
385 and r4, r2
386 mcr p15, 0, r4, c1, c0, 0
387
388 ldmfd sp!, {r0-r12, pc} @ restore regs and return
389save_context_wfi:
390 /*b save_context_wfi*/ @ enable to debug save code
391 mov r8, r0 /* Store SDRAM address in r8 */
a087cad9
TK
392 mrc p15, 0, r5, c1, c0, 1 @ Read Auxiliary Control Register
393 mov r4, #0x1 @ Number of parameters for restore call
394 stmia r8!, {r4-r5}
8bd22949
KH
395 /* Check what that target sleep state is:stored in r1*/
396 /* 1 - Only L1 and logic lost */
397 /* 2 - Only L2 lost */
398 /* 3 - Both L1 and L2 lost */
399 cmp r1, #0x2 /* Only L2 lost */
400 beq clean_l2
401 cmp r1, #0x1 /* L2 retained */
402 /* r9 stores whether to clean L2 or not*/
403 moveq r9, #0x0 /* Dont Clean L2 */
404 movne r9, #0x1 /* Clean L2 */
405l1_logic_lost:
406 /* Store sp and spsr to SDRAM */
407 mov r4, sp
408 mrs r5, spsr
409 mov r6, lr
410 stmia r8!, {r4-r6}
411 /* Save all ARM registers */
412 /* Coprocessor access control register */
413 mrc p15, 0, r6, c1, c0, 2
414 stmia r8!, {r6}
415 /* TTBR0, TTBR1 and Translation table base control */
416 mrc p15, 0, r4, c2, c0, 0
417 mrc p15, 0, r5, c2, c0, 1
418 mrc p15, 0, r6, c2, c0, 2
419 stmia r8!, {r4-r6}
420 /* Domain access control register, data fault status register,
421 and instruction fault status register */
422 mrc p15, 0, r4, c3, c0, 0
423 mrc p15, 0, r5, c5, c0, 0
424 mrc p15, 0, r6, c5, c0, 1
425 stmia r8!, {r4-r6}
426 /* Data aux fault status register, instruction aux fault status,
427 datat fault address register and instruction fault address register*/
428 mrc p15, 0, r4, c5, c1, 0
429 mrc p15, 0, r5, c5, c1, 1
430 mrc p15, 0, r6, c6, c0, 0
431 mrc p15, 0, r7, c6, c0, 2
432 stmia r8!, {r4-r7}
433 /* user r/w thread and process ID, user r/o thread and process ID,
434 priv only thread and process ID, cache size selection */
435 mrc p15, 0, r4, c13, c0, 2
436 mrc p15, 0, r5, c13, c0, 3
437 mrc p15, 0, r6, c13, c0, 4
438 mrc p15, 2, r7, c0, c0, 0
439 stmia r8!, {r4-r7}
440 /* Data TLB lockdown, instruction TLB lockdown registers */
441 mrc p15, 0, r5, c10, c0, 0
442 mrc p15, 0, r6, c10, c0, 1
443 stmia r8!, {r5-r6}
444 /* Secure or non secure vector base address, FCSE PID, Context PID*/
445 mrc p15, 0, r4, c12, c0, 0
446 mrc p15, 0, r5, c13, c0, 0
447 mrc p15, 0, r6, c13, c0, 1
448 stmia r8!, {r4-r6}
449 /* Primary remap, normal remap registers */
450 mrc p15, 0, r4, c10, c2, 0
451 mrc p15, 0, r5, c10, c2, 1
452 stmia r8!,{r4-r5}
453
454 /* Store current cpsr*/
455 mrs r2, cpsr
456 stmia r8!, {r2}
457
458 mrc p15, 0, r4, c1, c0, 0
459 /* save control register */
460 stmia r8!, {r4}
461clean_caches:
462 /* Clean Data or unified cache to POU*/
463 /* How to invalidate only L1 cache???? - #FIX_ME# */
464 /* mcr p15, 0, r11, c7, c11, 1 */
465 cmp r9, #1 /* Check whether L2 inval is required or not*/
466 bne skip_l2_inval
467clean_l2:
468 /* read clidr */
469 mrc p15, 1, r0, c0, c0, 1
470 /* extract loc from clidr */
471 ands r3, r0, #0x7000000
472 /* left align loc bit field */
473 mov r3, r3, lsr #23
474 /* if loc is 0, then no need to clean */
475 beq finished
476 /* start clean at cache level 0 */
477 mov r10, #0
478loop1:
479 /* work out 3x current cache level */
480 add r2, r10, r10, lsr #1
481 /* extract cache type bits from clidr*/
482 mov r1, r0, lsr r2
483 /* mask of the bits for current cache only */
484 and r1, r1, #7
485 /* see what cache we have at this level */
486 cmp r1, #2
487 /* skip if no cache, or just i-cache */
488 blt skip
489 /* select current cache level in cssr */
490 mcr p15, 2, r10, c0, c0, 0
491 /* isb to sych the new cssr&csidr */
492 isb
493 /* read the new csidr */
494 mrc p15, 1, r1, c0, c0, 0
495 /* extract the length of the cache lines */
496 and r2, r1, #7
497 /* add 4 (line length offset) */
498 add r2, r2, #4
499 ldr r4, assoc_mask
500 /* find maximum number on the way size */
501 ands r4, r4, r1, lsr #3
502 /* find bit position of way size increment */
503 clz r5, r4
504 ldr r7, numset_mask
505 /* extract max number of the index size*/
506 ands r7, r7, r1, lsr #13
507loop2:
508 mov r9, r4
509 /* create working copy of max way size*/
510loop3:
511 /* factor way and cache number into r11 */
512 orr r11, r10, r9, lsl r5
513 /* factor index number into r11 */
514 orr r11, r11, r7, lsl r2
515 /*clean & invalidate by set/way */
516 mcr p15, 0, r11, c7, c10, 2
517 /* decrement the way*/
518 subs r9, r9, #1
519 bge loop3
520 /*decrement the index */
521 subs r7, r7, #1
522 bge loop2
523skip:
524 add r10, r10, #2
525 /* increment cache number */
526 cmp r3, r10
527 bgt loop1
528finished:
529 /*swith back to cache level 0 */
530 mov r10, #0
531 /* select current cache level in cssr */
532 mcr p15, 2, r10, c0, c0, 0
533 isb
534skip_l2_inval:
535 /* Data memory barrier and Data sync barrier */
536 mov r1, #0
537 mcr p15, 0, r1, c7, c10, 4
538 mcr p15, 0, r1, c7, c10, 5
539
540 wfi @ wait for interrupt
541 nop
542 nop
543 nop
544 nop
545 nop
546 nop
547 nop
548 nop
549 nop
550 nop
89139dce 551 bl wait_sdrc_ok
8bd22949
KH
552 /* restore regs and return */
553 ldmfd sp!, {r0-r12, pc}
554
89139dce
PDS
555/* Make sure SDRC accesses are ok */
556wait_sdrc_ok:
557 ldr r4, cm_idlest1_core
558 ldr r5, [r4]
559 and r5, r5, #0x2
560 cmp r5, #0
561 bne wait_sdrc_ok
562 ldr r4, sdrc_power
563 ldr r5, [r4]
564 bic r5, r5, #0x40
565 str r5, [r4]
566wait_dll_lock:
567 /* Is dll in lock mode? */
568 ldr r4, sdrc_dlla_ctrl
569 ldr r5, [r4]
570 tst r5, #0x4
571 bxne lr
572 /* wait till dll locks */
573 ldr r4, sdrc_dlla_status
574 ldr r5, [r4]
575 and r5, r5, #0x4
576 cmp r5, #0x4
577 bne wait_dll_lock
578 bx lr
8bd22949 579
89139dce
PDS
580cm_idlest1_core:
581 .word CM_IDLEST1_CORE_V
582sdrc_dlla_status:
583 .word SDRC_DLLA_STATUS_V
584sdrc_dlla_ctrl:
585 .word SDRC_DLLA_CTRL_V
8bd22949
KH
586pm_prepwstst_core:
587 .word PM_PREPWSTST_CORE_V
0795a75a
TK
588pm_prepwstst_core_p:
589 .word PM_PREPWSTST_CORE_P
8bd22949
KH
590pm_prepwstst_mpu:
591 .word PM_PREPWSTST_MPU_V
592pm_pwstctrl_mpu:
593 .word PM_PWSTCTRL_MPU_P
594scratchpad_base:
595 .word SCRATCHPAD_BASE_P
0795a75a
TK
596sram_base:
597 .word SRAM_BASE_P + 0x8000
8bd22949
KH
598sdrc_power:
599 .word SDRC_POWER_V
8bd22949
KH
600clk_stabilize_delay:
601 .word 0x000001FF
602assoc_mask:
603 .word 0x3ff
604numset_mask:
605 .word 0x7fff
606ttbrbit_mask:
607 .word 0xFFFFC000
608table_index_mask:
609 .word 0xFFF00000
610table_entry:
611 .word 0x00000C02
612cache_pred_disable_mask:
613 .word 0xFFFFE7FB
27d59a4a
TK
614control_stat:
615 .word CONTROL_STAT
8bd22949
KH
616ENTRY(omap34xx_cpu_suspend_sz)
617 .word . - omap34xx_cpu_suspend