ls1043a.S 34 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637
  1. /*
  2. * Copyright 2018-2021 NXP
  3. *
  4. * SPDX-License-Identifier: BSD-3-Clause
  5. */
  6. #include <asm_macros.S>
  7. #include <cortex_a53.h>
  8. #include <dcfg_lsch2.h>
  9. #include <plat_gic.h>
  10. #include <scfg.h>
  11. #include <bl31_data.h>
  12. #include <plat_psci.h>
  13. #include <platform_def.h>
  14. /* the BASE address for these offsets is AUX_01_DATA in the */
  15. /* bootcore's psci data region */
  16. #define DEVDISR2_MASK_OFFSET 0x0 /* references AUX_01_DATA */
  17. #define DEVDISR5_MASK_OFFSET 0x8 /* references AUX_02_DATA */
  18. #define CPUACTLR_DATA_OFFSET 0x10 /* references AUX_03_DATA */
  19. /* the BASE address for these offsets is AUX_04_DATA in the */
  20. /* bootcore's psci data region */
  21. #define GICD_BASE_ADDR_OFFSET 0x0 /* references AUX_04_DATA */
  22. #define GICC_BASE_ADDR_OFFSET 0x8 /* references AUX_05_DATA */
  23. #define DAIF_DATA AUX_06_DATA /* references AUX_06_DATA */
  24. #define IPSTPACK_RETRY_CNT 0x10000
  25. #define DDR_SLEEP_RETRY_CNT 0x10000
  26. #define CPUACTLR_EL1 S3_1_C15_C2_0
  27. #define DDR_SDRAM_CFG_2_FRCSR 0x80000000
  28. #define DDR_SDRAM_CFG_2_OFFSET 0x114
  29. #define DDR_TIMING_CFG_4_OFFSET 0x160
  30. #define DDR_CNTRL_BASE_ADDR 0x01080000
  31. #define DLL_LOCK_MASK 0x3
  32. #define DLL_LOCK_VALUE 0x2
  33. #define ERROR_DDR_SLEEP -1
  34. #define ERROR_DDR_WAKE -2
  35. #define ERROR_NO_QUIESCE -3
  36. #define CORE_RESTARTABLE 0
  37. #define CORE_NOT_RESTARTABLE 1
  38. #define RESET_RETRY_CNT 800
  39. .global soc_init_lowlevel
  40. .global soc_init_percpu
  41. .global _soc_core_release
  42. .global _soc_core_restart
  43. .global _soc_ck_disabled
  44. .global _soc_sys_reset
  45. .global _soc_sys_off
  46. .global _getGICD_BaseAddr
  47. .global _getGICC_BaseAddr
  48. .global _soc_set_start_addr
  49. .global _soc_core_prep_off
  50. .global _soc_core_entr_off
  51. .global _soc_core_exit_off
  52. .global _soc_core_prep_stdby
  53. .global _soc_core_entr_stdby
  54. .global _soc_core_exit_stdby
  55. .global _soc_core_prep_pwrdn
  56. .global _soc_core_entr_pwrdn
  57. .global _soc_core_exit_pwrdn
  58. .global _soc_clstr_prep_stdby
  59. .global _soc_clstr_exit_stdby
  60. .global _soc_clstr_prep_pwrdn
  61. .global _soc_clstr_exit_pwrdn
  62. .global _soc_sys_prep_stdby
  63. .global _soc_sys_exit_stdby
  64. .global _soc_sys_prep_pwrdn
  65. .global _soc_sys_pwrdn_wfi
  66. .global _soc_sys_exit_pwrdn
  67. /*
  68. * This function initialize the soc.
  69. * in: void
  70. * out: void
  71. */
  72. func soc_init_lowlevel
  73. ret
  74. endfunc soc_init_lowlevel
  75. /*
  76. * void soc_init_percpu(void)
  77. * this function performs any soc-specific initialization that is needed on
  78. * a per-core basis
  79. * in: none
  80. * out: none
  81. * uses x0, x1, x2, x3
  82. */
  83. func soc_init_percpu
  84. mov x3, x30
  85. bl plat_my_core_mask
  86. mov x2, x0
  87. /* see if this core is marked for prefetch disable */
  88. mov x0, #PREFETCH_DIS_OFFSET
  89. bl _get_global_data /* 0-1 */
  90. tst x0, x2
  91. b.eq 1f
  92. bl _disable_ldstr_pfetch_A53 /* 0 */
  93. 1:
  94. mov x30, x3
  95. ret
  96. endfunc soc_init_percpu
  97. /*
  98. * part of CPU_ON
  99. * this function releases a secondary core from reset
  100. * in: x0 = core_mask_lsb
  101. * out: none
  102. * uses: x0, x1, x2, x3
  103. */
  104. _soc_core_release:
  105. #if (TEST_BL31)
  106. mov w2, w0
  107. CoreMaskMsb w2, w3
  108. /* x2 = core mask msb */
  109. #else
  110. mov x2, x0
  111. #endif
  112. /* write COREBCR */
  113. ldr x1, =NXP_SCFG_ADDR
  114. rev w3, w2
  115. str w3, [x1, #SCFG_COREBCR_OFFSET]
  116. isb
  117. /* read-modify-write BRR */
  118. mov x1, #NXP_DCFG_ADDR
  119. ldr w2, [x1, #DCFG_BRR_OFFSET]
  120. rev w3, w2
  121. orr w3, w3, w0
  122. rev w2, w3
  123. str w2, [x1, #DCFG_BRR_OFFSET]
  124. isb
  125. /* send event */
  126. sev
  127. isb
  128. ret
  129. /*
  130. * part of CPU_ON
  131. * this function restarts a core shutdown via _soc_core_entr_off
  132. * in: x0 = core mask lsb (of the target cpu)
  133. * out: x0 == 0, on success
  134. * x0 != 0, on failure
  135. * uses x0 ~ x5
  136. */
  137. _soc_core_restart:
  138. mov x5, x30
  139. mov x3, x0
  140. /* x3 = core mask lsb */
  141. bl _getGICD_BaseAddr
  142. mov x4, x0
  143. /* x4 = GICD_BASE_ADDR */
  144. /* enable forwarding of group 0 interrupts by setting GICD_CTLR[0] = 1 */
  145. ldr w1, [x4, #GICD_CTLR_OFFSET]
  146. orr w1, w1, #GICD_CTLR_EN_GRP0
  147. str w1, [x4, #GICD_CTLR_OFFSET]
  148. dsb sy
  149. isb
  150. /*
  151. * fire SGI by writing to GICD_SGIR the following values:
  152. * [25:24] = 0x0 (forward interrupt to the CPU interfaces specified in CPUTargetList field)
  153. * [23:16] = core mask lsb[7:0] (forward interrupt to target cpu)
  154. * [15] = 0 (forward SGI only if it is configured as group 0 interrupt)
  155. * [3:0] = 0xF (interrupt ID = 15)
  156. */
  157. lsl w1, w3, #16
  158. orr w1, w1, #0xF
  159. str w1, [x4, #GICD_SGIR_OFFSET]
  160. dsb sy
  161. isb
  162. /* load '0' on success */
  163. mov x0, xzr
  164. mov x30, x5
  165. ret
  166. /*
  167. * this function determines if a core is disabled via COREDISR
  168. * in: w0 = core_mask_lsb
  169. * out: w0 = 0, core not disabled
  170. * w0 != 0, core disabled
  171. * uses x0, x1, x2
  172. */
  173. _soc_ck_disabled:
  174. /* get base addr of dcfg block */
  175. ldr x1, =NXP_DCFG_ADDR
  176. /* read COREDISR */
  177. ldr w1, [x1, #DCFG_COREDISR_OFFSET]
  178. rev w2, w1
  179. /* test core bit */
  180. and w0, w2, w0
  181. ret
  182. /*
  183. * this function resets the system via SoC-specific methods
  184. * in: none
  185. * out: none
  186. * uses x0, x1, x2, x3
  187. */
  188. _soc_sys_reset:
  189. ldr x2, =NXP_DCFG_ADDR
  190. /* make sure the mask is cleared in the reset request mask register */
  191. mov w1, wzr
  192. str w1, [x2, #DCFG_RSTRQMR1_OFFSET]
  193. /* x2 = NXP_DCFG_ADDR */
  194. /* set the reset request */
  195. ldr w1, =RSTCR_RESET_REQ
  196. ldr x3, =DCFG_RSTCR_OFFSET
  197. rev w0, w1
  198. str w0, [x2, x3]
  199. /* x2 = NXP_DCFG_ADDR */
  200. /* x3 = DCFG_RSTCR_OFFSET */
  201. /* just in case this address range is mapped as cacheable,
  202. * flush the write out of the dcaches */
  203. add x3, x2, x3
  204. dc cvac, x3
  205. dsb st
  206. isb
  207. /* Note: this function does not return */
  208. 1:
  209. wfi
  210. b 1b
  211. /*
  212. * part of SYSTEM_OFF
  213. * this function turns off the SoC clocks
  214. * Note: this function is not intended to return, and the only allowable
  215. * recovery is POR
  216. * in: none
  217. * out: none
  218. * uses x0 ~ x8
  219. */
  220. _soc_sys_off:
  221. /* mask interrupts at the core */
  222. mrs x1, DAIF
  223. mov x0, #DAIF_SET_MASK
  224. orr x0, x1, x0
  225. msr DAIF, x0
  226. /* disable icache, dcache, mmu @ EL1 */
  227. mov x1, #SCTLR_I_C_M_MASK
  228. mrs x0, sctlr_el1
  229. bic x0, x0, x1
  230. msr sctlr_el1, x0
  231. /* disable dcache for EL3 */
  232. mrs x1, SCTLR_EL3
  233. bic x1, x1, #SCTLR_C_MASK
  234. /* make sure icache is enabled */
  235. orr x1, x1, #SCTLR_I_MASK
  236. msr SCTLR_EL3, x1
  237. isb
  238. /* set WFIL2_EN in SCFG_COREPMCR */
  239. ldr x0, =SCFG_COREPMCR_OFFSET
  240. ldr x1, =COREPMCR_WFIL2
  241. bl write_reg_scfg
  242. /* set OVRD_EN in RCPM2_POWMGTDCR */
  243. ldr x0, =RCPM2_POWMGTDCR_OFFSET
  244. ldr x1, =POWMGTDCR_OVRD_EN
  245. bl write_reg_rcpm2
  246. /* read IPPDEXPCR0 @ RCPM_IPPDEXPCR0 */
  247. ldr x0, =RCPM_IPPDEXPCR0_OFFSET
  248. bl read_reg_rcpm
  249. mov x7, x0
  250. /* build an override mask for IPSTPCR4/IPSTPACK4/DEVDISR5 */
  251. mov x5, xzr
  252. ldr x6, =IPPDEXPCR_MASK2
  253. and x6, x6, x7
  254. cbz x6, 1f
  255. /* x5 = override mask
  256. * x6 = IPPDEXPCR bits for DEVDISR5
  257. * x7 = IPPDEXPCR */
  258. /* get the overrides */
  259. orr x4, x5, #DEVDISR5_I2C_1
  260. tst x6, #IPPDEXPCR_I2C1
  261. csel x5, x5, x4, EQ
  262. orr x4, x5, #DEVDISR5_LPUART1
  263. tst x6, #IPPDEXPCR_LPUART1
  264. csel x5, x5, x4, EQ
  265. orr x4, x5, #DEVDISR5_FLX_TMR
  266. tst x6, #IPPDEXPCR_FLX_TMR1
  267. csel x5, x5, x4, EQ
  268. orr x4, x5, #DEVDISR5_OCRAM1
  269. tst x6, #IPPDEXPCR_OCRAM1
  270. csel x5, x5, x4, EQ
  271. orr x4, x5, #DEVDISR5_GPIO
  272. tst x6, #IPPDEXPCR_GPIO1
  273. csel x5, x5, x4, EQ
  274. 1:
  275. /* store the DEVDISR5 override mask */
  276. ldr x2, =BC_PSCI_BASE
  277. add x2, x2, #AUX_01_DATA
  278. str w5, [x2, #DEVDISR5_MASK_OFFSET]
  279. /* build an override mask for IPSTPCR1/IPSTPACK1/DEVDISR2 */
  280. mov x5, xzr
  281. ldr x6, =IPPDEXPCR_MASK1
  282. and x6, x6, x7
  283. cbz x6, 2f
  284. /* x5 = override mask */
  285. /* x6 = IPPDEXPCR bits for DEVDISR2 */
  286. /* get the overrides */
  287. orr x4, x5, #DEVDISR2_FMAN1_MAC1
  288. tst x6, #IPPDEXPCR_MAC1_1
  289. csel x5, x5, x4, EQ
  290. orr x4, x5, #DEVDISR2_FMAN1_MAC2
  291. tst x6, #IPPDEXPCR_MAC1_2
  292. csel x5, x5, x4, EQ
  293. orr x4, x5, #DEVDISR2_FMAN1_MAC3
  294. tst x6, #IPPDEXPCR_MAC1_3
  295. csel x5, x5, x4, EQ
  296. orr x4, x5, #DEVDISR2_FMAN1_MAC4
  297. tst x6, #IPPDEXPCR_MAC1_4
  298. csel x5, x5, x4, EQ
  299. orr x4, x5, #DEVDISR2_FMAN1_MAC5
  300. tst x6, #IPPDEXPCR_MAC1_5
  301. csel x5, x5, x4, EQ
  302. orr x4, x5, #DEVDISR2_FMAN1_MAC6
  303. tst x6, #IPPDEXPCR_MAC1_6
  304. csel x5, x5, x4, EQ
  305. orr x4, x5, #DEVDISR2_FMAN1_MAC9
  306. tst x6, #IPPDEXPCR_MAC1_9
  307. csel x5, x5, x4, EQ
  308. orr x4, x5, #DEVDISR2_FMAN1
  309. tst x6, #IPPDEXPCR_FM1
  310. csel x5, x5, x4, EQ
  311. 2:
  312. /* store the DEVDISR2 override mask */
  313. ldr x2, =BC_PSCI_BASE
  314. add x2, x2, #AUX_01_DATA
  315. str w5, [x2, #DEVDISR2_MASK_OFFSET]
  316. /* x5 = DEVDISR2 override mask */
  317. /* write IPSTPCR0 - no overrides */
  318. ldr x0, =RCPM2_IPSTPCR0_OFFSET
  319. ldr x1, =IPSTPCR0_VALUE
  320. bl write_reg_rcpm2
  321. /* x5 = DEVDISR2 override mask */
  322. /* write IPSTPCR1 - overrides possible */
  323. ldr x0, =RCPM2_IPSTPCR1_OFFSET
  324. ldr x1, =IPSTPCR1_VALUE
  325. bic x1, x1, x5
  326. bl write_reg_rcpm2
  327. /* write IPSTPCR2 - no overrides */
  328. ldr x0, =RCPM2_IPSTPCR2_OFFSET
  329. ldr x1, =IPSTPCR2_VALUE
  330. bl write_reg_rcpm2
  331. /* write IPSTPCR3 - no overrides */
  332. ldr x0, =RCPM2_IPSTPCR3_OFFSET
  333. ldr x1, =IPSTPCR3_VALUE
  334. bl write_reg_rcpm2
  335. /* write IPSTPCR4 - overrides possible */
  336. ldr x2, =BC_PSCI_BASE
  337. add x2, x2, #AUX_01_DATA
  338. ldr w6, [x2, #DEVDISR5_MASK_OFFSET]
  339. ldr x0, =RCPM2_IPSTPCR4_OFFSET
  340. ldr x1, =IPSTPCR4_VALUE
  341. bic x1, x1, x6
  342. bl write_reg_rcpm2
  343. /* x5 = DEVDISR2 override mask */
  344. /* x6 = DEVDISR5 override mask */
  345. /* poll on IPSTPACK0 */
  346. ldr x3, =RCPM2_IPSTPACKR0_OFFSET
  347. ldr x4, =IPSTPCR0_VALUE
  348. ldr x7, =IPSTPACK_RETRY_CNT
  349. 3:
  350. mov x0, x3
  351. bl read_reg_rcpm2
  352. cmp x0, x4
  353. b.eq 14f
  354. sub x7, x7, #1
  355. cbnz x7, 3b
  356. 14:
  357. /* poll on IPSTPACK1 */
  358. ldr x3, =IPSTPCR1_VALUE
  359. ldr x7, =IPSTPACK_RETRY_CNT
  360. bic x4, x3, x5
  361. ldr x3, =RCPM2_IPSTPACKR1_OFFSET
  362. 4:
  363. mov x0, x3
  364. bl read_reg_rcpm2
  365. cmp x0, x4
  366. b.eq 15f
  367. sub x7, x7, #1
  368. cbnz x7, 4b
  369. 15:
  370. /* poll on IPSTPACK2 */
  371. ldr x3, =RCPM2_IPSTPACKR2_OFFSET
  372. ldr x4, =IPSTPCR2_VALUE
  373. ldr x7, =IPSTPACK_RETRY_CNT
  374. 5:
  375. mov x0, x3
  376. bl read_reg_rcpm2
  377. cmp x0, x4
  378. b.eq 16f
  379. sub x7, x7, #1
  380. cbnz x7, 5b
  381. 16:
  382. /* poll on IPSTPACK3 */
  383. ldr x3, =RCPM2_IPSTPACKR3_OFFSET
  384. ldr x4, =IPSTPCR3_VALUE
  385. ldr x7, =IPSTPACK_RETRY_CNT
  386. 6:
  387. mov x0, x3
  388. bl read_reg_rcpm2
  389. cmp x0, x4
  390. b.eq 17f
  391. sub x7, x7, #1
  392. cbnz x7, 6b
  393. 17:
  394. /* poll on IPSTPACK4 */
  395. ldr x3, =IPSTPCR4_VALUE
  396. ldr x7, =IPSTPACK_RETRY_CNT
  397. bic x4, x3, x6
  398. ldr x3, =RCPM2_IPSTPACKR4_OFFSET
  399. 7:
  400. mov x0, x3
  401. bl read_reg_rcpm2
  402. cmp x0, x4
  403. b.eq 18f
  404. sub x7, x7, #1
  405. cbnz x7, 7b
  406. 18:
  407. ldr x7, =BC_PSCI_BASE
  408. add x7, x7, #AUX_01_DATA
  409. /* x5 = DEVDISR2 override mask
  410. * x6 = DEVDISR5 override mask
  411. * x7 = [soc_data_area] */
  412. /* DEVDISR1 - load new value */
  413. mov x0, #DCFG_DEVDISR1_OFFSET
  414. bl read_reg_dcfg
  415. mov x0, #DCFG_DEVDISR1_OFFSET
  416. ldr x1, =DEVDISR1_VALUE
  417. bl write_reg_dcfg
  418. /* DEVDISR2 - load new value */
  419. mov x0, #DCFG_DEVDISR2_OFFSET
  420. bl read_reg_dcfg
  421. mov x0, #DCFG_DEVDISR2_OFFSET
  422. ldr x1, =DEVDISR2_VALUE
  423. bic x1, x1, x5
  424. bl write_reg_dcfg
  425. /* x6 = DEVDISR5 override mask */
  426. /* x7 = [soc_data_area] */
  427. /* DEVDISR3 - load new value */
  428. mov x0, #DCFG_DEVDISR3_OFFSET
  429. bl read_reg_dcfg
  430. mov x0, #DCFG_DEVDISR3_OFFSET
  431. ldr x1, =DEVDISR3_VALUE
  432. bl write_reg_dcfg
  433. /* DEVDISR4 - load new value */
  434. mov x0, #DCFG_DEVDISR4_OFFSET
  435. bl read_reg_dcfg
  436. mov x0, #DCFG_DEVDISR4_OFFSET
  437. ldr x1, =DEVDISR4_VALUE
  438. bl write_reg_dcfg
  439. /* DEVDISR5 - load new value */
  440. mov x0, #DCFG_DEVDISR5_OFFSET
  441. bl read_reg_dcfg
  442. mov x0, #DCFG_DEVDISR5_OFFSET
  443. ldr x1, =DEVDISR5_VALUE
  444. bic x1, x1, x6
  445. bl write_reg_dcfg
  446. /* x7 = [soc_data_area] */
  447. /* disable data prefetch */
  448. mrs x0, CPUACTLR_EL1
  449. bic x0, x0, #CPUACTLR_L1PCTL_MASK
  450. msr CPUACTLR_EL1, x0
  451. /* x6 = DEVDISR5 override mask */
  452. /* setup registers for cache-only execution */
  453. ldr x5, =IPSTPCR4_VALUE
  454. bic x5, x5, x6
  455. mov x6, #DDR_CNTRL_BASE_ADDR
  456. mov x7, #DCSR_RCPM2_BASE
  457. mov x8, #NXP_DCFG_ADDR
  458. dsb sy
  459. isb
  460. /* set the DLL_LOCK cycle count */
  461. ldr w1, [x6, #DDR_TIMING_CFG_4_OFFSET]
  462. rev w2, w1
  463. bic w2, w2, #DLL_LOCK_MASK
  464. orr w2, w2, #DLL_LOCK_VALUE
  465. rev w1, w2
  466. str w1, [x6, #DDR_TIMING_CFG_4_OFFSET]
  467. /* x5 = ipstpcr4 (IPSTPCR4_VALUE bic DEVDISR5_MASK)
  468. * x6 = DDR_CNTRL_BASE_ADDR
  469. * x7 = DCSR_RCPM2_BASE
  470. * x8 = NXP_DCFG_ADDR */
  471. /* enter the cache-only sequence - there is no return */
  472. b final_shutdown
  473. /*
  474. * part of CPU_OFF
  475. * this function programs SoC & GIC registers in preparation for shutting down
  476. * the core
  477. * in: x0 = core mask lsb
  478. * out: none
  479. * uses x0 ~ x7
  480. */
  481. _soc_core_prep_off:
  482. mov x7, x30
  483. mov x6, x0
  484. /* make sure the smpen bit is set */
  485. mrs x2, CORTEX_A53_ECTLR_EL1
  486. orr x2, x2, #CPUECTLR_SMPEN_MASK
  487. msr CORTEX_A53_ECTLR_EL1, x2
  488. isb
  489. /* configure the cpu interface */
  490. /* disable signaling of ints */
  491. bl _getGICC_BaseAddr // 0-1
  492. mov x4, x0
  493. ldr w3, [x4, #GICC_CTLR_OFFSET]
  494. bic w3, w3, #GICC_CTLR_EN_GRP0
  495. bic w3, w3, #GICC_CTLR_EN_GRP1
  496. str w3, [x4, #GICC_CTLR_OFFSET]
  497. dsb sy
  498. isb
  499. /*
  500. * x3 = GICC_CTRL
  501. * x4 = GICC_BASE_ADDR
  502. * x6 = core mask
  503. */
  504. /* set the priority filter */
  505. ldr w2, [x4, #GICC_PMR_OFFSET]
  506. orr w2, w2, #GICC_PMR_FILTER
  507. str w2, [x4, #GICC_PMR_OFFSET]
  508. /* setup GICC_CTLR */
  509. bic w3, w3, #GICC_CTLR_ACKCTL_MASK
  510. orr w3, w3, #GICC_CTLR_FIQ_EN_MASK
  511. orr w3, w3, #GICC_CTLR_EOImodeS_MASK
  512. orr w3, w3, #GICC_CTLR_CBPR_MASK
  513. str w3, [x4, #GICC_CTLR_OFFSET]
  514. /* x3 = GICC_CTRL */
  515. /* x4 = GICC_BASE_ADDR */
  516. /* setup the banked-per-core GICD registers */
  517. bl _getGICD_BaseAddr
  518. /*
  519. * x0 = GICD_BASE_ADDR
  520. * x3 = GICC_CTRL
  521. * x4 = GICC_BASE_ADDR
  522. * x6 = core mask
  523. */
  524. /* define SGI15 as Grp0 */
  525. ldr w2, [x0, #GICD_IGROUPR0_OFFSET]
  526. bic w2, w2, #GICD_IGROUP0_SGI15
  527. str w2, [x0, #GICD_IGROUPR0_OFFSET]
  528. /* set priority of SGI 15 to highest... */
  529. ldr w2, [x0, #GICD_IPRIORITYR3_OFFSET]
  530. bic w2, w2, #GICD_IPRIORITY_SGI15_MASK
  531. str w2, [x0, #GICD_IPRIORITYR3_OFFSET]
  532. /* enable SGI 15 */
  533. ldr w2, [x0, #GICD_ISENABLER0_OFFSET]
  534. orr w2, w2, #GICD_ISENABLE0_SGI15
  535. str w2, [x0, #GICD_ISENABLER0_OFFSET]
  536. /* enable the cpu interface */
  537. orr w3, w3, #GICC_CTLR_EN_GRP0
  538. str w3, [x4, #GICC_CTLR_OFFSET]
  539. /* x0 = GICD_BASE_ADDR
  540. * x6 = core mask */
  541. /* clear any pending SGIs */
  542. add x0, x0, #GICD_CPENDSGIR3_OFFSET
  543. ldr x2, =GICD_CPENDSGIR_CLR_MASK
  544. str w2, [x0]
  545. dsb sy
  546. isb
  547. mov x30, x7
  548. ret
  549. /*
  550. * part of CPU_OFF
  551. * this function performs the final steps to shutdown the core
  552. * in: x0 = core mask lsb
  553. * out: none
  554. * uses x0 ~ x5
  555. */
  556. _soc_core_entr_off:
  557. mov x5, x30
  558. mov x4, x0
  559. bl _getGICD_BaseAddr
  560. mov x3, x0
  561. /* x3 = GICD_BASE_ADDR */
  562. /* x4 = core mask (lsb) */
  563. 3:
  564. /* enter low-power state by executing wfi */
  565. wfi
  566. /* x3 = GICD_BASE_ADDR */
  567. /* x4 = core mask (lsb) */
  568. /* see if we got hit by SGI 15 */
  569. add x0, x3, #GICD_SPENDSGIR3_OFFSET
  570. ldr w2, [x0]
  571. and w2, w2, #GICD_SPENDSGIR3_SGI15_MASK
  572. cbz w2, 4f
  573. /* clear the pending SGI */
  574. ldr x2, =GICD_CPENDSGIR_CLR_MASK
  575. add x0, x3, #GICD_CPENDSGIR3_OFFSET
  576. str w2, [x0]
  577. 4:
  578. /* check if core has been turned on */
  579. mov x0, x4
  580. bl _getCoreState
  581. /* x0 = core state */
  582. cmp x0, #CORE_WAKEUP
  583. b.ne 3b
  584. /* if we get here, then we have exited the wfi */
  585. dsb sy
  586. isb
  587. mov x30, x5
  588. ret
  589. /*
  590. * part of CPU_OFF
  591. * this function starts the process of starting a core back up
  592. * in: x0 = core mask lsb
  593. * out: none
  594. * uses x0 ~ x5
  595. */
  596. _soc_core_exit_off:
  597. mov x5, x30
  598. mov x4, x0
  599. /* x4 = core mask */
  600. bl _getGICC_BaseAddr
  601. mov x2, x0
  602. /* read GICC_IAR */
  603. ldr w0, [x2, #GICC_IAR_OFFSET]
  604. /* write GICC_EIOR - signal end-of-interrupt */
  605. str w0, [x2, #GICC_EOIR_OFFSET]
  606. /* write GICC_DIR - disable interrupt */
  607. str w0, [x2, #GICC_DIR_OFFSET]
  608. /* x2 = GICC_BASE_ADDR */
  609. /* disable signaling of grp0 ints */
  610. ldr w1, [x2, #GICC_CTLR_OFFSET]
  611. bic w1, w1, #GICC_CTLR_EN_GRP0
  612. str w1, [x2, #GICC_CTLR_OFFSET]
  613. dsb sy
  614. isb
  615. mov x30, x5
  616. ret
  617. /*
  618. * this function loads a 64-bit execution address of the core in the soc registers
  619. * BOOTLOCPTRL/H
  620. * in: x0, 64-bit address to write to BOOTLOCPTRL/H
  621. * uses x0, x1, x2, x3
  622. */
  623. _soc_set_start_addr:
  624. /* get the 64-bit base address of the scfg block */
  625. ldr x2, =NXP_SCFG_ADDR
  626. /* write the 32-bit BOOTLOCPTRL register (offset 0x604 in the scfg block) */
  627. mov x1, x0
  628. rev w3, w1
  629. str w3, [x2, #SCFG_BOOTLOCPTRL_OFFSET]
  630. /* write the 32-bit BOOTLOCPTRH register (offset 0x600 in the scfg block) */
  631. lsr x1, x0, #32
  632. rev w3, w1
  633. str w3, [x2, #SCFG_BOOTLOCPTRH_OFFSET]
  634. ret
  635. /*
  636. * part of CPU_SUSPEND
  637. * this function puts the calling core into standby state
  638. * in: x0 = core mask lsb
  639. * out: none
  640. * uses x0
  641. */
  642. _soc_core_entr_stdby:
  643. dsb sy
  644. isb
  645. wfi
  646. ret
  647. /*
  648. * part of CPU_SUSPEND
  649. * this function performs SoC-specific programming prior to standby
  650. * in: x0 = core mask lsb
  651. * out: none
  652. * uses x0, x1
  653. */
  654. _soc_core_prep_stdby:
  655. /* clear CORTEX_A53_ECTLR_EL1[2:0] */
  656. mrs x1, CORTEX_A53_ECTLR_EL1
  657. bic x1, x1, #CPUECTLR_TIMER_MASK
  658. msr CORTEX_A53_ECTLR_EL1, x1
  659. ret
  660. /*
  661. * part of CPU_SUSPEND
  662. * this function performs any SoC-specific cleanup after standby state
  663. * in: x0 = core mask lsb
  664. * out: none
  665. * uses none
  666. */
  667. _soc_core_exit_stdby:
  668. ret
  669. /*
  670. * part of CPU_SUSPEND
  671. * this function performs SoC-specific programming prior to power-down
  672. * in: x0 = core mask lsb
  673. * out: none
  674. * uses x0, x1
  675. */
  676. _soc_core_prep_pwrdn:
  677. /* make sure the smp bit is set */
  678. mrs x1, CORTEX_A53_ECTLR_EL1
  679. orr x1, x1, #CPUECTLR_SMPEN_MASK
  680. msr CORTEX_A53_ECTLR_EL1, x1
  681. isb
  682. ret
  683. /*
  684. * part of CPU_SUSPEND
  685. * this function puts the calling core into a power-down state
  686. * in: x0 = core mask lsb
  687. * out: none
  688. * uses x0
  689. */
  690. _soc_core_entr_pwrdn:
  691. dsb sy
  692. isb
  693. wfi
  694. ret
  695. /*
  696. * part of CPU_SUSPEND
  697. * this function performs any SoC-specific cleanup after power-down
  698. * in: x0 = core mask lsb
  699. * out: none
  700. * uses none
  701. */
  702. _soc_core_exit_pwrdn:
  703. ret
  704. /*
  705. * part of CPU_SUSPEND
  706. * this function performs SoC-specific programming prior to standby
  707. * in: x0 = core mask lsb
  708. * out: none
  709. * uses x0, x1
  710. */
  711. _soc_clstr_prep_stdby:
  712. /* clear CORTEX_A53_ECTLR_EL1[2:0] */
  713. mrs x1, CORTEX_A53_ECTLR_EL1
  714. bic x1, x1, #CPUECTLR_TIMER_MASK
  715. msr CORTEX_A53_ECTLR_EL1, x1
  716. ret
  717. /*
  718. * part of CPU_SUSPEND
  719. * this function performs any SoC-specific cleanup after standby state
  720. * in: x0 = core mask lsb
  721. * out: none
  722. * uses none
  723. */
  724. _soc_clstr_exit_stdby:
  725. ret
  726. /*
  727. * part of CPU_SUSPEND
  728. * this function performs SoC-specific programming prior to power-down
  729. * in: x0 = core mask lsb
  730. * out: none
  731. * uses x0, x1
  732. */
  733. _soc_clstr_prep_pwrdn:
  734. /* make sure the smp bit is set */
  735. mrs x1, CORTEX_A53_ECTLR_EL1
  736. orr x1, x1, #CPUECTLR_SMPEN_MASK
  737. msr CORTEX_A53_ECTLR_EL1, x1
  738. isb
  739. ret
  740. /*
  741. * part of CPU_SUSPEND
  742. * this function performs any SoC-specific cleanup after power-down
  743. * in: x0 = core mask lsb
  744. * out: none
  745. * uses none
  746. */
  747. _soc_clstr_exit_pwrdn:
  748. ret
  749. /*
  750. * part of CPU_SUSPEND
  751. * this function performs SoC-specific programming prior to standby
  752. * in: x0 = core mask lsb
  753. * out: none
  754. * uses x0, x1
  755. */
  756. _soc_sys_prep_stdby:
  757. /* clear CORTEX_A53_ECTLR_EL1[2:0] */
  758. mrs x1, CORTEX_A53_ECTLR_EL1
  759. bic x1, x1, #CPUECTLR_TIMER_MASK
  760. msr CORTEX_A53_ECTLR_EL1, x1
  761. ret
  762. /*
  763. * part of CPU_SUSPEND
  764. * this function performs any SoC-specific cleanup after standby state
  765. * in: x0 = core mask lsb
  766. * out: none
  767. * uses none
  768. */
  769. _soc_sys_exit_stdby:
  770. ret
  771. /*
  772. * part of CPU_SUSPEND
  773. * this function performs SoC-specific programming prior to
  774. * suspend-to-power-down
  775. * in: x0 = core mask lsb
  776. * out: none
  777. * uses x0, x1, x2, x3, x4
  778. */
  779. _soc_sys_prep_pwrdn:
  780. mov x4, x30
  781. /* make sure the smp bit is set */
  782. mrs x1, CORTEX_A53_ECTLR_EL1
  783. orr x1, x1, #CPUECTLR_SMPEN_MASK
  784. msr CORTEX_A53_ECTLR_EL1, x1
  785. isb
  786. /* set WFIL2_EN in SCFG_COREPMCR */
  787. ldr x0, =SCFG_COREPMCR_OFFSET
  788. ldr x1, =COREPMCR_WFIL2
  789. bl write_reg_scfg // 0-3
  790. /* set OVRD_EN in RCPM2_POWMGTDCR */
  791. ldr x0, =RCPM2_POWMGTDCR_OFFSET
  792. ldr x1, =POWMGTDCR_OVRD_EN
  793. bl write_reg_rcpm2 // 0-3
  794. mov x30, x4
  795. ret
  796. /*
  797. * part of CPU_SUSPEND
  798. * this function puts the calling core, and potentially the soc, into a
  799. * low-power state
  800. * in: x0 = core mask lsb
  801. * out: x0 = 0, success
  802. * x0 < 0, failure
  803. * uses x0 ~ x9
  804. */
  805. _soc_sys_pwrdn_wfi:
  806. mov x18, x30
  807. /* read IPPDEXPCR0 @ RCPM_IPPDEXPCR0 */
  808. ldr x0, =RCPM_IPPDEXPCR0_OFFSET
  809. bl read_reg_rcpm
  810. mov x7, x0
  811. /* build an override mask for IPSTPCR4/IPSTPACK4/DEVDISR5 */
  812. mov x5, xzr
  813. ldr x6, =IPPDEXPCR_MASK2
  814. and x6, x6, x7
  815. cbz x6, 1f
  816. /* x5 = override mask
  817. * x6 = IPPDEXPCR bits for DEVDISR5
  818. * x7 = IPPDEXPCR */
  819. /* get the overrides */
  820. orr x4, x5, #DEVDISR5_I2C_1
  821. tst x6, #IPPDEXPCR_I2C1
  822. csel x5, x5, x4, EQ
  823. orr x4, x5, #DEVDISR5_LPUART1
  824. tst x6, #IPPDEXPCR_LPUART1
  825. csel x5, x5, x4, EQ
  826. orr x4, x5, #DEVDISR5_FLX_TMR
  827. tst x6, #IPPDEXPCR_FLX_TMR1
  828. csel x5, x5, x4, EQ
  829. orr x4, x5, #DEVDISR5_OCRAM1
  830. tst x6, #IPPDEXPCR_OCRAM1
  831. csel x5, x5, x4, EQ
  832. orr x4, x5, #DEVDISR5_GPIO
  833. tst x6, #IPPDEXPCR_GPIO1
  834. csel x5, x5, x4, EQ
  835. 1:
  836. /* store the DEVDISR5 override mask */
  837. ldr x2, =BC_PSCI_BASE
  838. add x2, x2, #AUX_01_DATA
  839. str w5, [x2, #DEVDISR5_MASK_OFFSET]
  840. /* build an override mask for IPSTPCR1/IPSTPACK1/DEVDISR2 */
  841. mov x5, xzr
  842. ldr x6, =IPPDEXPCR_MASK1
  843. and x6, x6, x7
  844. cbz x6, 2f
  845. /* x5 = override mask */
  846. /* x6 = IPPDEXPCR bits for DEVDISR2 */
  847. /* get the overrides */
  848. orr x4, x5, #DEVDISR2_FMAN1_MAC1
  849. tst x6, #IPPDEXPCR_MAC1_1
  850. csel x5, x5, x4, EQ
  851. orr x4, x5, #DEVDISR2_FMAN1_MAC2
  852. tst x6, #IPPDEXPCR_MAC1_2
  853. csel x5, x5, x4, EQ
  854. orr x4, x5, #DEVDISR2_FMAN1_MAC3
  855. tst x6, #IPPDEXPCR_MAC1_3
  856. csel x5, x5, x4, EQ
  857. orr x4, x5, #DEVDISR2_FMAN1_MAC4
  858. tst x6, #IPPDEXPCR_MAC1_4
  859. csel x5, x5, x4, EQ
  860. orr x4, x5, #DEVDISR2_FMAN1_MAC5
  861. tst x6, #IPPDEXPCR_MAC1_5
  862. csel x5, x5, x4, EQ
  863. orr x4, x5, #DEVDISR2_FMAN1_MAC6
  864. tst x6, #IPPDEXPCR_MAC1_6
  865. csel x5, x5, x4, EQ
  866. orr x4, x5, #DEVDISR2_FMAN1_MAC9
  867. tst x6, #IPPDEXPCR_MAC1_9
  868. csel x5, x5, x4, EQ
  869. orr x4, x5, #DEVDISR2_FMAN1
  870. tst x6, #IPPDEXPCR_FM1
  871. csel x5, x5, x4, EQ
  872. 2:
  873. /* store the DEVDISR2 override mask */
  874. ldr x2, =BC_PSCI_BASE
  875. add x2, x2, #AUX_01_DATA
  876. str w5, [x2, #DEVDISR2_MASK_OFFSET]
  877. /* x5 = DEVDISR2 override mask */
  878. /* write IPSTPCR0 - no overrides */
  879. ldr x0, =RCPM2_IPSTPCR0_OFFSET
  880. ldr x1, =IPSTPCR0_VALUE
  881. bl write_reg_rcpm2
  882. /* x5 = DEVDISR2 override mask */
  883. /* write IPSTPCR1 - overrides possible */
  884. ldr x0, =RCPM2_IPSTPCR1_OFFSET
  885. ldr x1, =IPSTPCR1_VALUE
  886. bic x1, x1, x5
  887. bl write_reg_rcpm2
  888. /* write IPSTPCR2 - no overrides */
  889. ldr x0, =RCPM2_IPSTPCR2_OFFSET
  890. ldr x1, =IPSTPCR2_VALUE
  891. bl write_reg_rcpm2
  892. /* write IPSTPCR3 - no overrides */
  893. ldr x0, =RCPM2_IPSTPCR3_OFFSET
  894. ldr x1, =IPSTPCR3_VALUE
  895. bl write_reg_rcpm2
  896. /* write IPSTPCR4 - overrides possible */
  897. ldr x2, =BC_PSCI_BASE
  898. add x2, x2, #AUX_01_DATA
  899. ldr w6, [x2, #DEVDISR5_MASK_OFFSET]
  900. ldr x0, =RCPM2_IPSTPCR4_OFFSET
  901. ldr x1, =IPSTPCR4_VALUE
  902. bic x1, x1, x6
  903. bl write_reg_rcpm2
  904. /* x5 = DEVDISR2 override mask */
  905. /* x6 = DEVDISR5 override mask */
  906. /* poll on IPSTPACK0 */
  907. ldr x3, =RCPM2_IPSTPACKR0_OFFSET
  908. ldr x4, =IPSTPCR0_VALUE
  909. ldr x7, =IPSTPACK_RETRY_CNT
  910. 3:
  911. mov x0, x3
  912. bl read_reg_rcpm2
  913. cmp x0, x4
  914. b.eq 14f
  915. sub x7, x7, #1
  916. cbnz x7, 3b
  917. 14:
  918. /* poll on IPSTPACK1 */
  919. ldr x3, =IPSTPCR1_VALUE
  920. ldr x7, =IPSTPACK_RETRY_CNT
  921. bic x4, x3, x5
  922. ldr x3, =RCPM2_IPSTPACKR1_OFFSET
  923. 4:
  924. mov x0, x3
  925. bl read_reg_rcpm2
  926. cmp x0, x4
  927. b.eq 15f
  928. sub x7, x7, #1
  929. cbnz x7, 4b
  930. 15:
  931. /* poll on IPSTPACK2 */
  932. ldr x3, =RCPM2_IPSTPACKR2_OFFSET
  933. ldr x4, =IPSTPCR2_VALUE
  934. ldr x7, =IPSTPACK_RETRY_CNT
  935. 5:
  936. mov x0, x3
  937. bl read_reg_rcpm2
  938. cmp x0, x4
  939. b.eq 16f
  940. sub x7, x7, #1
  941. cbnz x7, 5b
  942. 16:
  943. /* poll on IPSTPACK3 */
  944. ldr x3, =RCPM2_IPSTPACKR3_OFFSET
  945. ldr x4, =IPSTPCR3_VALUE
  946. ldr x7, =IPSTPACK_RETRY_CNT
  947. 6:
  948. mov x0, x3
  949. bl read_reg_rcpm2
  950. cmp x0, x4
  951. b.eq 17f
  952. sub x7, x7, #1
  953. cbnz x7, 6b
  954. 17:
  955. /* poll on IPSTPACK4 */
  956. ldr x3, =IPSTPCR4_VALUE
  957. ldr x7, =IPSTPACK_RETRY_CNT
  958. bic x4, x3, x6
  959. ldr x3, =RCPM2_IPSTPACKR4_OFFSET
  960. 7:
  961. mov x0, x3
  962. bl read_reg_rcpm2
  963. cmp x0, x4
  964. b.eq 18f
  965. sub x7, x7, #1
  966. cbnz x7, 7b
  967. 18:
  968. ldr x7, =BC_PSCI_BASE
  969. add x7, x7, #AUX_01_DATA
  970. /* x5 = DEVDISR2 override mask
  971. * x6 = DEVDISR5 override mask
  972. * x7 = [soc_data_area] */
  973. /* save DEVDISR1 and load new value */
  974. mov x0, #DCFG_DEVDISR1_OFFSET
  975. bl read_reg_dcfg
  976. mov w13, w0
  977. mov x0, #DCFG_DEVDISR1_OFFSET
  978. ldr x1, =DEVDISR1_VALUE
  979. bl write_reg_dcfg
  980. /* save DEVDISR2 and load new value */
  981. mov x0, #DCFG_DEVDISR2_OFFSET
  982. bl read_reg_dcfg
  983. mov w14, w0
  984. mov x0, #DCFG_DEVDISR2_OFFSET
  985. ldr x1, =DEVDISR2_VALUE
  986. bic x1, x1, x5
  987. bl write_reg_dcfg
  988. /* x6 = DEVDISR5 override mask */
  989. /* x7 = [soc_data_area] */
  990. /* save DEVDISR3 and load new value */
  991. mov x0, #DCFG_DEVDISR3_OFFSET
  992. bl read_reg_dcfg
  993. mov w15, w0
  994. mov x0, #DCFG_DEVDISR3_OFFSET
  995. ldr x1, =DEVDISR3_VALUE
  996. bl write_reg_dcfg
  997. /* save DEVDISR4 and load new value */
  998. mov x0, #DCFG_DEVDISR4_OFFSET
  999. bl read_reg_dcfg
  1000. mov w16, w0
  1001. mov x0, #DCFG_DEVDISR4_OFFSET
  1002. ldr x1, =DEVDISR4_VALUE
  1003. bl write_reg_dcfg
  1004. /* save DEVDISR5 and load new value */
  1005. mov x0, #DCFG_DEVDISR5_OFFSET
  1006. bl read_reg_dcfg
  1007. mov w17, w0
  1008. mov x0, #DCFG_DEVDISR5_OFFSET
  1009. ldr x1, =DEVDISR5_VALUE
  1010. bic x1, x1, x6
  1011. bl write_reg_dcfg
  1012. /* x7 = [soc_data_area] */
  1013. /* save cpuactlr and disable data prefetch */
  1014. mrs x0, CPUACTLR_EL1
  1015. str w0, [x7, #CPUACTLR_DATA_OFFSET]
  1016. bic x0, x0, #CPUACTLR_L1PCTL_MASK
  1017. msr CPUACTLR_EL1, x0
  1018. /* x6 = DEVDISR5 override mask */
  1019. /* setup registers for cache-only execution */
  1020. ldr x5, =IPSTPCR4_VALUE
  1021. bic x5, x5, x6
  1022. mov x6, #DDR_CNTRL_BASE_ADDR
  1023. mov x7, #DCSR_RCPM2_BASE
  1024. mov x8, #NXP_DCFG_ADDR
  1025. dsb sy
  1026. isb
  1027. /* set the DLL_LOCK cycle count */
  1028. ldr w1, [x6, #DDR_TIMING_CFG_4_OFFSET]
  1029. rev w2, w1
  1030. bic w2, w2, #DLL_LOCK_MASK
  1031. orr w2, w2, #DLL_LOCK_VALUE
  1032. rev w1, w2
  1033. str w1, [x6, #DDR_TIMING_CFG_4_OFFSET]
  1034. /*
  1035. * x5 = ipstpcr4 (IPSTPCR4_VALUE bic DEVDISR5_MASK)
  1036. * x6 = DDR_CNTRL_BASE_ADDR
  1037. * x7 = DCSR_RCPM2_BASE
  1038. * x8 = NXP_DCFG_ADDR
  1039. * w13 = DEVDISR1 saved value
  1040. * w14 = DEVDISR2 saved value
  1041. * w15 = DEVDISR3 saved value
  1042. * w16 = DEVDISR4 saved value
  1043. * w17 = DEVDISR5 saved value
  1044. */
  1045. /* enter the cache-only sequence */
  1046. mov x9, #CORE_RESTARTABLE
  1047. bl final_pwrdown
  1048. /* when we are here, the core has come out of wfi and the SoC is back up */
  1049. mov x30, x18
  1050. ret
  1051. /*
  1052. * part of CPU_SUSPEND
  1053. * this function performs any SoC-specific cleanup after power-down
  1054. * in: x0 = core mask lsb
  1055. * out: none
  1056. * uses x0, x1
  1057. */
  1058. _soc_sys_exit_pwrdn:
  1059. /* clear POWMGTDCR */
  1060. mov x1, #DCSR_RCPM2_BASE
  1061. str wzr, [x1, #RCPM2_POWMGTDCR_OFFSET]
  1062. /* clear WFIL2_EN in SCFG_COREPMCR */
  1063. mov x1, #NXP_SCFG_ADDR
  1064. str wzr, [x1, #SCFG_COREPMCR_OFFSET]
  1065. ret
  1066. /*
  1067. * write a register in the SCFG block
  1068. * in: x0 = offset
  1069. * in: w1 = value to write
  1070. * uses x0, x1, x2, x3
  1071. */
  1072. write_reg_scfg:
  1073. ldr x2, =NXP_SCFG_ADDR
  1074. /* swap for BE */
  1075. rev w3, w1
  1076. str w3, [x2, x0]
  1077. ret
  1078. /*
  1079. * read a register in the SCFG block
  1080. * in: x0 = offset
  1081. * out: w0 = value read
  1082. * uses x0, x1, x2
  1083. */
  1084. read_reg_scfg:
  1085. ldr x2, =NXP_SCFG_ADDR
  1086. ldr w1, [x2, x0]
  1087. /* swap for BE */
  1088. rev w0, w1
  1089. ret
  1090. /*
  1091. * write a register in the DCFG block
  1092. * in: x0 = offset
  1093. * in: w1 = value to write
  1094. * uses x0, x1, x2, x3
  1095. */
  1096. write_reg_dcfg:
  1097. ldr x2, =NXP_DCFG_ADDR
  1098. /* swap for BE */
  1099. rev w3, w1
  1100. str w3, [x2, x0]
  1101. ret
  1102. /*
  1103. * read a register in the DCFG block
  1104. * in: x0 = offset
  1105. * out: w0 = value read
  1106. * uses x0, x1, x2
  1107. */
  1108. read_reg_dcfg:
  1109. ldr x2, =NXP_DCFG_ADDR
  1110. ldr w1, [x2, x0]
  1111. /* swap for BE */
  1112. rev w0, w1
  1113. ret
  1114. /*
  1115. * write a register in the RCPM block
  1116. * in: x0 = offset
  1117. * in: w1 = value to write
  1118. * uses x0, x1, x2, x3
  1119. */
  1120. write_reg_rcpm:
  1121. ldr x2, =NXP_RCPM_ADDR
  1122. /* swap for BE */
  1123. rev w3, w1
  1124. str w3, [x2, x0]
  1125. ret
  1126. /*
  1127. * read a register in the RCPM block
  1128. * in: x0 = offset
  1129. * out: w0 = value read
  1130. * uses x0, x1, x2
  1131. */
  1132. read_reg_rcpm:
  1133. ldr x2, =NXP_RCPM_ADDR
  1134. ldr w1, [x2, x0]
  1135. /* swap for BE */
  1136. rev w0, w1
  1137. ret
  1138. /*
  1139. * write a register in the DCSR-RCPM2 block
  1140. * in: x0 = offset
  1141. * in: w1 = value to write
  1142. * uses x0, x1, x2, x3
  1143. */
  1144. write_reg_rcpm2:
  1145. ldr x2, =DCSR_RCPM2_BASE
  1146. /* swap for BE */
  1147. rev w3, w1
  1148. str w3, [x2, x0]
  1149. ret
  1150. /*
  1151. * read a register in the DCSR-RCPM2 block
  1152. * in: x0 = offset
  1153. * out: w0 = value read
  1154. * uses x0, x1, x2
  1155. */
  1156. read_reg_rcpm2:
  1157. ldr x2, =DCSR_RCPM2_BASE
  1158. ldr w1, [x2, x0]
  1159. /* swap for BE */
  1160. rev w0, w1
  1161. ret
  1162. /*
  1163. * this function returns the base address of the gic distributor
  1164. * in: none
  1165. * out: x0 = base address of gic distributor
  1166. * uses x0, x1
  1167. */
  1168. _getGICD_BaseAddr:
  1169. /* read SVR and get the SoC version */
  1170. mov x0, #NXP_DCFG_ADDR
  1171. ldr w1, [x0, #DCFG_SVR_OFFSET]
  1172. rev w0, w1
  1173. /* x0 = svr */
  1174. and w0, w0, #SVR_MIN_VER_MASK
  1175. cmp w0, #SVR_MINOR_VER_0
  1176. b.ne 8f
  1177. /* load the gic base addresses for rev 1.0 parts */
  1178. ldr x0, =NXP_GICD_4K_ADDR
  1179. b 10f
  1180. 8:
  1181. /* for rev 1.1 and later parts, the GIC base addresses */
  1182. /* can be at 4k or 64k offsets */
  1183. /* read the scfg reg GIC400_ADDR_ALIGN */
  1184. mov x0, #NXP_SCFG_ADDR
  1185. ldr w1, [x0, #SCFG_GIC400_ADDR_ALIGN_OFFSET]
  1186. rev w0, w1
  1187. /* x0 = GIC400_ADDR_ALIGN value */
  1188. and x0, x0, #SCFG_GIC400_ADDR_ALIGN_4KMODE_MASK
  1189. mov x1, #SCFG_GIC400_ADDR_ALIGN_4KMODE_EN
  1190. cmp x0, x1
  1191. b.ne 9f
  1192. /* load the base addresses for 4k offsets */
  1193. ldr x0, =NXP_GICD_4K_ADDR
  1194. b 10f
  1195. 9:
  1196. /* load the base address for 64k offsets */
  1197. ldr x0, =NXP_GICD_64K_ADDR
  1198. 10:
  1199. ret
  1200. /*
  1201. * this function returns the base address of the gic distributor
  1202. * in: none
  1203. * out: x0 = base address of gic controller
  1204. * uses x0, x1
  1205. */
  1206. _getGICC_BaseAddr:
  1207. /* read SVR and get the SoC version */
  1208. mov x0, #NXP_DCFG_ADDR
  1209. ldr w1, [x0, #DCFG_SVR_OFFSET]
  1210. rev w0, w1
  1211. /* x0 = svr */
  1212. and w0, w0, #SVR_MIN_VER_MASK
  1213. cmp w0, #SVR_MINOR_VER_0
  1214. b.ne 8f
  1215. /* load the gic base addresses for rev 1.0 parts */
  1216. ldr x0, =NXP_GICC_4K_ADDR
  1217. b 10f
  1218. 8:
  1219. /* for rev 1.1 and later parts, the GIC base addresses */
  1220. /* can be at 4k or 64k offsets */
  1221. /* read the scfg reg GIC400_ADDR_ALIGN */
  1222. mov x0, #NXP_SCFG_ADDR
  1223. ldr w1, [x0, #SCFG_GIC400_ADDR_ALIGN_OFFSET]
  1224. rev w0, w1
  1225. /* x0 = GIC400_ADDR_ALIGN value */
  1226. and x0, x0, #SCFG_GIC400_ADDR_ALIGN_4KMODE_MASK
  1227. mov x1, #SCFG_GIC400_ADDR_ALIGN_4KMODE_EN
  1228. cmp x0, x1
  1229. b.ne 9f
  1230. /* load the base addresses for 4k offsets */
  1231. ldr x0, =NXP_GICC_4K_ADDR
  1232. b 10f
  1233. 9:
  1234. /* load the base address for 64k offsets */
  1235. ldr x0, =NXP_GICC_64K_ADDR
  1236. 10:
  1237. ret
  1238. /*
  1239. * this function will pwrdown ddr and the final core - it will do this
  1240. * by loading itself into the icache and then executing from there
  1241. * in: x5 = ipstpcr4 (IPSTPCR4_VALUE bic DEVDISR5_MASK)
  1242. * x6 = DDR_CNTRL_BASE_ADDR
  1243. * x7 = DCSR_RCPM2_BASE
  1244. * x8 = NXP_DCFG_ADDR
  1245. * x9 = 0, restartable
  1246. * = 1, non-restartable
  1247. * w13 = DEVDISR1 saved value
  1248. * w14 = DEVDISR2 saved value
  1249. * w15 = DEVDISR3 saved value
  1250. * w16 = DEVDISR4 saved value
  1251. * w17 = DEVDISR5 saved value
  1252. * out: none
  1253. * uses x0 ~ x9
  1254. */
  1255. /* 4Kb aligned */
  1256. .align 12
  1257. final_pwrdown:
  1258. mov x0, xzr
  1259. b touch_line_0
  1260. start_line_0:
  1261. mov x0, #1
  1262. mov x2, #DDR_SDRAM_CFG_2_FRCSR /* put ddr in self refresh - start */
  1263. ldr w3, [x6, #DDR_SDRAM_CFG_2_OFFSET]
  1264. rev w4, w3
  1265. orr w4, w4, w2
  1266. rev w3, w4
  1267. str w3, [x6, #DDR_SDRAM_CFG_2_OFFSET] /* put ddr in self refresh - end */
  1268. orr w3, w5, #DEVDISR5_MEM /* quiesce ddr clocks - start */
  1269. rev w4, w3
  1270. str w4, [x7, #RCPM2_IPSTPCR4_OFFSET] /* quiesce ddr clocks - end */
  1271. mov w3, #DEVDISR5_MEM
  1272. rev w3, w3 /* polling mask */
  1273. mov x2, #DDR_SLEEP_RETRY_CNT /* poll on ipstpack4 - start */
  1274. touch_line_0:
  1275. cbz x0, touch_line_1
  1276. start_line_1:
  1277. ldr w1, [x7, #RCPM2_IPSTPACKR4_OFFSET]
  1278. tst w1, w3
  1279. b.ne 1f
  1280. subs x2, x2, #1
  1281. b.gt start_line_1 /* poll on ipstpack4 - end */
  1282. /* if we get here, we have a timeout err */
  1283. rev w4, w5
  1284. str w4, [x7, #RCPM2_IPSTPCR4_OFFSET] /* re-enable ddr clks interface */
  1285. mov x0, #ERROR_DDR_SLEEP /* load error code */
  1286. b 2f
  1287. 1:
  1288. str w4, [x8, #DCFG_DEVDISR5_OFFSET] /* disable ddr cntrlr clk in devdisr5 */
  1289. 5:
  1290. wfi /* stop the final core */
  1291. cbnz x9, 5b /* if non-restartable, keep in wfi */
  1292. rev w4, w5
  1293. str w4, [x8, #DCFG_DEVDISR5_OFFSET] /* re-enable ddr in devdisr5 */
  1294. str w4, [x7, #RCPM2_IPSTPCR4_OFFSET] /* re-enable ddr clk in ipstpcr4 */
  1295. touch_line_1:
  1296. cbz x0, touch_line_2
  1297. start_line_2:
  1298. ldr w1, [x7, #RCPM2_IPSTPACKR4_OFFSET] /* poll on ipstpack4 - start */
  1299. tst w1, w3
  1300. b.eq 2f
  1301. nop
  1302. b start_line_2 /* poll on ipstpack4 - end */
  1303. 2:
  1304. mov x2, #DDR_SDRAM_CFG_2_FRCSR /* take ddr out-of self refresh - start */
  1305. ldr w3, [x6, #DDR_SDRAM_CFG_2_OFFSET]
  1306. rev w4, w3
  1307. bic w4, w4, w2
  1308. rev w3, w4
  1309. mov x1, #DDR_SLEEP_RETRY_CNT /* wait for ddr cntrlr clock - start */
  1310. 3:
  1311. subs x1, x1, #1
  1312. b.gt 3b /* wait for ddr cntrlr clock - end */
  1313. str w3, [x6, #DDR_SDRAM_CFG_2_OFFSET] /* take ddr out-of self refresh - end */
  1314. rev w1, w17
  1315. touch_line_2:
  1316. cbz x0, touch_line_3
  1317. start_line_3:
  1318. str w1, [x8, #DCFG_DEVDISR5_OFFSET] /* reset devdisr5 */
  1319. rev w1, w16
  1320. str w1, [x8, #DCFG_DEVDISR4_OFFSET] /* reset devdisr4 */
  1321. rev w1, w15
  1322. str w1, [x8, #DCFG_DEVDISR3_OFFSET] /* reset devdisr3 */
  1323. rev w1, w14
  1324. str w1, [x8, #DCFG_DEVDISR2_OFFSET] /* reset devdisr2 */
  1325. rev w1, w13
  1326. str w1, [x8, #DCFG_DEVDISR1_OFFSET] /* reset devdisr1 */
  1327. str wzr, [x7, #RCPM2_IPSTPCR4_OFFSET] /* reset ipstpcr4 */
  1328. str wzr, [x7, #RCPM2_IPSTPCR3_OFFSET] /* reset ipstpcr3 */
  1329. str wzr, [x7, #RCPM2_IPSTPCR2_OFFSET] /* reset ipstpcr2 */
  1330. str wzr, [x7, #RCPM2_IPSTPCR1_OFFSET] /* reset ipstpcr1 */
  1331. str wzr, [x7, #RCPM2_IPSTPCR0_OFFSET] /* reset ipstpcr0 */
  1332. b continue_restart
  1333. touch_line_3:
  1334. cbz x0, start_line_0
  1335. /* execute here after ddr is back up */
  1336. continue_restart:
  1337. /*
  1338. * if x0 = 1, all is well
  1339. * if x0 < 1, we had an error
  1340. */
  1341. cmp x0, #1
  1342. b.ne 4f
  1343. mov x0, #0
  1344. 4:
  1345. ret
  1346. /*
  1347. * Note: there is no return from this function
  1348. * this function will shutdown ddr and the final core - it will do this
  1349. * by loading itself into the icache and then executing from there
  1350. * in: x5 = ipstpcr4 (IPSTPCR4_VALUE bic DEVDISR5_MASK)
  1351. * x6 = DDR_CNTRL_BASE_ADDR
  1352. * x7 = DCSR_RCPM2_BASE
  1353. * x8 = NXP_DCFG_ADDR
  1354. * out: none
  1355. * uses x0 ~ x8
  1356. */
  1357. /* 4Kb aligned */
  1358. .align 12
  1359. final_shutdown:
  1360. mov x0, xzr
  1361. b touch_line0
  1362. start_line0:
  1363. mov x0, #1
  1364. mov x2, #DDR_SDRAM_CFG_2_FRCSR /* put ddr in self refresh - start */
  1365. ldr w3, [x6, #DDR_SDRAM_CFG_2_OFFSET]
  1366. rev w4, w3
  1367. orr w4, w4, w2
  1368. rev w3, w4
  1369. str w3, [x6, #DDR_SDRAM_CFG_2_OFFSET] /* put ddr in self refresh - end */
  1370. orr w3, w5, #DEVDISR5_MEM /* quiesce ddr clocks - start */
  1371. rev w4, w3
  1372. str w4, [x7, #RCPM2_IPSTPCR4_OFFSET] /* quiesce ddr clocks - end */
  1373. mov w3, #DEVDISR5_MEM
  1374. rev w3, w3 /* polling mask */
  1375. mov x2, #DDR_SLEEP_RETRY_CNT /* poll on ipstpack4 - start */
  1376. touch_line0:
  1377. cbz x0, touch_line1
  1378. start_line1:
  1379. ldr w1, [x7, #RCPM2_IPSTPACKR4_OFFSET]
  1380. tst w1, w3
  1381. b.ne 1f
  1382. subs x2, x2, #1
  1383. b.gt start_line1 /* poll on ipstpack4 - end */
  1384. nop
  1385. nop
  1386. nop
  1387. nop
  1388. 1:
  1389. str w4, [x8, #DCFG_DEVDISR5_OFFSET] /* disable ddr cntrlr clk in devdisr5 */
  1390. 5:
  1391. wfi /* stop the final core */
  1392. b 5b /* stay here until POR */
  1393. nop
  1394. nop
  1395. nop
  1396. touch_line1:
  1397. cbz x0, start_line0