stm32mp2_ddr.c 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479
  1. /*
  2. * Copyright (C) 2021-2024, STMicroelectronics - All Rights Reserved
  3. *
  4. * SPDX-License-Identifier: BSD-3-Clause
  5. */
  6. #include <errno.h>
  7. #include <common/debug.h>
  8. #include <ddrphy_phyinit.h>
  9. #include <drivers/delay_timer.h>
  10. #include <drivers/st/stm32mp2_ddr_helpers.h>
  11. #include <drivers/st/stm32mp2_ddr_regs.h>
  12. #include <drivers/st/stm32mp_ddr.h>
  13. #include <lib/mmio.h>
  14. #include <platform_def.h>
  15. #define DDRDBG_FRAC_PLL_LOCK U(0x10)
  16. #define DDRCTL_REG(x, y, z) \
  17. { \
  18. .offset = offsetof(struct stm32mp_ddrctl, x), \
  19. .par_offset = offsetof(struct y, x), \
  20. .qd = z \
  21. }
  22. /*
  23. * PARAMETERS: value get from device tree :
  24. * size / order need to be aligned with binding
  25. * modification NOT ALLOWED !!!
  26. */
  27. #define DDRCTL_REG_REG_SIZE 48 /* st,ctl-reg */
  28. #define DDRCTL_REG_TIMING_SIZE 20 /* st,ctl-timing */
  29. #define DDRCTL_REG_MAP_SIZE 12 /* st,ctl-map */
  30. #if STM32MP_DDR_DUAL_AXI_PORT
  31. #define DDRCTL_REG_PERF_SIZE 21 /* st,ctl-perf */
  32. #else /* !STM32MP_DDR_DUAL_AXI_PORT */
  33. #define DDRCTL_REG_PERF_SIZE 14 /* st,ctl-perf */
  34. #endif /* STM32MP_DDR_DUAL_AXI_PORT */
  35. #define DDRPHY_REG_REG_SIZE 0 /* st,phy-reg */
  36. #define DDRPHY_REG_TIMING_SIZE 0 /* st,phy-timing */
  37. #define DDRCTL_REG_REG(x, z) DDRCTL_REG(x, stm32mp2_ddrctrl_reg, z)
  38. static const struct stm32mp_ddr_reg_desc ddr_reg[DDRCTL_REG_REG_SIZE] = {
  39. DDRCTL_REG_REG(mstr, true),
  40. DDRCTL_REG_REG(mrctrl0, false),
  41. DDRCTL_REG_REG(mrctrl1, false),
  42. DDRCTL_REG_REG(mrctrl2, false),
  43. DDRCTL_REG_REG(derateen, true),
  44. DDRCTL_REG_REG(derateint, false),
  45. DDRCTL_REG_REG(deratectl, false),
  46. DDRCTL_REG_REG(pwrctl, false),
  47. DDRCTL_REG_REG(pwrtmg, true),
  48. DDRCTL_REG_REG(hwlpctl, true),
  49. DDRCTL_REG_REG(rfshctl0, false),
  50. DDRCTL_REG_REG(rfshctl1, false),
  51. DDRCTL_REG_REG(rfshctl3, true),
  52. DDRCTL_REG_REG(crcparctl0, false),
  53. DDRCTL_REG_REG(crcparctl1, false),
  54. DDRCTL_REG_REG(init0, true),
  55. DDRCTL_REG_REG(init1, false),
  56. DDRCTL_REG_REG(init2, false),
  57. DDRCTL_REG_REG(init3, true),
  58. DDRCTL_REG_REG(init4, true),
  59. DDRCTL_REG_REG(init5, false),
  60. DDRCTL_REG_REG(init6, true),
  61. DDRCTL_REG_REG(init7, true),
  62. DDRCTL_REG_REG(dimmctl, false),
  63. DDRCTL_REG_REG(rankctl, true),
  64. DDRCTL_REG_REG(rankctl1, true),
  65. DDRCTL_REG_REG(zqctl0, true),
  66. DDRCTL_REG_REG(zqctl1, false),
  67. DDRCTL_REG_REG(zqctl2, false),
  68. DDRCTL_REG_REG(dfitmg0, true),
  69. DDRCTL_REG_REG(dfitmg1, true),
  70. DDRCTL_REG_REG(dfilpcfg0, false),
  71. DDRCTL_REG_REG(dfilpcfg1, false),
  72. DDRCTL_REG_REG(dfiupd0, true),
  73. DDRCTL_REG_REG(dfiupd1, false),
  74. DDRCTL_REG_REG(dfiupd2, false),
  75. DDRCTL_REG_REG(dfimisc, true),
  76. DDRCTL_REG_REG(dfitmg2, true),
  77. DDRCTL_REG_REG(dfitmg3, false),
  78. DDRCTL_REG_REG(dbictl, true),
  79. DDRCTL_REG_REG(dfiphymstr, false),
  80. DDRCTL_REG_REG(dbg0, false),
  81. DDRCTL_REG_REG(dbg1, false),
  82. DDRCTL_REG_REG(dbgcmd, false),
  83. DDRCTL_REG_REG(swctl, false), /* forced qd value */
  84. DDRCTL_REG_REG(swctlstatic, false),
  85. DDRCTL_REG_REG(poisoncfg, false),
  86. DDRCTL_REG_REG(pccfg, false),
  87. };
  88. #define DDRCTL_REG_TIMING(x, z) DDRCTL_REG(x, stm32mp2_ddrctrl_timing, z)
  89. static const struct stm32mp_ddr_reg_desc ddr_timing[DDRCTL_REG_TIMING_SIZE] = {
  90. DDRCTL_REG_TIMING(rfshtmg, false),
  91. DDRCTL_REG_TIMING(rfshtmg1, false),
  92. DDRCTL_REG_TIMING(dramtmg0, true),
  93. DDRCTL_REG_TIMING(dramtmg1, true),
  94. DDRCTL_REG_TIMING(dramtmg2, true),
  95. DDRCTL_REG_TIMING(dramtmg3, true),
  96. DDRCTL_REG_TIMING(dramtmg4, true),
  97. DDRCTL_REG_TIMING(dramtmg5, true),
  98. DDRCTL_REG_TIMING(dramtmg6, true),
  99. DDRCTL_REG_TIMING(dramtmg7, true),
  100. DDRCTL_REG_TIMING(dramtmg8, true),
  101. DDRCTL_REG_TIMING(dramtmg9, true),
  102. DDRCTL_REG_TIMING(dramtmg10, true),
  103. DDRCTL_REG_TIMING(dramtmg11, true),
  104. DDRCTL_REG_TIMING(dramtmg12, true),
  105. DDRCTL_REG_TIMING(dramtmg13, true),
  106. DDRCTL_REG_TIMING(dramtmg14, true),
  107. DDRCTL_REG_TIMING(dramtmg15, true),
  108. DDRCTL_REG_TIMING(odtcfg, true),
  109. DDRCTL_REG_TIMING(odtmap, false),
  110. };
  111. #define DDRCTL_REG_MAP(x) DDRCTL_REG(x, stm32mp2_ddrctrl_map, false)
  112. static const struct stm32mp_ddr_reg_desc ddr_map[DDRCTL_REG_MAP_SIZE] = {
  113. DDRCTL_REG_MAP(addrmap0),
  114. DDRCTL_REG_MAP(addrmap1),
  115. DDRCTL_REG_MAP(addrmap2),
  116. DDRCTL_REG_MAP(addrmap3),
  117. DDRCTL_REG_MAP(addrmap4),
  118. DDRCTL_REG_MAP(addrmap5),
  119. DDRCTL_REG_MAP(addrmap6),
  120. DDRCTL_REG_MAP(addrmap7),
  121. DDRCTL_REG_MAP(addrmap8),
  122. DDRCTL_REG_MAP(addrmap9),
  123. DDRCTL_REG_MAP(addrmap10),
  124. DDRCTL_REG_MAP(addrmap11),
  125. };
  126. #define DDRCTL_REG_PERF(x, z) DDRCTL_REG(x, stm32mp2_ddrctrl_perf, z)
  127. static const struct stm32mp_ddr_reg_desc ddr_perf[DDRCTL_REG_PERF_SIZE] = {
  128. DDRCTL_REG_PERF(sched, true),
  129. DDRCTL_REG_PERF(sched1, false),
  130. DDRCTL_REG_PERF(perfhpr1, true),
  131. DDRCTL_REG_PERF(perflpr1, true),
  132. DDRCTL_REG_PERF(perfwr1, true),
  133. DDRCTL_REG_PERF(sched3, false),
  134. DDRCTL_REG_PERF(sched4, false),
  135. DDRCTL_REG_PERF(pcfgr_0, false),
  136. DDRCTL_REG_PERF(pcfgw_0, false),
  137. DDRCTL_REG_PERF(pctrl_0, false),
  138. DDRCTL_REG_PERF(pcfgqos0_0, true),
  139. DDRCTL_REG_PERF(pcfgqos1_0, true),
  140. DDRCTL_REG_PERF(pcfgwqos0_0, true),
  141. DDRCTL_REG_PERF(pcfgwqos1_0, true),
  142. #if STM32MP_DDR_DUAL_AXI_PORT
  143. DDRCTL_REG_PERF(pcfgr_1, false),
  144. DDRCTL_REG_PERF(pcfgw_1, false),
  145. DDRCTL_REG_PERF(pctrl_1, false),
  146. DDRCTL_REG_PERF(pcfgqos0_1, true),
  147. DDRCTL_REG_PERF(pcfgqos1_1, true),
  148. DDRCTL_REG_PERF(pcfgwqos0_1, true),
  149. DDRCTL_REG_PERF(pcfgwqos1_1, true),
  150. #endif /* STM32MP_DDR_DUAL_AXI_PORT */
  151. };
  152. static const struct stm32mp_ddr_reg_desc ddrphy_reg[DDRPHY_REG_REG_SIZE] = {};
  153. static const struct stm32mp_ddr_reg_desc ddrphy_timing[DDRPHY_REG_TIMING_SIZE] = {};
  154. /*
  155. * REGISTERS ARRAY: used to parse device tree and interactive mode
  156. */
  157. static const struct stm32mp_ddr_reg_info ddr_registers[REG_TYPE_NB] __unused = {
  158. [REG_REG] = {
  159. .name = "static",
  160. .desc = ddr_reg,
  161. .size = DDRCTL_REG_REG_SIZE,
  162. .base = DDR_BASE
  163. },
  164. [REG_TIMING] = {
  165. .name = "timing",
  166. .desc = ddr_timing,
  167. .size = DDRCTL_REG_TIMING_SIZE,
  168. .base = DDR_BASE
  169. },
  170. [REG_PERF] = {
  171. .name = "perf",
  172. .desc = ddr_perf,
  173. .size = DDRCTL_REG_PERF_SIZE,
  174. .base = DDR_BASE
  175. },
  176. [REG_MAP] = {
  177. .name = "map",
  178. .desc = ddr_map,
  179. .size = DDRCTL_REG_MAP_SIZE,
  180. .base = DDR_BASE
  181. },
  182. [REGPHY_REG] = {
  183. .name = "static",
  184. .desc = ddrphy_reg,
  185. .size = DDRPHY_REG_REG_SIZE,
  186. .base = DDRPHY_BASE
  187. },
  188. [REGPHY_TIMING] = {
  189. .name = "timing",
  190. .desc = ddrphy_timing,
  191. .size = DDRPHY_REG_TIMING_SIZE,
  192. .base = DDRPHY_BASE
  193. },
  194. };
  195. static void ddr_reset(struct stm32mp_ddr_priv *priv)
  196. {
  197. udelay(DDR_DELAY_1US);
  198. mmio_setbits_32(priv->rcc + RCC_DDRITFCFGR, RCC_DDRITFCFGR_DDRRST);
  199. mmio_write_32(priv->rcc + RCC_DDRPHYCAPBCFGR,
  200. RCC_DDRPHYCAPBCFGR_DDRPHYCAPBEN | RCC_DDRPHYCAPBCFGR_DDRPHYCAPBLPEN |
  201. RCC_DDRPHYCAPBCFGR_DDRPHYCAPBRST);
  202. mmio_write_32(priv->rcc + RCC_DDRCAPBCFGR,
  203. RCC_DDRCAPBCFGR_DDRCAPBEN | RCC_DDRCAPBCFGR_DDRCAPBLPEN |
  204. RCC_DDRCAPBCFGR_DDRCAPBRST);
  205. mmio_write_32(priv->rcc + RCC_DDRCFGR,
  206. RCC_DDRCFGR_DDRCFGEN | RCC_DDRCFGR_DDRCFGLPEN | RCC_DDRCFGR_DDRCFGRST);
  207. udelay(DDR_DELAY_1US);
  208. mmio_setbits_32(priv->rcc + RCC_DDRITFCFGR, RCC_DDRITFCFGR_DDRRST);
  209. mmio_write_32(priv->rcc + RCC_DDRPHYCAPBCFGR,
  210. RCC_DDRPHYCAPBCFGR_DDRPHYCAPBEN | RCC_DDRPHYCAPBCFGR_DDRPHYCAPBLPEN);
  211. mmio_write_32(priv->rcc + RCC_DDRCAPBCFGR,
  212. RCC_DDRCAPBCFGR_DDRCAPBEN | RCC_DDRCAPBCFGR_DDRCAPBLPEN);
  213. mmio_write_32(priv->rcc + RCC_DDRCFGR, RCC_DDRCFGR_DDRCFGEN | RCC_DDRCFGR_DDRCFGLPEN);
  214. udelay(DDR_DELAY_1US);
  215. }
  216. static void ddr_standby_reset(struct stm32mp_ddr_priv *priv)
  217. {
  218. udelay(DDR_DELAY_1US);
  219. mmio_write_32(priv->rcc + RCC_DDRCPCFGR,
  220. RCC_DDRCPCFGR_DDRCPEN | RCC_DDRCPCFGR_DDRCPLPEN | RCC_DDRCPCFGR_DDRCPRST);
  221. mmio_setbits_32(priv->rcc + RCC_DDRITFCFGR, RCC_DDRITFCFGR_DDRRST);
  222. mmio_write_32(priv->rcc + RCC_DDRPHYCAPBCFGR,
  223. RCC_DDRPHYCAPBCFGR_DDRPHYCAPBEN | RCC_DDRPHYCAPBCFGR_DDRPHYCAPBLPEN |
  224. RCC_DDRPHYCAPBCFGR_DDRPHYCAPBRST);
  225. mmio_write_32(priv->rcc + RCC_DDRCAPBCFGR,
  226. RCC_DDRCAPBCFGR_DDRCAPBEN | RCC_DDRCAPBCFGR_DDRCAPBLPEN |
  227. RCC_DDRCAPBCFGR_DDRCAPBRST);
  228. mmio_clrbits_32(priv->rcc + RCC_DDRITFCFGR, RCC_DDRITFCFGR_DDRPHYDLP);
  229. mmio_setbits_32(priv->rcc + RCC_DDRPHYCCFGR, RCC_DDRPHYCCFGR_DDRPHYCEN);
  230. udelay(DDR_DELAY_1US);
  231. }
  232. static void ddr_standby_reset_release(struct stm32mp_ddr_priv *priv)
  233. {
  234. udelay(DDR_DELAY_1US);
  235. mmio_write_32(priv->rcc + RCC_DDRCPCFGR, RCC_DDRCPCFGR_DDRCPEN | RCC_DDRCPCFGR_DDRCPLPEN);
  236. mmio_clrbits_32(priv->rcc + RCC_DDRITFCFGR, RCC_DDRITFCFGR_DDRRST);
  237. mmio_clrbits_32(priv->rcc + RCC_DDRPHYCAPBCFGR, RCC_DDRPHYCAPBCFGR_DDRPHYCAPBRST);
  238. mmio_write_32(priv->rcc + RCC_DDRCFGR, RCC_DDRCFGR_DDRCFGEN | RCC_DDRCFGR_DDRCFGLPEN);
  239. udelay(DDR_DELAY_1US);
  240. }
  241. static void ddr_sysconf_configuration(struct stm32mp_ddr_priv *priv,
  242. struct stm32mp_ddr_config *config)
  243. {
  244. mmio_write_32(stm32_ddrdbg_get_base() + DDRDBG_LP_DISABLE,
  245. DDRDBG_LP_DISABLE_LPI_XPI_DISABLE | DDRDBG_LP_DISABLE_LPI_DDRC_DISABLE);
  246. mmio_write_32(stm32_ddrdbg_get_base() + DDRDBG_BYPASS_PCLKEN,
  247. (uint32_t)config->uib.pllbypass);
  248. mmio_write_32(priv->rcc + RCC_DDRPHYCCFGR, RCC_DDRPHYCCFGR_DDRPHYCEN);
  249. mmio_setbits_32(priv->rcc + RCC_DDRITFCFGR, RCC_DDRITFCFGR_DDRRST);
  250. udelay(DDR_DELAY_1US);
  251. }
  252. static void set_dfi_init_complete_en(struct stm32mp_ddrctl *ctl, bool phy_init_done)
  253. {
  254. /*
  255. * Manage quasi-dynamic registers modification
  256. * dfimisc.dfi_init_complete_en : Group 3
  257. */
  258. stm32mp_ddr_set_qd3_update_conditions(ctl);
  259. udelay(DDR_DELAY_1US);
  260. if (phy_init_done) {
  261. /* Indicates to controller that PHY has completed initialization */
  262. mmio_setbits_32((uintptr_t)&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
  263. } else {
  264. /* PHY not initialized yet, wait for completion */
  265. mmio_clrbits_32((uintptr_t)&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
  266. }
  267. udelay(DDR_DELAY_1US);
  268. stm32mp_ddr_unset_qd3_update_conditions(ctl);
  269. }
  270. static void disable_refresh(struct stm32mp_ddrctl *ctl)
  271. {
  272. mmio_setbits_32((uintptr_t)&ctl->rfshctl3, DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
  273. stm32mp_ddr_wait_refresh_update_done_ack(ctl);
  274. udelay(DDR_DELAY_1US);
  275. mmio_clrbits_32((uintptr_t)&ctl->pwrctl,
  276. DDRCTRL_PWRCTL_POWERDOWN_EN | DDRCTRL_PWRCTL_SELFREF_EN);
  277. udelay(DDR_DELAY_1US);
  278. set_dfi_init_complete_en(ctl, false);
  279. }
  280. static void restore_refresh(struct stm32mp_ddrctl *ctl, uint32_t rfshctl3, uint32_t pwrctl)
  281. {
  282. if ((rfshctl3 & DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH) == 0U) {
  283. mmio_clrbits_32((uintptr_t)&ctl->rfshctl3, DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
  284. stm32mp_ddr_wait_refresh_update_done_ack(ctl);
  285. udelay(DDR_DELAY_1US);
  286. }
  287. if ((pwrctl & DDRCTRL_PWRCTL_SELFREF_SW) != 0U) {
  288. mmio_clrbits_32((uintptr_t)&ctl->pwrctl, DDRCTRL_PWRCTL_SELFREF_SW);
  289. udelay(DDR_DELAY_1US);
  290. }
  291. if ((pwrctl & DDRCTRL_PWRCTL_POWERDOWN_EN) != 0U) {
  292. mmio_setbits_32((uintptr_t)&ctl->pwrctl, DDRCTRL_PWRCTL_POWERDOWN_EN);
  293. udelay(DDR_DELAY_1US);
  294. }
  295. if ((pwrctl & DDRCTRL_PWRCTL_SELFREF_EN) != 0U) {
  296. mmio_setbits_32((uintptr_t)&ctl->pwrctl, DDRCTRL_PWRCTL_SELFREF_EN);
  297. udelay(DDR_DELAY_1US);
  298. }
  299. set_dfi_init_complete_en(ctl, true);
  300. }
  301. void stm32mp2_ddr_init(struct stm32mp_ddr_priv *priv,
  302. struct stm32mp_ddr_config *config)
  303. {
  304. int ret = -EINVAL;
  305. uint32_t ddr_retdis;
  306. enum ddr_type ddr_type;
  307. if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) != 0U) {
  308. ddr_type = STM32MP_DDR3;
  309. } else if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR4) != 0U) {
  310. ddr_type = STM32MP_DDR4;
  311. } else if ((config->c_reg.mstr & DDRCTRL_MSTR_LPDDR4) != 0U) {
  312. ddr_type = STM32MP_LPDDR4;
  313. } else {
  314. ERROR("DDR type not supported\n");
  315. panic();
  316. }
  317. VERBOSE("name = %s\n", config->info.name);
  318. VERBOSE("speed = %u kHz\n", config->info.speed);
  319. VERBOSE("size = 0x%zx\n", config->info.size);
  320. if (config->self_refresh) {
  321. VERBOSE("sel-refresh exit (zdata = 0x%x)\n", config->zdata);
  322. }
  323. /* Check DDR PHY pads retention */
  324. ddr_retdis = mmio_read_32(priv->pwr + PWR_CR11) & PWR_CR11_DDRRETDIS;
  325. if (config->self_refresh) {
  326. if (ddr_retdis == PWR_CR11_DDRRETDIS) {
  327. VERBOSE("self-refresh aborted: no retention\n");
  328. config->self_refresh = false;
  329. }
  330. }
  331. if (config->self_refresh) {
  332. ddr_standby_reset(priv);
  333. VERBOSE("disable DDR PHY retention\n");
  334. mmio_setbits_32(priv->pwr + PWR_CR11, PWR_CR11_DDRRETDIS);
  335. udelay(DDR_DELAY_1US);
  336. mmio_clrbits_32(priv->rcc + RCC_DDRCAPBCFGR, RCC_DDRCAPBCFGR_DDRCAPBRST);
  337. udelay(DDR_DELAY_1US);
  338. } else {
  339. if (stm32mp_board_ddr_power_init(ddr_type) != 0) {
  340. ERROR("DDR power init failed\n");
  341. panic();
  342. }
  343. VERBOSE("disable DDR PHY retention\n");
  344. mmio_setbits_32(priv->pwr + PWR_CR11, PWR_CR11_DDRRETDIS);
  345. ddr_reset(priv);
  346. ddr_sysconf_configuration(priv, config);
  347. }
  348. #if STM32MP_LPDDR4_TYPE
  349. /*
  350. * Enable PWRCTL.SELFREF_SW to ensure correct setting of PWRCTL.LPDDR4_SR_ALLOWED.
  351. * Later disabled in restore_refresh().
  352. */
  353. config->c_reg.pwrctl |= DDRCTRL_PWRCTL_SELFREF_SW;
  354. #endif /* STM32MP_LPDDR4_TYPE */
  355. stm32mp_ddr_set_reg(priv, REG_REG, &config->c_reg, ddr_registers);
  356. stm32mp_ddr_set_reg(priv, REG_TIMING, &config->c_timing, ddr_registers);
  357. stm32mp_ddr_set_reg(priv, REG_MAP, &config->c_map, ddr_registers);
  358. stm32mp_ddr_set_reg(priv, REG_PERF, &config->c_perf, ddr_registers);
  359. if (!config->self_refresh) {
  360. /* DDR core and PHY reset de-assert */
  361. mmio_clrbits_32(priv->rcc + RCC_DDRITFCFGR, RCC_DDRITFCFGR_DDRRST);
  362. disable_refresh(priv->ctl);
  363. }
  364. if (config->self_refresh) {
  365. ddr_standby_reset_release(priv);
  366. /* Initialize DDR by skipping training and disabling result saving */
  367. ret = ddrphy_phyinit_sequence(config, true, false);
  368. if (ret == 0) {
  369. ret = ddrphy_phyinit_restore_sequence();
  370. }
  371. /* Poll on ddrphy_initeng0_phyinlpx.phyinlp3 = 0 */
  372. ddr_wait_lp3_mode(false);
  373. } else {
  374. /* Initialize DDR including training and result saving */
  375. ret = ddrphy_phyinit_sequence(config, false, true);
  376. }
  377. if (ret != 0) {
  378. ERROR("DDR PHY init: Error %d\n", ret);
  379. panic();
  380. }
  381. ddr_activate_controller(priv->ctl, false);
  382. if (config->self_refresh) {
  383. struct stm32mp_ddrctl *ctl = priv->ctl;
  384. /* SW self refresh exit prequested */
  385. mmio_clrbits_32((uintptr_t)&ctl->pwrctl, DDRCTRL_PWRCTL_SELFREF_SW);
  386. if (ddr_sr_exit_loop() != 0) {
  387. ERROR("DDR Standby exit error\n");
  388. panic();
  389. }
  390. /* Re-enable DFI low-power interface */
  391. mmio_clrbits_32((uintptr_t)&ctl->dfilpcfg0, DDRCTRL_DFILPCFG0_DFI_LP_EN_SR);
  392. } else {
  393. restore_refresh(priv->ctl, config->c_reg.rfshctl3, config->c_reg.pwrctl);
  394. }
  395. stm32mp_ddr_enable_axi_port(priv->ctl);
  396. }