stm32mp1_ddr.c 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758
  1. /*
  2. * Copyright (C) 2018-2024, STMicroelectronics - All Rights Reserved
  3. *
  4. * SPDX-License-Identifier: GPL-2.0+ OR BSD-3-Clause
  5. */
  6. #include <errno.h>
  7. #include <stddef.h>
  8. #include <arch.h>
  9. #include <arch_helpers.h>
  10. #include <common/debug.h>
  11. #include <drivers/clk.h>
  12. #include <drivers/delay_timer.h>
  13. #include <drivers/st/stm32mp1_ddr.h>
  14. #include <drivers/st/stm32mp1_ddr_regs.h>
  15. #include <drivers/st/stm32mp1_pwr.h>
  16. #include <drivers/st/stm32mp1_ram.h>
  17. #include <drivers/st/stm32mp_ddr.h>
  18. #include <lib/mmio.h>
  19. #include <plat/common/platform.h>
  20. #include <platform_def.h>
  21. #define DDRCTL_REG(x, y) \
  22. { \
  23. .offset = offsetof(struct stm32mp_ddrctl, x), \
  24. .par_offset = offsetof(struct y, x) \
  25. }
  26. #define DDRPHY_REG(x, y) \
  27. { \
  28. .offset = offsetof(struct stm32mp_ddrphy, x), \
  29. .par_offset = offsetof(struct y, x) \
  30. }
  31. /*
  32. * PARAMETERS: value get from device tree :
  33. * size / order need to be aligned with binding
  34. * modification NOT ALLOWED !!!
  35. */
  36. #define DDRCTL_REG_REG_SIZE 25 /* st,ctl-reg */
  37. #define DDRCTL_REG_TIMING_SIZE 12 /* st,ctl-timing */
  38. #define DDRCTL_REG_MAP_SIZE 9 /* st,ctl-map */
  39. #if STM32MP_DDR_DUAL_AXI_PORT
  40. #define DDRCTL_REG_PERF_SIZE 17 /* st,ctl-perf */
  41. #else
  42. #define DDRCTL_REG_PERF_SIZE 11 /* st,ctl-perf */
  43. #endif
  44. #if STM32MP_DDR_32BIT_INTERFACE
  45. #define DDRPHY_REG_REG_SIZE 11 /* st,phy-reg */
  46. #else
  47. #define DDRPHY_REG_REG_SIZE 9 /* st,phy-reg */
  48. #endif
  49. #define DDRPHY_REG_TIMING_SIZE 10 /* st,phy-timing */
  50. #define DDRCTL_REG_REG(x) DDRCTL_REG(x, stm32mp1_ddrctrl_reg)
  51. static const struct stm32mp_ddr_reg_desc ddr_reg[DDRCTL_REG_REG_SIZE] = {
  52. DDRCTL_REG_REG(mstr),
  53. DDRCTL_REG_REG(mrctrl0),
  54. DDRCTL_REG_REG(mrctrl1),
  55. DDRCTL_REG_REG(derateen),
  56. DDRCTL_REG_REG(derateint),
  57. DDRCTL_REG_REG(pwrctl),
  58. DDRCTL_REG_REG(pwrtmg),
  59. DDRCTL_REG_REG(hwlpctl),
  60. DDRCTL_REG_REG(rfshctl0),
  61. DDRCTL_REG_REG(rfshctl3),
  62. DDRCTL_REG_REG(crcparctl0),
  63. DDRCTL_REG_REG(zqctl0),
  64. DDRCTL_REG_REG(dfitmg0),
  65. DDRCTL_REG_REG(dfitmg1),
  66. DDRCTL_REG_REG(dfilpcfg0),
  67. DDRCTL_REG_REG(dfiupd0),
  68. DDRCTL_REG_REG(dfiupd1),
  69. DDRCTL_REG_REG(dfiupd2),
  70. DDRCTL_REG_REG(dfiphymstr),
  71. DDRCTL_REG_REG(odtmap),
  72. DDRCTL_REG_REG(dbg0),
  73. DDRCTL_REG_REG(dbg1),
  74. DDRCTL_REG_REG(dbgcmd),
  75. DDRCTL_REG_REG(poisoncfg),
  76. DDRCTL_REG_REG(pccfg),
  77. };
  78. #define DDRCTL_REG_TIMING(x) DDRCTL_REG(x, stm32mp1_ddrctrl_timing)
  79. static const struct stm32mp_ddr_reg_desc ddr_timing[DDRCTL_REG_TIMING_SIZE] = {
  80. DDRCTL_REG_TIMING(rfshtmg),
  81. DDRCTL_REG_TIMING(dramtmg0),
  82. DDRCTL_REG_TIMING(dramtmg1),
  83. DDRCTL_REG_TIMING(dramtmg2),
  84. DDRCTL_REG_TIMING(dramtmg3),
  85. DDRCTL_REG_TIMING(dramtmg4),
  86. DDRCTL_REG_TIMING(dramtmg5),
  87. DDRCTL_REG_TIMING(dramtmg6),
  88. DDRCTL_REG_TIMING(dramtmg7),
  89. DDRCTL_REG_TIMING(dramtmg8),
  90. DDRCTL_REG_TIMING(dramtmg14),
  91. DDRCTL_REG_TIMING(odtcfg),
  92. };
  93. #define DDRCTL_REG_MAP(x) DDRCTL_REG(x, stm32mp1_ddrctrl_map)
  94. static const struct stm32mp_ddr_reg_desc ddr_map[DDRCTL_REG_MAP_SIZE] = {
  95. DDRCTL_REG_MAP(addrmap1),
  96. DDRCTL_REG_MAP(addrmap2),
  97. DDRCTL_REG_MAP(addrmap3),
  98. DDRCTL_REG_MAP(addrmap4),
  99. DDRCTL_REG_MAP(addrmap5),
  100. DDRCTL_REG_MAP(addrmap6),
  101. DDRCTL_REG_MAP(addrmap9),
  102. DDRCTL_REG_MAP(addrmap10),
  103. DDRCTL_REG_MAP(addrmap11),
  104. };
  105. #define DDRCTL_REG_PERF(x) DDRCTL_REG(x, stm32mp1_ddrctrl_perf)
  106. static const struct stm32mp_ddr_reg_desc ddr_perf[DDRCTL_REG_PERF_SIZE] = {
  107. DDRCTL_REG_PERF(sched),
  108. DDRCTL_REG_PERF(sched1),
  109. DDRCTL_REG_PERF(perfhpr1),
  110. DDRCTL_REG_PERF(perflpr1),
  111. DDRCTL_REG_PERF(perfwr1),
  112. DDRCTL_REG_PERF(pcfgr_0),
  113. DDRCTL_REG_PERF(pcfgw_0),
  114. DDRCTL_REG_PERF(pcfgqos0_0),
  115. DDRCTL_REG_PERF(pcfgqos1_0),
  116. DDRCTL_REG_PERF(pcfgwqos0_0),
  117. DDRCTL_REG_PERF(pcfgwqos1_0),
  118. #if STM32MP_DDR_DUAL_AXI_PORT
  119. DDRCTL_REG_PERF(pcfgr_1),
  120. DDRCTL_REG_PERF(pcfgw_1),
  121. DDRCTL_REG_PERF(pcfgqos0_1),
  122. DDRCTL_REG_PERF(pcfgqos1_1),
  123. DDRCTL_REG_PERF(pcfgwqos0_1),
  124. DDRCTL_REG_PERF(pcfgwqos1_1),
  125. #endif
  126. };
  127. #define DDRPHY_REG_REG(x) DDRPHY_REG(x, stm32mp1_ddrphy_reg)
  128. static const struct stm32mp_ddr_reg_desc ddrphy_reg[DDRPHY_REG_REG_SIZE] = {
  129. DDRPHY_REG_REG(pgcr),
  130. DDRPHY_REG_REG(aciocr),
  131. DDRPHY_REG_REG(dxccr),
  132. DDRPHY_REG_REG(dsgcr),
  133. DDRPHY_REG_REG(dcr),
  134. DDRPHY_REG_REG(odtcr),
  135. DDRPHY_REG_REG(zq0cr1),
  136. DDRPHY_REG_REG(dx0gcr),
  137. DDRPHY_REG_REG(dx1gcr),
  138. #if STM32MP_DDR_32BIT_INTERFACE
  139. DDRPHY_REG_REG(dx2gcr),
  140. DDRPHY_REG_REG(dx3gcr),
  141. #endif
  142. };
  143. #define DDRPHY_REG_TIMING(x) DDRPHY_REG(x, stm32mp1_ddrphy_timing)
  144. static const struct stm32mp_ddr_reg_desc ddrphy_timing[DDRPHY_REG_TIMING_SIZE] = {
  145. DDRPHY_REG_TIMING(ptr0),
  146. DDRPHY_REG_TIMING(ptr1),
  147. DDRPHY_REG_TIMING(ptr2),
  148. DDRPHY_REG_TIMING(dtpr0),
  149. DDRPHY_REG_TIMING(dtpr1),
  150. DDRPHY_REG_TIMING(dtpr2),
  151. DDRPHY_REG_TIMING(mr0),
  152. DDRPHY_REG_TIMING(mr1),
  153. DDRPHY_REG_TIMING(mr2),
  154. DDRPHY_REG_TIMING(mr3),
  155. };
  156. /*
  157. * REGISTERS ARRAY: used to parse device tree and interactive mode
  158. */
  159. static const struct stm32mp_ddr_reg_info ddr_registers[REG_TYPE_NB] = {
  160. [REG_REG] = {
  161. .name = "static",
  162. .desc = ddr_reg,
  163. .size = DDRCTL_REG_REG_SIZE,
  164. .base = DDR_BASE
  165. },
  166. [REG_TIMING] = {
  167. .name = "timing",
  168. .desc = ddr_timing,
  169. .size = DDRCTL_REG_TIMING_SIZE,
  170. .base = DDR_BASE
  171. },
  172. [REG_PERF] = {
  173. .name = "perf",
  174. .desc = ddr_perf,
  175. .size = DDRCTL_REG_PERF_SIZE,
  176. .base = DDR_BASE
  177. },
  178. [REG_MAP] = {
  179. .name = "map",
  180. .desc = ddr_map,
  181. .size = DDRCTL_REG_MAP_SIZE,
  182. .base = DDR_BASE
  183. },
  184. [REGPHY_REG] = {
  185. .name = "static",
  186. .desc = ddrphy_reg,
  187. .size = DDRPHY_REG_REG_SIZE,
  188. .base = DDRPHY_BASE
  189. },
  190. [REGPHY_TIMING] = {
  191. .name = "timing",
  192. .desc = ddrphy_timing,
  193. .size = DDRPHY_REG_TIMING_SIZE,
  194. .base = DDRPHY_BASE
  195. },
  196. };
  197. static void stm32mp1_ddrphy_idone_wait(struct stm32mp_ddrphy *phy)
  198. {
  199. uint32_t pgsr;
  200. int error = 0;
  201. uint64_t timeout = timeout_init_us(DDR_TIMEOUT_US_1S);
  202. do {
  203. pgsr = mmio_read_32((uintptr_t)&phy->pgsr);
  204. VERBOSE(" > [0x%lx] pgsr = 0x%x &\n",
  205. (uintptr_t)&phy->pgsr, pgsr);
  206. if (timeout_elapsed(timeout)) {
  207. panic();
  208. }
  209. if ((pgsr & DDRPHYC_PGSR_DTERR) != 0U) {
  210. VERBOSE("DQS Gate Trainig Error\n");
  211. error++;
  212. }
  213. if ((pgsr & DDRPHYC_PGSR_DTIERR) != 0U) {
  214. VERBOSE("DQS Gate Trainig Intermittent Error\n");
  215. error++;
  216. }
  217. if ((pgsr & DDRPHYC_PGSR_DFTERR) != 0U) {
  218. VERBOSE("DQS Drift Error\n");
  219. error++;
  220. }
  221. if ((pgsr & DDRPHYC_PGSR_RVERR) != 0U) {
  222. VERBOSE("Read Valid Training Error\n");
  223. error++;
  224. }
  225. if ((pgsr & DDRPHYC_PGSR_RVEIRR) != 0U) {
  226. VERBOSE("Read Valid Training Intermittent Error\n");
  227. error++;
  228. }
  229. } while (((pgsr & DDRPHYC_PGSR_IDONE) == 0U) && (error == 0));
  230. VERBOSE("\n[0x%lx] pgsr = 0x%x\n",
  231. (uintptr_t)&phy->pgsr, pgsr);
  232. }
  233. static void stm32mp1_ddrphy_init(struct stm32mp_ddrphy *phy, uint32_t pir)
  234. {
  235. uint32_t pir_init = pir | DDRPHYC_PIR_INIT;
  236. mmio_write_32((uintptr_t)&phy->pir, pir_init);
  237. VERBOSE("[0x%lx] pir = 0x%x -> 0x%x\n",
  238. (uintptr_t)&phy->pir, pir_init,
  239. mmio_read_32((uintptr_t)&phy->pir));
  240. /* Need to wait 10 configuration clock before start polling */
  241. udelay(DDR_DELAY_10US);
  242. /* Wait DRAM initialization and Gate Training Evaluation complete */
  243. stm32mp1_ddrphy_idone_wait(phy);
  244. }
  245. /* Wait quasi dynamic register update */
  246. static void stm32mp1_wait_operating_mode(struct stm32mp_ddr_priv *priv, uint32_t mode)
  247. {
  248. uint64_t timeout;
  249. uint32_t stat;
  250. int break_loop = 0;
  251. timeout = timeout_init_us(DDR_TIMEOUT_US_1S);
  252. for ( ; ; ) {
  253. uint32_t operating_mode;
  254. uint32_t selref_type;
  255. stat = mmio_read_32((uintptr_t)&priv->ctl->stat);
  256. operating_mode = stat & DDRCTRL_STAT_OPERATING_MODE_MASK;
  257. selref_type = stat & DDRCTRL_STAT_SELFREF_TYPE_MASK;
  258. VERBOSE("[0x%lx] stat = 0x%x\n",
  259. (uintptr_t)&priv->ctl->stat, stat);
  260. if (timeout_elapsed(timeout)) {
  261. panic();
  262. }
  263. if (mode == DDRCTRL_STAT_OPERATING_MODE_SR) {
  264. /*
  265. * Self-refresh due to software
  266. * => checking also STAT.selfref_type.
  267. */
  268. if ((operating_mode ==
  269. DDRCTRL_STAT_OPERATING_MODE_SR) &&
  270. (selref_type == DDRCTRL_STAT_SELFREF_TYPE_SR)) {
  271. break_loop = 1;
  272. }
  273. } else if (operating_mode == mode) {
  274. break_loop = 1;
  275. } else if ((mode == DDRCTRL_STAT_OPERATING_MODE_NORMAL) &&
  276. (operating_mode == DDRCTRL_STAT_OPERATING_MODE_SR) &&
  277. (selref_type == DDRCTRL_STAT_SELFREF_TYPE_ASR)) {
  278. /* Normal mode: handle also automatic self refresh */
  279. break_loop = 1;
  280. }
  281. if (break_loop == 1) {
  282. break;
  283. }
  284. }
  285. VERBOSE("[0x%lx] stat = 0x%x\n",
  286. (uintptr_t)&priv->ctl->stat, stat);
  287. }
  288. /* Mode Register Writes (MRW or MRS) */
  289. static void stm32mp1_mode_register_write(struct stm32mp_ddr_priv *priv, uint8_t addr,
  290. uint32_t data)
  291. {
  292. uint32_t mrctrl0;
  293. VERBOSE("MRS: %d = %x\n", addr, data);
  294. /*
  295. * 1. Poll MRSTAT.mr_wr_busy until it is '0'.
  296. * This checks that there is no outstanding MR transaction.
  297. * No write should be performed to MRCTRL0 and MRCTRL1
  298. * if MRSTAT.mr_wr_busy = 1.
  299. */
  300. while ((mmio_read_32((uintptr_t)&priv->ctl->mrstat) &
  301. DDRCTRL_MRSTAT_MR_WR_BUSY) != 0U) {
  302. ;
  303. }
  304. /*
  305. * 2. Write the MRCTRL0.mr_type, MRCTRL0.mr_addr, MRCTRL0.mr_rank
  306. * and (for MRWs) MRCTRL1.mr_data to define the MR transaction.
  307. */
  308. mrctrl0 = DDRCTRL_MRCTRL0_MR_TYPE_WRITE |
  309. DDRCTRL_MRCTRL0_MR_RANK_ALL |
  310. (((uint32_t)addr << DDRCTRL_MRCTRL0_MR_ADDR_SHIFT) &
  311. DDRCTRL_MRCTRL0_MR_ADDR_MASK);
  312. mmio_write_32((uintptr_t)&priv->ctl->mrctrl0, mrctrl0);
  313. VERBOSE("[0x%lx] mrctrl0 = 0x%x (0x%x)\n",
  314. (uintptr_t)&priv->ctl->mrctrl0,
  315. mmio_read_32((uintptr_t)&priv->ctl->mrctrl0), mrctrl0);
  316. mmio_write_32((uintptr_t)&priv->ctl->mrctrl1, data);
  317. VERBOSE("[0x%lx] mrctrl1 = 0x%x\n",
  318. (uintptr_t)&priv->ctl->mrctrl1,
  319. mmio_read_32((uintptr_t)&priv->ctl->mrctrl1));
  320. /*
  321. * 3. In a separate APB transaction, write the MRCTRL0.mr_wr to 1. This
  322. * bit is self-clearing, and triggers the MR transaction.
  323. * The uMCTL2 then asserts the MRSTAT.mr_wr_busy while it performs
  324. * the MR transaction to SDRAM, and no further access can be
  325. * initiated until it is deasserted.
  326. */
  327. mrctrl0 |= DDRCTRL_MRCTRL0_MR_WR;
  328. mmio_write_32((uintptr_t)&priv->ctl->mrctrl0, mrctrl0);
  329. while ((mmio_read_32((uintptr_t)&priv->ctl->mrstat) &
  330. DDRCTRL_MRSTAT_MR_WR_BUSY) != 0U) {
  331. ;
  332. }
  333. VERBOSE("[0x%lx] mrctrl0 = 0x%x\n",
  334. (uintptr_t)&priv->ctl->mrctrl0, mrctrl0);
  335. }
  336. /* Switch DDR3 from DLL-on to DLL-off */
  337. static void stm32mp1_ddr3_dll_off(struct stm32mp_ddr_priv *priv)
  338. {
  339. uint32_t mr1 = mmio_read_32((uintptr_t)&priv->phy->mr1);
  340. uint32_t mr2 = mmio_read_32((uintptr_t)&priv->phy->mr2);
  341. uint32_t dbgcam;
  342. VERBOSE("mr1: 0x%x\n", mr1);
  343. VERBOSE("mr2: 0x%x\n", mr2);
  344. /*
  345. * 1. Set the DBG1.dis_hif = 1.
  346. * This prevents further reads/writes being received on the HIF.
  347. */
  348. mmio_setbits_32((uintptr_t)&priv->ctl->dbg1, DDRCTRL_DBG1_DIS_HIF);
  349. VERBOSE("[0x%lx] dbg1 = 0x%x\n",
  350. (uintptr_t)&priv->ctl->dbg1,
  351. mmio_read_32((uintptr_t)&priv->ctl->dbg1));
  352. /*
  353. * 2. Ensure all commands have been flushed from the uMCTL2 by polling
  354. * DBGCAM.wr_data_pipeline_empty = 1,
  355. * DBGCAM.rd_data_pipeline_empty = 1,
  356. * DBGCAM.dbg_wr_q_depth = 0 ,
  357. * DBGCAM.dbg_lpr_q_depth = 0, and
  358. * DBGCAM.dbg_hpr_q_depth = 0.
  359. */
  360. do {
  361. dbgcam = mmio_read_32((uintptr_t)&priv->ctl->dbgcam);
  362. VERBOSE("[0x%lx] dbgcam = 0x%x\n",
  363. (uintptr_t)&priv->ctl->dbgcam, dbgcam);
  364. } while ((((dbgcam & DDRCTRL_DBGCAM_DATA_PIPELINE_EMPTY) ==
  365. DDRCTRL_DBGCAM_DATA_PIPELINE_EMPTY)) &&
  366. ((dbgcam & DDRCTRL_DBGCAM_DBG_Q_DEPTH) == 0U));
  367. /*
  368. * 3. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers)
  369. * to disable RTT_NOM:
  370. * a. DDR3: Write to MR1[9], MR1[6] and MR1[2]
  371. * b. DDR4: Write to MR1[10:8]
  372. */
  373. mr1 &= ~(BIT(9) | BIT(6) | BIT(2));
  374. stm32mp1_mode_register_write(priv, 1, mr1);
  375. /*
  376. * 4. For DDR4 only: Perform an MRS command
  377. * (using MRCTRL0 and MRCTRL1 registers) to write to MR5[8:6]
  378. * to disable RTT_PARK
  379. */
  380. /*
  381. * 5. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers)
  382. * to write to MR2[10:9], to disable RTT_WR
  383. * (and therefore disable dynamic ODT).
  384. * This applies for both DDR3 and DDR4.
  385. */
  386. mr2 &= ~GENMASK(10, 9);
  387. stm32mp1_mode_register_write(priv, 2, mr2);
  388. /*
  389. * 6. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers)
  390. * to disable the DLL. The timing of this MRS is automatically
  391. * handled by the uMCTL2.
  392. * a. DDR3: Write to MR1[0]
  393. * b. DDR4: Write to MR1[0]
  394. */
  395. mr1 |= BIT(0);
  396. stm32mp1_mode_register_write(priv, 1, mr1);
  397. /*
  398. * 7. Put the SDRAM into self-refresh mode by setting
  399. * PWRCTL.selfref_sw = 1, and polling STAT.operating_mode to ensure
  400. * the DDRC has entered self-refresh.
  401. */
  402. mmio_setbits_32((uintptr_t)&priv->ctl->pwrctl,
  403. DDRCTRL_PWRCTL_SELFREF_SW);
  404. VERBOSE("[0x%lx] pwrctl = 0x%x\n",
  405. (uintptr_t)&priv->ctl->pwrctl,
  406. mmio_read_32((uintptr_t)&priv->ctl->pwrctl));
  407. /*
  408. * 8. Wait until STAT.operating_mode[1:0]==11 indicating that the
  409. * DWC_ddr_umctl2 core is in self-refresh mode.
  410. * Ensure transition to self-refresh was due to software
  411. * by checking that STAT.selfref_type[1:0]=2.
  412. */
  413. stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_SR);
  414. /*
  415. * 9. Set the MSTR.dll_off_mode = 1.
  416. * warning: MSTR.dll_off_mode is a quasi-dynamic type 2 field
  417. */
  418. stm32mp_ddr_start_sw_done(priv->ctl);
  419. mmio_setbits_32((uintptr_t)&priv->ctl->mstr, DDRCTRL_MSTR_DLL_OFF_MODE);
  420. VERBOSE("[0x%lx] mstr = 0x%x\n",
  421. (uintptr_t)&priv->ctl->mstr,
  422. mmio_read_32((uintptr_t)&priv->ctl->mstr));
  423. stm32mp_ddr_wait_sw_done_ack(priv->ctl);
  424. /* 10. Change the clock frequency to the desired value. */
  425. /*
  426. * 11. Update any registers which may be required to change for the new
  427. * frequency. This includes static and dynamic registers.
  428. * This includes both uMCTL2 registers and PHY registers.
  429. */
  430. /* Change Bypass Mode Frequency Range */
  431. if (clk_get_rate(DDRPHYC) < 100000000U) {
  432. mmio_clrbits_32((uintptr_t)&priv->phy->dllgcr,
  433. DDRPHYC_DLLGCR_BPS200);
  434. } else {
  435. mmio_setbits_32((uintptr_t)&priv->phy->dllgcr,
  436. DDRPHYC_DLLGCR_BPS200);
  437. }
  438. mmio_setbits_32((uintptr_t)&priv->phy->acdllcr, DDRPHYC_ACDLLCR_DLLDIS);
  439. mmio_setbits_32((uintptr_t)&priv->phy->dx0dllcr,
  440. DDRPHYC_DXNDLLCR_DLLDIS);
  441. mmio_setbits_32((uintptr_t)&priv->phy->dx1dllcr,
  442. DDRPHYC_DXNDLLCR_DLLDIS);
  443. #if STM32MP_DDR_32BIT_INTERFACE
  444. mmio_setbits_32((uintptr_t)&priv->phy->dx2dllcr,
  445. DDRPHYC_DXNDLLCR_DLLDIS);
  446. mmio_setbits_32((uintptr_t)&priv->phy->dx3dllcr,
  447. DDRPHYC_DXNDLLCR_DLLDIS);
  448. #endif
  449. /* 12. Exit the self-refresh state by setting PWRCTL.selfref_sw = 0. */
  450. stm32mp_ddr_sw_selfref_exit(priv->ctl);
  451. stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_NORMAL);
  452. /*
  453. * 13. If ZQCTL0.dis_srx_zqcl = 0, the uMCTL2 performs a ZQCL command
  454. * at this point.
  455. */
  456. /*
  457. * 14. Perform MRS commands as required to re-program timing registers
  458. * in the SDRAM for the new frequency
  459. * (in particular, CL, CWL and WR may need to be changed).
  460. */
  461. /* 15. Write DBG1.dis_hif = 0 to re-enable reads and writes. */
  462. stm32mp_ddr_enable_host_interface(priv->ctl);
  463. }
  464. static void stm32mp1_refresh_disable(struct stm32mp_ddrctl *ctl)
  465. {
  466. stm32mp_ddr_start_sw_done(ctl);
  467. /* Quasi-dynamic register update*/
  468. mmio_setbits_32((uintptr_t)&ctl->rfshctl3,
  469. DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
  470. mmio_clrbits_32((uintptr_t)&ctl->pwrctl, DDRCTRL_PWRCTL_POWERDOWN_EN);
  471. mmio_clrbits_32((uintptr_t)&ctl->dfimisc,
  472. DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
  473. stm32mp_ddr_wait_sw_done_ack(ctl);
  474. }
  475. static void stm32mp1_refresh_restore(struct stm32mp_ddrctl *ctl,
  476. uint32_t rfshctl3, uint32_t pwrctl)
  477. {
  478. stm32mp_ddr_start_sw_done(ctl);
  479. if ((rfshctl3 & DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH) == 0U) {
  480. mmio_clrbits_32((uintptr_t)&ctl->rfshctl3,
  481. DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
  482. }
  483. if ((pwrctl & DDRCTRL_PWRCTL_POWERDOWN_EN) != 0U) {
  484. mmio_setbits_32((uintptr_t)&ctl->pwrctl,
  485. DDRCTRL_PWRCTL_POWERDOWN_EN);
  486. }
  487. mmio_setbits_32((uintptr_t)&ctl->dfimisc,
  488. DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
  489. stm32mp_ddr_wait_sw_done_ack(ctl);
  490. }
  491. void stm32mp1_ddr_init(struct stm32mp_ddr_priv *priv,
  492. struct stm32mp_ddr_config *config)
  493. {
  494. uint32_t pir;
  495. int ret = -EINVAL;
  496. if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) != 0U) {
  497. ret = stm32mp_board_ddr_power_init(STM32MP_DDR3);
  498. } else if ((config->c_reg.mstr & DDRCTRL_MSTR_LPDDR2) != 0U) {
  499. ret = stm32mp_board_ddr_power_init(STM32MP_LPDDR2);
  500. } else if ((config->c_reg.mstr & DDRCTRL_MSTR_LPDDR3) != 0U) {
  501. ret = stm32mp_board_ddr_power_init(STM32MP_LPDDR3);
  502. } else {
  503. ERROR("DDR type not supported\n");
  504. }
  505. if (ret != 0) {
  506. panic();
  507. }
  508. VERBOSE("name = %s\n", config->info.name);
  509. VERBOSE("speed = %u kHz\n", config->info.speed);
  510. VERBOSE("size = 0x%x\n", config->info.size);
  511. /* DDR INIT SEQUENCE */
  512. /*
  513. * 1. Program the DWC_ddr_umctl2 registers
  514. * nota: check DFIMISC.dfi_init_complete = 0
  515. */
  516. /* 1.1 RESETS: presetn, core_ddrc_rstn, aresetn */
  517. mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST);
  518. mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST);
  519. mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST);
  520. mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST);
  521. mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST);
  522. mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST);
  523. /* 1.2. start CLOCK */
  524. if (stm32mp1_ddr_clk_enable(priv, config->info.speed) != 0) {
  525. panic();
  526. }
  527. /* 1.3. deassert reset */
  528. /* De-assert PHY rstn and ctl_rstn via DPHYRST and DPHYCTLRST. */
  529. mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST);
  530. mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST);
  531. /*
  532. * De-assert presetn once the clocks are active
  533. * and stable via DDRCAPBRST bit.
  534. */
  535. mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST);
  536. /* 1.4. wait 128 cycles to permit initialization of end logic */
  537. udelay(DDR_DELAY_2US);
  538. /* For PCLK = 133MHz => 1 us is enough, 2 to allow lower frequency */
  539. /* 1.5. initialize registers ddr_umctl2 */
  540. /* Stop uMCTL2 before PHY is ready */
  541. mmio_clrbits_32((uintptr_t)&priv->ctl->dfimisc,
  542. DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
  543. VERBOSE("[0x%lx] dfimisc = 0x%x\n",
  544. (uintptr_t)&priv->ctl->dfimisc,
  545. mmio_read_32((uintptr_t)&priv->ctl->dfimisc));
  546. stm32mp_ddr_set_reg(priv, REG_REG, &config->c_reg, ddr_registers);
  547. /* DDR3 = don't set DLLOFF for init mode */
  548. if ((config->c_reg.mstr &
  549. (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE))
  550. == (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE)) {
  551. VERBOSE("deactivate DLL OFF in mstr\n");
  552. mmio_clrbits_32((uintptr_t)&priv->ctl->mstr,
  553. DDRCTRL_MSTR_DLL_OFF_MODE);
  554. VERBOSE("[0x%lx] mstr = 0x%x\n",
  555. (uintptr_t)&priv->ctl->mstr,
  556. mmio_read_32((uintptr_t)&priv->ctl->mstr));
  557. }
  558. stm32mp_ddr_set_reg(priv, REG_TIMING, &config->c_timing, ddr_registers);
  559. stm32mp_ddr_set_reg(priv, REG_MAP, &config->c_map, ddr_registers);
  560. /* Skip CTRL init, SDRAM init is done by PHY PUBL */
  561. mmio_clrsetbits_32((uintptr_t)&priv->ctl->init0,
  562. DDRCTRL_INIT0_SKIP_DRAM_INIT_MASK,
  563. DDRCTRL_INIT0_SKIP_DRAM_INIT_NORMAL);
  564. VERBOSE("[0x%lx] init0 = 0x%x\n",
  565. (uintptr_t)&priv->ctl->init0,
  566. mmio_read_32((uintptr_t)&priv->ctl->init0));
  567. stm32mp_ddr_set_reg(priv, REG_PERF, &config->c_perf, ddr_registers);
  568. /* 2. deassert reset signal core_ddrc_rstn, aresetn and presetn */
  569. mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST);
  570. mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST);
  571. mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST);
  572. /*
  573. * 3. start PHY init by accessing relevant PUBL registers
  574. * (DXGCR, DCR, PTR*, MR*, DTPR*)
  575. */
  576. stm32mp_ddr_set_reg(priv, REGPHY_REG, &config->p_reg, ddr_registers);
  577. stm32mp_ddr_set_reg(priv, REGPHY_TIMING, &config->p_timing, ddr_registers);
  578. /* DDR3 = don't set DLLOFF for init mode */
  579. if ((config->c_reg.mstr &
  580. (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE))
  581. == (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE)) {
  582. VERBOSE("deactivate DLL OFF in mr1\n");
  583. mmio_clrbits_32((uintptr_t)&priv->phy->mr1, BIT(0));
  584. VERBOSE("[0x%lx] mr1 = 0x%x\n",
  585. (uintptr_t)&priv->phy->mr1,
  586. mmio_read_32((uintptr_t)&priv->phy->mr1));
  587. }
  588. /*
  589. * 4. Monitor PHY init status by polling PUBL register PGSR.IDONE
  590. * Perform DDR PHY DRAM initialization and Gate Training Evaluation
  591. */
  592. stm32mp1_ddrphy_idone_wait(priv->phy);
  593. /*
  594. * 5. Indicate to PUBL that controller performs SDRAM initialization
  595. * by setting PIR.INIT and PIR CTLDINIT and pool PGSR.IDONE
  596. * DRAM init is done by PHY, init0.skip_dram.init = 1
  597. */
  598. pir = DDRPHYC_PIR_DLLSRST | DDRPHYC_PIR_DLLLOCK | DDRPHYC_PIR_ZCAL |
  599. DDRPHYC_PIR_ITMSRST | DDRPHYC_PIR_DRAMINIT | DDRPHYC_PIR_ICPC;
  600. if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) != 0U) {
  601. pir |= DDRPHYC_PIR_DRAMRST; /* Only for DDR3 */
  602. }
  603. stm32mp1_ddrphy_init(priv->phy, pir);
  604. /*
  605. * 6. SET DFIMISC.dfi_init_complete_en to 1
  606. * Enable quasi-dynamic register programming.
  607. */
  608. stm32mp_ddr_start_sw_done(priv->ctl);
  609. mmio_setbits_32((uintptr_t)&priv->ctl->dfimisc,
  610. DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
  611. VERBOSE("[0x%lx] dfimisc = 0x%x\n",
  612. (uintptr_t)&priv->ctl->dfimisc,
  613. mmio_read_32((uintptr_t)&priv->ctl->dfimisc));
  614. stm32mp_ddr_wait_sw_done_ack(priv->ctl);
  615. /*
  616. * 7. Wait for DWC_ddr_umctl2 to move to normal operation mode
  617. * by monitoring STAT.operating_mode signal
  618. */
  619. /* Wait uMCTL2 ready */
  620. stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_NORMAL);
  621. /* Switch to DLL OFF mode */
  622. if ((config->c_reg.mstr & DDRCTRL_MSTR_DLL_OFF_MODE) != 0U) {
  623. stm32mp1_ddr3_dll_off(priv);
  624. }
  625. VERBOSE("DDR DQS training : ");
  626. /*
  627. * 8. Disable Auto refresh and power down by setting
  628. * - RFSHCTL3.dis_au_refresh = 1
  629. * - PWRCTL.powerdown_en = 0
  630. * - DFIMISC.dfiinit_complete_en = 0
  631. */
  632. stm32mp1_refresh_disable(priv->ctl);
  633. /*
  634. * 9. Program PUBL PGCR to enable refresh during training
  635. * and rank to train
  636. * not done => keep the programed value in PGCR
  637. */
  638. /*
  639. * 10. configure PUBL PIR register to specify which training step
  640. * to run
  641. * RVTRN is executed only on LPDDR2/LPDDR3
  642. */
  643. pir = DDRPHYC_PIR_QSTRN;
  644. if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) == 0U) {
  645. pir |= DDRPHYC_PIR_RVTRN;
  646. }
  647. stm32mp1_ddrphy_init(priv->phy, pir);
  648. /* 11. monitor PUB PGSR.IDONE to poll cpmpletion of training sequence */
  649. stm32mp1_ddrphy_idone_wait(priv->phy);
  650. /*
  651. * 12. set back registers in step 8 to the original values if desidered
  652. */
  653. stm32mp1_refresh_restore(priv->ctl, config->c_reg.rfshctl3,
  654. config->c_reg.pwrctl);
  655. stm32mp_ddr_enable_axi_port(priv->ctl);
  656. }