stm32mp2_ddr_helpers.c 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527
  1. /*
  2. * Copyright (c) 2024, STMicroelectronics - All Rights Reserved
  3. *
  4. * SPDX-License-Identifier: BSD-3-Clause
  5. */
  6. #include <errno.h>
  7. #include <arch_helpers.h>
  8. #include <common/debug.h>
  9. #include <drivers/delay_timer.h>
  10. #include <drivers/st/stm32mp2_ddr.h>
  11. #include <drivers/st/stm32mp2_ddr_helpers.h>
  12. #include <drivers/st/stm32mp2_ddr_regs.h>
  13. #include <drivers/st/stm32mp_ddr.h>
  14. #include <lib/mmio.h>
  15. #include <platform_def.h>
  16. /* HW idle period (unit: Multiples of 32 DFI clock cycles) */
  17. #define HW_IDLE_PERIOD 0x3U
  18. static enum stm32mp2_ddr_sr_mode saved_ddr_sr_mode;
  19. #pragma weak stm32_ddrdbg_get_base
  20. uintptr_t stm32_ddrdbg_get_base(void)
  21. {
  22. return 0U;
  23. }
  24. static void set_qd1_qd3_update_conditions(struct stm32mp_ddrctl *ctl)
  25. {
  26. mmio_setbits_32((uintptr_t)&ctl->dbg1, DDRCTRL_DBG1_DIS_DQ);
  27. stm32mp_ddr_set_qd3_update_conditions(ctl);
  28. }
  29. static void unset_qd1_qd3_update_conditions(struct stm32mp_ddrctl *ctl)
  30. {
  31. stm32mp_ddr_unset_qd3_update_conditions(ctl);
  32. mmio_clrbits_32((uintptr_t)&ctl->dbg1, DDRCTRL_DBG1_DIS_DQ);
  33. }
  34. static void wait_dfi_init_complete(struct stm32mp_ddrctl *ctl)
  35. {
  36. uint64_t timeout;
  37. uint32_t dfistat;
  38. timeout = timeout_init_us(DDR_TIMEOUT_US_1S);
  39. do {
  40. dfistat = mmio_read_32((uintptr_t)&ctl->dfistat);
  41. VERBOSE("[0x%lx] dfistat = 0x%x ", (uintptr_t)&ctl->dfistat, dfistat);
  42. if (timeout_elapsed(timeout)) {
  43. panic();
  44. }
  45. } while ((dfistat & DDRCTRL_DFISTAT_DFI_INIT_COMPLETE) == 0U);
  46. VERBOSE("[0x%lx] dfistat = 0x%x\n", (uintptr_t)&ctl->dfistat, dfistat);
  47. }
  48. static void disable_dfi_low_power_interface(struct stm32mp_ddrctl *ctl)
  49. {
  50. uint64_t timeout;
  51. uint32_t dfistat;
  52. uint32_t stat;
  53. mmio_clrbits_32((uintptr_t)&ctl->dfilpcfg0, DDRCTRL_DFILPCFG0_DFI_LP_EN_SR);
  54. timeout = timeout_init_us(DDR_TIMEOUT_US_1S);
  55. do {
  56. dfistat = mmio_read_32((uintptr_t)&ctl->dfistat);
  57. stat = mmio_read_32((uintptr_t)&ctl->stat);
  58. VERBOSE("[0x%lx] dfistat = 0x%x ", (uintptr_t)&ctl->dfistat, dfistat);
  59. VERBOSE("[0x%lx] stat = 0x%x ", (uintptr_t)&ctl->stat, stat);
  60. if (timeout_elapsed(timeout)) {
  61. panic();
  62. }
  63. } while (((dfistat & DDRCTRL_DFISTAT_DFI_LP_ACK) != 0U) ||
  64. ((stat & DDRCTRL_STAT_OPERATING_MODE_MASK) == DDRCTRL_STAT_OPERATING_MODE_SR));
  65. VERBOSE("[0x%lx] dfistat = 0x%x\n", (uintptr_t)&ctl->dfistat, dfistat);
  66. VERBOSE("[0x%lx] stat = 0x%x\n", (uintptr_t)&ctl->stat, stat);
  67. }
  68. void ddr_activate_controller(struct stm32mp_ddrctl *ctl, bool sr_entry)
  69. {
  70. /*
  71. * Manage quasi-dynamic registers modification
  72. * dfimisc.dfi_frequency : Group 1
  73. * dfimisc.dfi_init_complete_en and dfimisc.dfi_init_start : Group 3
  74. */
  75. set_qd1_qd3_update_conditions(ctl);
  76. if (sr_entry) {
  77. mmio_setbits_32((uintptr_t)&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_FREQUENCY);
  78. } else {
  79. mmio_clrbits_32((uintptr_t)&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_FREQUENCY);
  80. }
  81. mmio_setbits_32((uintptr_t)&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_START);
  82. mmio_clrbits_32((uintptr_t)&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_START);
  83. wait_dfi_init_complete(ctl);
  84. udelay(DDR_DELAY_1US);
  85. if (sr_entry) {
  86. mmio_clrbits_32((uintptr_t)&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
  87. } else {
  88. mmio_setbits_32((uintptr_t)&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
  89. }
  90. udelay(DDR_DELAY_1US);
  91. unset_qd1_qd3_update_conditions(ctl);
  92. }
  93. #if STM32MP_LPDDR4_TYPE
  94. static void disable_phy_ddc(void)
  95. {
  96. /* Enable APB access to internal CSR registers */
  97. mmio_write_32(stm32mp_ddrphyc_base() + DDRPHY_APBONLY0_MICROCONTMUXSEL, 0U);
  98. mmio_write_32(stm32mp_ddrphyc_base() + DDRPHY_DRTUB0_UCCLKHCLKENABLES,
  99. DDRPHY_DRTUB0_UCCLKHCLKENABLES_UCCLKEN |
  100. DDRPHY_DRTUB0_UCCLKHCLKENABLES_HCLKEN);
  101. /* Disable DRAM drift compensation */
  102. mmio_write_32(stm32mp_ddrphyc_base() + DDRPHY_INITENG0_P0_SEQ0BDISABLEFLAG6, 0xFFFFU);
  103. /* Disable APB access to internal CSR registers */
  104. mmio_write_32(stm32mp_ddrphyc_base() + DDRPHY_DRTUB0_UCCLKHCLKENABLES,
  105. DDRPHY_DRTUB0_UCCLKHCLKENABLES_HCLKEN);
  106. mmio_write_32(stm32mp_ddrphyc_base() + DDRPHY_APBONLY0_MICROCONTMUXSEL,
  107. DDRPHY_APBONLY0_MICROCONTMUXSEL_MICROCONTMUXSEL);
  108. }
  109. #endif /* STM32MP_LPDDR4_TYPE */
  110. void ddr_wait_lp3_mode(bool sr_entry)
  111. {
  112. uint64_t timeout;
  113. bool repeat_loop = false;
  114. /* Enable APB access to internal CSR registers */
  115. mmio_write_32(stm32mp_ddrphyc_base() + DDRPHY_APBONLY0_MICROCONTMUXSEL, 0U);
  116. mmio_write_32(stm32mp_ddrphyc_base() + DDRPHY_DRTUB0_UCCLKHCLKENABLES,
  117. DDRPHY_DRTUB0_UCCLKHCLKENABLES_UCCLKEN |
  118. DDRPHY_DRTUB0_UCCLKHCLKENABLES_HCLKEN);
  119. timeout = timeout_init_us(DDR_TIMEOUT_US_1S);
  120. do {
  121. uint16_t phyinlpx = mmio_read_32(stm32mp_ddrphyc_base() +
  122. DDRPHY_INITENG0_P0_PHYINLPX);
  123. if (timeout_elapsed(timeout)) {
  124. panic();
  125. }
  126. if (sr_entry) {
  127. repeat_loop = (phyinlpx & DDRPHY_INITENG0_P0_PHYINLPX_PHYINLP3) == 0U;
  128. } else {
  129. repeat_loop = (phyinlpx & DDRPHY_INITENG0_P0_PHYINLPX_PHYINLP3) != 0U;
  130. }
  131. } while (repeat_loop);
  132. /* Disable APB access to internal CSR registers */
  133. #if STM32MP_DDR3_TYPE || STM32MP_DDR4_TYPE
  134. mmio_write_32(stm32mp_ddrphyc_base() + DDRPHY_DRTUB0_UCCLKHCLKENABLES, 0U);
  135. #else /* STM32MP_LPDDR4_TYPE */
  136. mmio_write_32(stm32mp_ddrphyc_base() + DDRPHY_DRTUB0_UCCLKHCLKENABLES,
  137. DDRPHY_DRTUB0_UCCLKHCLKENABLES_HCLKEN);
  138. #endif /* STM32MP_DDR3_TYPE || STM32MP_DDR4_TYPE */
  139. mmio_write_32(stm32mp_ddrphyc_base() + DDRPHY_APBONLY0_MICROCONTMUXSEL,
  140. DDRPHY_APBONLY0_MICROCONTMUXSEL_MICROCONTMUXSEL);
  141. }
  142. static int sr_loop(bool is_entry)
  143. {
  144. uint32_t type;
  145. uint32_t state __maybe_unused;
  146. uint64_t timeout = timeout_init_us(DDR_TIMEOUT_US_1S);
  147. bool repeat_loop = false;
  148. /*
  149. * Wait for DDRCTRL to be out of or back to "normal/mission mode".
  150. * Consider also SRPD mode for LPDDR4 only.
  151. */
  152. do {
  153. type = mmio_read_32(stm32mp_ddrctrl_base() + DDRCTRL_STAT) &
  154. DDRCTRL_STAT_SELFREF_TYPE_MASK;
  155. #if STM32MP_LPDDR4_TYPE
  156. state = mmio_read_32(stm32mp_ddrctrl_base() + DDRCTRL_STAT) &
  157. DDRCTRL_STAT_SELFREF_STATE_MASK;
  158. #endif /* STM32MP_LPDDR4_TYPE */
  159. if (timeout_elapsed(timeout)) {
  160. return -ETIMEDOUT;
  161. }
  162. if (is_entry) {
  163. #if STM32MP_LPDDR4_TYPE
  164. repeat_loop = (type == 0x0U) || (state != DDRCTRL_STAT_SELFREF_STATE_SRPD);
  165. #else /* !STM32MP_LPDDR4_TYPE */
  166. repeat_loop = (type == 0x0U);
  167. #endif /* STM32MP_LPDDR4_TYPE */
  168. } else {
  169. #if STM32MP_LPDDR4_TYPE
  170. repeat_loop = (type != 0x0U) || (state != 0x0U);
  171. #else /* !STM32MP_LPDDR4_TYPE */
  172. repeat_loop = (type != 0x0U);
  173. #endif /* STM32MP_LPDDR4_TYPE */
  174. }
  175. } while (repeat_loop);
  176. return 0;
  177. }
  178. static int sr_entry_loop(void)
  179. {
  180. return sr_loop(true);
  181. }
  182. int ddr_sr_exit_loop(void)
  183. {
  184. return sr_loop(false);
  185. }
  186. static int sr_ssr_set(void)
  187. {
  188. uintptr_t ddrctrl_base = stm32mp_ddrctrl_base();
  189. /*
  190. * Disable Clock disable with LP modes
  191. * (used in RUN mode for LPDDR2 with specific timing).
  192. */
  193. mmio_clrbits_32(ddrctrl_base + DDRCTRL_PWRCTL, DDRCTRL_PWRCTL_EN_DFI_DRAM_CLK_DISABLE);
  194. /* Disable automatic Self-Refresh mode */
  195. mmio_clrbits_32(ddrctrl_base + DDRCTRL_PWRCTL, DDRCTRL_PWRCTL_SELFREF_EN);
  196. mmio_write_32(stm32_ddrdbg_get_base() + DDRDBG_LP_DISABLE,
  197. DDRDBG_LP_DISABLE_LPI_XPI_DISABLE | DDRDBG_LP_DISABLE_LPI_DDRC_DISABLE);
  198. return 0;
  199. }
  200. static int sr_ssr_entry(bool standby)
  201. {
  202. uintptr_t ddrctrl_base = stm32mp_ddrctrl_base();
  203. uintptr_t rcc_base = stm32mp_rcc_base();
  204. if (stm32mp_ddr_disable_axi_port((struct stm32mp_ddrctl *)ddrctrl_base) != 0) {
  205. panic();
  206. }
  207. #if STM32MP_LPDDR4_TYPE
  208. if (standby) {
  209. /* Disable DRAM drift compensation */
  210. disable_phy_ddc();
  211. }
  212. #endif /* STM32MP_LPDDR4_TYPE */
  213. disable_dfi_low_power_interface((struct stm32mp_ddrctl *)ddrctrl_base);
  214. /* SW self refresh entry prequested */
  215. mmio_setbits_32(ddrctrl_base + DDRCTRL_PWRCTL, DDRCTRL_PWRCTL_SELFREF_SW);
  216. #if STM32MP_LPDDR4_TYPE
  217. mmio_clrbits_32(ddrctrl_base + DDRCTRL_PWRCTL, DDRCTRL_PWRCTL_STAY_IN_SELFREF);
  218. #endif /* STM32MP_LPDDR4_TYPE */
  219. if (sr_entry_loop() != 0) {
  220. return -1;
  221. }
  222. ddr_activate_controller((struct stm32mp_ddrctl *)ddrctrl_base, true);
  223. /* Poll on ddrphy_initeng0_phyinlpx.phyinlp3 = 1 */
  224. ddr_wait_lp3_mode(true);
  225. if (standby) {
  226. mmio_clrbits_32(stm32mp_pwr_base() + PWR_CR11, PWR_CR11_DDRRETDIS);
  227. }
  228. mmio_clrsetbits_32(rcc_base + RCC_DDRCPCFGR, RCC_DDRCPCFGR_DDRCPLPEN,
  229. RCC_DDRCPCFGR_DDRCPEN);
  230. mmio_setbits_32(rcc_base + RCC_DDRPHYCCFGR, RCC_DDRPHYCCFGR_DDRPHYCEN);
  231. mmio_setbits_32(rcc_base + RCC_DDRITFCFGR, RCC_DDRITFCFGR_DDRPHYDLP);
  232. return 0;
  233. }
  234. static int sr_ssr_exit(void)
  235. {
  236. uintptr_t ddrctrl_base = stm32mp_ddrctrl_base();
  237. uintptr_t rcc_base = stm32mp_rcc_base();
  238. mmio_setbits_32(rcc_base + RCC_DDRCPCFGR,
  239. RCC_DDRCPCFGR_DDRCPLPEN | RCC_DDRCPCFGR_DDRCPEN);
  240. mmio_clrbits_32(rcc_base + RCC_DDRITFCFGR, RCC_DDRITFCFGR_DDRPHYDLP);
  241. mmio_setbits_32(rcc_base + RCC_DDRPHYCCFGR, RCC_DDRPHYCCFGR_DDRPHYCEN);
  242. udelay(DDR_DELAY_1US);
  243. ddr_activate_controller((struct stm32mp_ddrctl *)ddrctrl_base, false);
  244. /* Poll on ddrphy_initeng0_phyinlpx.phyinlp3 = 0 */
  245. ddr_wait_lp3_mode(false);
  246. /* SW self refresh exit prequested */
  247. mmio_clrbits_32(ddrctrl_base + DDRCTRL_PWRCTL, DDRCTRL_PWRCTL_SELFREF_SW);
  248. if (ddr_sr_exit_loop() != 0) {
  249. return -1;
  250. }
  251. /* Re-enable DFI low-power interface */
  252. mmio_setbits_32(ddrctrl_base + DDRCTRL_DFILPCFG0, DDRCTRL_DFILPCFG0_DFI_LP_EN_SR);
  253. stm32mp_ddr_enable_axi_port((struct stm32mp_ddrctl *)ddrctrl_base);
  254. return 0;
  255. }
  256. static int sr_hsr_set(void)
  257. {
  258. uintptr_t ddrctrl_base = stm32mp_ddrctrl_base();
  259. mmio_clrsetbits_32(stm32mp_rcc_base() + RCC_DDRITFCFGR,
  260. RCC_DDRITFCFGR_DDRCKMOD_MASK, RCC_DDRITFCFGR_DDRCKMOD_HSR);
  261. /*
  262. * manage quasi-dynamic registers modification
  263. * hwlpctl.hw_lp_en : Group 2
  264. */
  265. if (stm32mp_ddr_sw_selfref_entry((struct stm32mp_ddrctl *)ddrctrl_base) != 0) {
  266. panic();
  267. }
  268. stm32mp_ddr_start_sw_done((struct stm32mp_ddrctl *)ddrctrl_base);
  269. mmio_write_32(ddrctrl_base + DDRCTRL_HWLPCTL,
  270. DDRCTRL_HWLPCTL_HW_LP_EN | DDRCTRL_HWLPCTL_HW_LP_EXIT_IDLE_EN |
  271. (HW_IDLE_PERIOD << DDRCTRL_HWLPCTL_HW_LP_IDLE_X32_SHIFT));
  272. stm32mp_ddr_wait_sw_done_ack((struct stm32mp_ddrctl *)ddrctrl_base);
  273. stm32mp_ddr_sw_selfref_exit((struct stm32mp_ddrctl *)ddrctrl_base);
  274. return 0;
  275. }
  276. static int sr_hsr_entry(void)
  277. {
  278. mmio_write_32(stm32mp_rcc_base() + RCC_DDRCPCFGR, RCC_DDRCPCFGR_DDRCPLPEN);
  279. return sr_entry_loop(); /* read_data should be equal to 0x223 */
  280. }
  281. static int sr_hsr_exit(void)
  282. {
  283. mmio_write_32(stm32mp_rcc_base() + RCC_DDRCPCFGR,
  284. RCC_DDRCPCFGR_DDRCPLPEN | RCC_DDRCPCFGR_DDRCPEN);
  285. /* TODO: check if ddr_sr_exit_loop() is needed here */
  286. return 0;
  287. }
  288. static int sr_asr_set(void)
  289. {
  290. mmio_write_32(stm32_ddrdbg_get_base() + DDRDBG_LP_DISABLE, 0U);
  291. return 0;
  292. }
  293. static int sr_asr_entry(void)
  294. {
  295. /*
  296. * Automatically enter into self refresh when there is no ddr traffic
  297. * for the delay programmed into SYSCONF_DDRC_AUTO_SR_DELAY register.
  298. * Default value is 0x20 (unit: Multiples of 32 DFI clock cycles).
  299. */
  300. return sr_entry_loop();
  301. }
  302. static int sr_asr_exit(void)
  303. {
  304. return ddr_sr_exit_loop();
  305. }
  306. uint32_t ddr_get_io_calibration_val(void)
  307. {
  308. /* TODO create related service */
  309. return 0U;
  310. }
  311. int ddr_sr_entry(bool standby)
  312. {
  313. int ret = -EINVAL;
  314. switch (saved_ddr_sr_mode) {
  315. case DDR_SSR_MODE:
  316. ret = sr_ssr_entry(standby);
  317. break;
  318. case DDR_HSR_MODE:
  319. ret = sr_hsr_entry();
  320. break;
  321. case DDR_ASR_MODE:
  322. ret = sr_asr_entry();
  323. break;
  324. default:
  325. break;
  326. }
  327. return ret;
  328. }
  329. int ddr_sr_exit(void)
  330. {
  331. int ret = -EINVAL;
  332. switch (saved_ddr_sr_mode) {
  333. case DDR_SSR_MODE:
  334. ret = sr_ssr_exit();
  335. break;
  336. case DDR_HSR_MODE:
  337. ret = sr_hsr_exit();
  338. break;
  339. case DDR_ASR_MODE:
  340. ret = sr_asr_exit();
  341. break;
  342. default:
  343. break;
  344. }
  345. return ret;
  346. }
  347. enum stm32mp2_ddr_sr_mode ddr_read_sr_mode(void)
  348. {
  349. uint32_t pwrctl = mmio_read_32(stm32mp_ddrctrl_base() + DDRCTRL_PWRCTL);
  350. enum stm32mp2_ddr_sr_mode mode = DDR_SR_MODE_INVALID;
  351. switch (pwrctl & (DDRCTRL_PWRCTL_EN_DFI_DRAM_CLK_DISABLE |
  352. DDRCTRL_PWRCTL_SELFREF_EN)) {
  353. case 0U:
  354. mode = DDR_SSR_MODE;
  355. break;
  356. case DDRCTRL_PWRCTL_EN_DFI_DRAM_CLK_DISABLE:
  357. mode = DDR_HSR_MODE;
  358. break;
  359. case DDRCTRL_PWRCTL_EN_DFI_DRAM_CLK_DISABLE | DDRCTRL_PWRCTL_SELFREF_EN:
  360. mode = DDR_ASR_MODE;
  361. break;
  362. default:
  363. break;
  364. }
  365. return mode;
  366. }
  367. void ddr_set_sr_mode(enum stm32mp2_ddr_sr_mode mode)
  368. {
  369. int ret = -EINVAL;
  370. if (mode == saved_ddr_sr_mode) {
  371. return;
  372. }
  373. switch (mode) {
  374. case DDR_SSR_MODE:
  375. ret = sr_ssr_set();
  376. break;
  377. case DDR_HSR_MODE:
  378. ret = sr_hsr_set();
  379. break;
  380. case DDR_ASR_MODE:
  381. ret = sr_asr_set();
  382. break;
  383. default:
  384. break;
  385. }
  386. if (ret != 0) {
  387. ERROR("Unknown Self Refresh mode\n");
  388. panic();
  389. }
  390. saved_ddr_sr_mode = mode;
  391. }
  392. void ddr_save_sr_mode(void)
  393. {
  394. saved_ddr_sr_mode = ddr_read_sr_mode();
  395. }
  396. void ddr_restore_sr_mode(void)
  397. {
  398. ddr_set_sr_mode(saved_ddr_sr_mode);
  399. }
  400. void ddr_sub_system_clk_init(void)
  401. {
  402. mmio_write_32(stm32mp_rcc_base() + RCC_DDRCPCFGR,
  403. RCC_DDRCPCFGR_DDRCPEN | RCC_DDRCPCFGR_DDRCPLPEN);
  404. }
  405. void ddr_sub_system_clk_off(void)
  406. {
  407. uintptr_t rcc_base = stm32mp_rcc_base();
  408. /* Clear DDR IO retention */
  409. mmio_clrbits_32(stm32mp_pwr_base() + PWR_CR11, PWR_CR11_DDRRETDIS);
  410. /* Reset DDR sub system */
  411. mmio_write_32(rcc_base + RCC_DDRCPCFGR, RCC_DDRCPCFGR_DDRCPRST);
  412. mmio_write_32(rcc_base + RCC_DDRITFCFGR, RCC_DDRITFCFGR_DDRRST);
  413. mmio_write_32(rcc_base + RCC_DDRPHYCAPBCFGR, RCC_DDRPHYCAPBCFGR_DDRPHYCAPBRST);
  414. mmio_write_32(rcc_base + RCC_DDRCAPBCFGR, RCC_DDRCAPBCFGR_DDRCAPBRST);
  415. /* Deactivate clocks and PLL2 */
  416. mmio_clrbits_32(rcc_base + RCC_DDRPHYCCFGR, RCC_DDRPHYCCFGR_DDRPHYCEN);
  417. mmio_clrbits_32(rcc_base + RCC_PLL2CFGR1, RCC_PLL2CFGR1_PLLEN);
  418. }