cortex_a76.S 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815
  1. /*
  2. * Copyright (c) 2017-2020, ARM Limited and Contributors. All rights reserved.
  3. *
  4. * SPDX-License-Identifier: BSD-3-Clause
  5. */
  6. #include <arch.h>
  7. #include <asm_macros.S>
  8. #include <common/bl_common.h>
  9. #include <cortex_a76.h>
  10. #include <cpu_macros.S>
  11. #include <plat_macros.S>
  12. #include <services/arm_arch_svc.h>
  13. #include "wa_cve_2022_23960_bhb.S"
  14. /* Hardware handled coherency */
  15. #if HW_ASSISTED_COHERENCY == 0
  16. #error "Cortex-A76 must be compiled with HW_ASSISTED_COHERENCY enabled"
  17. #endif
  18. .globl cortex_a76_reset_func
  19. .globl cortex_a76_core_pwr_dwn
  20. .globl cortex_a76_disable_wa_cve_2018_3639
  21. /* 64-bit only core */
  22. #if CTX_INCLUDE_AARCH32_REGS == 1
  23. #error "Cortex-A76 supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0"
  24. #endif
  25. #define ESR_EL3_A64_SMC0 0x5e000000
  26. #define ESR_EL3_A32_SMC0 0x4e000000
  27. #if DYNAMIC_WORKAROUND_CVE_2018_3639
  28. /*
  29. * This macro applies the mitigation for CVE-2018-3639.
  30. * It implements a fast path where `SMCCC_ARCH_WORKAROUND_2`
  31. * SMC calls from a lower EL running in AArch32 or AArch64
  32. * will go through the fast and return early.
  33. *
  34. * The macro saves x2-x3 to the context. In the fast path
  35. * x0-x3 registers do not need to be restored as the calling
  36. * context will have saved them. The macro also saves
  37. * x29-x30 to the context in the sync_exception path.
  38. */
  39. .macro apply_cve_2018_3639_wa _is_sync_exception _esr_el3_val
  40. stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
  41. .if \_is_sync_exception
  42. stp x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
  43. mov_imm w2, \_esr_el3_val
  44. bl apply_cve_2018_3639_sync_wa
  45. ldp x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
  46. .endif
  47. /*
  48. * Always enable v4 mitigation during EL3 execution. This is not
  49. * required for the fast path above because it does not perform any
  50. * memory loads.
  51. */
  52. mrs x2, CORTEX_A76_CPUACTLR2_EL1
  53. orr x2, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
  54. msr CORTEX_A76_CPUACTLR2_EL1, x2
  55. isb
  56. /*
  57. * The caller may have passed arguments to EL3 via x2-x3.
  58. * Restore these registers from the context before jumping to the
  59. * main runtime vector table entry.
  60. */
  61. ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
  62. .endm
  63. #endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
  64. #if DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960
  65. vector_base cortex_a76_wa_cve_vbar
  66. /* ---------------------------------------------------------------------
  67. * Current EL with SP_EL0 : 0x0 - 0x200
  68. * ---------------------------------------------------------------------
  69. */
  70. vector_entry cortex_a76_sync_exception_sp_el0
  71. b sync_exception_sp_el0
  72. end_vector_entry cortex_a76_sync_exception_sp_el0
  73. vector_entry cortex_a76_irq_sp_el0
  74. b irq_sp_el0
  75. end_vector_entry cortex_a76_irq_sp_el0
  76. vector_entry cortex_a76_fiq_sp_el0
  77. b fiq_sp_el0
  78. end_vector_entry cortex_a76_fiq_sp_el0
  79. vector_entry cortex_a76_serror_sp_el0
  80. b serror_sp_el0
  81. end_vector_entry cortex_a76_serror_sp_el0
  82. /* ---------------------------------------------------------------------
  83. * Current EL with SP_ELx: 0x200 - 0x400
  84. * ---------------------------------------------------------------------
  85. */
  86. vector_entry cortex_a76_sync_exception_sp_elx
  87. b sync_exception_sp_elx
  88. end_vector_entry cortex_a76_sync_exception_sp_elx
  89. vector_entry cortex_a76_irq_sp_elx
  90. b irq_sp_elx
  91. end_vector_entry cortex_a76_irq_sp_elx
  92. vector_entry cortex_a76_fiq_sp_elx
  93. b fiq_sp_elx
  94. end_vector_entry cortex_a76_fiq_sp_elx
  95. vector_entry cortex_a76_serror_sp_elx
  96. b serror_sp_elx
  97. end_vector_entry cortex_a76_serror_sp_elx
  98. /* ---------------------------------------------------------------------
  99. * Lower EL using AArch64 : 0x400 - 0x600
  100. * ---------------------------------------------------------------------
  101. */
  102. vector_entry cortex_a76_sync_exception_aarch64
  103. #if WORKAROUND_CVE_2022_23960
  104. apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
  105. #endif /* WORKAROUND_CVE_2022_23960 */
  106. #if DYNAMIC_WORKAROUND_CVE_2018_3639
  107. apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
  108. #endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
  109. b sync_exception_aarch64
  110. end_vector_entry cortex_a76_sync_exception_aarch64
  111. vector_entry cortex_a76_irq_aarch64
  112. #if WORKAROUND_CVE_2022_23960
  113. apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
  114. #endif /* WORKAROUND_CVE_2022_23960 */
  115. #if DYNAMIC_WORKAROUND_CVE_2018_3639
  116. apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
  117. #endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
  118. b irq_aarch64
  119. end_vector_entry cortex_a76_irq_aarch64
  120. vector_entry cortex_a76_fiq_aarch64
  121. #if WORKAROUND_CVE_2022_23960
  122. apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
  123. #endif /* WORKAROUND_CVE_2022_23960 */
  124. #if DYNAMIC_WORKAROUND_CVE_2018_3639
  125. apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
  126. #endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
  127. b fiq_aarch64
  128. end_vector_entry cortex_a76_fiq_aarch64
  129. vector_entry cortex_a76_serror_aarch64
  130. #if WORKAROUND_CVE_2022_23960
  131. apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
  132. #endif /* WORKAROUND_CVE_2022_23960 */
  133. #if DYNAMIC_WORKAROUND_CVE_2018_3639
  134. apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
  135. #endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
  136. b serror_aarch64
  137. end_vector_entry cortex_a76_serror_aarch64
  138. /* ---------------------------------------------------------------------
  139. * Lower EL using AArch32 : 0x600 - 0x800
  140. * ---------------------------------------------------------------------
  141. */
  142. vector_entry cortex_a76_sync_exception_aarch32
  143. #if WORKAROUND_CVE_2022_23960
  144. apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
  145. #endif /* WORKAROUND_CVE_2022_23960 */
  146. #if DYNAMIC_WORKAROUND_CVE_2018_3639
  147. apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
  148. #endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
  149. b sync_exception_aarch32
  150. end_vector_entry cortex_a76_sync_exception_aarch32
  151. vector_entry cortex_a76_irq_aarch32
  152. #if WORKAROUND_CVE_2022_23960
  153. apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
  154. #endif /* WORKAROUND_CVE_2022_23960 */
  155. #if DYNAMIC_WORKAROUND_CVE_2018_3639
  156. apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
  157. #endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
  158. b irq_aarch32
  159. end_vector_entry cortex_a76_irq_aarch32
  160. vector_entry cortex_a76_fiq_aarch32
  161. #if WORKAROUND_CVE_2022_23960
  162. apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
  163. #endif /* WORKAROUND_CVE_2022_23960 */
  164. #if DYNAMIC_WORKAROUND_CVE_2018_3639
  165. apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
  166. #endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
  167. b fiq_aarch32
  168. end_vector_entry cortex_a76_fiq_aarch32
  169. vector_entry cortex_a76_serror_aarch32
  170. #if WORKAROUND_CVE_2022_23960
  171. apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
  172. #endif /* WORKAROUND_CVE_2022_23960 */
  173. #if DYNAMIC_WORKAROUND_CVE_2018_3639
  174. apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
  175. #endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
  176. b serror_aarch32
  177. end_vector_entry cortex_a76_serror_aarch32
  178. #endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960 */
  179. #if DYNAMIC_WORKAROUND_CVE_2018_3639
  180. /*
  181. * -----------------------------------------------------------------
  182. * This function applies the mitigation for CVE-2018-3639
  183. * specifically for sync exceptions. It implements a fast path
  184. * where `SMCCC_ARCH_WORKAROUND_2` SMC calls from a lower EL
  185. * running in AArch64 will go through the fast and return early.
  186. *
  187. * In the fast path x0-x3 registers do not need to be restored as the
  188. * calling context will have saved them.
  189. *
  190. * Caller must pass value of esr_el3 to compare via x2.
  191. * Save and restore these registers outside of this function from the
  192. * context before jumping to the main runtime vector table entry.
  193. *
  194. * Shall clobber: x0-x3, x30
  195. * -----------------------------------------------------------------
  196. */
  197. func apply_cve_2018_3639_sync_wa
  198. /*
  199. * Ensure SMC is coming from A64/A32 state on #0
  200. * with W0 = SMCCC_ARCH_WORKAROUND_2
  201. *
  202. * This sequence evaluates as:
  203. * (W0==SMCCC_ARCH_WORKAROUND_2) ? (ESR_EL3==SMC#0) : (NE)
  204. * allowing use of a single branch operation
  205. * X2 populated outside this function with the SMC FID.
  206. */
  207. orr w3, wzr, #SMCCC_ARCH_WORKAROUND_2
  208. cmp x0, x3
  209. mrs x3, esr_el3
  210. ccmp w2, w3, #0, eq
  211. /*
  212. * Static predictor will predict a fall-through, optimizing
  213. * the `SMCCC_ARCH_WORKAROUND_2` fast path.
  214. */
  215. bne 1f
  216. /*
  217. * The sequence below implements the `SMCCC_ARCH_WORKAROUND_2`
  218. * fast path.
  219. */
  220. cmp x1, xzr /* enable/disable check */
  221. /*
  222. * When the calling context wants mitigation disabled,
  223. * we program the mitigation disable function in the
  224. * CPU context, which gets invoked on subsequent exits from
  225. * EL3 via the `el3_exit` function. Otherwise NULL is
  226. * programmed in the CPU context, which results in caller's
  227. * inheriting the EL3 mitigation state (enabled) on subsequent
  228. * `el3_exit`.
  229. */
  230. mov x0, xzr
  231. adr x1, cortex_a76_disable_wa_cve_2018_3639
  232. csel x1, x1, x0, eq
  233. str x1, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
  234. mrs x2, CORTEX_A76_CPUACTLR2_EL1
  235. orr x1, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
  236. bic x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
  237. csel x3, x3, x1, eq
  238. msr CORTEX_A76_CPUACTLR2_EL1, x3
  239. ldp x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
  240. /*
  241. * `SMCCC_ARCH_WORKAROUND_2`fast path return to lower EL.
  242. */
  243. exception_return /* exception_return contains ISB */
  244. 1:
  245. ret
  246. endfunc apply_cve_2018_3639_sync_wa
  247. #endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
  248. /* --------------------------------------------------
  249. * Errata Workaround for Cortex A76 Errata #1073348.
  250. * This applies only to revision <= r1p0 of Cortex A76.
  251. * Inputs:
  252. * x0: variant[4:7] and revision[0:3] of current cpu.
  253. * Shall clobber: x0-x17
  254. * --------------------------------------------------
  255. */
  256. func errata_a76_1073348_wa
  257. /*
  258. * Compare x0 against revision r1p0
  259. */
  260. mov x17, x30
  261. bl check_errata_1073348
  262. cbz x0, 1f
  263. mrs x1, CORTEX_A76_CPUACTLR_EL1
  264. orr x1, x1 ,#CORTEX_A76_CPUACTLR_EL1_DISABLE_STATIC_PREDICTION
  265. msr CORTEX_A76_CPUACTLR_EL1, x1
  266. isb
  267. 1:
  268. ret x17
  269. endfunc errata_a76_1073348_wa
  270. func check_errata_1073348
  271. mov x1, #0x10
  272. b cpu_rev_var_ls
  273. endfunc check_errata_1073348
  274. /* --------------------------------------------------
  275. * Errata Workaround for Cortex A76 Errata #1130799.
  276. * This applies only to revision <= r2p0 of Cortex A76.
  277. * Inputs:
  278. * x0: variant[4:7] and revision[0:3] of current cpu.
  279. * Shall clobber: x0-x17
  280. * --------------------------------------------------
  281. */
  282. func errata_a76_1130799_wa
  283. /*
  284. * Compare x0 against revision r2p0
  285. */
  286. mov x17, x30
  287. bl check_errata_1130799
  288. cbz x0, 1f
  289. mrs x1, CORTEX_A76_CPUACTLR2_EL1
  290. orr x1, x1 ,#(1 << 59)
  291. msr CORTEX_A76_CPUACTLR2_EL1, x1
  292. isb
  293. 1:
  294. ret x17
  295. endfunc errata_a76_1130799_wa
  296. func check_errata_1130799
  297. mov x1, #0x20
  298. b cpu_rev_var_ls
  299. endfunc check_errata_1130799
  300. /* --------------------------------------------------
  301. * Errata Workaround for Cortex A76 Errata #1220197.
  302. * This applies only to revision <= r2p0 of Cortex A76.
  303. * Inputs:
  304. * x0: variant[4:7] and revision[0:3] of current cpu.
  305. * Shall clobber: x0-x17
  306. * --------------------------------------------------
  307. */
  308. func errata_a76_1220197_wa
  309. /*
  310. * Compare x0 against revision r2p0
  311. */
  312. mov x17, x30
  313. bl check_errata_1220197
  314. cbz x0, 1f
  315. mrs x1, CORTEX_A76_CPUECTLR_EL1
  316. orr x1, x1, #CORTEX_A76_CPUECTLR_EL1_WS_THR_L2
  317. msr CORTEX_A76_CPUECTLR_EL1, x1
  318. isb
  319. 1:
  320. ret x17
  321. endfunc errata_a76_1220197_wa
  322. func check_errata_1220197
  323. mov x1, #0x20
  324. b cpu_rev_var_ls
  325. endfunc check_errata_1220197
  326. /* --------------------------------------------------
  327. * Errata Workaround for Cortex A76 Errata #1257314.
  328. * This applies only to revision <= r3p0 of Cortex A76.
  329. * Inputs:
  330. * x0: variant[4:7] and revision[0:3] of current cpu.
  331. * Shall clobber: x0-x17
  332. * --------------------------------------------------
  333. */
  334. func errata_a76_1257314_wa
  335. /*
  336. * Compare x0 against revision r3p0
  337. */
  338. mov x17, x30
  339. bl check_errata_1257314
  340. cbz x0, 1f
  341. mrs x1, CORTEX_A76_CPUACTLR3_EL1
  342. orr x1, x1, CORTEX_A76_CPUACTLR3_EL1_BIT_10
  343. msr CORTEX_A76_CPUACTLR3_EL1, x1
  344. isb
  345. 1:
  346. ret x17
  347. endfunc errata_a76_1257314_wa
  348. func check_errata_1257314
  349. mov x1, #0x30
  350. b cpu_rev_var_ls
  351. endfunc check_errata_1257314
  352. /* --------------------------------------------------
  353. * Errata Workaround for Cortex A76 Errata #1262888.
  354. * This applies only to revision <= r3p0 of Cortex A76.
  355. * Inputs:
  356. * x0: variant[4:7] and revision[0:3] of current cpu.
  357. * Shall clobber: x0-x17
  358. * --------------------------------------------------
  359. */
  360. func errata_a76_1262888_wa
  361. /*
  362. * Compare x0 against revision r3p0
  363. */
  364. mov x17, x30
  365. bl check_errata_1262888
  366. cbz x0, 1f
  367. mrs x1, CORTEX_A76_CPUECTLR_EL1
  368. orr x1, x1, CORTEX_A76_CPUECTLR_EL1_BIT_51
  369. msr CORTEX_A76_CPUECTLR_EL1, x1
  370. isb
  371. 1:
  372. ret x17
  373. endfunc errata_a76_1262888_wa
  374. func check_errata_1262888
  375. mov x1, #0x30
  376. b cpu_rev_var_ls
  377. endfunc check_errata_1262888
  378. /* ---------------------------------------------------
  379. * Errata Workaround for Cortex A76 Errata #1286807.
  380. * This applies only to revision <= r3p0 of Cortex A76.
  381. * Due to the nature of the errata it is applied unconditionally
  382. * when built in, report it as applicable in this case
  383. * ---------------------------------------------------
  384. */
  385. func check_errata_1286807
  386. #if ERRATA_A76_1286807
  387. mov x0, #ERRATA_APPLIES
  388. ret
  389. #else
  390. mov x1, #0x30
  391. b cpu_rev_var_ls
  392. #endif
  393. endfunc check_errata_1286807
  394. /* --------------------------------------------------
  395. * Errata workaround for Cortex A76 Errata #1791580.
  396. * This applies to revisions <= r4p0 of Cortex A76.
  397. * Inputs:
  398. * x0: variant[4:7] and revision[0:3] of current cpu.
  399. * Shall clobber: x0-x17
  400. * --------------------------------------------------
  401. */
  402. func errata_a76_1791580_wa
  403. /* Compare x0 against revision r4p0 */
  404. mov x17, x30
  405. bl check_errata_1791580
  406. cbz x0, 1f
  407. mrs x1, CORTEX_A76_CPUACTLR2_EL1
  408. orr x1, x1, CORTEX_A76_CPUACTLR2_EL1_BIT_2
  409. msr CORTEX_A76_CPUACTLR2_EL1, x1
  410. isb
  411. 1:
  412. ret x17
  413. endfunc errata_a76_1791580_wa
  414. func check_errata_1791580
  415. /* Applies to everything <=r4p0. */
  416. mov x1, #0x40
  417. b cpu_rev_var_ls
  418. endfunc check_errata_1791580
  419. /* --------------------------------------------------
  420. * Errata Workaround for Cortex A76 Errata #1262606,
  421. * #1275112, and #1868343. #1262606 and #1275112
  422. * apply to revisions <= r3p0 and #1868343 applies to
  423. * revisions <= r4p0.
  424. * Inputs:
  425. * x0: variant[4:7] and revision[0:3] of current cpu.
  426. * Shall clobber: x0-x17
  427. * --------------------------------------------------
  428. */
  429. func errata_a76_1262606_1275112_1868343_wa
  430. mov x17, x30
  431. /* Check for <= r3p0 cases and branch if check passes. */
  432. #if ERRATA_A76_1262606 || ERRATA_A76_1275112
  433. bl check_errata_1262606
  434. cbnz x0, 1f
  435. #endif
  436. /* Check for <= r4p0 cases and branch if check fails. */
  437. #if ERRATA_A76_1868343
  438. bl check_errata_1868343
  439. cbz x0, 2f
  440. #endif
  441. 1:
  442. mrs x1, CORTEX_A76_CPUACTLR_EL1
  443. orr x1, x1, #CORTEX_A76_CPUACTLR_EL1_BIT_13
  444. msr CORTEX_A76_CPUACTLR_EL1, x1
  445. isb
  446. 2:
  447. ret x17
  448. endfunc errata_a76_1262606_1275112_1868343_wa
  449. func check_errata_1262606
  450. mov x1, #0x30
  451. b cpu_rev_var_ls
  452. endfunc check_errata_1262606
  453. func check_errata_1275112
  454. mov x1, #0x30
  455. b cpu_rev_var_ls
  456. endfunc check_errata_1275112
  457. func check_errata_1868343
  458. mov x1, #0x40
  459. b cpu_rev_var_ls
  460. endfunc check_errata_1868343
  461. /* --------------------------------------------------
  462. * Errata Workaround for A76 Erratum 1946160.
  463. * This applies to revisions r3p0 - r4p1 of A76.
  464. * It also exists in r0p0 - r2p0 but there is no fix
  465. * in those revisions.
  466. * Inputs:
  467. * x0: variant[4:7] and revision[0:3] of current cpu.
  468. * Shall clobber: x0-x17
  469. * --------------------------------------------------
  470. */
  471. func errata_a76_1946160_wa
  472. /* Compare x0 against revisions r3p0 - r4p1 */
  473. mov x17, x30
  474. bl check_errata_1946160
  475. cbz x0, 1f
  476. mov x0, #3
  477. msr S3_6_C15_C8_0, x0
  478. ldr x0, =0x10E3900002
  479. msr S3_6_C15_C8_2, x0
  480. ldr x0, =0x10FFF00083
  481. msr S3_6_C15_C8_3, x0
  482. ldr x0, =0x2001003FF
  483. msr S3_6_C15_C8_1, x0
  484. mov x0, #4
  485. msr S3_6_C15_C8_0, x0
  486. ldr x0, =0x10E3800082
  487. msr S3_6_C15_C8_2, x0
  488. ldr x0, =0x10FFF00083
  489. msr S3_6_C15_C8_3, x0
  490. ldr x0, =0x2001003FF
  491. msr S3_6_C15_C8_1, x0
  492. mov x0, #5
  493. msr S3_6_C15_C8_0, x0
  494. ldr x0, =0x10E3800200
  495. msr S3_6_C15_C8_2, x0
  496. ldr x0, =0x10FFF003E0
  497. msr S3_6_C15_C8_3, x0
  498. ldr x0, =0x2001003FF
  499. msr S3_6_C15_C8_1, x0
  500. isb
  501. 1:
  502. ret x17
  503. endfunc errata_a76_1946160_wa
  504. func check_errata_1946160
  505. /* Applies to revisions r3p0 - r4p1. */
  506. mov x1, #0x30
  507. mov x2, #0x41
  508. b cpu_rev_var_range
  509. endfunc check_errata_1946160
  510. func check_errata_cve_2018_3639
  511. #if WORKAROUND_CVE_2018_3639
  512. mov x0, #ERRATA_APPLIES
  513. #else
  514. mov x0, #ERRATA_MISSING
  515. #endif
  516. ret
  517. endfunc check_errata_cve_2018_3639
  518. func cortex_a76_disable_wa_cve_2018_3639
  519. mrs x0, CORTEX_A76_CPUACTLR2_EL1
  520. bic x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
  521. msr CORTEX_A76_CPUACTLR2_EL1, x0
  522. isb
  523. ret
  524. endfunc cortex_a76_disable_wa_cve_2018_3639
  525. /* --------------------------------------------------------------
  526. * Errata Workaround for Cortex A76 Errata #1165522.
  527. * This applies only to revisions <= r3p0 of Cortex A76.
  528. * Due to the nature of the errata it is applied unconditionally
  529. * when built in, report it as applicable in this case
  530. * --------------------------------------------------------------
  531. */
  532. func check_errata_1165522
  533. #if ERRATA_A76_1165522
  534. mov x0, #ERRATA_APPLIES
  535. ret
  536. #else
  537. mov x1, #0x30
  538. b cpu_rev_var_ls
  539. #endif
  540. endfunc check_errata_1165522
  541. func check_errata_cve_2022_23960
  542. #if WORKAROUND_CVE_2022_23960
  543. mov x0, #ERRATA_APPLIES
  544. #else
  545. mov x0, #ERRATA_MISSING
  546. #endif /* WORKAROUND_CVE_2022_23960 */
  547. ret
  548. endfunc check_errata_cve_2022_23960
  549. /* -------------------------------------------------
  550. * The CPU Ops reset function for Cortex-A76.
  551. * Shall clobber: x0-x19
  552. * -------------------------------------------------
  553. */
  554. func cortex_a76_reset_func
  555. mov x19, x30
  556. bl cpu_get_rev_var
  557. mov x18, x0
  558. #if ERRATA_A76_1073348
  559. mov x0, x18
  560. bl errata_a76_1073348_wa
  561. #endif
  562. #if ERRATA_A76_1130799
  563. mov x0, x18
  564. bl errata_a76_1130799_wa
  565. #endif
  566. #if ERRATA_A76_1220197
  567. mov x0, x18
  568. bl errata_a76_1220197_wa
  569. #endif
  570. #if ERRATA_A76_1257314
  571. mov x0, x18
  572. bl errata_a76_1257314_wa
  573. #endif
  574. #if ERRATA_A76_1262606 || ERRATA_A76_1275112 || ERRATA_A76_1868343
  575. mov x0, x18
  576. bl errata_a76_1262606_1275112_1868343_wa
  577. #endif
  578. #if ERRATA_A76_1262888
  579. mov x0, x18
  580. bl errata_a76_1262888_wa
  581. #endif
  582. #if ERRATA_A76_1791580
  583. mov x0, x18
  584. bl errata_a76_1791580_wa
  585. #endif
  586. #if ERRATA_A76_1946160
  587. mov x0, x18
  588. bl errata_a76_1946160_wa
  589. #endif
  590. #if WORKAROUND_CVE_2018_3639
  591. /* If the PE implements SSBS, we don't need the dynamic workaround */
  592. mrs x0, id_aa64pfr1_el1
  593. lsr x0, x0, #ID_AA64PFR1_EL1_SSBS_SHIFT
  594. and x0, x0, #ID_AA64PFR1_EL1_SSBS_MASK
  595. #if !DYNAMIC_WORKAROUND_CVE_2018_3639 && ENABLE_ASSERTIONS
  596. cmp x0, 0
  597. ASM_ASSERT(ne)
  598. #endif
  599. #if DYNAMIC_WORKAROUND_CVE_2018_3639
  600. cbnz x0, 1f
  601. mrs x0, CORTEX_A76_CPUACTLR2_EL1
  602. orr x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
  603. msr CORTEX_A76_CPUACTLR2_EL1, x0
  604. isb
  605. #ifdef IMAGE_BL31
  606. /*
  607. * The Cortex-A76 generic vectors are overwritten to use the vectors
  608. * defined above. This is required in order to apply mitigation
  609. * against CVE-2018-3639 on exception entry from lower ELs.
  610. * If the below vector table is used, skip overriding it again for
  611. * CVE_2022_23960 as both use the same vbar.
  612. */
  613. adr x0, cortex_a76_wa_cve_vbar
  614. msr vbar_el3, x0
  615. isb
  616. b 2f
  617. #endif /* IMAGE_BL31 */
  618. 1:
  619. #endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
  620. #endif /* WORKAROUND_CVE_2018_3639 */
  621. #if IMAGE_BL31 && WORKAROUND_CVE_2022_23960
  622. /*
  623. * The Cortex-A76 generic vectors are overridden to apply errata
  624. * mitigation on exception entry from lower ELs. This will be bypassed
  625. * if DYNAMIC_WORKAROUND_CVE_2018_3639 has overridden the vectors.
  626. */
  627. adr x0, cortex_a76_wa_cve_vbar
  628. msr vbar_el3, x0
  629. isb
  630. #endif /* IMAGE_BL31 && WORKAROUND_CVE_2022_23960 */
  631. 2:
  632. #if ERRATA_DSU_798953
  633. bl errata_dsu_798953_wa
  634. #endif
  635. #if ERRATA_DSU_936184
  636. bl errata_dsu_936184_wa
  637. #endif
  638. ret x19
  639. endfunc cortex_a76_reset_func
  640. /* ---------------------------------------------
  641. * HW will do the cache maintenance while powering down
  642. * ---------------------------------------------
  643. */
  644. func cortex_a76_core_pwr_dwn
  645. /* ---------------------------------------------
  646. * Enable CPU power down bit in power control register
  647. * ---------------------------------------------
  648. */
  649. mrs x0, CORTEX_A76_CPUPWRCTLR_EL1
  650. orr x0, x0, #CORTEX_A76_CORE_PWRDN_EN_MASK
  651. msr CORTEX_A76_CPUPWRCTLR_EL1, x0
  652. isb
  653. ret
  654. endfunc cortex_a76_core_pwr_dwn
  655. #if REPORT_ERRATA
  656. /*
  657. * Errata printing function for Cortex A76. Must follow AAPCS.
  658. */
  659. func cortex_a76_errata_report
  660. stp x8, x30, [sp, #-16]!
  661. bl cpu_get_rev_var
  662. mov x8, x0
  663. /*
  664. * Report all errata. The revision-variant information is passed to
  665. * checking functions of each errata.
  666. */
  667. report_errata ERRATA_A76_1073348, cortex_a76, 1073348
  668. report_errata ERRATA_A76_1130799, cortex_a76, 1130799
  669. report_errata ERRATA_A76_1220197, cortex_a76, 1220197
  670. report_errata ERRATA_A76_1257314, cortex_a76, 1257314
  671. report_errata ERRATA_A76_1262606, cortex_a76, 1262606
  672. report_errata ERRATA_A76_1262888, cortex_a76, 1262888
  673. report_errata ERRATA_A76_1275112, cortex_a76, 1275112
  674. report_errata ERRATA_A76_1286807, cortex_a76, 1286807
  675. report_errata ERRATA_A76_1791580, cortex_a76, 1791580
  676. report_errata ERRATA_A76_1165522, cortex_a76, 1165522
  677. report_errata ERRATA_A76_1868343, cortex_a76, 1868343
  678. report_errata ERRATA_A76_1946160, cortex_a76, 1946160
  679. report_errata WORKAROUND_CVE_2018_3639, cortex_a76, cve_2018_3639
  680. report_errata ERRATA_DSU_798953, cortex_a76, dsu_798953
  681. report_errata ERRATA_DSU_936184, cortex_a76, dsu_936184
  682. report_errata WORKAROUND_CVE_2022_23960, cortex_a76, cve_2022_23960
  683. ldp x8, x30, [sp], #16
  684. ret
  685. endfunc cortex_a76_errata_report
  686. #endif
  687. /* ---------------------------------------------
  688. * This function provides cortex_a76 specific
  689. * register information for crash reporting.
  690. * It needs to return with x6 pointing to
  691. * a list of register names in ascii and
  692. * x8 - x15 having values of registers to be
  693. * reported.
  694. * ---------------------------------------------
  695. */
  696. .section .rodata.cortex_a76_regs, "aS"
  697. cortex_a76_regs: /* The ascii list of register names to be reported */
  698. .asciz "cpuectlr_el1", ""
  699. func cortex_a76_cpu_reg_dump
  700. adr x6, cortex_a76_regs
  701. mrs x8, CORTEX_A76_CPUECTLR_EL1
  702. ret
  703. endfunc cortex_a76_cpu_reg_dump
  704. declare_cpu_ops_wa cortex_a76, CORTEX_A76_MIDR, \
  705. cortex_a76_reset_func, \
  706. CPU_NO_EXTRA1_FUNC, \
  707. cortex_a76_disable_wa_cve_2018_3639, \
  708. CPU_NO_EXTRA3_FUNC, \
  709. cortex_a76_core_pwr_dwn