cortex_a76.S 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538
  1. /*
  2. * Copyright (c) 2017-2024, Arm Limited and Contributors. All rights reserved.
  3. *
  4. * SPDX-License-Identifier: BSD-3-Clause
  5. */
  6. #include <arch.h>
  7. #include <asm_macros.S>
  8. #include <common/bl_common.h>
  9. #include <cortex_a76.h>
  10. #include <cpu_macros.S>
  11. #include <plat_macros.S>
  12. #include <services/arm_arch_svc.h>
  13. #include "wa_cve_2022_23960_bhb.S"
  14. /* Hardware handled coherency */
  15. #if HW_ASSISTED_COHERENCY == 0
  16. #error "Cortex-A76 must be compiled with HW_ASSISTED_COHERENCY enabled"
  17. #endif
  18. .globl cortex_a76_reset_func
  19. .globl cortex_a76_core_pwr_dwn
  20. .globl cortex_a76_disable_wa_cve_2018_3639
  21. /* 64-bit only core */
  22. #if CTX_INCLUDE_AARCH32_REGS == 1
  23. #error "Cortex-A76 supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0"
  24. #endif
  25. #define ESR_EL3_A64_SMC0 0x5e000000
  26. #define ESR_EL3_A32_SMC0 0x4e000000
  27. #if DYNAMIC_WORKAROUND_CVE_2018_3639
  28. /*
  29. * This macro applies the mitigation for CVE-2018-3639.
  30. * It implements a fast path where `SMCCC_ARCH_WORKAROUND_2`
  31. * SMC calls from a lower EL running in AArch32 or AArch64
  32. * will go through the fast and return early.
  33. *
  34. * The macro saves x2-x3 to the context. In the fast path
  35. * x0-x3 registers do not need to be restored as the calling
  36. * context will have saved them. The macro also saves
  37. * x29-x30 to the context in the sync_exception path.
  38. */
  39. .macro apply_cve_2018_3639_wa _is_sync_exception _esr_el3_val
  40. stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
  41. .if \_is_sync_exception
  42. stp x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
  43. mov_imm w2, \_esr_el3_val
  44. bl apply_cve_2018_3639_sync_wa
  45. ldp x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
  46. .endif
  47. /*
  48. * Always enable v4 mitigation during EL3 execution. This is not
  49. * required for the fast path above because it does not perform any
  50. * memory loads.
  51. */
  52. mrs x2, CORTEX_A76_CPUACTLR2_EL1
  53. orr x2, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
  54. msr CORTEX_A76_CPUACTLR2_EL1, x2
  55. isb
  56. /*
  57. * The caller may have passed arguments to EL3 via x2-x3.
  58. * Restore these registers from the context before jumping to the
  59. * main runtime vector table entry.
  60. */
  61. ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
  62. .endm
  63. #endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
  64. #if DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960
  65. vector_base cortex_a76_wa_cve_vbar
  66. /* ---------------------------------------------------------------------
  67. * Current EL with SP_EL0 : 0x0 - 0x200
  68. * ---------------------------------------------------------------------
  69. */
  70. vector_entry cortex_a76_sync_exception_sp_el0
  71. b sync_exception_sp_el0
  72. end_vector_entry cortex_a76_sync_exception_sp_el0
  73. vector_entry cortex_a76_irq_sp_el0
  74. b irq_sp_el0
  75. end_vector_entry cortex_a76_irq_sp_el0
  76. vector_entry cortex_a76_fiq_sp_el0
  77. b fiq_sp_el0
  78. end_vector_entry cortex_a76_fiq_sp_el0
  79. vector_entry cortex_a76_serror_sp_el0
  80. b serror_sp_el0
  81. end_vector_entry cortex_a76_serror_sp_el0
  82. /* ---------------------------------------------------------------------
  83. * Current EL with SP_ELx: 0x200 - 0x400
  84. * ---------------------------------------------------------------------
  85. */
  86. vector_entry cortex_a76_sync_exception_sp_elx
  87. b sync_exception_sp_elx
  88. end_vector_entry cortex_a76_sync_exception_sp_elx
  89. vector_entry cortex_a76_irq_sp_elx
  90. b irq_sp_elx
  91. end_vector_entry cortex_a76_irq_sp_elx
  92. vector_entry cortex_a76_fiq_sp_elx
  93. b fiq_sp_elx
  94. end_vector_entry cortex_a76_fiq_sp_elx
  95. vector_entry cortex_a76_serror_sp_elx
  96. b serror_sp_elx
  97. end_vector_entry cortex_a76_serror_sp_elx
  98. /* ---------------------------------------------------------------------
  99. * Lower EL using AArch64 : 0x400 - 0x600
  100. * ---------------------------------------------------------------------
  101. */
  102. vector_entry cortex_a76_sync_exception_aarch64
  103. #if WORKAROUND_CVE_2022_23960
  104. apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
  105. #endif /* WORKAROUND_CVE_2022_23960 */
  106. #if DYNAMIC_WORKAROUND_CVE_2018_3639
  107. apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
  108. #endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
  109. b sync_exception_aarch64
  110. end_vector_entry cortex_a76_sync_exception_aarch64
  111. vector_entry cortex_a76_irq_aarch64
  112. #if WORKAROUND_CVE_2022_23960
  113. apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
  114. #endif /* WORKAROUND_CVE_2022_23960 */
  115. #if DYNAMIC_WORKAROUND_CVE_2018_3639
  116. apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
  117. #endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
  118. b irq_aarch64
  119. end_vector_entry cortex_a76_irq_aarch64
  120. vector_entry cortex_a76_fiq_aarch64
  121. #if WORKAROUND_CVE_2022_23960
  122. apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
  123. #endif /* WORKAROUND_CVE_2022_23960 */
  124. #if DYNAMIC_WORKAROUND_CVE_2018_3639
  125. apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
  126. #endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
  127. b fiq_aarch64
  128. end_vector_entry cortex_a76_fiq_aarch64
  129. vector_entry cortex_a76_serror_aarch64
  130. #if WORKAROUND_CVE_2022_23960
  131. apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
  132. #endif /* WORKAROUND_CVE_2022_23960 */
  133. #if DYNAMIC_WORKAROUND_CVE_2018_3639
  134. apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
  135. #endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
  136. b serror_aarch64
  137. end_vector_entry cortex_a76_serror_aarch64
  138. /* ---------------------------------------------------------------------
  139. * Lower EL using AArch32 : 0x600 - 0x800
  140. * ---------------------------------------------------------------------
  141. */
  142. vector_entry cortex_a76_sync_exception_aarch32
  143. #if WORKAROUND_CVE_2022_23960
  144. apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
  145. #endif /* WORKAROUND_CVE_2022_23960 */
  146. #if DYNAMIC_WORKAROUND_CVE_2018_3639
  147. apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
  148. #endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
  149. b sync_exception_aarch32
  150. end_vector_entry cortex_a76_sync_exception_aarch32
  151. vector_entry cortex_a76_irq_aarch32
  152. #if WORKAROUND_CVE_2022_23960
  153. apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
  154. #endif /* WORKAROUND_CVE_2022_23960 */
  155. #if DYNAMIC_WORKAROUND_CVE_2018_3639
  156. apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
  157. #endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
  158. b irq_aarch32
  159. end_vector_entry cortex_a76_irq_aarch32
  160. vector_entry cortex_a76_fiq_aarch32
  161. #if WORKAROUND_CVE_2022_23960
  162. apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
  163. #endif /* WORKAROUND_CVE_2022_23960 */
  164. #if DYNAMIC_WORKAROUND_CVE_2018_3639
  165. apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
  166. #endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
  167. b fiq_aarch32
  168. end_vector_entry cortex_a76_fiq_aarch32
  169. vector_entry cortex_a76_serror_aarch32
  170. #if WORKAROUND_CVE_2022_23960
  171. apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
  172. #endif /* WORKAROUND_CVE_2022_23960 */
  173. #if DYNAMIC_WORKAROUND_CVE_2018_3639
  174. apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
  175. #endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
  176. b serror_aarch32
  177. end_vector_entry cortex_a76_serror_aarch32
  178. #endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960 */
  179. #if DYNAMIC_WORKAROUND_CVE_2018_3639
  180. /*
  181. * -----------------------------------------------------------------
  182. * This function applies the mitigation for CVE-2018-3639
  183. * specifically for sync exceptions. It implements a fast path
  184. * where `SMCCC_ARCH_WORKAROUND_2` SMC calls from a lower EL
  185. * running in AArch64 will go through the fast and return early.
  186. *
  187. * In the fast path x0-x3 registers do not need to be restored as the
  188. * calling context will have saved them.
  189. *
  190. * Caller must pass value of esr_el3 to compare via x2.
  191. * Save and restore these registers outside of this function from the
  192. * context before jumping to the main runtime vector table entry.
  193. *
  194. * Shall clobber: x0-x3, x30
  195. * -----------------------------------------------------------------
  196. */
  197. func apply_cve_2018_3639_sync_wa
  198. /*
  199. * Ensure SMC is coming from A64/A32 state on #0
  200. * with W0 = SMCCC_ARCH_WORKAROUND_2
  201. *
  202. * This sequence evaluates as:
  203. * (W0==SMCCC_ARCH_WORKAROUND_2) ? (ESR_EL3==SMC#0) : (NE)
  204. * allowing use of a single branch operation
  205. * X2 populated outside this function with the SMC FID.
  206. */
  207. orr w3, wzr, #SMCCC_ARCH_WORKAROUND_2
  208. cmp x0, x3
  209. mrs x3, esr_el3
  210. ccmp w2, w3, #0, eq
  211. /*
  212. * Static predictor will predict a fall-through, optimizing
  213. * the `SMCCC_ARCH_WORKAROUND_2` fast path.
  214. */
  215. bne 1f
  216. /*
  217. * The sequence below implements the `SMCCC_ARCH_WORKAROUND_2`
  218. * fast path.
  219. */
  220. cmp x1, xzr /* enable/disable check */
  221. /*
  222. * When the calling context wants mitigation disabled,
  223. * we program the mitigation disable function in the
  224. * CPU context, which gets invoked on subsequent exits from
  225. * EL3 via the `el3_exit` function. Otherwise NULL is
  226. * programmed in the CPU context, which results in caller's
  227. * inheriting the EL3 mitigation state (enabled) on subsequent
  228. * `el3_exit`.
  229. */
  230. mov x0, xzr
  231. adr x1, cortex_a76_disable_wa_cve_2018_3639
  232. csel x1, x1, x0, eq
  233. str x1, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
  234. mrs x2, CORTEX_A76_CPUACTLR2_EL1
  235. orr x1, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
  236. bic x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
  237. csel x3, x3, x1, eq
  238. msr CORTEX_A76_CPUACTLR2_EL1, x3
  239. ldp x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
  240. /*
  241. * `SMCCC_ARCH_WORKAROUND_2`fast path return to lower EL.
  242. */
  243. exception_return /* exception_return contains ISB */
  244. 1:
  245. ret
  246. endfunc apply_cve_2018_3639_sync_wa
  247. #endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
  248. workaround_reset_start cortex_a76, ERRATUM(1073348), ERRATA_A76_1073348
  249. sysreg_bit_set CORTEX_A76_CPUACTLR_EL1 ,CORTEX_A76_CPUACTLR_EL1_DISABLE_STATIC_PREDICTION
  250. workaround_reset_end cortex_a76, ERRATUM(1073348)
  251. check_erratum_ls cortex_a76, ERRATUM(1073348), CPU_REV(1, 0)
  252. workaround_reset_start cortex_a76, ERRATUM(1130799), ERRATA_A76_1130799
  253. sysreg_bit_set CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_BIT_59
  254. msr CORTEX_A76_CPUACTLR2_EL1, x1
  255. workaround_reset_end cortex_a76, ERRATUM(1130799)
  256. check_erratum_ls cortex_a76, ERRATUM(1130799), CPU_REV(2, 0)
  257. workaround_reset_start cortex_a76, ERRATUM(1220197), ERRATA_A76_1220197
  258. sysreg_bit_set CORTEX_A76_CPUECTLR_EL1, CORTEX_A76_CPUECTLR_EL1_WS_THR_L2
  259. workaround_reset_end cortex_a76, ERRATUM(1220197)
  260. check_erratum_ls cortex_a76, ERRATUM(1220197), CPU_REV(2, 0)
  261. workaround_reset_start cortex_a76, ERRATUM(1257314), ERRATA_A76_1257314
  262. sysreg_bit_set CORTEX_A76_CPUACTLR3_EL1, CORTEX_A76_CPUACTLR3_EL1_BIT_10
  263. workaround_reset_end cortex_a76, ERRATUM(1257314)
  264. check_erratum_ls cortex_a76, ERRATUM(1257314), CPU_REV(3, 0)
  265. workaround_reset_start cortex_a76, ERRATUM(1262606), ERRATA_A76_1262606
  266. sysreg_bit_set CORTEX_A76_CPUACTLR_EL1, CORTEX_A76_CPUACTLR_EL1_BIT_13
  267. workaround_reset_end cortex_a76, ERRATUM(1262606)
  268. check_erratum_ls cortex_a76, ERRATUM(1262606), CPU_REV(3, 0)
  269. workaround_reset_start cortex_a76, ERRATUM(1262888), ERRATA_A76_1262888
  270. sysreg_bit_set CORTEX_A76_CPUECTLR_EL1, CORTEX_A76_CPUECTLR_EL1_BIT_51
  271. workaround_reset_end cortex_a76, ERRATUM(1262888)
  272. check_erratum_ls cortex_a76, ERRATUM(1262888), CPU_REV(3, 0)
  273. workaround_reset_start cortex_a76, ERRATUM(1275112), ERRATA_A76_1275112
  274. sysreg_bit_set CORTEX_A76_CPUACTLR_EL1, CORTEX_A76_CPUACTLR_EL1_BIT_13
  275. workaround_reset_end cortex_a76, ERRATUM(1275112)
  276. check_erratum_ls cortex_a76, ERRATUM(1275112), CPU_REV(3, 0)
  277. check_erratum_custom_start cortex_a76, ERRATUM(1286807)
  278. #if ERRATA_A76_1286807
  279. mov x0, #ERRATA_APPLIES
  280. ret
  281. #else
  282. mov x1, #0x30
  283. b cpu_rev_var_ls
  284. #endif
  285. check_erratum_custom_end cortex_a76, ERRATUM(1286807)
  286. workaround_reset_start cortex_a76, ERRATUM(1791580), ERRATA_A76_1791580
  287. sysreg_bit_set CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_BIT_2
  288. workaround_reset_end cortex_a76, ERRATUM(1791580)
  289. check_erratum_ls cortex_a76, ERRATUM(1791580), CPU_REV(4, 0)
  290. workaround_reset_start cortex_a76, ERRATUM(1868343), ERRATA_A76_1868343
  291. sysreg_bit_set CORTEX_A76_CPUACTLR_EL1, CORTEX_A76_CPUACTLR_EL1_BIT_13
  292. workaround_reset_end cortex_a76, ERRATUM(1868343)
  293. check_erratum_ls cortex_a76, ERRATUM(1868343), CPU_REV(4, 0)
  294. workaround_reset_start cortex_a76, ERRATUM(1946160), ERRATA_A76_1946160
  295. mov x0, #3
  296. msr S3_6_C15_C8_0, x0
  297. ldr x0, =0x10E3900002
  298. msr S3_6_C15_C8_2, x0
  299. ldr x0, =0x10FFF00083
  300. msr S3_6_C15_C8_3, x0
  301. ldr x0, =0x2001003FF
  302. msr S3_6_C15_C8_1, x0
  303. mov x0, #4
  304. msr S3_6_C15_C8_0, x0
  305. ldr x0, =0x10E3800082
  306. msr S3_6_C15_C8_2, x0
  307. ldr x0, =0x10FFF00083
  308. msr S3_6_C15_C8_3, x0
  309. ldr x0, =0x2001003FF
  310. msr S3_6_C15_C8_1, x0
  311. mov x0, #5
  312. msr S3_6_C15_C8_0, x0
  313. ldr x0, =0x10E3800200
  314. msr S3_6_C15_C8_2, x0
  315. ldr x0, =0x10FFF003E0
  316. msr S3_6_C15_C8_3, x0
  317. ldr x0, =0x2001003FF
  318. msr S3_6_C15_C8_1, x0
  319. workaround_reset_end cortex_a76, ERRATUM(1946160)
  320. check_erratum_range cortex_a76, ERRATUM(1946160), CPU_REV(3, 0), CPU_REV(4, 1)
  321. workaround_runtime_start cortex_a76, ERRATUM(2743102), ERRATA_A76_2743102
  322. /* dsb before isb of power down sequence */
  323. dsb sy
  324. workaround_runtime_end cortex_a76, ERRATUM(2743102)
  325. check_erratum_ls cortex_a76, ERRATUM(2743102), CPU_REV(4, 1)
  326. check_erratum_chosen cortex_a76, CVE(2018, 3639), WORKAROUND_CVE_2018_3639
  327. func cortex_a76_disable_wa_cve_2018_3639
  328. sysreg_bit_clear CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
  329. isb
  330. ret
  331. endfunc cortex_a76_disable_wa_cve_2018_3639
  332. /* --------------------------------------------------------------
  333. * Errata Workaround for Cortex A76 Errata #1165522.
  334. * This applies only to revisions <= r3p0 of Cortex A76.
  335. * Due to the nature of the errata it is applied unconditionally
  336. * when built in, report it as applicable in this case
  337. * --------------------------------------------------------------
  338. */
  339. check_erratum_custom_start cortex_a76, ERRATUM(1165522)
  340. #if ERRATA_A76_1165522
  341. mov x0, #ERRATA_APPLIES
  342. ret
  343. #else
  344. mov x1, #0x30
  345. b cpu_rev_var_ls
  346. #endif
  347. check_erratum_custom_end cortex_a76, ERRATUM(1165522)
  348. check_erratum_chosen cortex_a76, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
  349. /* erratum has no workaround in the cpu. Generic code must take care */
  350. add_erratum_entry cortex_a76, CVE(2022, 23960), WORKAROUND_CVE_2022_23960, NO_APPLY_AT_RESET
  351. /* ERRATA_DSU_798953 :
  352. * The errata is defined in dsu_helpers.S but applies to cortex_a76
  353. * as well. Henceforth creating symbolic names to the already existing errata
  354. * workaround functions to get them registered under the Errata Framework.
  355. */
  356. .equ check_erratum_cortex_a76_798953, check_errata_dsu_798953
  357. .equ erratum_cortex_a76_798953_wa, errata_dsu_798953_wa
  358. add_erratum_entry cortex_a76, ERRATUM(798953), ERRATA_DSU_798953, APPLY_AT_RESET
  359. /* ERRATA_DSU_936184 :
  360. * The errata is defined in dsu_helpers.S but applies to cortex_a76
  361. * as well. Henceforth creating symbolic names to the already existing errata
  362. * workaround functions to get them registered under the Errata Framework.
  363. */
  364. .equ check_erratum_cortex_a76_936184, check_errata_dsu_936184
  365. .equ erratum_cortex_a76_936184_wa, errata_dsu_936184_wa
  366. add_erratum_entry cortex_a76, ERRATUM(936184), ERRATA_DSU_936184, APPLY_AT_RESET
  367. cpu_reset_func_start cortex_a76
  368. #if WORKAROUND_CVE_2018_3639
  369. /* If the PE implements SSBS, we don't need the dynamic workaround */
  370. mrs x0, id_aa64pfr1_el1
  371. lsr x0, x0, #ID_AA64PFR1_EL1_SSBS_SHIFT
  372. and x0, x0, #ID_AA64PFR1_EL1_SSBS_MASK
  373. #if !DYNAMIC_WORKAROUND_CVE_2018_3639 && ENABLE_ASSERTIONS
  374. cmp x0, 0
  375. ASM_ASSERT(ne)
  376. #endif
  377. #if DYNAMIC_WORKAROUND_CVE_2018_3639
  378. cbnz x0, 1f
  379. sysreg_bit_set CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
  380. isb
  381. #ifdef IMAGE_BL31
  382. /*
  383. * The Cortex-A76 generic vectors are overwritten to use the vectors
  384. * defined above. This is required in order to apply mitigation
  385. * against CVE-2018-3639 on exception entry from lower ELs.
  386. * If the below vector table is used, skip overriding it again for
  387. * CVE_2022_23960 as both use the same vbar.
  388. */
  389. override_vector_table cortex_a76_wa_cve_vbar
  390. isb
  391. b 2f
  392. #endif /* IMAGE_BL31 */
  393. 1:
  394. #endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
  395. #endif /* WORKAROUND_CVE_2018_3639 */
  396. #if IMAGE_BL31 && WORKAROUND_CVE_2022_23960
  397. /*
  398. * The Cortex-A76 generic vectors are overridden to apply errata
  399. * mitigation on exception entry from lower ELs. This will be bypassed
  400. * if DYNAMIC_WORKAROUND_CVE_2018_3639 has overridden the vectors.
  401. */
  402. override_vector_table cortex_a76_wa_cve_vbar
  403. isb
  404. #endif /* IMAGE_BL31 && WORKAROUND_CVE_2022_23960 */
  405. 2:
  406. cpu_reset_func_end cortex_a76
  407. /* ---------------------------------------------
  408. * HW will do the cache maintenance while powering down
  409. * ---------------------------------------------
  410. */
  411. func cortex_a76_core_pwr_dwn
  412. /* ---------------------------------------------
  413. * Enable CPU power down bit in power control register
  414. * ---------------------------------------------
  415. */
  416. sysreg_bit_set CORTEX_A76_CPUPWRCTLR_EL1, CORTEX_A76_CORE_PWRDN_EN_MASK
  417. apply_erratum cortex_a76, ERRATUM(2743102), ERRATA_A76_2743102
  418. isb
  419. ret
  420. endfunc cortex_a76_core_pwr_dwn
  421. /* ---------------------------------------------
  422. * This function provides cortex_a76 specific
  423. * register information for crash reporting.
  424. * It needs to return with x6 pointing to
  425. * a list of register names in ascii and
  426. * x8 - x15 having values of registers to be
  427. * reported.
  428. * ---------------------------------------------
  429. */
  430. .section .rodata.cortex_a76_regs, "aS"
  431. cortex_a76_regs: /* The ascii list of register names to be reported */
  432. .asciz "cpuectlr_el1", ""
  433. func cortex_a76_cpu_reg_dump
  434. adr x6, cortex_a76_regs
  435. mrs x8, CORTEX_A76_CPUECTLR_EL1
  436. ret
  437. endfunc cortex_a76_cpu_reg_dump
  438. declare_cpu_ops_wa cortex_a76, CORTEX_A76_MIDR, \
  439. cortex_a76_reset_func, \
  440. CPU_NO_EXTRA1_FUNC, \
  441. cortex_a76_disable_wa_cve_2018_3639, \
  442. CPU_NO_EXTRA3_FUNC, \
  443. cortex_a76_core_pwr_dwn