context.S 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658
  1. /*
  2. * Copyright (c) 2013-2024, Arm Limited and Contributors. All rights reserved.
  3. *
  4. * SPDX-License-Identifier: BSD-3-Clause
  5. */
  6. #include <arch.h>
  7. #include <asm_macros.S>
  8. #include <assert_macros.S>
  9. #include <context.h>
  10. #include <el3_common_macros.S>
  11. #include <platform_def.h>
  12. #if CTX_INCLUDE_FPREGS
  13. .global fpregs_context_save
  14. .global fpregs_context_restore
  15. #endif /* CTX_INCLUDE_FPREGS */
  16. #if CTX_INCLUDE_SVE_REGS
  17. .global sve_context_save
  18. .global sve_context_restore
  19. #endif /* CTX_INCLUDE_SVE_REGS */
  20. #if ERRATA_SPECULATIVE_AT
  21. .global save_and_update_ptw_el1_sys_regs
  22. #endif /* ERRATA_SPECULATIVE_AT */
  23. .global prepare_el3_entry
  24. .global restore_gp_pmcr_pauth_regs
  25. .global el3_exit
  26. /* Following macros will be used if any of CTX_INCLUDE_FPREGS or CTX_INCLUDE_SVE_REGS is enabled */
  27. #if CTX_INCLUDE_FPREGS || CTX_INCLUDE_SVE_REGS
  28. .macro fpregs_state_save base:req hold:req
  29. mrs \hold, fpsr
  30. str \hold, [\base, #CTX_SIMD_FPSR]
  31. mrs \hold, fpcr
  32. str \hold, [\base, #CTX_SIMD_FPCR]
  33. #if CTX_INCLUDE_AARCH32_REGS && CTX_INCLUDE_FPREGS
  34. mrs \hold, fpexc32_el2
  35. str \hold, [\base, #CTX_SIMD_FPEXC32]
  36. #endif
  37. .endm
  38. .macro fpregs_state_restore base:req hold:req
  39. ldr \hold, [\base, #CTX_SIMD_FPSR]
  40. msr fpsr, \hold
  41. ldr \hold, [\base, #CTX_SIMD_FPCR]
  42. msr fpcr, \hold
  43. #if CTX_INCLUDE_AARCH32_REGS && CTX_INCLUDE_FPREGS
  44. ldr \hold, [\base, #CTX_SIMD_FPEXC32]
  45. msr fpexc32_el2, \hold
  46. #endif
  47. .endm
  48. #endif /* CTX_INCLUDE_FPREGS || CTX_INCLUDE_SVE_REGS */
  49. /* ------------------------------------------------------------------
  50. * The following function follows the aapcs_64 strictly to use
  51. * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
  52. * to save floating point register context. It assumes that 'x0' is
  53. * pointing to a 'fp_regs' structure where the register context will
  54. * be saved.
  55. *
  56. * Access to VFP registers will trap if CPTR_EL3.TFP is set.
  57. * However currently we don't use VFP registers nor set traps in
  58. * Trusted Firmware, and assume it's cleared.
  59. *
  60. * TODO: Revisit when VFP is used in secure world
  61. * ------------------------------------------------------------------
  62. */
  63. #if CTX_INCLUDE_FPREGS
  64. func fpregs_context_save
  65. stp q0, q1, [x0], #32
  66. stp q2, q3, [x0], #32
  67. stp q4, q5, [x0], #32
  68. stp q6, q7, [x0], #32
  69. stp q8, q9, [x0], #32
  70. stp q10, q11, [x0], #32
  71. stp q12, q13, [x0], #32
  72. stp q14, q15, [x0], #32
  73. stp q16, q17, [x0], #32
  74. stp q18, q19, [x0], #32
  75. stp q20, q21, [x0], #32
  76. stp q22, q23, [x0], #32
  77. stp q24, q25, [x0], #32
  78. stp q26, q27, [x0], #32
  79. stp q28, q29, [x0], #32
  80. stp q30, q31, [x0], #32
  81. fpregs_state_save x0, x9
  82. ret
  83. endfunc fpregs_context_save
  84. /* ------------------------------------------------------------------
  85. * The following function follows the aapcs_64 strictly to use x9-x17
  86. * (temporary caller-saved registers according to AArch64 PCS) to
  87. * restore floating point register context. It assumes that 'x0' is
  88. * pointing to a 'fp_regs' structure from where the register context
  89. * will be restored.
  90. *
  91. * Access to VFP registers will trap if CPTR_EL3.TFP is set.
  92. * However currently we don't use VFP registers nor set traps in
  93. * Trusted Firmware, and assume it's cleared.
  94. *
  95. * TODO: Revisit when VFP is used in secure world
  96. * ------------------------------------------------------------------
  97. */
  98. func fpregs_context_restore
  99. ldp q0, q1, [x0], #32
  100. ldp q2, q3, [x0], #32
  101. ldp q4, q5, [x0], #32
  102. ldp q6, q7, [x0], #32
  103. ldp q8, q9, [x0], #32
  104. ldp q10, q11, [x0], #32
  105. ldp q12, q13, [x0], #32
  106. ldp q14, q15, [x0], #32
  107. ldp q16, q17, [x0], #32
  108. ldp q18, q19, [x0], #32
  109. ldp q20, q21, [x0], #32
  110. ldp q22, q23, [x0], #32
  111. ldp q24, q25, [x0], #32
  112. ldp q26, q27, [x0], #32
  113. ldp q28, q29, [x0], #32
  114. ldp q30, q31, [x0], #32
  115. fpregs_state_restore x0, x9
  116. ret
  117. endfunc fpregs_context_restore
  118. #endif /* CTX_INCLUDE_FPREGS */
  119. #if CTX_INCLUDE_SVE_REGS
  120. /*
  121. * Helper macros for SVE predicates save/restore operations.
  122. */
  123. .macro sve_predicate_op op:req reg:req
  124. \op p0, [\reg, #0, MUL VL]
  125. \op p1, [\reg, #1, MUL VL]
  126. \op p2, [\reg, #2, MUL VL]
  127. \op p3, [\reg, #3, MUL VL]
  128. \op p4, [\reg, #4, MUL VL]
  129. \op p5, [\reg, #5, MUL VL]
  130. \op p6, [\reg, #6, MUL VL]
  131. \op p7, [\reg, #7, MUL VL]
  132. \op p8, [\reg, #8, MUL VL]
  133. \op p9, [\reg, #9, MUL VL]
  134. \op p10, [\reg, #10, MUL VL]
  135. \op p11, [\reg, #11, MUL VL]
  136. \op p12, [\reg, #12, MUL VL]
  137. \op p13, [\reg, #13, MUL VL]
  138. \op p14, [\reg, #14, MUL VL]
  139. \op p15, [\reg, #15, MUL VL]
  140. .endm
  141. .macro sve_vectors_op op:req reg:req
  142. \op z0, [\reg, #0, MUL VL]
  143. \op z1, [\reg, #1, MUL VL]
  144. \op z2, [\reg, #2, MUL VL]
  145. \op z3, [\reg, #3, MUL VL]
  146. \op z4, [\reg, #4, MUL VL]
  147. \op z5, [\reg, #5, MUL VL]
  148. \op z6, [\reg, #6, MUL VL]
  149. \op z7, [\reg, #7, MUL VL]
  150. \op z8, [\reg, #8, MUL VL]
  151. \op z9, [\reg, #9, MUL VL]
  152. \op z10, [\reg, #10, MUL VL]
  153. \op z11, [\reg, #11, MUL VL]
  154. \op z12, [\reg, #12, MUL VL]
  155. \op z13, [\reg, #13, MUL VL]
  156. \op z14, [\reg, #14, MUL VL]
  157. \op z15, [\reg, #15, MUL VL]
  158. \op z16, [\reg, #16, MUL VL]
  159. \op z17, [\reg, #17, MUL VL]
  160. \op z18, [\reg, #18, MUL VL]
  161. \op z19, [\reg, #19, MUL VL]
  162. \op z20, [\reg, #20, MUL VL]
  163. \op z21, [\reg, #21, MUL VL]
  164. \op z22, [\reg, #22, MUL VL]
  165. \op z23, [\reg, #23, MUL VL]
  166. \op z24, [\reg, #24, MUL VL]
  167. \op z25, [\reg, #25, MUL VL]
  168. \op z26, [\reg, #26, MUL VL]
  169. \op z27, [\reg, #27, MUL VL]
  170. \op z28, [\reg, #28, MUL VL]
  171. \op z29, [\reg, #29, MUL VL]
  172. \op z30, [\reg, #30, MUL VL]
  173. \op z31, [\reg, #31, MUL VL]
  174. .endm
  175. /* ------------------------------------------------------------------
  176. * The following function follows the aapcs_64 strictly to use x9-x17
  177. * (temporary caller-saved registers according to AArch64 PCS) to
  178. * restore SVE register context. It assumes that 'x0' is
  179. * pointing to a 'sve_regs_t' structure to which the register context
  180. * will be saved.
  181. * ------------------------------------------------------------------
  182. */
  183. func sve_context_save
  184. .arch_extension sve
  185. /* Temporarily enable SVE */
  186. mrs x10, cptr_el3
  187. orr x11, x10, #CPTR_EZ_BIT
  188. bic x11, x11, #TFP_BIT
  189. msr cptr_el3, x11
  190. isb
  191. /* zcr_el3 */
  192. mrs x12, S3_6_C1_C2_0
  193. mov x13, #((SVE_VECTOR_LEN >> 7) - 1)
  194. msr S3_6_C1_C2_0, x13
  195. isb
  196. /* Predicate registers */
  197. mov x13, #CTX_SIMD_PREDICATES
  198. add x9, x0, x13
  199. sve_predicate_op str, x9
  200. /* Save FFR after predicates */
  201. mov x13, #CTX_SIMD_FFR
  202. add x9, x0, x13
  203. rdffr p0.b
  204. str p0, [x9]
  205. /* Save vector registers */
  206. mov x13, #CTX_SIMD_VECTORS
  207. add x9, x0, x13
  208. sve_vectors_op str, x9
  209. /* Restore SVE enablement */
  210. msr S3_6_C1_C2_0, x12 /* zcr_el3 */
  211. msr cptr_el3, x10
  212. isb
  213. .arch_extension nosve
  214. /* Save FPSR, FPCR and FPEXC32 */
  215. fpregs_state_save x0, x9
  216. ret
  217. endfunc sve_context_save
  218. /* ------------------------------------------------------------------
  219. * The following function follows the aapcs_64 strictly to use x9-x17
  220. * (temporary caller-saved registers according to AArch64 PCS) to
  221. * restore SVE register context. It assumes that 'x0' is pointing to
  222. * a 'sve_regs_t' structure from where the register context will be
  223. * restored.
  224. * ------------------------------------------------------------------
  225. */
  226. func sve_context_restore
  227. .arch_extension sve
  228. /* Temporarily enable SVE for EL3 */
  229. mrs x10, cptr_el3
  230. orr x11, x10, #CPTR_EZ_BIT
  231. bic x11, x11, #TFP_BIT
  232. msr cptr_el3, x11
  233. isb
  234. /* zcr_el3 */
  235. mrs x12, S3_6_C1_C2_0
  236. mov x13, #((SVE_VECTOR_LEN >> 7) - 1)
  237. msr S3_6_C1_C2_0, x13
  238. isb
  239. /* Restore FFR register before predicates */
  240. mov x13, #CTX_SIMD_FFR
  241. add x9, x0, x13
  242. ldr p0, [x9]
  243. wrffr p0.b
  244. /* Restore predicate registers */
  245. mov x13, #CTX_SIMD_PREDICATES
  246. add x9, x0, x13
  247. sve_predicate_op ldr, x9
  248. /* Restore vector registers */
  249. mov x13, #CTX_SIMD_VECTORS
  250. add x9, x0, x13
  251. sve_vectors_op ldr, x9
  252. /* Restore SVE enablement */
  253. msr S3_6_C1_C2_0, x12 /* zcr_el3 */
  254. msr cptr_el3, x10
  255. isb
  256. .arch_extension nosve
  257. /* Restore FPSR, FPCR and FPEXC32 */
  258. fpregs_state_restore x0, x9
  259. ret
  260. endfunc sve_context_restore
  261. #endif /* CTX_INCLUDE_SVE_REGS */
  262. /*
  263. * Set SCR_EL3.EA bit to enable SErrors at EL3
  264. */
  265. .macro enable_serror_at_el3
  266. mrs x8, scr_el3
  267. orr x8, x8, #SCR_EA_BIT
  268. msr scr_el3, x8
  269. .endm
  270. /*
  271. * Set the PSTATE bits not set when the exception was taken as
  272. * described in the AArch64.TakeException() pseudocode function
  273. * in ARM DDI 0487F.c page J1-7635 to a default value.
  274. */
  275. .macro set_unset_pstate_bits
  276. /*
  277. * If Data Independent Timing (DIT) functionality is implemented,
  278. * always enable DIT in EL3
  279. */
  280. #if ENABLE_FEAT_DIT
  281. #if ENABLE_FEAT_DIT >= 2
  282. mrs x8, id_aa64pfr0_el1
  283. and x8, x8, #(ID_AA64PFR0_DIT_MASK << ID_AA64PFR0_DIT_SHIFT)
  284. cbz x8, 1f
  285. #endif
  286. mov x8, #DIT_BIT
  287. msr DIT, x8
  288. 1:
  289. #endif /* ENABLE_FEAT_DIT */
  290. .endm /* set_unset_pstate_bits */
  291. /*-------------------------------------------------------------------------
  292. * This macro checks the ENABLE_FEAT_MPAM state, performs ID register
  293. * check to see if the platform supports MPAM extension and restores MPAM3
  294. * register value if it is FEAT_STATE_ENABLED/FEAT_STATE_CHECKED.
  295. *
  296. * This is particularly more complicated because we can't check
  297. * if the platform supports MPAM by looking for status of a particular bit
  298. * in the MDCR_EL3 or CPTR_EL3 register like other extensions.
  299. * ------------------------------------------------------------------------
  300. */
  301. .macro restore_mpam3_el3
  302. #if ENABLE_FEAT_MPAM
  303. #if ENABLE_FEAT_MPAM >= 2
  304. mrs x8, id_aa64pfr0_el1
  305. lsr x8, x8, #(ID_AA64PFR0_MPAM_SHIFT)
  306. and x8, x8, #(ID_AA64PFR0_MPAM_MASK)
  307. mrs x7, id_aa64pfr1_el1
  308. lsr x7, x7, #(ID_AA64PFR1_MPAM_FRAC_SHIFT)
  309. and x7, x7, #(ID_AA64PFR1_MPAM_FRAC_MASK)
  310. orr x7, x7, x8
  311. cbz x7, no_mpam
  312. #endif
  313. /* -----------------------------------------------------------
  314. * Restore MPAM3_EL3 register as per context state
  315. * Currently we only enable MPAM for NS world and trap to EL3
  316. * for MPAM access in lower ELs of Secure and Realm world
  317. * x9 holds address of the per_world context
  318. * -----------------------------------------------------------
  319. */
  320. ldr x17, [x9, #CTX_MPAM3_EL3]
  321. msr S3_6_C10_C5_0, x17 /* mpam3_el3 */
  322. no_mpam:
  323. #endif
  324. .endm /* restore_mpam3_el3 */
  325. /* ------------------------------------------------------------------
  326. * The following macro is used to save and restore all the general
  327. * purpose and ARMv8.3-PAuth (if enabled) registers.
  328. * It also checks if the Secure Cycle Counter (PMCCNTR_EL0)
  329. * is disabled in EL3/Secure (ARMv8.5-PMU), wherein PMCCNTR_EL0
  330. * needs not to be saved/restored during world switch.
  331. *
  332. * Ideally we would only save and restore the callee saved registers
  333. * when a world switch occurs but that type of implementation is more
  334. * complex. So currently we will always save and restore these
  335. * registers on entry and exit of EL3.
  336. * clobbers: x18
  337. * ------------------------------------------------------------------
  338. */
  339. .macro save_gp_pmcr_pauth_regs
  340. stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
  341. stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
  342. stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
  343. stp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
  344. stp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
  345. stp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
  346. stp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
  347. stp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
  348. stp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
  349. stp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
  350. stp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
  351. stp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
  352. stp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
  353. stp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
  354. stp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
  355. mrs x18, sp_el0
  356. str x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
  357. /* PMUv3 is presumed to be always present */
  358. mrs x9, pmcr_el0
  359. str x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
  360. isb
  361. #if CTX_INCLUDE_PAUTH_REGS
  362. /* ----------------------------------------------------------
  363. * Save the ARMv8.3-PAuth keys as they are not banked
  364. * by exception level
  365. * ----------------------------------------------------------
  366. */
  367. add x19, sp, #CTX_PAUTH_REGS_OFFSET
  368. mrs x20, APIAKeyLo_EL1 /* x21:x20 = APIAKey */
  369. mrs x21, APIAKeyHi_EL1
  370. mrs x22, APIBKeyLo_EL1 /* x23:x22 = APIBKey */
  371. mrs x23, APIBKeyHi_EL1
  372. mrs x24, APDAKeyLo_EL1 /* x25:x24 = APDAKey */
  373. mrs x25, APDAKeyHi_EL1
  374. mrs x26, APDBKeyLo_EL1 /* x27:x26 = APDBKey */
  375. mrs x27, APDBKeyHi_EL1
  376. mrs x28, APGAKeyLo_EL1 /* x29:x28 = APGAKey */
  377. mrs x29, APGAKeyHi_EL1
  378. stp x20, x21, [x19, #CTX_PACIAKEY_LO]
  379. stp x22, x23, [x19, #CTX_PACIBKEY_LO]
  380. stp x24, x25, [x19, #CTX_PACDAKEY_LO]
  381. stp x26, x27, [x19, #CTX_PACDBKEY_LO]
  382. stp x28, x29, [x19, #CTX_PACGAKEY_LO]
  383. #endif /* CTX_INCLUDE_PAUTH_REGS */
  384. .endm /* save_gp_pmcr_pauth_regs */
  385. /* -----------------------------------------------------------------
  386. * This function saves the context and sets the PSTATE to a known
  387. * state, preparing entry to el3.
  388. * Save all the general purpose and ARMv8.3-PAuth (if enabled)
  389. * registers.
  390. * Then set any of the PSTATE bits that are not set by hardware
  391. * according to the Aarch64.TakeException pseudocode in the Arm
  392. * Architecture Reference Manual to a default value for EL3.
  393. * clobbers: x17
  394. * -----------------------------------------------------------------
  395. */
  396. func prepare_el3_entry
  397. save_gp_pmcr_pauth_regs
  398. setup_el3_execution_context
  399. ret
  400. endfunc prepare_el3_entry
  401. /* ------------------------------------------------------------------
  402. * This function restores ARMv8.3-PAuth (if enabled) and all general
  403. * purpose registers except x30 from the CPU context.
  404. * x30 register must be explicitly restored by the caller.
  405. * ------------------------------------------------------------------
  406. */
  407. func restore_gp_pmcr_pauth_regs
  408. #if CTX_INCLUDE_PAUTH_REGS
  409. /* Restore the ARMv8.3 PAuth keys */
  410. add x10, sp, #CTX_PAUTH_REGS_OFFSET
  411. ldp x0, x1, [x10, #CTX_PACIAKEY_LO] /* x1:x0 = APIAKey */
  412. ldp x2, x3, [x10, #CTX_PACIBKEY_LO] /* x3:x2 = APIBKey */
  413. ldp x4, x5, [x10, #CTX_PACDAKEY_LO] /* x5:x4 = APDAKey */
  414. ldp x6, x7, [x10, #CTX_PACDBKEY_LO] /* x7:x6 = APDBKey */
  415. ldp x8, x9, [x10, #CTX_PACGAKEY_LO] /* x9:x8 = APGAKey */
  416. msr APIAKeyLo_EL1, x0
  417. msr APIAKeyHi_EL1, x1
  418. msr APIBKeyLo_EL1, x2
  419. msr APIBKeyHi_EL1, x3
  420. msr APDAKeyLo_EL1, x4
  421. msr APDAKeyHi_EL1, x5
  422. msr APDBKeyLo_EL1, x6
  423. msr APDBKeyHi_EL1, x7
  424. msr APGAKeyLo_EL1, x8
  425. msr APGAKeyHi_EL1, x9
  426. #endif /* CTX_INCLUDE_PAUTH_REGS */
  427. /* PMUv3 is presumed to be always present */
  428. ldr x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
  429. msr pmcr_el0, x0
  430. ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
  431. ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
  432. ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
  433. ldp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
  434. ldp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
  435. ldp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
  436. ldp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
  437. ldp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
  438. ldp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
  439. ldp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
  440. ldp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
  441. ldp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
  442. ldp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
  443. ldp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
  444. ldr x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
  445. msr sp_el0, x28
  446. ldp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
  447. ret
  448. endfunc restore_gp_pmcr_pauth_regs
  449. #if ERRATA_SPECULATIVE_AT
  450. /* --------------------------------------------------------------------
  451. * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1
  452. * registers and update EL1 registers to disable stage1 and stage2
  453. * page table walk.
  454. * --------------------------------------------------------------------
  455. */
  456. func save_and_update_ptw_el1_sys_regs
  457. /* ----------------------------------------------------------
  458. * Save only sctlr_el1 and tcr_el1 registers
  459. * ----------------------------------------------------------
  460. */
  461. mrs x29, sctlr_el1
  462. str x29, [sp, #(CTX_ERRATA_SPEC_AT_OFFSET + CTX_ERRATA_SPEC_AT_SCTLR_EL1)]
  463. mrs x29, tcr_el1
  464. str x29, [sp, #(CTX_ERRATA_SPEC_AT_OFFSET + CTX_ERRATA_SPEC_AT_TCR_EL1)]
  465. /* ------------------------------------------------------------
  466. * Must follow below order in order to disable page table
  467. * walk for lower ELs (EL1 and EL0). First step ensures that
  468. * page table walk is disabled for stage1 and second step
  469. * ensures that page table walker should use TCR_EL1.EPDx
  470. * bits to perform address translation. ISB ensures that CPU
  471. * does these 2 steps in order.
  472. *
  473. * 1. Update TCR_EL1.EPDx bits to disable page table walk by
  474. * stage1.
  475. * 2. Enable MMU bit to avoid identity mapping via stage2
  476. * and force TCR_EL1.EPDx to be used by the page table
  477. * walker.
  478. * ------------------------------------------------------------
  479. */
  480. orr x29, x29, #(TCR_EPD0_BIT)
  481. orr x29, x29, #(TCR_EPD1_BIT)
  482. msr tcr_el1, x29
  483. isb
  484. mrs x29, sctlr_el1
  485. orr x29, x29, #SCTLR_M_BIT
  486. msr sctlr_el1, x29
  487. isb
  488. ret
  489. endfunc save_and_update_ptw_el1_sys_regs
  490. #endif /* ERRATA_SPECULATIVE_AT */
  491. /* -----------------------------------------------------------------
  492. * The below macro returns the address of the per_world context for
  493. * the security state, retrieved through "get_security_state" macro.
  494. * The per_world context address is returned in the register argument.
  495. * Clobbers: x9, x10
  496. * ------------------------------------------------------------------
  497. */
  498. .macro get_per_world_context _reg:req
  499. ldr x10, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
  500. get_security_state x9, x10
  501. mov_imm x10, (CTX_PERWORLD_EL3STATE_END - CTX_CPTR_EL3)
  502. mul x9, x9, x10
  503. adrp x10, per_world_context
  504. add x10, x10, :lo12:per_world_context
  505. add x9, x9, x10
  506. mov \_reg, x9
  507. .endm
  508. /* ------------------------------------------------------------------
  509. * This routine assumes that the SP_EL3 is pointing to a valid
  510. * context structure from where the gp regs and other special
  511. * registers can be retrieved.
  512. * ------------------------------------------------------------------
  513. */
  514. func el3_exit
  515. #if ENABLE_ASSERTIONS
  516. /* el3_exit assumes SP_EL0 on entry */
  517. mrs x17, spsel
  518. cmp x17, #MODE_SP_EL0
  519. ASM_ASSERT(eq)
  520. #endif /* ENABLE_ASSERTIONS */
  521. /* ----------------------------------------------------------
  522. * Save the current SP_EL0 i.e. the EL3 runtime stack which
  523. * will be used for handling the next SMC.
  524. * Then switch to SP_EL3.
  525. * ----------------------------------------------------------
  526. */
  527. mov x17, sp
  528. msr spsel, #MODE_SP_ELX
  529. str x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
  530. /* ----------------------------------------------------------
  531. * Restore CPTR_EL3.
  532. * ZCR is only restored if SVE is supported and enabled.
  533. * Synchronization is required before zcr_el3 is addressed.
  534. * ----------------------------------------------------------
  535. */
  536. /* The address of the per_world context is stored in x9 */
  537. get_per_world_context x9
  538. ldp x19, x20, [x9, #CTX_CPTR_EL3]
  539. msr cptr_el3, x19
  540. #if IMAGE_BL31
  541. ands x19, x19, #CPTR_EZ_BIT
  542. beq sve_not_enabled
  543. isb
  544. msr S3_6_C1_C2_0, x20 /* zcr_el3 */
  545. sve_not_enabled:
  546. restore_mpam3_el3
  547. #endif /* IMAGE_BL31 */
  548. #if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
  549. /* ----------------------------------------------------------
  550. * Restore mitigation state as it was on entry to EL3
  551. * ----------------------------------------------------------
  552. */
  553. ldr x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
  554. cbz x17, 1f
  555. blr x17
  556. 1:
  557. #endif /* IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 */
  558. #if IMAGE_BL31
  559. synchronize_errors
  560. #endif /* IMAGE_BL31 */
  561. /* --------------------------------------------------------------
  562. * Restore MDCR_EL3, SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
  563. * --------------------------------------------------------------
  564. */
  565. ldp x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
  566. ldr x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
  567. ldr x19, [sp, #CTX_EL3STATE_OFFSET + CTX_MDCR_EL3]
  568. msr spsr_el3, x16
  569. msr elr_el3, x17
  570. msr scr_el3, x18
  571. msr mdcr_el3, x19
  572. restore_ptw_el1_sys_regs
  573. /* ----------------------------------------------------------
  574. * Restore general purpose (including x30), PMCR_EL0 and
  575. * ARMv8.3-PAuth registers.
  576. * Exit EL3 via ERET to a lower exception level.
  577. * ----------------------------------------------------------
  578. */
  579. bl restore_gp_pmcr_pauth_regs
  580. ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
  581. #ifdef IMAGE_BL31
  582. /* Clear the EL3 flag as we are exiting el3 */
  583. str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_NESTED_EA_FLAG]
  584. #endif /* IMAGE_BL31 */
  585. exception_return
  586. endfunc el3_exit