arch_helpers.h 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497
  1. /*
  2. * Copyright (c) 2016-2024, ARM Limited and Contributors. All rights reserved.
  3. * Portions copyright (c) 2021-2022, ProvenRun S.A.S. All rights reserved.
  4. *
  5. * SPDX-License-Identifier: BSD-3-Clause
  6. */
  7. #ifndef ARCH_HELPERS_H
  8. #define ARCH_HELPERS_H
  9. #include <assert.h>
  10. #include <cdefs.h>
  11. #include <stdbool.h>
  12. #include <stdint.h>
  13. #include <string.h>
  14. #include <arch.h>
  15. /**********************************************************************
  16. * Macros which create inline functions to read or write CPU system
  17. * registers
  18. *********************************************************************/
  19. #define _DEFINE_COPROCR_WRITE_FUNC(_name, coproc, opc1, CRn, CRm, opc2) \
  20. static inline void write_## _name(u_register_t v) \
  21. { \
  22. __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
  23. }
  24. #define _DEFINE_COPROCR_READ_FUNC(_name, coproc, opc1, CRn, CRm, opc2) \
  25. static inline u_register_t read_ ## _name(void) \
  26. { \
  27. u_register_t v; \
  28. __asm__ volatile ("mrc "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : "=r" (v));\
  29. return v; \
  30. }
  31. /*
  32. * The undocumented %Q and %R extended asm are used to implemented the below
  33. * 64 bit `mrrc` and `mcrr` instructions.
  34. */
  35. #define _DEFINE_COPROCR_WRITE_FUNC_64(_name, coproc, opc1, CRm) \
  36. static inline void write64_## _name(uint64_t v) \
  37. { \
  38. __asm__ volatile ("mcrr "#coproc","#opc1", %Q0, %R0,"#CRm : : "r" (v));\
  39. }
  40. #define _DEFINE_COPROCR_READ_FUNC_64(_name, coproc, opc1, CRm) \
  41. static inline uint64_t read64_## _name(void) \
  42. { uint64_t v; \
  43. __asm__ volatile ("mrrc "#coproc","#opc1", %Q0, %R0,"#CRm : "=r" (v));\
  44. return v; \
  45. }
  46. #define _DEFINE_SYSREG_READ_FUNC(_name, _reg_name) \
  47. static inline u_register_t read_ ## _name(void) \
  48. { \
  49. u_register_t v; \
  50. __asm__ volatile ("mrs %0, " #_reg_name : "=r" (v)); \
  51. return v; \
  52. }
  53. #define _DEFINE_SYSREG_WRITE_FUNC(_name, _reg_name) \
  54. static inline void write_ ## _name(u_register_t v) \
  55. { \
  56. __asm__ volatile ("msr " #_reg_name ", %0" : : "r" (v)); \
  57. }
  58. #define _DEFINE_SYSREG_WRITE_CONST_FUNC(_name, _reg_name) \
  59. static inline void write_ ## _name(const u_register_t v) \
  60. { \
  61. __asm__ volatile ("msr " #_reg_name ", %0" : : "i" (v)); \
  62. }
  63. /* Define read function for coproc register */
  64. #define DEFINE_COPROCR_READ_FUNC(_name, ...) \
  65. _DEFINE_COPROCR_READ_FUNC(_name, __VA_ARGS__)
  66. /* Define write function for coproc register */
  67. #define DEFINE_COPROCR_WRITE_FUNC(_name, ...) \
  68. _DEFINE_COPROCR_WRITE_FUNC(_name, __VA_ARGS__)
  69. /* Define read & write function for coproc register */
  70. #define DEFINE_COPROCR_RW_FUNCS(_name, ...) \
  71. _DEFINE_COPROCR_READ_FUNC(_name, __VA_ARGS__) \
  72. _DEFINE_COPROCR_WRITE_FUNC(_name, __VA_ARGS__)
  73. /* Define 64 bit read function for coproc register */
  74. #define DEFINE_COPROCR_READ_FUNC_64(_name, ...) \
  75. _DEFINE_COPROCR_READ_FUNC_64(_name, __VA_ARGS__)
  76. /* Define 64 bit write function for coproc register */
  77. #define DEFINE_COPROCR_WRITE_FUNC_64(_name, ...) \
  78. _DEFINE_COPROCR_WRITE_FUNC_64(_name, __VA_ARGS__)
  79. /* Define 64 bit read & write function for coproc register */
  80. #define DEFINE_COPROCR_RW_FUNCS_64(_name, ...) \
  81. _DEFINE_COPROCR_READ_FUNC_64(_name, __VA_ARGS__) \
  82. _DEFINE_COPROCR_WRITE_FUNC_64(_name, __VA_ARGS__)
  83. /* Define read & write function for system register */
  84. #define DEFINE_SYSREG_RW_FUNCS(_name) \
  85. _DEFINE_SYSREG_READ_FUNC(_name, _name) \
  86. _DEFINE_SYSREG_WRITE_FUNC(_name, _name)
  87. /**********************************************************************
  88. * Macros to create inline functions for tlbi operations
  89. *********************************************************************/
  90. #define _DEFINE_TLBIOP_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \
  91. static inline void tlbi##_op(void) \
  92. { \
  93. u_register_t v = 0; \
  94. __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
  95. }
  96. #define _DEFINE_BPIOP_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \
  97. static inline void bpi##_op(void) \
  98. { \
  99. u_register_t v = 0; \
  100. __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
  101. }
  102. #define _DEFINE_TLBIOP_PARAM_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \
  103. static inline void tlbi##_op(u_register_t v) \
  104. { \
  105. __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
  106. }
  107. /* Define function for simple TLBI operation */
  108. #define DEFINE_TLBIOP_FUNC(_op, ...) \
  109. _DEFINE_TLBIOP_FUNC(_op, __VA_ARGS__)
  110. /* Define function for TLBI operation with register parameter */
  111. #define DEFINE_TLBIOP_PARAM_FUNC(_op, ...) \
  112. _DEFINE_TLBIOP_PARAM_FUNC(_op, __VA_ARGS__)
  113. /* Define function for simple BPI operation */
  114. #define DEFINE_BPIOP_FUNC(_op, ...) \
  115. _DEFINE_BPIOP_FUNC(_op, __VA_ARGS__)
  116. /**********************************************************************
  117. * Macros to create inline functions for DC operations
  118. *********************************************************************/
  119. #define _DEFINE_DCOP_PARAM_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \
  120. static inline void dc##_op(u_register_t v) \
  121. { \
  122. __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
  123. }
  124. /* Define function for DC operation with register parameter */
  125. #define DEFINE_DCOP_PARAM_FUNC(_op, ...) \
  126. _DEFINE_DCOP_PARAM_FUNC(_op, __VA_ARGS__)
  127. /**********************************************************************
  128. * Macros to create inline functions for system instructions
  129. *********************************************************************/
  130. /* Define function for simple system instruction */
  131. #define DEFINE_SYSOP_FUNC(_op) \
  132. static inline void _op(void) \
  133. { \
  134. __asm__ (#_op); \
  135. }
  136. /* Define function for system instruction with type specifier */
  137. #define DEFINE_SYSOP_TYPE_FUNC(_op, _type) \
  138. static inline void _op ## _type(void) \
  139. { \
  140. __asm__ (#_op " " #_type : : : "memory"); \
  141. }
  142. /* Define function for system instruction with register parameter */
  143. #define DEFINE_SYSOP_TYPE_PARAM_FUNC(_op, _type) \
  144. static inline void _op ## _type(u_register_t v) \
  145. { \
  146. __asm__ (#_op " " #_type ", %0" : : "r" (v)); \
  147. }
  148. void flush_dcache_range(uintptr_t addr, size_t size);
  149. void clean_dcache_range(uintptr_t addr, size_t size);
  150. void inv_dcache_range(uintptr_t addr, size_t size);
  151. bool is_dcache_enabled(void);
  152. void dcsw_op_louis(u_register_t op_type);
  153. void dcsw_op_all(u_register_t op_type);
  154. void disable_mmu_secure(void);
  155. void disable_mmu_icache_secure(void);
  156. DEFINE_SYSOP_FUNC(wfi)
  157. DEFINE_SYSOP_FUNC(wfe)
  158. DEFINE_SYSOP_FUNC(sev)
  159. DEFINE_SYSOP_TYPE_FUNC(dsb, sy)
  160. DEFINE_SYSOP_TYPE_FUNC(dmb, sy)
  161. DEFINE_SYSOP_TYPE_FUNC(dmb, st)
  162. /* dmb ld is not valid for armv7/thumb machines */
  163. #if ARM_ARCH_MAJOR != 7
  164. DEFINE_SYSOP_TYPE_FUNC(dmb, ld)
  165. #endif
  166. DEFINE_SYSOP_TYPE_FUNC(dsb, ish)
  167. DEFINE_SYSOP_TYPE_FUNC(dsb, ishst)
  168. DEFINE_SYSOP_TYPE_FUNC(dmb, ish)
  169. DEFINE_SYSOP_TYPE_FUNC(dmb, ishst)
  170. DEFINE_SYSOP_FUNC(isb)
  171. void __dead2 smc(uint32_t r0, uint32_t r1, uint32_t r2, uint32_t r3,
  172. uint32_t r4, uint32_t r5, uint32_t r6, uint32_t r7);
  173. DEFINE_SYSREG_RW_FUNCS(spsr)
  174. DEFINE_SYSREG_RW_FUNCS(cpsr)
  175. /*******************************************************************************
  176. * System register accessor prototypes
  177. ******************************************************************************/
  178. DEFINE_COPROCR_READ_FUNC(mpidr, MPIDR)
  179. DEFINE_COPROCR_READ_FUNC(midr, MIDR)
  180. DEFINE_COPROCR_READ_FUNC(id_mmfr3, ID_MMFR3)
  181. DEFINE_COPROCR_READ_FUNC(id_mmfr4, ID_MMFR4)
  182. DEFINE_COPROCR_READ_FUNC(id_dfr0, ID_DFR0)
  183. DEFINE_COPROCR_READ_FUNC(id_dfr1, ID_DFR1)
  184. DEFINE_COPROCR_READ_FUNC(id_pfr0, ID_PFR0)
  185. DEFINE_COPROCR_READ_FUNC(id_pfr1, ID_PFR1)
  186. DEFINE_COPROCR_READ_FUNC(id_pfr2, ID_PFR2)
  187. DEFINE_COPROCR_READ_FUNC(isr, ISR)
  188. DEFINE_COPROCR_READ_FUNC(clidr, CLIDR)
  189. DEFINE_COPROCR_READ_FUNC_64(cntpct, CNTPCT_64)
  190. DEFINE_COPROCR_RW_FUNCS(scr, SCR)
  191. DEFINE_COPROCR_RW_FUNCS(ctr, CTR)
  192. DEFINE_COPROCR_RW_FUNCS(sctlr, SCTLR)
  193. DEFINE_COPROCR_RW_FUNCS(actlr, ACTLR)
  194. DEFINE_COPROCR_RW_FUNCS(hsctlr, HSCTLR)
  195. DEFINE_COPROCR_RW_FUNCS(hcr, HCR)
  196. DEFINE_COPROCR_RW_FUNCS(hcptr, HCPTR)
  197. DEFINE_COPROCR_RW_FUNCS(cntfrq, CNTFRQ)
  198. DEFINE_COPROCR_RW_FUNCS(cnthctl, CNTHCTL)
  199. DEFINE_COPROCR_RW_FUNCS(mair0, MAIR0)
  200. DEFINE_COPROCR_RW_FUNCS(mair1, MAIR1)
  201. DEFINE_COPROCR_RW_FUNCS(hmair0, HMAIR0)
  202. DEFINE_COPROCR_RW_FUNCS(ttbcr, TTBCR)
  203. DEFINE_COPROCR_RW_FUNCS(htcr, HTCR)
  204. DEFINE_COPROCR_RW_FUNCS(ttbr0, TTBR0)
  205. DEFINE_COPROCR_RW_FUNCS_64(ttbr0, TTBR0_64)
  206. DEFINE_COPROCR_RW_FUNCS(ttbr1, TTBR1)
  207. DEFINE_COPROCR_RW_FUNCS_64(httbr, HTTBR_64)
  208. DEFINE_COPROCR_RW_FUNCS(vpidr, VPIDR)
  209. DEFINE_COPROCR_RW_FUNCS(vmpidr, VMPIDR)
  210. DEFINE_COPROCR_RW_FUNCS_64(vttbr, VTTBR_64)
  211. DEFINE_COPROCR_RW_FUNCS_64(ttbr1, TTBR1_64)
  212. DEFINE_COPROCR_RW_FUNCS_64(cntvoff, CNTVOFF_64)
  213. DEFINE_COPROCR_RW_FUNCS(csselr, CSSELR)
  214. DEFINE_COPROCR_RW_FUNCS(hstr, HSTR)
  215. DEFINE_COPROCR_RW_FUNCS(cnthp_ctl_el2, CNTHP_CTL)
  216. DEFINE_COPROCR_RW_FUNCS(cnthp_tval_el2, CNTHP_TVAL)
  217. DEFINE_COPROCR_RW_FUNCS_64(cnthp_cval_el2, CNTHP_CVAL_64)
  218. #define get_cntp_ctl_enable(x) (((x) >> CNTP_CTL_ENABLE_SHIFT) & \
  219. CNTP_CTL_ENABLE_MASK)
  220. #define get_cntp_ctl_imask(x) (((x) >> CNTP_CTL_IMASK_SHIFT) & \
  221. CNTP_CTL_IMASK_MASK)
  222. #define get_cntp_ctl_istatus(x) (((x) >> CNTP_CTL_ISTATUS_SHIFT) & \
  223. CNTP_CTL_ISTATUS_MASK)
  224. #define set_cntp_ctl_enable(x) ((x) |= U(1) << CNTP_CTL_ENABLE_SHIFT)
  225. #define set_cntp_ctl_imask(x) ((x) |= U(1) << CNTP_CTL_IMASK_SHIFT)
  226. #define clr_cntp_ctl_enable(x) ((x) &= ~(U(1) << CNTP_CTL_ENABLE_SHIFT))
  227. #define clr_cntp_ctl_imask(x) ((x) &= ~(U(1) << CNTP_CTL_IMASK_SHIFT))
  228. DEFINE_COPROCR_RW_FUNCS(icc_sre_el1, ICC_SRE)
  229. DEFINE_COPROCR_RW_FUNCS(icc_sre_el2, ICC_HSRE)
  230. DEFINE_COPROCR_RW_FUNCS(icc_sre_el3, ICC_MSRE)
  231. DEFINE_COPROCR_RW_FUNCS(icc_pmr_el1, ICC_PMR)
  232. DEFINE_COPROCR_RW_FUNCS(icc_rpr_el1, ICC_RPR)
  233. DEFINE_COPROCR_RW_FUNCS(icc_igrpen1_el3, ICC_MGRPEN1)
  234. DEFINE_COPROCR_RW_FUNCS(icc_igrpen1_el1, ICC_IGRPEN1)
  235. DEFINE_COPROCR_RW_FUNCS(icc_igrpen0_el1, ICC_IGRPEN0)
  236. DEFINE_COPROCR_RW_FUNCS(icc_hppir0_el1, ICC_HPPIR0)
  237. DEFINE_COPROCR_RW_FUNCS(icc_hppir1_el1, ICC_HPPIR1)
  238. DEFINE_COPROCR_RW_FUNCS(icc_iar0_el1, ICC_IAR0)
  239. DEFINE_COPROCR_RW_FUNCS(icc_iar1_el1, ICC_IAR1)
  240. DEFINE_COPROCR_RW_FUNCS(icc_eoir0_el1, ICC_EOIR0)
  241. DEFINE_COPROCR_RW_FUNCS(icc_eoir1_el1, ICC_EOIR1)
  242. DEFINE_COPROCR_RW_FUNCS_64(icc_sgi0r_el1, ICC_SGI0R_EL1_64)
  243. DEFINE_COPROCR_WRITE_FUNC_64(icc_sgi1r, ICC_SGI1R_EL1_64)
  244. DEFINE_COPROCR_WRITE_FUNC_64(icc_asgi1r, ICC_ASGI1R_EL1_64)
  245. DEFINE_COPROCR_RW_FUNCS(sdcr, SDCR)
  246. DEFINE_COPROCR_RW_FUNCS(hdcr, HDCR)
  247. DEFINE_COPROCR_RW_FUNCS(cnthp_ctl, CNTHP_CTL)
  248. DEFINE_COPROCR_RW_FUNCS(pmcr, PMCR)
  249. /*
  250. * Address translation
  251. */
  252. DEFINE_COPROCR_WRITE_FUNC(ats1cpr, ATS1CPR)
  253. DEFINE_COPROCR_WRITE_FUNC(ats1hr, ATS1HR)
  254. DEFINE_COPROCR_RW_FUNCS_64(par, PAR_64)
  255. DEFINE_COPROCR_RW_FUNCS(nsacr, NSACR)
  256. /* AArch32 coproc registers for 32bit MMU descriptor support */
  257. DEFINE_COPROCR_RW_FUNCS(prrr, PRRR)
  258. DEFINE_COPROCR_RW_FUNCS(nmrr, NMRR)
  259. DEFINE_COPROCR_RW_FUNCS(dacr, DACR)
  260. /* Coproc registers for 32bit AMU support */
  261. DEFINE_COPROCR_READ_FUNC(amcfgr, AMCFGR)
  262. DEFINE_COPROCR_READ_FUNC(amcgcr, AMCGCR)
  263. DEFINE_COPROCR_RW_FUNCS(amcr, AMCR)
  264. DEFINE_COPROCR_RW_FUNCS(amcntenset0, AMCNTENSET0)
  265. DEFINE_COPROCR_RW_FUNCS(amcntenset1, AMCNTENSET1)
  266. DEFINE_COPROCR_RW_FUNCS(amcntenclr0, AMCNTENCLR0)
  267. DEFINE_COPROCR_RW_FUNCS(amcntenclr1, AMCNTENCLR1)
  268. /* Coproc registers for 64bit AMU support */
  269. DEFINE_COPROCR_RW_FUNCS_64(amevcntr00, AMEVCNTR00)
  270. DEFINE_COPROCR_RW_FUNCS_64(amevcntr01, AMEVCNTR01)
  271. DEFINE_COPROCR_RW_FUNCS_64(amevcntr02, AMEVCNTR02)
  272. DEFINE_COPROCR_RW_FUNCS_64(amevcntr03, AMEVCNTR03)
  273. /*
  274. * TLBI operation prototypes
  275. */
  276. DEFINE_TLBIOP_FUNC(all, TLBIALL)
  277. DEFINE_TLBIOP_FUNC(allis, TLBIALLIS)
  278. DEFINE_TLBIOP_PARAM_FUNC(mva, TLBIMVA)
  279. DEFINE_TLBIOP_PARAM_FUNC(mvaa, TLBIMVAA)
  280. DEFINE_TLBIOP_PARAM_FUNC(mvaais, TLBIMVAAIS)
  281. DEFINE_TLBIOP_PARAM_FUNC(mvahis, TLBIMVAHIS)
  282. /*
  283. * BPI operation prototypes.
  284. */
  285. DEFINE_BPIOP_FUNC(allis, BPIALLIS)
  286. /*
  287. * DC operation prototypes
  288. */
  289. DEFINE_DCOP_PARAM_FUNC(civac, DCCIMVAC)
  290. DEFINE_DCOP_PARAM_FUNC(ivac, DCIMVAC)
  291. #if ERRATA_A53_819472 || ERRATA_A53_824069 || ERRATA_A53_827319
  292. DEFINE_DCOP_PARAM_FUNC(cvac, DCCIMVAC)
  293. #else
  294. DEFINE_DCOP_PARAM_FUNC(cvac, DCCMVAC)
  295. #endif
  296. /*
  297. * DynamIQ Shared Unit power management
  298. */
  299. DEFINE_COPROCR_RW_FUNCS(clusterpwrdn, CLUSTERPWRDN)
  300. DEFINE_COPROCR_RW_FUNCS(clusterpmcr, CLUSTERPMCR)
  301. DEFINE_COPROCR_RW_FUNCS(clusterpmcntenset, CLUSTERPMCNTENSET)
  302. DEFINE_COPROCR_RW_FUNCS(clusterpmccntr, CLUSTERPMCCNTR)
  303. DEFINE_COPROCR_RW_FUNCS(clusterpmovsset, CLUSTERPMOVSSET)
  304. DEFINE_COPROCR_RW_FUNCS(clusterpmovsclr, CLUSTERPMOVSCLR)
  305. DEFINE_COPROCR_RW_FUNCS(clusterpmselr, CLUSTERPMSELR)
  306. DEFINE_COPROCR_RW_FUNCS(clusterpmxevcntr, CLUSTERPMXEVCNTR)
  307. DEFINE_COPROCR_RW_FUNCS(clusterpmxevtyper, CLUSTERPMXEVTYPER)
  308. /*
  309. * RNDR is AArch64 only, so just provide a placeholder here to make the
  310. * linker happy.
  311. */
  312. static inline u_register_t read_rndr(void)
  313. {
  314. assert(1);
  315. return 0;
  316. }
  317. /* Previously defined accessor functions with incomplete register names */
  318. #define dsb() dsbsy()
  319. #define dmb() dmbsy()
  320. /* dmb ld is not valid for armv7/thumb machines, so alias it to dmb */
  321. #if ARM_ARCH_MAJOR == 7
  322. #define dmbld() dmb()
  323. #endif
  324. #define IS_IN_SECURE() \
  325. (GET_NS_BIT(read_scr()) == 0)
  326. #define IS_IN_HYP() (GET_M32(read_cpsr()) == MODE32_hyp)
  327. #define IS_IN_SVC() (GET_M32(read_cpsr()) == MODE32_svc)
  328. #define IS_IN_MON() (GET_M32(read_cpsr()) == MODE32_mon)
  329. #define IS_IN_EL2() IS_IN_HYP()
  330. /* If EL3 is AArch32, then secure PL1 and monitor mode correspond to EL3 */
  331. #define IS_IN_EL3() \
  332. ((GET_M32(read_cpsr()) == MODE32_mon) || \
  333. (IS_IN_SECURE() && (GET_M32(read_cpsr()) != MODE32_usr)))
  334. static inline unsigned int get_current_el(void)
  335. {
  336. if (IS_IN_EL3()) {
  337. return 3U;
  338. } else if (IS_IN_EL2()) {
  339. return 2U;
  340. } else {
  341. return 1U;
  342. }
  343. }
  344. /* Macros for compatibility with AArch64 system registers */
  345. #define read_mpidr_el1() read_mpidr()
  346. #define read_scr_el3() read_scr()
  347. #define write_scr_el3(_v) write_scr(_v)
  348. #define read_hcr_el2() read_hcr()
  349. #define write_hcr_el2(_v) write_hcr(_v)
  350. #define read_cpacr_el1() read_cpacr()
  351. #define write_cpacr_el1(_v) write_cpacr(_v)
  352. #define read_cntfrq_el0() read_cntfrq()
  353. #define write_cntfrq_el0(_v) write_cntfrq(_v)
  354. #define read_isr_el1() read_isr()
  355. #define read_cntpct_el0() read64_cntpct()
  356. #define read_ctr_el0() read_ctr()
  357. #define write_icc_sgi0r_el1(_v) write64_icc_sgi0r_el1(_v)
  358. #define write_icc_sgi1r(_v) write64_icc_sgi1r(_v)
  359. #define write_icc_asgi1r(_v) write64_icc_asgi1r(_v)
  360. #define read_daif() read_cpsr()
  361. #define write_daif(flags) write_cpsr(flags)
  362. #define read_cnthp_cval_el2() read64_cnthp_cval_el2()
  363. #define write_cnthp_cval_el2(v) write64_cnthp_cval_el2(v)
  364. #define read_amcntenset0_el0() read_amcntenset0()
  365. #define read_amcntenset1_el0() read_amcntenset1()
  366. /* Helper functions to manipulate CPSR */
  367. static inline void enable_irq(void)
  368. {
  369. /*
  370. * The compiler memory barrier will prevent the compiler from
  371. * scheduling non-volatile memory access after the write to the
  372. * register.
  373. *
  374. * This could happen if some initialization code issues non-volatile
  375. * accesses to an area used by an interrupt handler, in the assumption
  376. * that it is safe as the interrupts are disabled at the time it does
  377. * that (according to program order). However, non-volatile accesses
  378. * are not necessarily in program order relatively with volatile inline
  379. * assembly statements (and volatile accesses).
  380. */
  381. COMPILER_BARRIER();
  382. __asm__ volatile ("cpsie i");
  383. isb();
  384. }
  385. static inline void enable_serror(void)
  386. {
  387. COMPILER_BARRIER();
  388. __asm__ volatile ("cpsie a");
  389. isb();
  390. }
  391. static inline void enable_fiq(void)
  392. {
  393. COMPILER_BARRIER();
  394. __asm__ volatile ("cpsie f");
  395. isb();
  396. }
  397. static inline void disable_irq(void)
  398. {
  399. COMPILER_BARRIER();
  400. __asm__ volatile ("cpsid i");
  401. isb();
  402. }
  403. static inline void disable_serror(void)
  404. {
  405. COMPILER_BARRIER();
  406. __asm__ volatile ("cpsid a");
  407. isb();
  408. }
  409. static inline void disable_fiq(void)
  410. {
  411. COMPILER_BARRIER();
  412. __asm__ volatile ("cpsid f");
  413. isb();
  414. }
  415. #endif /* ARCH_HELPERS_H */