amu_helpers.S 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389
  1. /*
  2. * Copyright (c) 2017-2021, Arm Limited and Contributors. All rights reserved.
  3. *
  4. * SPDX-License-Identifier: BSD-3-Clause
  5. */
  6. #include <arch.h>
  7. #include <assert_macros.S>
  8. #include <asm_macros.S>
  9. .globl amu_group0_cnt_read_internal
  10. .globl amu_group0_cnt_write_internal
  11. .globl amu_group1_cnt_read_internal
  12. .globl amu_group1_cnt_write_internal
  13. .globl amu_group1_set_evtype_internal
  14. /* FEAT_AMUv1p1 virtualisation offset register functions */
  15. .globl amu_group0_voffset_read_internal
  16. .globl amu_group0_voffset_write_internal
  17. .globl amu_group1_voffset_read_internal
  18. .globl amu_group1_voffset_write_internal
  19. /*
  20. * uint64_t amu_group0_cnt_read_internal(int idx);
  21. *
  22. * Given `idx`, read the corresponding AMU counter
  23. * and return it in `x0`.
  24. */
  25. func amu_group0_cnt_read_internal
  26. adr x1, 1f
  27. #if ENABLE_ASSERTIONS
  28. /*
  29. * It can be dangerous to call this function with an
  30. * out of bounds index. Ensure `idx` is valid.
  31. */
  32. tst x0, #~3
  33. ASM_ASSERT(eq)
  34. #endif
  35. /*
  36. * Given `idx` calculate address of mrs/ret instruction pair
  37. * in the table below.
  38. */
  39. add x1, x1, x0, lsl #3 /* each mrs/ret sequence is 8 bytes */
  40. #if ENABLE_BTI
  41. add x1, x1, x0, lsl #2 /* + "bti j" instruction */
  42. #endif
  43. br x1
  44. 1: read AMEVCNTR00_EL0 /* index 0 */
  45. read AMEVCNTR01_EL0 /* index 1 */
  46. read AMEVCNTR02_EL0 /* index 2 */
  47. read AMEVCNTR03_EL0 /* index 3 */
  48. endfunc amu_group0_cnt_read_internal
  49. /*
  50. * void amu_group0_cnt_write_internal(int idx, uint64_t val);
  51. *
  52. * Given `idx`, write `val` to the corresponding AMU counter.
  53. */
  54. func amu_group0_cnt_write_internal
  55. adr x2, 1f
  56. #if ENABLE_ASSERTIONS
  57. /*
  58. * It can be dangerous to call this function with an
  59. * out of bounds index. Ensure `idx` is valid.
  60. */
  61. tst x0, #~3
  62. ASM_ASSERT(eq)
  63. #endif
  64. /*
  65. * Given `idx` calculate address of mrs/ret instruction pair
  66. * in the table below.
  67. */
  68. add x2, x2, x0, lsl #3 /* each msr/ret sequence is 8 bytes */
  69. #if ENABLE_BTI
  70. add x2, x2, x0, lsl #2 /* + "bti j" instruction */
  71. #endif
  72. br x2
  73. 1: write AMEVCNTR00_EL0 /* index 0 */
  74. write AMEVCNTR01_EL0 /* index 1 */
  75. write AMEVCNTR02_EL0 /* index 2 */
  76. write AMEVCNTR03_EL0 /* index 3 */
  77. endfunc amu_group0_cnt_write_internal
  78. #if ENABLE_AMU_AUXILIARY_COUNTERS
  79. /*
  80. * uint64_t amu_group1_cnt_read_internal(int idx);
  81. *
  82. * Given `idx`, read the corresponding AMU counter
  83. * and return it in `x0`.
  84. */
  85. func amu_group1_cnt_read_internal
  86. adr x1, 1f
  87. #if ENABLE_ASSERTIONS
  88. /*
  89. * It can be dangerous to call this function with an
  90. * out of bounds index. Ensure `idx` is valid.
  91. */
  92. tst x0, #~0xF
  93. ASM_ASSERT(eq)
  94. #endif
  95. /*
  96. * Given `idx` calculate address of mrs/ret instruction pair
  97. * in the table below.
  98. */
  99. add x1, x1, x0, lsl #3 /* each mrs/ret sequence is 8 bytes */
  100. #if ENABLE_BTI
  101. add x1, x1, x0, lsl #2 /* + "bti j" instruction */
  102. #endif
  103. br x1
  104. 1: read AMEVCNTR10_EL0 /* index 0 */
  105. read AMEVCNTR11_EL0 /* index 1 */
  106. read AMEVCNTR12_EL0 /* index 2 */
  107. read AMEVCNTR13_EL0 /* index 3 */
  108. read AMEVCNTR14_EL0 /* index 4 */
  109. read AMEVCNTR15_EL0 /* index 5 */
  110. read AMEVCNTR16_EL0 /* index 6 */
  111. read AMEVCNTR17_EL0 /* index 7 */
  112. read AMEVCNTR18_EL0 /* index 8 */
  113. read AMEVCNTR19_EL0 /* index 9 */
  114. read AMEVCNTR1A_EL0 /* index 10 */
  115. read AMEVCNTR1B_EL0 /* index 11 */
  116. read AMEVCNTR1C_EL0 /* index 12 */
  117. read AMEVCNTR1D_EL0 /* index 13 */
  118. read AMEVCNTR1E_EL0 /* index 14 */
  119. read AMEVCNTR1F_EL0 /* index 15 */
  120. endfunc amu_group1_cnt_read_internal
  121. /*
  122. * void amu_group1_cnt_write_internal(int idx, uint64_t val);
  123. *
  124. * Given `idx`, write `val` to the corresponding AMU counter.
  125. */
  126. func amu_group1_cnt_write_internal
  127. adr x2, 1f
  128. #if ENABLE_ASSERTIONS
  129. /*
  130. * It can be dangerous to call this function with an
  131. * out of bounds index. Ensure `idx` is valid.
  132. */
  133. tst x0, #~0xF
  134. ASM_ASSERT(eq)
  135. #endif
  136. /*
  137. * Given `idx` calculate address of mrs/ret instruction pair
  138. * in the table below.
  139. */
  140. add x2, x2, x0, lsl #3 /* each msr/ret sequence is 8 bytes */
  141. #if ENABLE_BTI
  142. add x2, x2, x0, lsl #2 /* + "bti j" instruction */
  143. #endif
  144. br x2
  145. 1: write AMEVCNTR10_EL0 /* index 0 */
  146. write AMEVCNTR11_EL0 /* index 1 */
  147. write AMEVCNTR12_EL0 /* index 2 */
  148. write AMEVCNTR13_EL0 /* index 3 */
  149. write AMEVCNTR14_EL0 /* index 4 */
  150. write AMEVCNTR15_EL0 /* index 5 */
  151. write AMEVCNTR16_EL0 /* index 6 */
  152. write AMEVCNTR17_EL0 /* index 7 */
  153. write AMEVCNTR18_EL0 /* index 8 */
  154. write AMEVCNTR19_EL0 /* index 9 */
  155. write AMEVCNTR1A_EL0 /* index 10 */
  156. write AMEVCNTR1B_EL0 /* index 11 */
  157. write AMEVCNTR1C_EL0 /* index 12 */
  158. write AMEVCNTR1D_EL0 /* index 13 */
  159. write AMEVCNTR1E_EL0 /* index 14 */
  160. write AMEVCNTR1F_EL0 /* index 15 */
  161. endfunc amu_group1_cnt_write_internal
  162. /*
  163. * void amu_group1_set_evtype_internal(int idx, unsigned int val);
  164. *
  165. * Program the AMU event type register indexed by `idx`
  166. * with the value `val`.
  167. */
  168. func amu_group1_set_evtype_internal
  169. adr x2, 1f
  170. #if ENABLE_ASSERTIONS
  171. /*
  172. * It can be dangerous to call this function with an
  173. * out of bounds index. Ensure `idx` is valid.
  174. */
  175. tst x0, #~0xF
  176. ASM_ASSERT(eq)
  177. /* val should be between [0, 65535] */
  178. tst x1, #~0xFFFF
  179. ASM_ASSERT(eq)
  180. #endif
  181. /*
  182. * Given `idx` calculate address of msr/ret instruction pair
  183. * in the table below.
  184. */
  185. add x2, x2, x0, lsl #3 /* each msr/ret sequence is 8 bytes */
  186. #if ENABLE_BTI
  187. add x2, x2, x0, lsl #2 /* + "bti j" instruction */
  188. #endif
  189. br x2
  190. 1: write AMEVTYPER10_EL0 /* index 0 */
  191. write AMEVTYPER11_EL0 /* index 1 */
  192. write AMEVTYPER12_EL0 /* index 2 */
  193. write AMEVTYPER13_EL0 /* index 3 */
  194. write AMEVTYPER14_EL0 /* index 4 */
  195. write AMEVTYPER15_EL0 /* index 5 */
  196. write AMEVTYPER16_EL0 /* index 6 */
  197. write AMEVTYPER17_EL0 /* index 7 */
  198. write AMEVTYPER18_EL0 /* index 8 */
  199. write AMEVTYPER19_EL0 /* index 9 */
  200. write AMEVTYPER1A_EL0 /* index 10 */
  201. write AMEVTYPER1B_EL0 /* index 11 */
  202. write AMEVTYPER1C_EL0 /* index 12 */
  203. write AMEVTYPER1D_EL0 /* index 13 */
  204. write AMEVTYPER1E_EL0 /* index 14 */
  205. write AMEVTYPER1F_EL0 /* index 15 */
  206. endfunc amu_group1_set_evtype_internal
  207. #endif
  208. /*
  209. * Accessor functions for virtual offset registers added with FEAT_AMUv1p1
  210. */
  211. /*
  212. * uint64_t amu_group0_voffset_read_internal(int idx);
  213. *
  214. * Given `idx`, read the corresponding AMU virtual offset register
  215. * and return it in `x0`.
  216. */
  217. func amu_group0_voffset_read_internal
  218. adr x1, 1f
  219. #if ENABLE_ASSERTIONS
  220. /*
  221. * It can be dangerous to call this function with an
  222. * out of bounds index. Ensure `idx` is valid.
  223. */
  224. tst x0, #~3
  225. ASM_ASSERT(eq)
  226. /* Make sure idx != 1 since AMEVCNTVOFF01_EL2 does not exist */
  227. cmp x0, #1
  228. ASM_ASSERT(ne)
  229. #endif
  230. /*
  231. * Given `idx` calculate address of mrs/ret instruction pair
  232. * in the table below.
  233. */
  234. add x1, x1, x0, lsl #3 /* each mrs/ret sequence is 8 bytes */
  235. #if ENABLE_BTI
  236. add x1, x1, x0, lsl #2 /* + "bti j" instruction */
  237. #endif
  238. br x1
  239. 1: read AMEVCNTVOFF00_EL2 /* index 0 */
  240. .skip 8 /* AMEVCNTVOFF01_EL2 does not exist */
  241. #if ENABLE_BTI
  242. .skip 4
  243. #endif
  244. read AMEVCNTVOFF02_EL2 /* index 2 */
  245. read AMEVCNTVOFF03_EL2 /* index 3 */
  246. endfunc amu_group0_voffset_read_internal
  247. /*
  248. * void amu_group0_voffset_write_internal(int idx, uint64_t val);
  249. *
  250. * Given `idx`, write `val` to the corresponding AMU virtual offset register.
  251. */
  252. func amu_group0_voffset_write_internal
  253. adr x2, 1f
  254. #if ENABLE_ASSERTIONS
  255. /*
  256. * It can be dangerous to call this function with an
  257. * out of bounds index. Ensure `idx` is valid.
  258. */
  259. tst x0, #~3
  260. ASM_ASSERT(eq)
  261. /* Make sure idx != 1 since AMEVCNTVOFF01_EL2 does not exist */
  262. cmp x0, #1
  263. ASM_ASSERT(ne)
  264. #endif
  265. /*
  266. * Given `idx` calculate address of mrs/ret instruction pair
  267. * in the table below.
  268. */
  269. add x2, x2, x0, lsl #3 /* each msr/ret sequence is 8 bytes */
  270. #if ENABLE_BTI
  271. add x2, x2, x0, lsl #2 /* + "bti j" instruction */
  272. #endif
  273. br x2
  274. 1: write AMEVCNTVOFF00_EL2 /* index 0 */
  275. .skip 8 /* AMEVCNTVOFF01_EL2 does not exist */
  276. #if ENABLE_BTI
  277. .skip 4
  278. #endif
  279. write AMEVCNTVOFF02_EL2 /* index 2 */
  280. write AMEVCNTVOFF03_EL2 /* index 3 */
  281. endfunc amu_group0_voffset_write_internal
  282. #if ENABLE_AMU_AUXILIARY_COUNTERS
  283. /*
  284. * uint64_t amu_group1_voffset_read_internal(int idx);
  285. *
  286. * Given `idx`, read the corresponding AMU virtual offset register
  287. * and return it in `x0`.
  288. */
  289. func amu_group1_voffset_read_internal
  290. adr x1, 1f
  291. #if ENABLE_ASSERTIONS
  292. /*
  293. * It can be dangerous to call this function with an
  294. * out of bounds index. Ensure `idx` is valid.
  295. */
  296. tst x0, #~0xF
  297. ASM_ASSERT(eq)
  298. #endif
  299. /*
  300. * Given `idx` calculate address of mrs/ret instruction pair
  301. * in the table below.
  302. */
  303. add x1, x1, x0, lsl #3 /* each mrs/ret sequence is 8 bytes */
  304. #if ENABLE_BTI
  305. add x1, x1, x0, lsl #2 /* + "bti j" instruction */
  306. #endif
  307. br x1
  308. 1: read AMEVCNTVOFF10_EL2 /* index 0 */
  309. read AMEVCNTVOFF11_EL2 /* index 1 */
  310. read AMEVCNTVOFF12_EL2 /* index 2 */
  311. read AMEVCNTVOFF13_EL2 /* index 3 */
  312. read AMEVCNTVOFF14_EL2 /* index 4 */
  313. read AMEVCNTVOFF15_EL2 /* index 5 */
  314. read AMEVCNTVOFF16_EL2 /* index 6 */
  315. read AMEVCNTVOFF17_EL2 /* index 7 */
  316. read AMEVCNTVOFF18_EL2 /* index 8 */
  317. read AMEVCNTVOFF19_EL2 /* index 9 */
  318. read AMEVCNTVOFF1A_EL2 /* index 10 */
  319. read AMEVCNTVOFF1B_EL2 /* index 11 */
  320. read AMEVCNTVOFF1C_EL2 /* index 12 */
  321. read AMEVCNTVOFF1D_EL2 /* index 13 */
  322. read AMEVCNTVOFF1E_EL2 /* index 14 */
  323. read AMEVCNTVOFF1F_EL2 /* index 15 */
  324. endfunc amu_group1_voffset_read_internal
  325. /*
  326. * void amu_group1_voffset_write_internal(int idx, uint64_t val);
  327. *
  328. * Given `idx`, write `val` to the corresponding AMU virtual offset register.
  329. */
  330. func amu_group1_voffset_write_internal
  331. adr x2, 1f
  332. #if ENABLE_ASSERTIONS
  333. /*
  334. * It can be dangerous to call this function with an
  335. * out of bounds index. Ensure `idx` is valid.
  336. */
  337. tst x0, #~0xF
  338. ASM_ASSERT(eq)
  339. #endif
  340. /*
  341. * Given `idx` calculate address of mrs/ret instruction pair
  342. * in the table below.
  343. */
  344. add x2, x2, x0, lsl #3 /* each msr/ret sequence is 8 bytes */
  345. #if ENABLE_BTI
  346. add x2, x2, x0, lsl #2 /* + "bti j" instruction */
  347. #endif
  348. br x2
  349. 1: write AMEVCNTVOFF10_EL2 /* index 0 */
  350. write AMEVCNTVOFF11_EL2 /* index 1 */
  351. write AMEVCNTVOFF12_EL2 /* index 2 */
  352. write AMEVCNTVOFF13_EL2 /* index 3 */
  353. write AMEVCNTVOFF14_EL2 /* index 4 */
  354. write AMEVCNTVOFF15_EL2 /* index 5 */
  355. write AMEVCNTVOFF16_EL2 /* index 6 */
  356. write AMEVCNTVOFF17_EL2 /* index 7 */
  357. write AMEVCNTVOFF18_EL2 /* index 8 */
  358. write AMEVCNTVOFF19_EL2 /* index 9 */
  359. write AMEVCNTVOFF1A_EL2 /* index 10 */
  360. write AMEVCNTVOFF1B_EL2 /* index 11 */
  361. write AMEVCNTVOFF1C_EL2 /* index 12 */
  362. write AMEVCNTVOFF1D_EL2 /* index 13 */
  363. write AMEVCNTVOFF1E_EL2 /* index 14 */
  364. write AMEVCNTVOFF1F_EL2 /* index 15 */
  365. endfunc amu_group1_voffset_write_internal
  366. #endif