smccc_helpers.h 4.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162
  1. /*
  2. * Copyright (c) 2015-2018, ARM Limited and Contributors. All rights reserved.
  3. *
  4. * SPDX-License-Identifier: BSD-3-Clause
  5. */
  6. #ifndef SMCCC_HELPERS_H
  7. #define SMCCC_HELPERS_H
  8. #include <lib/smccc.h>
  9. /* Definitions to help the assembler access the SMC/ERET args structure */
  10. #define SMC_ARGS_SIZE 0x40
  11. #define SMC_ARG0 0x0
  12. #define SMC_ARG1 0x8
  13. #define SMC_ARG2 0x10
  14. #define SMC_ARG3 0x18
  15. #define SMC_ARG4 0x20
  16. #define SMC_ARG5 0x28
  17. #define SMC_ARG6 0x30
  18. #define SMC_ARG7 0x38
  19. #define SMC_ARGS_END 0x40
  20. #ifndef __ASSEMBLER__
  21. #include <stdbool.h>
  22. #include <context.h>
  23. #include <platform_def.h> /* For CACHE_WRITEBACK_GRANULE */
  24. /* Convenience macros to return from SMC handler */
  25. #define SMC_RET0(_h) { \
  26. return (uint64_t) (_h); \
  27. }
  28. #define SMC_RET1(_h, _x0) { \
  29. write_ctx_reg((get_gpregs_ctx(_h)), (CTX_GPREG_X0), (_x0)); \
  30. SMC_RET0(_h); \
  31. }
  32. #define SMC_RET2(_h, _x0, _x1) { \
  33. write_ctx_reg((get_gpregs_ctx(_h)), (CTX_GPREG_X1), (_x1)); \
  34. SMC_RET1(_h, (_x0)); \
  35. }
  36. #define SMC_RET3(_h, _x0, _x1, _x2) { \
  37. write_ctx_reg((get_gpregs_ctx(_h)), (CTX_GPREG_X2), (_x2)); \
  38. SMC_RET2(_h, (_x0), (_x1)); \
  39. }
  40. #define SMC_RET4(_h, _x0, _x1, _x2, _x3) { \
  41. write_ctx_reg((get_gpregs_ctx(_h)), (CTX_GPREG_X3), (_x3)); \
  42. SMC_RET3(_h, (_x0), (_x1), (_x2)); \
  43. }
  44. #define SMC_RET5(_h, _x0, _x1, _x2, _x3, _x4) { \
  45. write_ctx_reg((get_gpregs_ctx(_h)), (CTX_GPREG_X4), (_x4)); \
  46. SMC_RET4(_h, (_x0), (_x1), (_x2), (_x3)); \
  47. }
  48. #define SMC_RET6(_h, _x0, _x1, _x2, _x3, _x4, _x5) { \
  49. write_ctx_reg((get_gpregs_ctx(_h)), (CTX_GPREG_X5), (_x5)); \
  50. SMC_RET5(_h, (_x0), (_x1), (_x2), (_x3), (_x4)); \
  51. }
  52. #define SMC_RET7(_h, _x0, _x1, _x2, _x3, _x4, _x5, _x6) { \
  53. write_ctx_reg((get_gpregs_ctx(_h)), (CTX_GPREG_X6), (_x6)); \
  54. SMC_RET6(_h, (_x0), (_x1), (_x2), (_x3), (_x4), (_x5)); \
  55. }
  56. #define SMC_RET8(_h, _x0, _x1, _x2, _x3, _x4, _x5, _x6, _x7) { \
  57. write_ctx_reg((get_gpregs_ctx(_h)), (CTX_GPREG_X7), (_x7)); \
  58. SMC_RET7(_h, (_x0), (_x1), (_x2), (_x3), (_x4), (_x5), (_x6)); \
  59. }
  60. /*
  61. * Convenience macros to access general purpose registers using handle provided
  62. * to SMC handler. These take the offset values defined in context.h
  63. */
  64. #define SMC_GET_GP(_h, _g) \
  65. read_ctx_reg((get_gpregs_ctx(_h)), (_g))
  66. #define SMC_SET_GP(_h, _g, _v) \
  67. write_ctx_reg((get_gpregs_ctx(_h)), (_g), (_v))
  68. /* Useful for SMCCCv1.2 */
  69. #define SMC_RET18(_h, _x0, _x1, _x2, _x3, _x4, _x5, _x6, _x7, _x8, _x9, \
  70. _x10, _x11, _x12, _x13, _x14, _x15, _x16, _x17) { \
  71. SMC_SET_GP(_h, CTX_GPREG_X8, _x8); \
  72. SMC_SET_GP(_h, CTX_GPREG_X9, _x9); \
  73. SMC_SET_GP(_h, CTX_GPREG_X10, _x10); \
  74. SMC_SET_GP(_h, CTX_GPREG_X11, _x11); \
  75. SMC_SET_GP(_h, CTX_GPREG_X12, _x12); \
  76. SMC_SET_GP(_h, CTX_GPREG_X13, _x13); \
  77. SMC_SET_GP(_h, CTX_GPREG_X14, _x14); \
  78. SMC_SET_GP(_h, CTX_GPREG_X15, _x15); \
  79. SMC_SET_GP(_h, CTX_GPREG_X16, _x16); \
  80. SMC_SET_GP(_h, CTX_GPREG_X17, _x17); \
  81. SMC_RET8(_h, (_x0), (_x1), (_x2), (_x3), (_x4), (_x5), (_x6), \
  82. (_x7)); \
  83. }
  84. /*
  85. * Convenience macros to access EL3 context registers using handle provided to
  86. * SMC handler. These take the offset values defined in context.h
  87. */
  88. #define SMC_GET_EL3(_h, _e) \
  89. read_ctx_reg((get_el3state_ctx(_h)), (_e))
  90. #define SMC_SET_EL3(_h, _e, _v) \
  91. write_ctx_reg((get_el3state_ctx(_h)), (_e), (_v))
  92. /*
  93. * Helper macro to retrieve the SMC parameters from cpu_context_t.
  94. */
  95. #define get_smc_params_from_ctx(_hdl, _x1, _x2, _x3, _x4) \
  96. do { \
  97. const gp_regs_t *regs = get_gpregs_ctx(_hdl); \
  98. _x1 = read_ctx_reg(regs, CTX_GPREG_X1); \
  99. _x2 = read_ctx_reg(regs, CTX_GPREG_X2); \
  100. _x3 = read_ctx_reg(regs, CTX_GPREG_X3); \
  101. _x4 = read_ctx_reg(regs, CTX_GPREG_X4); \
  102. } while (false)
  103. typedef struct {
  104. uint64_t _regs[SMC_ARGS_END >> 3];
  105. } __aligned(CACHE_WRITEBACK_GRANULE) smc_args_t;
  106. /*
  107. * Ensure that the assembler's view of the size of the tsp_args is the
  108. * same as the compilers.
  109. */
  110. CASSERT(sizeof(smc_args_t) == SMC_ARGS_SIZE, assert_sp_args_size_mismatch);
  111. static inline smc_args_t smc_helper(uint32_t func, uint64_t arg0,
  112. uint64_t arg1, uint64_t arg2,
  113. uint64_t arg3, uint64_t arg4,
  114. uint64_t arg5, uint64_t arg6)
  115. {
  116. smc_args_t ret_args = {0};
  117. register uint64_t r0 __asm__("x0") = func;
  118. register uint64_t r1 __asm__("x1") = arg0;
  119. register uint64_t r2 __asm__("x2") = arg1;
  120. register uint64_t r3 __asm__("x3") = arg2;
  121. register uint64_t r4 __asm__("x4") = arg3;
  122. register uint64_t r5 __asm__("x5") = arg4;
  123. register uint64_t r6 __asm__("x6") = arg5;
  124. register uint64_t r7 __asm__("x7") = arg6;
  125. /* Output registers, also used as inputs ('+' constraint). */
  126. __asm__ volatile("smc #0"
  127. : "+r"(r0), "+r"(r1), "+r"(r2), "+r"(r3), "+r"(r4),
  128. "+r"(r5), "+r"(r6), "+r"(r7));
  129. ret_args._regs[0] = r0;
  130. ret_args._regs[1] = r1;
  131. ret_args._regs[2] = r2;
  132. ret_args._regs[3] = r3;
  133. ret_args._regs[4] = r4;
  134. ret_args._regs[5] = r5;
  135. ret_args._regs[6] = r6;
  136. ret_args._regs[7] = r7;
  137. return ret_args;
  138. }
  139. #endif /*__ASSEMBLER__*/
  140. #endif /* SMCCC_HELPERS_H */