asm_macros.S 5.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244
  1. /*
  2. * Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved.
  3. *
  4. * SPDX-License-Identifier: BSD-3-Clause
  5. */
  6. #ifndef ASM_MACROS_S
  7. #define ASM_MACROS_S
  8. #include <arch.h>
  9. #include <common/asm_macros_common.S>
  10. #include <lib/spinlock.h>
  11. /*
  12. * TLBI instruction with type specifier that implements the workaround for
  13. * errata 813419 of Cortex-A57 or errata 1286807 of Cortex-A76.
  14. */
  15. #if ERRATA_A57_813419 || ERRATA_A76_1286807
  16. #define TLB_INVALIDATE(_type) \
  17. tlbi _type; \
  18. dsb ish; \
  19. tlbi _type
  20. #else
  21. #define TLB_INVALIDATE(_type) \
  22. tlbi _type
  23. #endif
  24. .macro func_prologue
  25. stp x29, x30, [sp, #-0x10]!
  26. mov x29,sp
  27. .endm
  28. .macro func_epilogue
  29. ldp x29, x30, [sp], #0x10
  30. .endm
  31. .macro dcache_line_size reg, tmp
  32. mrs \tmp, ctr_el0
  33. ubfx \tmp, \tmp, #16, #4
  34. mov \reg, #4
  35. lsl \reg, \reg, \tmp
  36. .endm
  37. .macro icache_line_size reg, tmp
  38. mrs \tmp, ctr_el0
  39. and \tmp, \tmp, #0xf
  40. mov \reg, #4
  41. lsl \reg, \reg, \tmp
  42. .endm
  43. .macro smc_check label
  44. mrs x0, esr_el3
  45. ubfx x0, x0, #ESR_EC_SHIFT, #ESR_EC_LENGTH
  46. cmp x0, #EC_AARCH64_SMC
  47. b.ne $label
  48. .endm
  49. /*
  50. * Declare the exception vector table, enforcing it is aligned on a
  51. * 2KB boundary, as required by the ARMv8 architecture.
  52. * Use zero bytes as the fill value to be stored in the padding bytes
  53. * so that it inserts illegal AArch64 instructions. This increases
  54. * security, robustness and potentially facilitates debugging.
  55. */
  56. .macro vector_base label, section_name=.vectors
  57. .section \section_name, "ax"
  58. .align 11, 0
  59. \label:
  60. .endm
  61. /*
  62. * Create an entry in the exception vector table, enforcing it is
  63. * aligned on a 128-byte boundary, as required by the ARMv8 architecture.
  64. * Use zero bytes as the fill value to be stored in the padding bytes
  65. * so that it inserts illegal AArch64 instructions. This increases
  66. * security, robustness and potentially facilitates debugging.
  67. */
  68. .macro vector_entry label, section_name=.vectors
  69. .cfi_sections .debug_frame
  70. .section \section_name, "ax"
  71. .align 7, 0
  72. .type \label, %function
  73. .cfi_startproc
  74. \label:
  75. .endm
  76. /*
  77. * Add the bytes until fill the full exception vector, whose size is always
  78. * 32 instructions. If there are more than 32 instructions in the
  79. * exception vector then an error is emitted.
  80. */
  81. .macro end_vector_entry label
  82. .cfi_endproc
  83. .fill \label + (32 * 4) - .
  84. .endm
  85. /*
  86. * This macro calculates the base address of the current CPU's MP stack
  87. * using the plat_my_core_pos() index, the name of the stack storage
  88. * and the size of each stack
  89. * Out: X0 = physical address of stack base
  90. * Clobber: X30, X1, X2
  91. */
  92. .macro get_my_mp_stack _name, _size
  93. bl plat_my_core_pos
  94. adrp x2, (\_name + \_size)
  95. add x2, x2, :lo12:(\_name + \_size)
  96. mov x1, #\_size
  97. madd x0, x0, x1, x2
  98. .endm
  99. /*
  100. * This macro calculates the base address of a UP stack using the
  101. * name of the stack storage and the size of the stack
  102. * Out: X0 = physical address of stack base
  103. */
  104. .macro get_up_stack _name, _size
  105. adrp x0, (\_name + \_size)
  106. add x0, x0, :lo12:(\_name + \_size)
  107. .endm
  108. /*
  109. * Helper macro to generate the best mov/movk combinations according
  110. * the value to be moved. The 16 bits from '_shift' are tested and
  111. * if not zero, they are moved into '_reg' without affecting
  112. * other bits.
  113. */
  114. .macro _mov_imm16 _reg, _val, _shift
  115. .if (\_val >> \_shift) & 0xffff
  116. .if (\_val & (1 << \_shift - 1))
  117. movk \_reg, (\_val >> \_shift) & 0xffff, LSL \_shift
  118. .else
  119. mov \_reg, \_val & (0xffff << \_shift)
  120. .endif
  121. .endif
  122. .endm
  123. /*
  124. * Helper macro to load arbitrary values into 32 or 64-bit registers
  125. * which generates the best mov/movk combinations. Many base addresses
  126. * are 64KB aligned the macro will eliminate updating bits 15:0 in
  127. * that case
  128. */
  129. .macro mov_imm _reg, _val
  130. .if (\_val) == 0
  131. mov \_reg, #0
  132. .else
  133. _mov_imm16 \_reg, (\_val), 0
  134. _mov_imm16 \_reg, (\_val), 16
  135. _mov_imm16 \_reg, (\_val), 32
  136. _mov_imm16 \_reg, (\_val), 48
  137. .endif
  138. .endm
  139. /*
  140. * Macro to mark instances where we're jumping to a function and don't
  141. * expect a return. To provide the function being jumped to with
  142. * additional information, we use 'bl' instruction to jump rather than
  143. * 'b'.
  144. *
  145. * Debuggers infer the location of a call from where LR points to, which
  146. * is usually the instruction after 'bl'. If this macro expansion
  147. * happens to be the last location in a function, that'll cause the LR
  148. * to point a location beyond the function, thereby misleading debugger
  149. * back trace. We therefore insert a 'nop' after the function call for
  150. * debug builds, unless 'skip_nop' parameter is non-zero.
  151. */
  152. .macro no_ret _func:req, skip_nop=0
  153. bl \_func
  154. #if DEBUG
  155. .ifeq \skip_nop
  156. nop
  157. .endif
  158. #endif
  159. .endm
  160. /*
  161. * Reserve space for a spin lock in assembly file.
  162. */
  163. .macro define_asm_spinlock _name:req
  164. .align SPINLOCK_ASM_ALIGN
  165. \_name:
  166. .space SPINLOCK_ASM_SIZE
  167. .endm
  168. #if RAS_EXTENSION
  169. .macro esb
  170. .inst 0xd503221f
  171. .endm
  172. #endif
  173. /*
  174. * Helper macro to read system register value into x0
  175. */
  176. .macro read reg:req
  177. #if ENABLE_BTI
  178. bti j
  179. #endif
  180. mrs x0, \reg
  181. ret
  182. .endm
  183. /*
  184. * Helper macro to write value from x1 to system register
  185. */
  186. .macro write reg:req
  187. #if ENABLE_BTI
  188. bti j
  189. #endif
  190. msr \reg, x1
  191. ret
  192. .endm
  193. /*
  194. * Macro for using speculation barrier instruction introduced by
  195. * FEAT_SB, if it's enabled.
  196. */
  197. .macro speculation_barrier
  198. #if ENABLE_FEAT_SB
  199. sb
  200. #else
  201. dsb sy
  202. isb
  203. #endif
  204. .endm
  205. /*
  206. * Macro for mitigating against speculative execution beyond ERET. Uses the
  207. * speculation barrier instruction introduced by FEAT_SB, if it's enabled.
  208. */
  209. .macro exception_return
  210. eret
  211. #if ENABLE_FEAT_SB
  212. sb
  213. #else
  214. dsb nsh
  215. isb
  216. #endif
  217. .endm
  218. #endif /* ASM_MACROS_S */