cortex_a15.S 3.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178
  1. /*
  2. * Copyright (c) 2016-2024, Arm Limited and Contributors. All rights reserved.
  3. *
  4. * SPDX-License-Identifier: BSD-3-Clause
  5. */
  6. #include <arch.h>
  7. #include <asm_macros.S>
  8. #include <assert_macros.S>
  9. #include <cortex_a15.h>
  10. #include <cpu_macros.S>
  11. /*
  12. * Cortex-A15 support LPAE and Virtualization Extensions.
  13. * Don't care if confiugration uses or not LPAE and VE.
  14. * Therefore, where we don't check ARCH_IS_ARMV7_WITH_LPAE/VE
  15. */
  16. .macro assert_cache_enabled
  17. #if ENABLE_ASSERTIONS
  18. ldcopr r0, SCTLR
  19. tst r0, #SCTLR_C_BIT
  20. ASM_ASSERT(eq)
  21. #endif
  22. .endm
  23. func cortex_a15_disable_smp
  24. ldcopr r0, ACTLR
  25. bic r0, #CORTEX_A15_ACTLR_SMP_BIT
  26. stcopr r0, ACTLR
  27. isb
  28. #if ERRATA_A15_816470
  29. /*
  30. * Invalidate any TLB address
  31. */
  32. mov r0, #0
  33. stcopr r0, TLBIMVA
  34. #endif
  35. dsb sy
  36. bx lr
  37. endfunc cortex_a15_disable_smp
  38. func cortex_a15_enable_smp
  39. ldcopr r0, ACTLR
  40. orr r0, #CORTEX_A15_ACTLR_SMP_BIT
  41. stcopr r0, ACTLR
  42. isb
  43. bx lr
  44. endfunc cortex_a15_enable_smp
  45. /* ----------------------------------------------------
  46. * Errata Workaround for Cortex A15 Errata #816470.
  47. * This applies only to revision >= r3p0 of Cortex A15.
  48. * ----------------------------------------------------
  49. */
  50. func check_errata_816470
  51. /*
  52. * Even though this is only needed for revision >= r3p0, it is always
  53. * applied because of the low cost of the workaround.
  54. */
  55. mov r0, #ERRATA_APPLIES
  56. bx lr
  57. endfunc check_errata_816470
  58. add_erratum_entry cortex_a15, ERRATUM(816470), ERRATA_A15_816470
  59. /* ----------------------------------------------------
  60. * Errata Workaround for Cortex A15 Errata #827671.
  61. * This applies only to revision >= r3p0 of Cortex A15.
  62. * Inputs:
  63. * r0: variant[4:7] and revision[0:3] of current cpu.
  64. * Shall clobber: r0-r3
  65. * ----------------------------------------------------
  66. */
  67. func errata_a15_827671_wa
  68. /*
  69. * Compare r0 against revision r3p0
  70. */
  71. mov r2, lr
  72. bl check_errata_827671
  73. cmp r0, #ERRATA_NOT_APPLIES
  74. beq 1f
  75. ldcopr r0, CORTEX_A15_ACTLR2
  76. orr r0, #CORTEX_A15_ACTLR2_INV_DCC_BIT
  77. stcopr r0, CORTEX_A15_ACTLR2
  78. isb
  79. 1:
  80. bx r2
  81. endfunc errata_a15_827671_wa
  82. func check_errata_827671
  83. mov r1, #0x30
  84. b cpu_rev_var_hs
  85. endfunc check_errata_827671
  86. add_erratum_entry cortex_a15, ERRATUM(827671), ERRATA_A15_827671
  87. func check_errata_cve_2017_5715
  88. #if WORKAROUND_CVE_2017_5715
  89. mov r0, #ERRATA_APPLIES
  90. #else
  91. mov r0, #ERRATA_MISSING
  92. #endif
  93. bx lr
  94. endfunc check_errata_cve_2017_5715
  95. add_erratum_entry cortex_a15, CVE(2017, 5715), WORKAROUND_CVE_2017_5715
  96. func check_errata_cve_2022_23960
  97. #if WORKAROUND_CVE_2022_23960
  98. mov r0, #ERRATA_APPLIES
  99. #else
  100. mov r0, #ERRATA_MISSING
  101. #endif
  102. bx lr
  103. endfunc check_errata_cve_2022_23960
  104. add_erratum_entry cortex_a15, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
  105. func cortex_a15_reset_func
  106. mov r5, lr
  107. bl cpu_get_rev_var
  108. #if ERRATA_A15_827671
  109. bl errata_a15_827671_wa
  110. #endif
  111. #if IMAGE_BL32 && (WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960)
  112. ldcopr r0, ACTLR
  113. orr r0, #CORTEX_A15_ACTLR_INV_BTB_BIT
  114. stcopr r0, ACTLR
  115. ldr r0, =wa_cve_2017_5715_icache_inv_vbar
  116. stcopr r0, VBAR
  117. stcopr r0, MVBAR
  118. /* isb will be applied in the course of the reset func */
  119. #endif
  120. mov lr, r5
  121. b cortex_a15_enable_smp
  122. endfunc cortex_a15_reset_func
  123. func cortex_a15_core_pwr_dwn
  124. push {r12, lr}
  125. assert_cache_enabled
  126. /* Flush L1 cache */
  127. mov r0, #DC_OP_CISW
  128. bl dcsw_op_level1
  129. /* Exit cluster coherency */
  130. pop {r12, lr}
  131. b cortex_a15_disable_smp
  132. endfunc cortex_a15_core_pwr_dwn
  133. func cortex_a15_cluster_pwr_dwn
  134. push {r12, lr}
  135. assert_cache_enabled
  136. /* Flush L1 caches */
  137. mov r0, #DC_OP_CISW
  138. bl dcsw_op_level1
  139. bl plat_disable_acp
  140. /* Flush L2 caches */
  141. mov r0, #DC_OP_CISW
  142. bl dcsw_op_level2
  143. /* Exit cluster coherency */
  144. pop {r12, lr}
  145. b cortex_a15_disable_smp
  146. endfunc cortex_a15_cluster_pwr_dwn
  147. declare_cpu_ops cortex_a15, CORTEX_A15_MIDR, \
  148. cortex_a15_reset_func, \
  149. cortex_a15_core_pwr_dwn, \
  150. cortex_a15_cluster_pwr_dwn