cortex_a17.S 3.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172
  1. /*
  2. * Copyright (c) 2017-2024, Arm Limited and Contributors. All rights reserved.
  3. *
  4. * SPDX-License-Identifier: BSD-3-Clause
  5. */
  6. #include <arch.h>
  7. #include <asm_macros.S>
  8. #include <assert_macros.S>
  9. #include <cortex_a17.h>
  10. #include <cpu_macros.S>
  11. .macro assert_cache_enabled
  12. #if ENABLE_ASSERTIONS
  13. ldcopr r0, SCTLR
  14. tst r0, #SCTLR_C_BIT
  15. ASM_ASSERT(eq)
  16. #endif
  17. .endm
  18. func cortex_a17_disable_smp
  19. ldcopr r0, ACTLR
  20. bic r0, #CORTEX_A17_ACTLR_SMP_BIT
  21. stcopr r0, ACTLR
  22. isb
  23. dsb sy
  24. bx lr
  25. endfunc cortex_a17_disable_smp
  26. func cortex_a17_enable_smp
  27. ldcopr r0, ACTLR
  28. orr r0, #CORTEX_A17_ACTLR_SMP_BIT
  29. stcopr r0, ACTLR
  30. isb
  31. bx lr
  32. endfunc cortex_a17_enable_smp
  33. /* ----------------------------------------------------
  34. * Errata Workaround for Cortex A17 Errata #852421.
  35. * This applies only to revision <= r1p2 of Cortex A17.
  36. * Inputs:
  37. * r0: variant[4:7] and revision[0:3] of current cpu.
  38. * Shall clobber: r0-r3
  39. * ----------------------------------------------------
  40. */
  41. func errata_a17_852421_wa
  42. /*
  43. * Compare r0 against revision r1p2
  44. */
  45. mov r2, lr
  46. bl check_errata_852421
  47. cmp r0, #ERRATA_NOT_APPLIES
  48. beq 1f
  49. ldcopr r0, CORTEX_A17_IMP_DEF_REG1
  50. orr r0, r0, #(1<<24)
  51. stcopr r0, CORTEX_A17_IMP_DEF_REG1
  52. 1:
  53. bx r2
  54. endfunc errata_a17_852421_wa
  55. func check_errata_852421
  56. mov r1, #0x12
  57. b cpu_rev_var_ls
  58. endfunc check_errata_852421
  59. add_erratum_entry cortex_a17, ERRATUM(852421), ERRATA_A17_852421
  60. /* ----------------------------------------------------
  61. * Errata Workaround for Cortex A17 Errata #852423.
  62. * This applies only to revision <= r1p2 of Cortex A17.
  63. * Inputs:
  64. * r0: variant[4:7] and revision[0:3] of current cpu.
  65. * Shall clobber: r0-r3
  66. * ----------------------------------------------------
  67. */
  68. func errata_a17_852423_wa
  69. /*
  70. * Compare r0 against revision r1p2
  71. */
  72. mov r2, lr
  73. bl check_errata_852423
  74. cmp r0, #ERRATA_NOT_APPLIES
  75. beq 1f
  76. ldcopr r0, CORTEX_A17_IMP_DEF_REG1
  77. orr r0, r0, #(1<<12)
  78. stcopr r0, CORTEX_A17_IMP_DEF_REG1
  79. 1:
  80. bx r2
  81. endfunc errata_a17_852423_wa
  82. func check_errata_852423
  83. mov r1, #0x12
  84. b cpu_rev_var_ls
  85. endfunc check_errata_852423
  86. add_erratum_entry cortex_a17, ERRATUM(852423), ERRATA_A17_852423
  87. func check_errata_cve_2017_5715
  88. #if WORKAROUND_CVE_2017_5715
  89. mov r0, #ERRATA_APPLIES
  90. #else
  91. mov r0, #ERRATA_MISSING
  92. #endif
  93. bx lr
  94. endfunc check_errata_cve_2017_5715
  95. add_erratum_entry cortex_a17, CVE(2017, 5715), WORKAROUND_CVE_2017_5715
  96. func cortex_a17_reset_func
  97. mov r5, lr
  98. bl cpu_get_rev_var
  99. mov r4, r0
  100. #if ERRATA_A17_852421
  101. mov r0, r4
  102. bl errata_a17_852421_wa
  103. #endif
  104. #if ERRATA_A17_852423
  105. mov r0, r4
  106. bl errata_a17_852423_wa
  107. #endif
  108. #if IMAGE_BL32 && WORKAROUND_CVE_2017_5715
  109. ldr r0, =wa_cve_2017_5715_bpiall_vbar
  110. stcopr r0, VBAR
  111. stcopr r0, MVBAR
  112. /* isb will be applied in the course of the reset func */
  113. #endif
  114. mov lr, r5
  115. b cortex_a17_enable_smp
  116. endfunc cortex_a17_reset_func
  117. func cortex_a17_core_pwr_dwn
  118. push {r12, lr}
  119. assert_cache_enabled
  120. /* Flush L1 cache */
  121. mov r0, #DC_OP_CISW
  122. bl dcsw_op_level1
  123. /* Exit cluster coherency */
  124. pop {r12, lr}
  125. b cortex_a17_disable_smp
  126. endfunc cortex_a17_core_pwr_dwn
  127. func cortex_a17_cluster_pwr_dwn
  128. push {r12, lr}
  129. assert_cache_enabled
  130. /* Flush L1 caches */
  131. mov r0, #DC_OP_CISW
  132. bl dcsw_op_level1
  133. bl plat_disable_acp
  134. /* Flush L2 caches */
  135. mov r0, #DC_OP_CISW
  136. bl dcsw_op_level2
  137. /* Exit cluster coherency */
  138. pop {r12, lr}
  139. b cortex_a17_disable_smp
  140. endfunc cortex_a17_cluster_pwr_dwn
  141. declare_cpu_ops cortex_a17, CORTEX_A17_MIDR, \
  142. cortex_a17_reset_func, \
  143. cortex_a17_core_pwr_dwn, \
  144. cortex_a17_cluster_pwr_dwn