cortex_a32.S 3.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123
  1. /*
  2. * Copyright (c) 2016-2024, Arm Limited and Contributors. All rights reserved.
  3. *
  4. * SPDX-License-Identifier: BSD-3-Clause
  5. */
  6. #include <arch.h>
  7. #include <asm_macros.S>
  8. #include <assert_macros.S>
  9. #include <cortex_a32.h>
  10. #include <cpu_macros.S>
  11. /* ---------------------------------------------
  12. * Disable intra-cluster coherency
  13. * Clobbers: r0-r1
  14. * ---------------------------------------------
  15. */
  16. func cortex_a32_disable_smp
  17. ldcopr16 r0, r1, CORTEX_A32_CPUECTLR_EL1
  18. bic r0, r0, #CORTEX_A32_CPUECTLR_SMPEN_BIT
  19. stcopr16 r0, r1, CORTEX_A32_CPUECTLR_EL1
  20. isb
  21. dsb sy
  22. bx lr
  23. endfunc cortex_a32_disable_smp
  24. /* -------------------------------------------------
  25. * The CPU Ops reset function for Cortex-A32.
  26. * Clobbers: r0-r1
  27. * -------------------------------------------------
  28. */
  29. func cortex_a32_reset_func
  30. /* ---------------------------------------------
  31. * Enable the SMP bit.
  32. * ---------------------------------------------
  33. */
  34. ldcopr16 r0, r1, CORTEX_A32_CPUECTLR_EL1
  35. orr r0, r0, #CORTEX_A32_CPUECTLR_SMPEN_BIT
  36. stcopr16 r0, r1, CORTEX_A32_CPUECTLR_EL1
  37. isb
  38. bx lr
  39. endfunc cortex_a32_reset_func
  40. /* ----------------------------------------------------
  41. * The CPU Ops core power down function for Cortex-A32.
  42. * Clobbers: r0-r3
  43. * ----------------------------------------------------
  44. */
  45. func cortex_a32_core_pwr_dwn
  46. /* r12 is pushed to meet the 8 byte stack alignment requirement */
  47. push {r12, lr}
  48. /* Assert if cache is enabled */
  49. #if ENABLE_ASSERTIONS
  50. ldcopr r0, SCTLR
  51. tst r0, #SCTLR_C_BIT
  52. ASM_ASSERT(eq)
  53. #endif
  54. /* ---------------------------------------------
  55. * Flush L1 caches.
  56. * ---------------------------------------------
  57. */
  58. mov r0, #DC_OP_CISW
  59. bl dcsw_op_level1
  60. /* ---------------------------------------------
  61. * Come out of intra cluster coherency
  62. * ---------------------------------------------
  63. */
  64. pop {r12, lr}
  65. b cortex_a32_disable_smp
  66. endfunc cortex_a32_core_pwr_dwn
  67. /* -------------------------------------------------------
  68. * The CPU Ops cluster power down function for Cortex-A32.
  69. * Clobbers: r0-r3
  70. * -------------------------------------------------------
  71. */
  72. func cortex_a32_cluster_pwr_dwn
  73. /* r12 is pushed to meet the 8 byte stack alignment requirement */
  74. push {r12, lr}
  75. /* Assert if cache is enabled */
  76. #if ENABLE_ASSERTIONS
  77. ldcopr r0, SCTLR
  78. tst r0, #SCTLR_C_BIT
  79. ASM_ASSERT(eq)
  80. #endif
  81. /* ---------------------------------------------
  82. * Flush L1 cache.
  83. * ---------------------------------------------
  84. */
  85. mov r0, #DC_OP_CISW
  86. bl dcsw_op_level1
  87. /* ---------------------------------------------
  88. * Disable the optional ACP.
  89. * ---------------------------------------------
  90. */
  91. bl plat_disable_acp
  92. /* ---------------------------------------------
  93. * Flush L2 cache.
  94. * ---------------------------------------------
  95. */
  96. mov r0, #DC_OP_CISW
  97. bl dcsw_op_level2
  98. /* ---------------------------------------------
  99. * Come out of intra cluster coherency
  100. * ---------------------------------------------
  101. */
  102. pop {r12, lr}
  103. b cortex_a32_disable_smp
  104. endfunc cortex_a32_cluster_pwr_dwn
  105. declare_cpu_ops cortex_a32, CORTEX_A32_MIDR, \
  106. cortex_a32_reset_func, \
  107. cortex_a32_core_pwr_dwn, \
  108. cortex_a32_cluster_pwr_dwn