enable_mmu.S 2.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120
  1. /*
  2. * Copyright (c) 2018, Arm Limited and Contributors. All rights reserved.
  3. *
  4. * SPDX-License-Identifier: BSD-3-Clause
  5. */
  6. #include <asm_macros.S>
  7. #include <assert_macros.S>
  8. #include <lib/xlat_tables/xlat_tables_v2.h>
  9. .global enable_mmu_direct_svc_mon
  10. .global enable_mmu_direct_hyp
  11. /* void enable_mmu_direct_svc_mon(unsigned int flags) */
  12. func enable_mmu_direct_svc_mon
  13. /* Assert that MMU is turned off */
  14. #if ENABLE_ASSERTIONS
  15. ldcopr r1, SCTLR
  16. tst r1, #SCTLR_M_BIT
  17. ASM_ASSERT(eq)
  18. #endif
  19. /* Invalidate TLB entries */
  20. TLB_INVALIDATE(r0, TLBIALL)
  21. mov r3, r0
  22. ldr r0, =mmu_cfg_params
  23. /* MAIR0. Only the lower 32 bits are used. */
  24. ldr r1, [r0, #(MMU_CFG_MAIR << 3)]
  25. stcopr r1, MAIR0
  26. /* TTBCR. Only the lower 32 bits are used. */
  27. ldr r2, [r0, #(MMU_CFG_TCR << 3)]
  28. stcopr r2, TTBCR
  29. /* TTBR0 */
  30. ldr r1, [r0, #(MMU_CFG_TTBR0 << 3)]
  31. ldr r2, [r0, #((MMU_CFG_TTBR0 << 3) + 4)]
  32. stcopr16 r1, r2, TTBR0_64
  33. /* TTBR1 is unused right now; set it to 0. */
  34. mov r1, #0
  35. mov r2, #0
  36. stcopr16 r1, r2, TTBR1_64
  37. /*
  38. * Ensure all translation table writes have drained into memory, the TLB
  39. * invalidation is complete, and translation register writes are
  40. * committed before enabling the MMU
  41. */
  42. dsb ish
  43. isb
  44. /* Enable enable MMU by honoring flags */
  45. ldcopr r1, SCTLR
  46. ldr r2, =(SCTLR_WXN_BIT | SCTLR_C_BIT | SCTLR_M_BIT)
  47. orr r1, r1, r2
  48. /* Clear C bit if requested */
  49. tst r3, #DISABLE_DCACHE
  50. bicne r1, r1, #SCTLR_C_BIT
  51. stcopr r1, SCTLR
  52. isb
  53. bx lr
  54. endfunc enable_mmu_direct_svc_mon
  55. /* void enable_mmu_direct_hyp(unsigned int flags) */
  56. func enable_mmu_direct_hyp
  57. /* Assert that MMU is turned off */
  58. #if ENABLE_ASSERTIONS
  59. ldcopr r1, HSCTLR
  60. tst r1, #HSCTLR_M_BIT
  61. ASM_ASSERT(eq)
  62. #endif
  63. /* Invalidate TLB entries */
  64. TLB_INVALIDATE(r0, TLBIALL)
  65. mov r3, r0
  66. ldr r0, =mmu_cfg_params
  67. /* HMAIR0 */
  68. ldr r1, [r0, #(MMU_CFG_MAIR << 3)]
  69. stcopr r1, HMAIR0
  70. /* HTCR */
  71. ldr r2, [r0, #(MMU_CFG_TCR << 3)]
  72. stcopr r2, HTCR
  73. /* HTTBR */
  74. ldr r1, [r0, #(MMU_CFG_TTBR0 << 3)]
  75. ldr r2, [r0, #((MMU_CFG_TTBR0 << 3) + 4)]
  76. stcopr16 r1, r2, HTTBR_64
  77. /*
  78. * Ensure all translation table writes have drained into memory, the TLB
  79. * invalidation is complete, and translation register writes are
  80. * committed before enabling the MMU
  81. */
  82. dsb ish
  83. isb
  84. /* Enable enable MMU by honoring flags */
  85. ldcopr r1, HSCTLR
  86. ldr r2, =(HSCTLR_WXN_BIT | HSCTLR_C_BIT | HSCTLR_M_BIT)
  87. orr r1, r1, r2
  88. /* Clear C bit if requested */
  89. tst r3, #DISABLE_DCACHE
  90. bicne r1, r1, #HSCTLR_C_BIT
  91. stcopr r1, HSCTLR
  92. isb
  93. bx lr
  94. endfunc enable_mmu_direct_hyp