rebootcode.s 4.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175
  1. /*
  2. * sheevaplug reboot code
  3. *
  4. * R11 is used by the loader as a temporary, so avoid it.
  5. */
  6. #include "arm.s"
  7. /*
  8. * Turn off MMU, then copy the new kernel to its correct location
  9. * in physical memory. Then jump to the start of the kernel.
  10. */
  11. /* main(PADDR(entry), PADDR(code), size); */
  12. TEXT main(SB), 1, $-4
  13. MOVW $setR12(SB), R12
  14. MOVW R0, p1+0(FP) /* destination, passed in R0 */
  15. /* copy in arguments from frame */
  16. MOVW R0, R8 /* entry point */
  17. MOVW p2+4(FP), R9 /* source */
  18. MOVW n+8(FP), R10 /* byte count */
  19. WAVE('R')
  20. BL cachesoff(SB)
  21. /* now back in 29- or 26-bit addressing, mainly for SB */
  22. /* turn the MMU off */
  23. WAVE('e')
  24. MOVW $KSEGM, R7
  25. MOVW $PHYSDRAM, R0
  26. BL _r15warp(SB)
  27. BIC R7, R12 /* SB */
  28. BIC R7, R13 /* SP */
  29. /* don't care about R14 */
  30. WAVE('b')
  31. BL mmuinvalidate(SB)
  32. WAVE('o')
  33. BL mmudisable(SB)
  34. WAVE('o')
  35. MOVW R9, R4 /* restore regs across function calls */
  36. MOVW R10, R5
  37. MOVW R8, R6
  38. /* set up a new stack for local vars and memmove args */
  39. MOVW R6, SP /* tiny trampoline stack */
  40. SUB $(0x20 + 4), SP /* back up before a.out header */
  41. MOVW R14, -48(SP) /* store return addr */
  42. SUB $48, SP /* allocate stack frame */
  43. MOVW R6, 44(SP) /* save dest/entry */
  44. MOVW R5, 40(SP) /* save count */
  45. WAVE('t')
  46. MOVW R6, 0(SP)
  47. MOVW R6, 4(SP) /* push dest */
  48. MOVW R6, R0
  49. MOVW R4, 8(SP) /* push src */
  50. MOVW R5, 12(SP) /* push size */
  51. BL memmove(SB)
  52. MOVW 44(SP), R6 /* restore R6 (dest/entry) */
  53. MOVW 40(SP), R5 /* restore R5 (count) */
  54. WAVE('-')
  55. /*
  56. * flush caches
  57. */
  58. BL cacheuwbinv(SB)
  59. WAVE('>')
  60. WAVE('\r');
  61. WAVE('\n');
  62. /*
  63. * jump to kernel entry point. Note the true kernel entry point is
  64. * the virtual address KZERO|R6, but this must wait until
  65. * the MMU is enabled by the kernel in l.s
  66. */
  67. ORR R6, R6 /* NOP: avoid link bug */
  68. B (R6)
  69. /*
  70. * turn the caches off, double map 0 & KZERO, invalidate TLBs, revert to
  71. * tiny addresses. upon return, it will be safe to turn off the mmu.
  72. */
  73. TEXT cachesoff(SB), 1, $-4
  74. MOVW $(PsrDirq|PsrDfiq|PsrMsvc), R0
  75. MOVW R0, CPSR
  76. MOVW $KADDR(0x100-4), R7 /* just before this code */
  77. MOVW R14, (R7) /* save link */
  78. BL cacheuwbinv(SB)
  79. MRC CpSC, 0, R0, C(CpCONTROL), C(0)
  80. BIC $(CpCwb|CpCicache|CpCdcache|CpCalign), R0
  81. MCR CpSC, 0, R0, C(CpCONTROL), C(0)
  82. BARRIERS
  83. /* redo double map of 0, KZERO */
  84. MOVW $(L1+L1X(PHYSDRAM)), R4 /* address of PTE for 0 */
  85. MOVW $PTEDRAM, R2 /* PTE bits */
  86. // MOVW $PTEIO, R2 /* PTE bits */
  87. MOVW $PHYSDRAM, R3
  88. MOVW $512, R5
  89. _ptrdbl:
  90. ORR R3, R2, R1 /* first identity-map 0 to 0, etc. */
  91. MOVW R1, (R4)
  92. ADD $4, R4 /* bump PTE address */
  93. ADD $MiB, R3 /* bump pa */
  94. SUB.S $1, R5
  95. BNE _ptrdbl
  96. BARRIERS
  97. MOVW $0, R0
  98. MCR CpSC, 0, R0, C(CpTLB), C(CpTLBinvd), CpTLBinv
  99. MCR CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinv
  100. BARRIERS
  101. /* back to 29- or 26-bit addressing, mainly for SB */
  102. MRC CpSC, 0, R0, C(CpCONTROL), C(0)
  103. BIC $(CpCd32|CpCi32), R0
  104. MCR CpSC, 0, R0, C(CpCONTROL), C(0)
  105. BARRIERS
  106. MOVW $KADDR(0x100-4), R7 /* just before this code */
  107. MOVW (R7), R14 /* restore link */
  108. RET
  109. TEXT _r15warp(SB), 1, $-4
  110. BIC $0xf0000000, R14
  111. ORR R0, R14
  112. RET
  113. TEXT mmudisable(SB), 1, $-4
  114. MRC CpSC, 0, R0, C(CpCONTROL), C(0)
  115. BIC $(CpChv|CpCmmu|CpCdcache|CpCicache|CpCwb), R0
  116. MCR CpSC, 0, R0, C(CpCONTROL), C(0)
  117. BARRIERS
  118. RET
  119. TEXT mmuinvalidate(SB), 1, $-4 /* invalidate all */
  120. MOVW $0, R0
  121. MCR CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinv
  122. BARRIERS
  123. RET
  124. TEXT cacheuwbinv(SB), 1, $-4 /* D+I writeback+invalidate */
  125. BARRIERS
  126. MOVW CPSR, R3 /* splhi */
  127. ORR $(PsrDirq), R3, R1
  128. MOVW R1, CPSR
  129. _uwbinv: /* D writeback+invalidate */
  130. MRC CpSC, 0, PC, C(CpCACHE), C(CpCACHEwbi), CpCACHEtest
  131. BNE _uwbinv
  132. MCR CpSC, CpL2, PC, C(CpTESTCFG), C(CpTCl2flush), CpTCl2all
  133. BARRIERS
  134. MCR CpSC, CpL2, PC, C(CpTESTCFG), C(CpTCl2inv), CpTCl2all
  135. BARRIERS
  136. MOVW $0, R0 /* I invalidate */
  137. MCR CpSC, 0, R0, C(CpCACHE), C(CpCACHEinvi), CpCACHEall
  138. BARRIERS
  139. MOVW $0, R0 /* drain write buffer */
  140. MCR CpSC, 0, R0, C(CpCACHE), C(CpCACHEwb), CpCACHEwait
  141. BARRIERS
  142. MOVW R3, CPSR /* splx */
  143. RET