l.s 5.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274
  1. /*
  2. * Broadcom bcm2835 SoC, as used in Raspberry Pi
  3. * arm1176jzf-s processor (armv6)
  4. */
  5. #include "arm.s"
  6. TEXT _start(SB), 1, $-4
  7. /*
  8. * load physical base for SB addressing while mmu is off
  9. * keep a handy zero in R0 until first function call
  10. */
  11. MOVW $setR12(SB), R12
  12. SUB $KZERO, R12
  13. ADD $PHYSDRAM, R12
  14. MOVW $0, R0
  15. /*
  16. * SVC mode, interrupts disabled
  17. */
  18. MOVW $(PsrDirq|PsrDfiq|PsrMsvc), R1
  19. MOVW R1, CPSR
  20. /*
  21. * disable the mmu and L1 caches
  22. * invalidate caches and tlb
  23. */
  24. MRC CpSC, 0, R1, C(CpCONTROL), C(0), CpMainctl
  25. BIC $(CpCdcache|CpCicache|CpCpredict|CpCmmu), R1
  26. MCR CpSC, 0, R1, C(CpCONTROL), C(0), CpMainctl
  27. MCR CpSC, 0, R0, C(CpCACHE), C(CpCACHEinvu), CpCACHEall
  28. MCR CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinv
  29. ISB
  30. /*
  31. * clear mach and page tables
  32. */
  33. MOVW $PADDR(MACHADDR), R1
  34. MOVW $PADDR(KTZERO), R2
  35. _ramZ:
  36. MOVW R0, (R1)
  37. ADD $4, R1
  38. CMP R1, R2
  39. BNE _ramZ
  40. /*
  41. * start stack at top of mach (physical addr)
  42. * set up page tables for kernel
  43. */
  44. MOVW $PADDR(MACHADDR+MACHSIZE-4), R13
  45. BL ,mmuinit(SB)
  46. /*
  47. * set up domain access control and page table base
  48. */
  49. MOVW $Client, R1
  50. MCR CpSC, 0, R1, C(CpDAC), C(0)
  51. MOVW $PADDR(L1), R1
  52. MCR CpSC, 0, R1, C(CpTTB), C(0)
  53. /*
  54. * enable caches, mmu, and high vectors
  55. */
  56. MRC CpSC, 0, R0, C(CpCONTROL), C(0), CpMainctl
  57. ORR $(CpChv|CpCdcache|CpCicache|CpCmmu), R0
  58. MCR CpSC, 0, R0, C(CpCONTROL), C(0), CpMainctl
  59. ISB
  60. /*
  61. * switch SB, SP, and PC into KZERO space
  62. */
  63. MOVW $setR12(SB), R12
  64. MOVW $(MACHADDR+MACHSIZE-4), R13
  65. MOVW $_startpg(SB), R15
  66. TEXT _startpg(SB), 1, $-4
  67. /*
  68. * enable cycle counter
  69. */
  70. MOVW $1, R1
  71. MCR CpSC, 0, R1, C(CpSPM), C(CpSPMperf), CpSPMctl
  72. /*
  73. * call main and loop forever if it returns
  74. */
  75. BL ,main(SB)
  76. B ,0(PC)
  77. BL _div(SB) /* hack to load _div, etc. */
  78. TEXT fsrget(SB), 1, $-4 /* data fault status */
  79. MRC CpSC, 0, R0, C(CpFSR), C(0), CpFSRdata
  80. RET
  81. TEXT ifsrget(SB), 1, $-4 /* instruction fault status */
  82. MRC CpSC, 0, R0, C(CpFSR), C(0), CpFSRinst
  83. RET
  84. TEXT farget(SB), 1, $-4 /* fault address */
  85. MRC CpSC, 0, R0, C(CpFAR), C(0x0)
  86. RET
  87. TEXT lcycles(SB), 1, $-4
  88. MRC CpSC, 0, R0, C(CpSPM), C(CpSPMperf), CpSPMcyc
  89. RET
  90. TEXT splhi(SB), 1, $-4
  91. MOVW $(MACHADDR+4), R2 /* save caller pc in Mach */
  92. MOVW R14, 0(R2)
  93. MOVW CPSR, R0 /* turn off irqs (but not fiqs) */
  94. ORR $(PsrDirq), R0, R1
  95. MOVW R1, CPSR
  96. RET
  97. TEXT splfhi(SB), 1, $-4
  98. MOVW $(MACHADDR+4), R2 /* save caller pc in Mach */
  99. MOVW R14, 0(R2)
  100. MOVW CPSR, R0 /* turn off irqs and fiqs */
  101. ORR $(PsrDirq|PsrDfiq), R0, R1
  102. MOVW R1, CPSR
  103. RET
  104. TEXT splflo(SB), 1, $-4
  105. MOVW CPSR, R0 /* turn on fiqs */
  106. BIC $(PsrDfiq), R0, R1
  107. MOVW R1, CPSR
  108. RET
  109. TEXT spllo(SB), 1, $-4
  110. MOVW CPSR, R0 /* turn on irqs and fiqs */
  111. BIC $(PsrDirq|PsrDfiq), R0, R1
  112. MOVW R1, CPSR
  113. RET
  114. TEXT splx(SB), 1, $-4
  115. MOVW $(MACHADDR+0x04), R2 /* save caller pc in Mach */
  116. MOVW R14, 0(R2)
  117. MOVW R0, R1 /* reset interrupt level */
  118. MOVW CPSR, R0
  119. MOVW R1, CPSR
  120. RET
  121. TEXT spldone(SB), 1, $0 /* end marker for devkprof.c */
  122. RET
  123. TEXT islo(SB), 1, $-4
  124. MOVW CPSR, R0
  125. AND $(PsrDirq), R0
  126. EOR $(PsrDirq), R0
  127. RET
  128. TEXT tas(SB), $-4
  129. TEXT _tas(SB), $-4
  130. MOVW R0,R1
  131. MOVW $1,R0
  132. SWPW R0,(R1) /* fix: deprecated in armv6 */
  133. RET
  134. TEXT setlabel(SB), 1, $-4
  135. MOVW R13, 0(R0) /* sp */
  136. MOVW R14, 4(R0) /* pc */
  137. MOVW $0, R0
  138. RET
  139. TEXT gotolabel(SB), 1, $-4
  140. MOVW 0(R0), R13 /* sp */
  141. MOVW 4(R0), R14 /* pc */
  142. MOVW $1, R0
  143. RET
  144. TEXT getcallerpc(SB), 1, $-4
  145. MOVW 0(R13), R0
  146. RET
  147. TEXT idlehands(SB), $-4
  148. BARRIERS
  149. MOVW CPSR, R3
  150. BIC $(PsrDirq|PsrDfiq), R3, R1 /* spllo */
  151. MOVW R1, CPSR
  152. MOVW $0, R0 /* wait for interrupt */
  153. MCR CpSC, 0, R0, C(CpCACHE), C(CpCACHEintr), CpCACHEwait
  154. ISB
  155. MOVW R3, CPSR /* splx */
  156. RET
  157. TEXT coherence(SB), $-4
  158. BARRIERS
  159. RET
  160. /*
  161. * invalidate tlb
  162. */
  163. TEXT mmuinvalidate(SB), 1, $-4
  164. MOVW $0, R0
  165. MCR CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinv
  166. BARRIERS
  167. RET
  168. /*
  169. * mmuinvalidateaddr(va)
  170. * invalidate tlb entry for virtual page address va, ASID 0
  171. */
  172. TEXT mmuinvalidateaddr(SB), 1, $-4
  173. MCR CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinvse
  174. BARRIERS
  175. RET
  176. /*
  177. * drain write buffer
  178. * writeback and invalidate data cache
  179. */
  180. TEXT cachedwbinv(SB), 1, $-4
  181. DSB
  182. MOVW $0, R0
  183. MCR CpSC, 0, R0, C(CpCACHE), C(CpCACHEwbi), CpCACHEall
  184. RET
  185. /*
  186. * cachedwbinvse(va, n)
  187. * drain write buffer
  188. * writeback and invalidate data cache range [va, va+n)
  189. */
  190. TEXT cachedwbinvse(SB), 1, $-4
  191. MOVW R0, R1 /* DSB clears R0 */
  192. DSB
  193. MOVW n+4(FP), R2
  194. ADD R1, R2
  195. SUB $1, R2
  196. BIC $(CACHELINESZ-1), R1
  197. BIC $(CACHELINESZ-1), R2
  198. MCRR(CpSC, 0, 2, 1, CpCACHERANGEdwbi)
  199. RET
  200. /*
  201. * cachedwbse(va, n)
  202. * drain write buffer
  203. * writeback data cache range [va, va+n)
  204. */
  205. TEXT cachedwbse(SB), 1, $-4
  206. MOVW R0, R1 /* DSB clears R0 */
  207. DSB
  208. MOVW n+4(FP), R2
  209. ADD R1, R2
  210. BIC $(CACHELINESZ-1), R1
  211. BIC $(CACHELINESZ-1), R2
  212. MCRR(CpSC, 0, 2, 1, CpCACHERANGEdwb)
  213. RET
  214. /*
  215. * drain write buffer and prefetch buffer
  216. * writeback and invalidate data cache
  217. * invalidate instruction cache
  218. */
  219. TEXT cacheuwbinv(SB), 1, $-4
  220. BARRIERS
  221. MOVW $0, R0
  222. MCR CpSC, 0, R0, C(CpCACHE), C(CpCACHEwbi), CpCACHEall
  223. MCR CpSC, 0, R0, C(CpCACHE), C(CpCACHEinvi), CpCACHEall
  224. RET
  225. /*
  226. * invalidate instruction cache
  227. */
  228. TEXT cacheiinv(SB), 1, $-4
  229. MOVW $0, R0
  230. MCR CpSC, 0, R0, C(CpCACHE), C(CpCACHEinvi), CpCACHEall
  231. RET