l.s 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454
  1. #include "mem.h"
  2. /*
  3. * Entered here from Compaq's bootldr with MMU disabled.
  4. */
  5. TEXT _start(SB), $-4
  6. MOVW $setR12(SB), R12 /* load the SB */
  7. _main:
  8. /* SVC mode, interrupts disabled */
  9. MOVW $(PsrDirq|PsrDfiq|PsrMsvc), R1
  10. MOVW R1, CPSR
  11. /* disable the MMU */
  12. MOVW $0x130, R1
  13. MCR CpMMU, 0, R1, C(CpControl), C(0x0)
  14. /* flush caches */
  15. MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x7), 0
  16. /* drain prefetch */
  17. MOVW R0,R0
  18. MOVW R0,R0
  19. MOVW R0,R0
  20. MOVW R0,R0
  21. /* drain write buffer */
  22. MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 4
  23. MOVW $(MACHADDR+BY2PG), R13 /* stack */
  24. SUB $4, R13 /* link */
  25. BL main(SB)
  26. BL exit(SB)
  27. /* we shouldn't get here */
  28. _mainloop:
  29. B _mainloop
  30. BL _div(SB) /* hack to get _div etc loaded */
  31. /* flush tlb's */
  32. TEXT mmuinvalidate(SB), $-4
  33. MCR CpMMU, 0, R0, C(CpTLBFlush), C(0x7)
  34. RET
  35. /* flush tlb's */
  36. TEXT mmuinvalidateaddr(SB), $-4
  37. MCR CpMMU, 0, R0, C(CpTLBFlush), C(0x6), 1
  38. RET
  39. /* write back and invalidate i and d caches */
  40. TEXT cacheflush(SB), $-4
  41. /* write back any dirty data */
  42. MOVW $0xe0000000,R0
  43. ADD $(8*1024),R0,R1
  44. _cfloop:
  45. MOVW.P 32(R0),R2
  46. CMP.S R0,R1
  47. BNE _cfloop
  48. /* drain write buffer and invalidate i&d cache contents */
  49. MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 4
  50. MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x7), 0
  51. /* drain prefetch */
  52. MOVW R0,R0
  53. MOVW R0,R0
  54. MOVW R0,R0
  55. MOVW R0,R0
  56. RET
  57. /* write back d cache */
  58. TEXT cachewb(SB), $-4
  59. /* write back any dirty data */
  60. _cachewb:
  61. MOVW $0xe0000000,R0
  62. ADD $(8*1024),R0,R1
  63. _cwbloop:
  64. MOVW.P 32(R0),R2
  65. CMP.S R0,R1
  66. BNE _cwbloop
  67. /* drain write buffer */
  68. MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 4
  69. RET
  70. /* write back a single cache line */
  71. TEXT cachewbaddr(SB), $-4
  72. BIC $31,R0
  73. MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 1
  74. B _wbflush
  75. /* write back a region of cache lines */
  76. TEXT cachewbregion(SB), $-4
  77. MOVW 4(FP),R1
  78. CMP.S $(4*1024),R1
  79. BGT _cachewb
  80. ADD R0,R1
  81. BIC $31,R0
  82. _cwbrloop:
  83. MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 1
  84. ADD $32,R0
  85. CMP.S R0,R1
  86. BGT _cwbrloop
  87. B _wbflush
  88. /* invalidate the dcache */
  89. TEXT dcacheinvalidate(SB), $-4
  90. MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x6)
  91. RET
  92. /* invalidate the icache */
  93. TEXT icacheinvalidate(SB), $-4
  94. MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x9)
  95. RET
  96. /* drain write buffer */
  97. TEXT wbflush(SB), $-4
  98. _wbflush:
  99. MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 4
  100. RET
  101. /* return cpu id */
  102. TEXT getcpuid(SB), $-4
  103. MRC CpMMU, 0, R0, C(CpCPUID), C(0x0)
  104. RET
  105. /* return fault status */
  106. TEXT getfsr(SB), $-4
  107. MRC CpMMU, 0, R0, C(CpFSR), C(0x0)
  108. RET
  109. /* return fault address */
  110. TEXT getfar(SB), $-4
  111. MRC CpMMU, 0, R0, C(CpFAR), C(0x0)
  112. RET
  113. /* return fault address */
  114. TEXT putfar(SB), $-4
  115. MRC CpMMU, 0, R0, C(CpFAR), C(0x0)
  116. RET
  117. /* set the translation table base */
  118. TEXT putttb(SB), $-4
  119. MCR CpMMU, 0, R0, C(CpTTB), C(0x0)
  120. RET
  121. /*
  122. * enable mmu, i and d caches
  123. */
  124. TEXT mmuenable(SB), $-4
  125. MRC CpMMU, 0, R0, C(CpControl), C(0x0)
  126. ORR $(CpCmmuena|CpCdcache|CpCicache|CpCwb), R0
  127. MCR CpMMU, 0, R0, C(CpControl), C(0x0)
  128. RET
  129. TEXT mmudisable(SB), $-4
  130. MRC CpMMU, 0, R0, C(CpControl), C(0x0)
  131. BIC $(CpCmmuena|CpCdcache|CpCicache|CpCwb|CpCvivec), R0
  132. MCR CpMMU, 0, R0, C(CpControl), C(0x0)
  133. RET
  134. /*
  135. * use exception vectors at 0xffff0000
  136. */
  137. TEXT mappedIvecEnable(SB), $-4
  138. MRC CpMMU, 0, R0, C(CpControl), C(0x0)
  139. ORR $(CpCvivec), R0
  140. MCR CpMMU, 0, R0, C(CpControl), C(0x0)
  141. RET
  142. TEXT mappedIvecDisable(SB), $-4
  143. MRC CpMMU, 0, R0, C(CpControl), C(0x0)
  144. BIC $(CpCvivec), R0
  145. MCR CpMMU, 0, R0, C(CpControl), C(0x0)
  146. RET
  147. /* set the translation table base */
  148. TEXT putdac(SB), $-4
  149. MCR CpMMU, 0, R0, C(CpDAC), C(0x0)
  150. RET
  151. /* set address translation pid */
  152. TEXT putpid(SB), $-4
  153. MCR CpMMU, 0, R0, C(CpPID), C(0x0)
  154. RET
  155. /*
  156. * set the stack value for the mode passed in R0
  157. */
  158. TEXT setr13(SB), $-4
  159. MOVW 4(FP), R1
  160. MOVW CPSR, R2
  161. BIC $PsrMask, R2, R3
  162. ORR R0, R3
  163. MOVW R3, CPSR
  164. MOVW R13, R0
  165. MOVW R1, R13
  166. MOVW R2, CPSR
  167. RET
  168. /*
  169. * exception vectors, copied by trapinit() to somewhere useful
  170. */
  171. TEXT vectors(SB), $-4
  172. MOVW 0x18(R15), R15 /* reset */
  173. MOVW 0x18(R15), R15 /* undefined */
  174. MOVW 0x18(R15), R15 /* SWI */
  175. MOVW 0x18(R15), R15 /* prefetch abort */
  176. MOVW 0x18(R15), R15 /* data abort */
  177. MOVW 0x18(R15), R15 /* reserved */
  178. MOVW 0x18(R15), R15 /* IRQ */
  179. MOVW 0x18(R15), R15 /* FIQ */
  180. TEXT vtable(SB), $-4
  181. WORD $_vsvc(SB) /* reset, in svc mode already */
  182. WORD $_vund(SB) /* undefined, switch to svc mode */
  183. WORD $_vsvc(SB) /* swi, in svc mode already */
  184. WORD $_vpabt(SB) /* prefetch abort, switch to svc mode */
  185. WORD $_vdabt(SB) /* data abort, switch to svc mode */
  186. WORD $_vsvc(SB) /* reserved */
  187. WORD $_virq(SB) /* IRQ, switch to svc mode */
  188. WORD $_vfiq(SB) /* FIQ, switch to svc mode */
  189. TEXT _vrst(SB), $-4
  190. BL resettrap(SB)
  191. TEXT _vsvc(SB), $-4 /* SWI */
  192. MOVW.W R14, -4(R13) /* ureg->pc = interupted PC */
  193. MOVW SPSR, R14 /* ureg->psr = SPSR */
  194. MOVW.W R14, -4(R13) /* ... */
  195. MOVW $PsrMsvc, R14 /* ureg->type = PsrMsvc */
  196. MOVW.W R14, -4(R13) /* ... */
  197. MOVM.DB.W.S [R0-R14], (R13) /* save user level registers, at end r13 points to ureg */
  198. MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
  199. MOVW R13, R0 /* first arg is pointer to ureg */
  200. SUB $8, R13 /* space for argument+link */
  201. BL syscall(SB)
  202. ADD $(8+4*15), R13 /* make r13 point to ureg->type */
  203. MOVW 8(R13), R14 /* restore link */
  204. MOVW 4(R13), R0 /* restore SPSR */
  205. MOVW R0, SPSR /* ... */
  206. MOVM.DB.S (R13), [R0-R14] /* restore registers */
  207. ADD $8, R13 /* pop past ureg->{type+psr} */
  208. RFE /* MOVM.IA.S.W (R13), [R15] */
  209. TEXT _vund(SB), $-4 /* undefined */
  210. MOVM.IA [R0-R4], (R13) /* free some working space */
  211. MOVW $PsrMund, R0
  212. B _vswitch
  213. TEXT _vpabt(SB), $-4 /* prefetch abort */
  214. MOVM.IA [R0-R4], (R13) /* free some working space */
  215. MOVW $PsrMabt, R0 /* r0 = type */
  216. B _vswitch
  217. TEXT _vdabt(SB), $-4 /* prefetch abort */
  218. MOVM.IA [R0-R4], (R13) /* free some working space */
  219. MOVW $(PsrMabt+1), R0 /* r0 = type */
  220. B _vswitch
  221. TEXT _virq(SB), $-4 /* IRQ */
  222. MOVM.IA [R0-R4], (R13) /* free some working space */
  223. MOVW $PsrMirq, R0 /* r0 = type */
  224. B _vswitch
  225. /*
  226. * come here with type in R0 and R13 pointing above saved [r0-r4]
  227. * and type in r0. we'll switch to SVC mode and then call trap.
  228. */
  229. _vswitch:
  230. MOVW SPSR, R1 /* save SPSR for ureg */
  231. MOVW R14, R2 /* save interrupted pc for ureg */
  232. MOVW R13, R3 /* save pointer to where the original [R0-R3] are */
  233. /* switch to svc mode */
  234. MOVW CPSR, R14
  235. BIC $PsrMask, R14
  236. ORR $(PsrDirq|PsrDfiq|PsrMsvc), R14
  237. MOVW R14, CPSR
  238. /* interupted code kernel or user? */
  239. AND.S $0xf, R1, R4
  240. BEQ _userexcep
  241. /* here for trap from SVC mode */
  242. MOVM.DB.W [R0-R2], (R13) /* set ureg->{type, psr, pc}; r13 points to ureg->type */
  243. MOVM.IA (R3), [R0-R4] /* restore [R0-R4] from previous mode's stack */
  244. MOVM.DB.W [R0-R14], (R13) /* save kernel level registers, at end r13 points to ureg */
  245. MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
  246. MOVW R13, R0 /* first arg is pointer to ureg */
  247. SUB $8, R13 /* space for argument+link (for debugger) */
  248. MOVW $0xdeaddead,R11 /* marker */
  249. BL trap(SB)
  250. ADD $(8+4*15), R13 /* make r13 point to ureg->type */
  251. MOVW 8(R13), R14 /* restore link */
  252. MOVW 4(R13), R0 /* restore SPSR */
  253. MOVW R0, SPSR /* ... */
  254. MOVM.DB (R13), [R0-R14] /* restore registers */
  255. ADD $8, R13 /* pop past ureg->{type+psr} */
  256. RFE /* MOVM.IA.S.W (R13), [R15] */
  257. /* here for trap from USER mode */
  258. _userexcep:
  259. MOVM.DB.W [R0-R2], (R13) /* set ureg->{type, psr, pc}; r13 points to ureg->type */
  260. MOVM.IA (R3), [R0-R4] /* restore [R0-R4] from previous mode's stack */
  261. MOVM.DB.W.S [R0-R14], (R13) /* save kernel level registers, at end r13 points to ureg */
  262. MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
  263. MOVW R13, R0 /* first arg is pointer to ureg */
  264. SUB $8, R13 /* space for argument+link (for debugger) */
  265. BL trap(SB)
  266. ADD $(8+4*15), R13 /* make r13 point to ureg->type */
  267. MOVW 8(R13), R14 /* restore link */
  268. MOVW 4(R13), R0 /* restore SPSR */
  269. MOVW R0, SPSR /* ... */
  270. MOVM.DB.S (R13), [R0-R14] /* restore registers */
  271. ADD $8, R13 /* pop past ureg->{type+psr} */
  272. RFE /* MOVM.IA.S.W (R13), [R15] */
  273. TEXT _vfiq(SB), $-4 /* FIQ */
  274. RFE /* FIQ is special, ignore it for now */
  275. /*
  276. * This is the first jump from kernel to user mode.
  277. * Fake a return from interrupt.
  278. *
  279. * Enter with R0 containing the user stack pointer.
  280. * UTZERO + 0x20 is always the entry point.
  281. *
  282. */
  283. TEXT touser(SB),$-4
  284. /* store the user stack pointer into the USR_r13 */
  285. MOVM.DB.W [R0], (R13)
  286. MOVM.S.IA.W (R13),[R13]
  287. /* set up a PSR for user level */
  288. MOVW $(PsrMusr), R0
  289. MOVW R0,SPSR
  290. /* save the PC on the stack */
  291. MOVW $(UTZERO+0x20), R0
  292. MOVM.DB.W [R0],(R13)
  293. /* return from interrupt */
  294. RFE /* MOVM.IA.S.W (R13), [R15] */
  295. /*
  296. * here to jump to a newly forked process
  297. */
  298. TEXT forkret(SB),$-4
  299. ADD $(4*15), R13 /* make r13 point to ureg->type */
  300. MOVW 8(R13), R14 /* restore link */
  301. MOVW 4(R13), R0 /* restore SPSR */
  302. MOVW R0, SPSR /* ... */
  303. MOVM.DB.S (R13), [R0-R14] /* restore registers */
  304. ADD $8, R13 /* pop past ureg->{type+psr} */
  305. RFE /* MOVM.IA.S.W (R13), [R15] */
  306. TEXT splhi(SB), $-4
  307. /* save caller pc in Mach */
  308. MOVW $(MACHADDR+0x04),R2
  309. MOVW R14,0(R2)
  310. /* turn off interrupts */
  311. MOVW CPSR, R0
  312. ORR $(PsrDfiq|PsrDirq), R0, R1
  313. MOVW R1, CPSR
  314. RET
  315. TEXT spllo(SB), $-4
  316. MOVW CPSR, R0
  317. BIC $(PsrDfiq|PsrDirq), R0, R1
  318. MOVW R1, CPSR
  319. RET
  320. TEXT splx(SB), $-4
  321. /* save caller pc in Mach */
  322. MOVW $(MACHADDR+0x04),R2
  323. MOVW R14,0(R2)
  324. /* reset interrupt level */
  325. MOVW R0, R1
  326. MOVW CPSR, R0
  327. MOVW R1, CPSR
  328. RET
  329. TEXT splxpc(SB), $-4 /* for iunlock */
  330. MOVW R0, R1
  331. MOVW CPSR, R0
  332. MOVW R1, CPSR
  333. RET
  334. TEXT spldone(SB), $0
  335. RET
  336. TEXT islo(SB), $-4
  337. MOVW CPSR, R0
  338. AND $(PsrDfiq|PsrDirq), R0
  339. EOR $(PsrDfiq|PsrDirq), R0
  340. RET
  341. TEXT cpsrr(SB), $-4
  342. MOVW CPSR, R0
  343. RET
  344. TEXT spsrr(SB), $-4
  345. MOVW SPSR, R0
  346. RET
  347. TEXT getcallerpc(SB), $-4
  348. MOVW 0(R13), R0
  349. RET
  350. TEXT tas(SB), $-4
  351. MOVW R0, R1
  352. MOVW $0xDEADDEAD, R2
  353. SWPW R2, (R1), R0
  354. RET
  355. TEXT setlabel(SB), $-4
  356. MOVW R13, 0(R0) /* sp */
  357. MOVW R14, 4(R0) /* pc */
  358. MOVW $0, R0
  359. RET
  360. TEXT gotolabel(SB), $-4
  361. MOVW 0(R0), R13 /* sp */
  362. MOVW 4(R0), R14 /* pc */
  363. MOVW $1, R0
  364. RET
  365. /* The first MCR instruction of this function needs to be on a cache-line
  366. * boundary; to make this happen, it will be copied (in trap.c).
  367. *
  368. * Doze puts the machine into idle mode. Any interrupt will get it out
  369. * at the next instruction (the RET, to be precise).
  370. */
  371. TEXT _doze(SB), $-4
  372. MOVW $UCDRAMZERO, R1
  373. MOVW R0,R0
  374. MOVW R0,R0
  375. MOVW R0,R0
  376. MOVW R0,R0
  377. MOVW R0,R0
  378. MOVW R0,R0
  379. MOVW R0,R0
  380. MCR CpPWR, 0, R0, C(CpTest), C(0x2), 2
  381. MOVW (R1), R0
  382. MCR CpPWR, 0, R0, C(CpTest), C(0x8), 2
  383. RET