l.s 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797
  1. /*
  2. * sheevaplug machine assist
  3. * arm926ej-s processor at 1.2GHz
  4. *
  5. * loader uses R11 as scratch.
  6. */
  7. #include "arm.s"
  8. /*
  9. * MCR and MRC are counter-intuitively named.
  10. * MCR coproc, opcode1, Rd, CRn, CRm[, opcode2] # arm -> coproc
  11. * MRC coproc, opcode1, Rd, CRn, CRm[, opcode2] # coproc -> arm
  12. */
  13. /*
  14. * Entered here from Das U-Boot with MMU disabled.
  15. * Until the MMU is enabled it is OK to call functions provided
  16. * they are within ±32MiB relative and do not require any
  17. * local variables or more than one argument (i.e. there is
  18. * no stack).
  19. */
  20. TEXT _start(SB), 1, $-4
  21. MOVW $setR12(SB), R12 /* load the SB */
  22. _main:
  23. /* SVC mode, interrupts disabled */
  24. MOVW $(PsrDirq|PsrDfiq|PsrMsvc), R1
  25. MOVW R1, CPSR
  26. BARRIERS
  27. /*
  28. * disable the MMU & caches,
  29. * switch to system permission & 32-bit addresses.
  30. */
  31. MOVW $(CpCsystem|CpCd32|CpCi32), R1
  32. MCR CpSC, 0, R1, C(CpCONTROL), C(0)
  33. BARRIERS
  34. /*
  35. * disable the Sheevaplug's L2 cache, invalidate all caches
  36. */
  37. /* flush caches. 926ejs manual says we have to do it iteratively. */
  38. _dwbinv0:
  39. MRC CpSC, 0, PC, C(CpCACHE), C(CpCACHEwbi), CpCACHEtest
  40. BNE _dwbinv0
  41. /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
  42. BARRIERS
  43. /* make the l2 cache pay attention */
  44. MOVW $(PHYSIO+0x20100), R1 /* CPUCSREG */
  45. MOVW (4*10)(R1), R2
  46. ORR $(1<<3), R2 /* cpu->l2cfg |= L2exists */
  47. MOVW R2, (4*10)(R1)
  48. BARRIERS
  49. /* invalidate l2 cache */
  50. MCR CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2inv), CpTCl2all
  51. BARRIERS
  52. /* disable l2 cache. do this while l1 caches are off */
  53. MRC CpSC, CpL2, R1, C(CpTESTCFG), C(CpTCl2cfg), CpTCl2conf
  54. /* disabling write allocation is probably for cortex-a8 errata 460075 */
  55. /* l2 off, no wr alloc, no streaming */
  56. BIC $(CpTCl2ena | CpTCl2wralloc | CpTCldcstream), R1
  57. MCR CpSC, CpL2, R1, C(CpTESTCFG), C(CpTCl2cfg), CpTCl2conf
  58. BARRIERS
  59. /* flush caches. 926ejs manual says we have to do it iteratively. */
  60. _dwbinv1:
  61. MRC CpSC, 0, PC, C(CpCACHE), C(CpCACHEwbi), CpCACHEtest
  62. BNE _dwbinv1
  63. BARRIERS
  64. WAVE('\r')
  65. /* clear Mach */
  66. MOVW $PADDR(MACHADDR), R4 /* address of Mach */
  67. _machZ:
  68. MOVW R0, (R4)
  69. ADD $4, R4 /* bump PTE address */
  70. CMP.S $PADDR(L1+L1X(0)), R4
  71. BNE _machZ
  72. /*
  73. * set up the MMU page table
  74. */
  75. /* clear all PTEs first, to provide a default */
  76. WAVE('\n')
  77. MOVW $PADDR(L1+L1X(0)), R4 /* address of PTE for 0 */
  78. _ptenv0:
  79. ZEROPTE()
  80. CMP.S $PADDR(L1+16*KiB), R4
  81. BNE _ptenv0
  82. /* double map of PHYSDRAM, KZERO to PHYSDRAM for first few MBs */
  83. MOVW $PTEDRAM, R2 /* PTE bits */
  84. MOVW $PHYSDRAM, R3 /* pa */
  85. MOVW $PADDR(L1+L1X(PHYSDRAM)), R4 /* address of PTE for PHYSDRAM */
  86. MOVW $16, R5
  87. _ptdbl:
  88. FILLPTE()
  89. SUB.S $1, R5
  90. BNE _ptdbl
  91. /*
  92. * back up and fill in PTEs for memory at KZERO
  93. * there is 1 bank of 512MB of SDRAM at PHYSDRAM
  94. */
  95. MOVW $PTEDRAM, R2 /* PTE bits */
  96. MOVW $PHYSDRAM, R3
  97. MOVW $PADDR(L1+L1X(KZERO)), R4 /* start with PTE for KZERO */
  98. MOVW $512, R5 /* inner loop count */
  99. _ptekrw: /* set PTEs for 512MiB */
  100. FILLPTE()
  101. SUB.S $1, R5
  102. BNE _ptekrw
  103. /*
  104. * back up and fill in PTE for MMIO
  105. */
  106. MOVW $PTEIO, R2 /* PTE bits */
  107. MOVW $PHYSIO, R3
  108. MOVW $PADDR(L1+L1X(VIRTIO)), R4 /* start with PTE for VIRTIO */
  109. FILLPTE()
  110. /* mmu.c sets up the vectors later */
  111. /*
  112. * set up a temporary stack; avoid data & bss segments
  113. */
  114. MOVW $(PHYSDRAM | (128*1024*1024)), R13
  115. WAVE('P')
  116. /* set the domain access control */
  117. MOVW $Client, R0
  118. BL dacput(SB)
  119. /* set the translation table base */
  120. MOVW $PADDR(L1), R0
  121. BL ttbput(SB)
  122. MOVW $0, R0
  123. BL pidput(SB) /* paranoia */
  124. /* the little dance to turn the MMU & caches on */
  125. WAVE('l')
  126. BL cacheuwbinv(SB)
  127. BL mmuinvalidate(SB)
  128. BL mmuenable(SB)
  129. WAVE('a')
  130. /* warp the PC into the virtual map */
  131. MOVW $KZERO, R0
  132. BL _r15warp(SB)
  133. /*
  134. * now running at KZERO+something!
  135. */
  136. MOVW $setR12(SB), R12 /* reload the SB */
  137. /*
  138. * set up temporary stack again, in case we've just switched
  139. * to a new register set.
  140. */
  141. MOVW $(KZERO|(128*1024*1024)), R13
  142. /* can now execute arbitrary C code */
  143. BL cacheuwbinv(SB)
  144. /* undo double map of 0, KZERO */
  145. MOVW $PADDR(L1+L1X(0)), R4 /* address of PTE for 0 */
  146. MOVW $0, R0
  147. MOVW $16, R5
  148. _ptudbl:
  149. MOVW R0, (R4)
  150. ADD $4, R4 /* bump PTE address */
  151. ADD $MiB, R0 /* bump pa */
  152. SUB.S $1, R5
  153. BNE _ptudbl
  154. BARRIERS
  155. MCR CpSC, 0, R0, C(CpTLB), C(CpTLBinvd), CpTLBinvse
  156. MCR CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinv
  157. BARRIERS
  158. WAVE('n')
  159. WAVE(' ')
  160. /* pass Mach to main and set up the stack */
  161. MOVW $(MACHADDR), R0 /* Mach */
  162. MOVW R0, R13
  163. ADD $(MACHSIZE), R13 /* stack pointer */
  164. SUB $4, R13 /* space for link register */
  165. BL main(SB) /* void main(Mach*) */
  166. /* fall through */
  167. /* not used */
  168. TEXT _reset(SB), 1, $-4
  169. /* turn the caches off */
  170. MOVW $(PsrDirq|PsrDfiq|PsrMsvc), R0
  171. MOVW R0, CPSR
  172. BARRIERS
  173. BL cacheuwbinv(SB)
  174. MRC CpSC, 0, R0, C(CpCONTROL), C(0)
  175. BIC $(CpCwb|CpCicache|CpCdcache|CpCalign), R0
  176. MCR CpSC, 0, R0, C(CpCONTROL), C(0)
  177. BARRIERS
  178. WAVE('R')
  179. /* redo double map of 0, KZERO */
  180. MOVW $(L1+L1X(0)), R4 /* address of PTE for 0 */
  181. MOVW $PTEDRAM, R2 /* PTE bits */
  182. MOVW $0, R3
  183. MOVW $16, R5
  184. _ptrdbl:
  185. ORR R3, R2, R1 /* first identity-map 0 to 0, etc. */
  186. MOVW R1, (R4)
  187. ADD $4, R4 /* bump PTE address */
  188. ADD $MiB, R3 /* bump pa */
  189. SUB.S $1, R5
  190. BNE _ptrdbl
  191. BARRIERS
  192. WAVE('e')
  193. MOVW $0, R0
  194. MCR CpSC, 0, R0, C(CpTLB), C(CpTLBinvd), CpTLBinv
  195. MCR CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinv
  196. BARRIERS
  197. /* back to 29- or 26-bit addressing, mainly for SB */
  198. MRC CpSC, 0, R0, C(CpCONTROL), C(0)
  199. BIC $(CpCd32|CpCi32), R0
  200. MCR CpSC, 0, R0, C(CpCONTROL), C(0)
  201. BARRIERS
  202. /* turn the MMU off */
  203. MOVW $PHYSDRAM, R0
  204. BL _r15warp(SB)
  205. BL mmuinvalidate(SB)
  206. BL mmudisable(SB)
  207. WAVE('s')
  208. /* set new reset vector */
  209. MOVW $0, R2
  210. MOVW $0xe59ff018, R3 /* MOVW 0x18(R15), R15 */
  211. MOVW R3, (R2)
  212. WAVE('e')
  213. MOVW $PHYSBOOTROM, R3
  214. MOVW R3, 0x20(R2) /* where $0xe59ff018 jumps to */
  215. BARRIERS
  216. WAVE('t')
  217. WAVE('\r')
  218. WAVE('\n')
  219. /* ...and jump to it */
  220. MOVW R2, R15 /* software reboot */
  221. _limbo: /* should not get here... */
  222. B _limbo /* ... and can't get out */
  223. BL _div(SB) /* hack to load _div, etc. */
  224. TEXT _r15warp(SB), 1, $-4
  225. BIC $KSEGM, R14
  226. ORR R0, R14
  227. BIC $KSEGM, R13
  228. ORR R0, R13
  229. RET
  230. /* clobbers R1, R6 */
  231. TEXT myputc(SB), 1, $-4
  232. MOVW $PHYSCONS, R6
  233. _busy:
  234. MOVW 20(R6), R1
  235. BIC.S $~(1<<5), R1 /* (x->lsr & LSRthre) == 0? */
  236. BEQ _busy
  237. MOVW R3, (R6) /* print */
  238. BARRIERS
  239. RET
  240. /*
  241. * l1 caches
  242. */
  243. TEXT l1cacheson(SB), 1, $-4
  244. MOVW CPSR, R5
  245. ORR $(PsrDirq|PsrDfiq), R5, R4
  246. MOVW R4, CPSR /* splhi */
  247. BARRIERS
  248. MRC CpSC, 0, R0, C(CpCONTROL), C(0)
  249. ORR $(CpCdcache|CpCicache|CpCwb), R0
  250. MCR CpSC, 0, R0, C(CpCONTROL), C(0)
  251. BARRIERS
  252. MOVW R5, CPSR /* splx */
  253. BARRIERS
  254. RET
  255. TEXT l1cachesoff(SB), 1, $-4
  256. MOVM.DB.W [R14], (SP) /* save lr on stack */
  257. MOVW CPSR, R5
  258. ORR $(PsrDirq|PsrDfiq), R5, R4
  259. MOVW R4, CPSR /* splhi */
  260. BARRIERS
  261. BL cacheuwbinv(SB)
  262. MRC CpSC, 0, R0, C(CpCONTROL), C(0)
  263. BIC $(CpCdcache|CpCicache|CpCwb), R0
  264. MCR CpSC, 0, R0, C(CpCONTROL), C(0)
  265. BARRIERS
  266. MOVW R5, CPSR /* splx */
  267. BARRIERS
  268. MOVM.IA.W (SP), [R14] /* restore lr */
  269. RET
  270. /*
  271. * cache* functions affect only the L1 caches, which are VIVT.
  272. */
  273. TEXT cachedwb(SB), 1, $-4 /* D writeback */
  274. MOVW CPSR, R3 /* splhi */
  275. ORR $(PsrDirq), R3, R1
  276. MOVW R1, CPSR
  277. BARRIERS
  278. /* keep writing back dirty cache lines until no more exist */
  279. _dwb:
  280. MRC CpSC, 0, PC, C(CpCACHE), C(CpCACHEwb), CpCACHEtest
  281. BNE _dwb
  282. /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
  283. BARRIERS
  284. MOVW R3, CPSR /* splx */
  285. BARRIERS
  286. RET
  287. TEXT cachedwbse(SB), 1, $-4 /* D writeback SE */
  288. MOVW R0, R2 /* first arg: address */
  289. MOVW CPSR, R3 /* splhi */
  290. ORR $(PsrDirq), R3, R1
  291. MOVW R1, CPSR
  292. BARRIERS
  293. MOVW 4(FP), R1 /* second arg: size */
  294. // CMP.S $(4*1024), R1
  295. // BGT _dwb
  296. ADD R2, R1
  297. BIC $(CACHELINESZ-1), R2
  298. _dwbse:
  299. MCR CpSC, 0, R2, C(CpCACHE), C(CpCACHEwb), CpCACHEse
  300. ADD $CACHELINESZ, R2
  301. CMP.S R2, R1
  302. BGT _dwbse
  303. /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
  304. BARRIERS
  305. MOVW R3, CPSR /* splx */
  306. BARRIERS
  307. RET
  308. TEXT cachedwbinv(SB), 1, $-4 /* D writeback+invalidate */
  309. MOVW CPSR, R3 /* splhi */
  310. ORR $(PsrDirq), R3, R1
  311. MOVW R1, CPSR
  312. BARRIERS
  313. /* keep writing back dirty cache lines until no more exist */
  314. _dwbinv:
  315. MRC CpSC, 0, PC, C(CpCACHE), C(CpCACHEwbi), CpCACHEtest
  316. BNE _dwbinv
  317. /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
  318. BARRIERS
  319. MOVW R3, CPSR /* splx */
  320. BARRIERS
  321. RET
  322. TEXT cachedwbinvse(SB), 1, $-4 /* D writeback+invalidate SE */
  323. MOVW R0, R2 /* first arg: address */
  324. MOVW CPSR, R3 /* splhi */
  325. ORR $(PsrDirq), R3, R1
  326. MOVW R1, CPSR
  327. BARRIERS
  328. MOVW 4(FP), R1 /* second arg: size */
  329. // CMP.S $(4*1024), R1
  330. // BGT _dwbinv
  331. ADD R2, R1
  332. BIC $(CACHELINESZ-1), R2
  333. _dwbinvse:
  334. MCR CpSC, 0, R2, C(CpCACHE), C(CpCACHEwbi), CpCACHEse
  335. ADD $CACHELINESZ, R2
  336. CMP.S R2, R1
  337. BGT _dwbinvse
  338. /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
  339. BARRIERS
  340. MOVW R3, CPSR /* splx */
  341. BARRIERS
  342. RET
  343. TEXT cachedinvse(SB), 1, $-4 /* D invalidate SE */
  344. MOVW R0, R2 /* first arg: address */
  345. MOVW CPSR, R3 /* splhi */
  346. ORR $(PsrDirq), R3, R1
  347. MOVW R1, CPSR
  348. BARRIERS
  349. MOVW 4(FP), R1 /* second arg: size */
  350. // CMP.S $(4*1024), R1
  351. // BGT _dinv
  352. ADD R2, R1
  353. BIC $(CACHELINESZ-1), R2
  354. _dinvse:
  355. MCR CpSC, 0, R2, C(CpCACHE), C(CpCACHEinvd), CpCACHEse
  356. ADD $CACHELINESZ, R2
  357. CMP.S R2, R1
  358. BGT _dinvse
  359. /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
  360. BARRIERS
  361. MOVW R3, CPSR /* splx */
  362. BARRIERS
  363. RET
  364. TEXT cacheuwbinv(SB), 1, $-4 /* D+I writeback+invalidate */
  365. MOVW CPSR, R3 /* splhi */
  366. ORR $(PsrDirq), R3, R1
  367. MOVW R1, CPSR
  368. BARRIERS
  369. /* keep writing back dirty cache lines until no more exist */
  370. _uwbinv: /* D writeback+invalidate */
  371. MRC CpSC, 0, PC, C(CpCACHE), C(CpCACHEwbi), CpCACHEtest
  372. BNE _uwbinv
  373. /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
  374. BARRIERS
  375. MOVW $0, R0 /* I invalidate */
  376. MCR CpSC, 0, R0, C(CpCACHE), C(CpCACHEinvi), CpCACHEall
  377. /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
  378. BARRIERS
  379. MOVW R3, CPSR /* splx */
  380. BARRIERS
  381. RET
  382. TEXT cacheiinv(SB), 1, $-4 /* I invalidate */
  383. BARRIERS
  384. MCR CpSC, 0, R0, C(CpCACHE), C(CpCACHEinvi), CpCACHEall
  385. /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
  386. BARRIERS
  387. RET
  388. TEXT cachedinv(SB), 1, $-4 /* D invalidate */
  389. _dinv:
  390. BARRIERS
  391. MCR CpSC, 0, R0, C(CpCACHE), C(CpCACHEinvd), CpCACHEall
  392. /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
  393. BARRIERS
  394. RET
  395. /*
  396. * l2 cache
  397. */
  398. /* enable l2 cache in config coproc. reg. do this while l1 caches are off. */
  399. TEXT l2cachecfgon(SB), 1, $-4
  400. BARRIERS
  401. MCR CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2inv), CpTCl2all
  402. BARRIERS
  403. MRC CpSC, CpL2, R1, C(CpTESTCFG), C(CpTCl2cfg), CpTCl2conf
  404. ORR $(CpTCl2ena | CpTCl2prefdis), R1 /* l2 on, prefetch off */
  405. MCR CpSC, CpL2, R1, C(CpTESTCFG), C(CpTCl2cfg), CpTCl2conf
  406. BARRIERS
  407. RET
  408. /* disable l2 cache in config coproc. reg. do this while l1 caches are off. */
  409. TEXT l2cachecfgoff(SB), 1, $-4
  410. BARRIERS
  411. MRC CpSC, CpL2, R1, C(CpTESTCFG), C(CpTCl2cfg), CpTCl2conf
  412. BIC $CpTCl2ena, R1
  413. MCR CpSC, CpL2, R1, C(CpTESTCFG), C(CpTCl2cfg), CpTCl2conf
  414. BARRIERS
  415. MCR CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2inv), CpTCl2all
  416. BARRIERS
  417. RET
  418. TEXT l2cacheuwb(SB), 1, $-4 /* L2 unified writeback */
  419. BARRIERS
  420. MCR CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2flush), CpTCl2all
  421. BARRIERS
  422. RET
  423. TEXT l2cacheuwbse(SB), 1, $-4 /* L2 unified writeback SE */
  424. MOVW R0, R2 /* first arg: address */
  425. MOVW CPSR, R3 /* splhi */
  426. ORR $(PsrDirq), R3, R1
  427. MOVW R1, CPSR
  428. BARRIERS
  429. MOVW 4(FP), R1 /* second arg: size */
  430. ADD R2, R1
  431. BIC $(CACHELINESZ-1), R2
  432. _l2wbse:
  433. MCR CpSC, CpL2, R2, C(CpTESTCFG), C(CpTCl2flush), CpTCl2seva
  434. ADD $CACHELINESZ, R2
  435. CMP.S R2, R1
  436. BGT _l2wbse
  437. BARRIERS
  438. MOVW R3, CPSR /* splx */
  439. BARRIERS
  440. RET
  441. TEXT l2cacheuwbinv(SB), 1, $-4 /* L2 unified writeback+invalidate */
  442. MOVW CPSR, R3 /* splhi */
  443. ORR $(PsrDirq), R3, R1
  444. MOVW R1, CPSR
  445. BARRIERS
  446. MCR CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2flush), CpTCl2all
  447. BARRIERS
  448. MCR CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2inv), CpTCl2all
  449. BARRIERS
  450. MOVW R3, CPSR /* splx */
  451. BARRIERS
  452. RET
  453. TEXT l2cacheuwbinvse(SB), 1, $-4 /* L2 unified writeback+invalidate SE */
  454. MOVW R0, R2 /* first arg: address */
  455. MOVW CPSR, R3 /* splhi */
  456. ORR $(PsrDirq), R3, R1
  457. MOVW R1, CPSR
  458. BARRIERS
  459. MOVW 4(FP), R1 /* second arg: size */
  460. ADD R2, R1
  461. BIC $(CACHELINESZ-1), R2
  462. _l2wbinvse:
  463. MCR CpSC, CpL2, R2, C(CpTESTCFG), C(CpTCl2flush), CpTCl2seva
  464. BARRIERS
  465. MCR CpSC, CpL2, R2, C(CpTESTCFG), C(CpTCl2inv), CpTCl2seva
  466. ADD $CACHELINESZ, R2
  467. CMP.S R2, R1
  468. BGT _l2wbinvse
  469. BARRIERS
  470. MOVW R3, CPSR /* splx */
  471. BARRIERS
  472. RET
  473. TEXT l2cacheuinv(SB), 1, $-4 /* L2 unified invalidate */
  474. BARRIERS
  475. MCR CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2inv), CpTCl2all
  476. BARRIERS
  477. RET
  478. TEXT l2cacheuinvse(SB), 1, $-4 /* L2 unified invalidate SE */
  479. MOVW R0, R2 /* first arg: address */
  480. MOVW CPSR, R3 /* splhi */
  481. ORR $(PsrDirq), R3, R1
  482. MOVW R1, CPSR
  483. BARRIERS
  484. MOVW 4(FP), R1 /* second arg: size */
  485. ADD R2, R1
  486. BIC $(CACHELINESZ-1), R2
  487. _l2invse:
  488. MCR CpSC, CpL2, R2, C(CpTESTCFG), C(CpTCl2inv), CpTCl2seva
  489. ADD $CACHELINESZ, R2
  490. CMP.S R2, R1
  491. BGT _l2invse
  492. BARRIERS
  493. MOVW R3, CPSR /* splx */
  494. BARRIERS
  495. RET
  496. /*
  497. * enable mmu, i and d caches, and high vector
  498. */
  499. TEXT mmuenable(SB), 1, $-4
  500. MRC CpSC, 0, R0, C(CpCONTROL), C(0)
  501. ORR $(CpChv|CpCmmu|CpCdcache|CpCicache|CpCwb|CpCsystem), R0
  502. BIC $(CpCrom), R0
  503. MCR CpSC, 0, R0, C(CpCONTROL), C(0)
  504. BARRIERS
  505. RET
  506. TEXT mmudisable(SB), 1, $-4
  507. MRC CpSC, 0, R0, C(CpCONTROL), C(0)
  508. BIC $(CpChv|CpCmmu|CpCdcache|CpCicache|CpCwb), R0
  509. MCR CpSC, 0, R0, C(CpCONTROL), C(0)
  510. BARRIERS
  511. RET
  512. TEXT mmuinvalidate(SB), 1, $-4 /* invalidate all */
  513. MOVW $0, R0
  514. MCR CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinv
  515. BARRIERS
  516. RET
  517. TEXT mmuinvalidateaddr(SB), 1, $-4 /* invalidate single entry */
  518. MCR CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinvse
  519. BARRIERS
  520. RET
  521. TEXT cpidget(SB), 1, $-4 /* main ID */
  522. MRC CpSC, 0, R0, C(CpID), C(0), CpIDid
  523. RET
  524. TEXT cpctget(SB), 1, $-4 /* cache type */
  525. MRC CpSC, 0, R0, C(CpID), C(0), CpIDct
  526. RET
  527. TEXT controlget(SB), 1, $-4 /* control */
  528. MRC CpSC, 0, R0, C(CpCONTROL), C(0)
  529. RET
  530. TEXT ttbget(SB), 1, $-4 /* translation table base */
  531. MRC CpSC, 0, R0, C(CpTTB), C(0)
  532. RET
  533. TEXT ttbput(SB), 1, $-4 /* translation table base */
  534. MCR CpSC, 0, R0, C(CpTTB), C(0)
  535. BARRIERS
  536. RET
  537. TEXT dacget(SB), 1, $-4 /* domain access control */
  538. MRC CpSC, 0, R0, C(CpDAC), C(0)
  539. RET
  540. TEXT dacput(SB), 1, $-4 /* domain access control */
  541. MCR CpSC, 0, R0, C(CpDAC), C(0)
  542. BARRIERS
  543. RET
  544. TEXT fsrget(SB), 1, $-4 /* fault status */
  545. MRC CpSC, 0, R0, C(CpFSR), C(0)
  546. RET
  547. TEXT farget(SB), 1, $-4 /* fault address */
  548. MRC CpSC, 0, R0, C(CpFAR), C(0x0)
  549. RET
  550. TEXT pidget(SB), 1, $-4 /* address translation pid */
  551. MRC CpSC, 0, R0, C(CpPID), C(0x0)
  552. RET
  553. TEXT pidput(SB), 1, $-4 /* address translation pid */
  554. MCR CpSC, 0, R0, C(CpPID), C(0x0)
  555. BARRIERS
  556. RET
  557. TEXT splhi(SB), 1, $-4
  558. MOVW $(MACHADDR+4), R2 /* save caller pc in Mach */
  559. MOVW R14, 0(R2)
  560. MOVW CPSR, R3 /* turn off interrupts */
  561. ORR $(PsrDirq), R3, R1
  562. MOVW R1, CPSR
  563. BARRIERS
  564. MOVW R3, R0
  565. RET
  566. TEXT spllo(SB), 1, $-4
  567. MOVW CPSR, R3
  568. BIC $(PsrDirq), R3, R1
  569. MOVW R1, CPSR
  570. BARRIERS
  571. MOVW R3, R0
  572. RET
  573. TEXT splx(SB), 1, $-4
  574. MOVW $(MACHADDR+0x04), R2 /* save caller pc in Mach */
  575. MOVW R14, 0(R2)
  576. MOVW R0, R1 /* reset interrupt level */
  577. MOVW CPSR, R3
  578. MOVW R1, CPSR
  579. BARRIERS
  580. MOVW R3, R0
  581. RET
  582. TEXT splxpc(SB), 1, $-4 /* for iunlock */
  583. MOVW R0, R1
  584. MOVW CPSR, R3
  585. MOVW R1, CPSR
  586. BARRIERS
  587. MOVW R3, R0
  588. RET
  589. TEXT spldone(SB), 1, $0
  590. RET
  591. TEXT islo(SB), 1, $-4
  592. MOVW CPSR, R0
  593. AND $(PsrDirq), R0
  594. EOR $(PsrDirq), R0
  595. RET
  596. TEXT splfhi(SB), $-4
  597. MOVW CPSR, R3
  598. ORR $(PsrDfiq|PsrDirq), R3, R1
  599. MOVW R1, CPSR
  600. BARRIERS
  601. MOVW R3, R0
  602. RET
  603. //TEXT splflo(SB), $-4
  604. // MOVW CPSR, R3
  605. // BIC $(PsrDfiq), R3, R1
  606. // MOVW R1, CPSR
  607. // BARRIERS
  608. // MOVW R3, R0
  609. // RET
  610. TEXT _tas(SB), $-4
  611. MOVW R0,R1
  612. BARRIERS
  613. MOVW $1,R0
  614. SWPW R0,(R1) /* fix: deprecated in armv7 */
  615. MOVW R0, R3
  616. BARRIERS
  617. MOVW R3, R0
  618. RET
  619. //TEXT tas32(SB), 1, $-4
  620. // MOVW R0, R1
  621. // MOVW $0xDEADDEAD, R0
  622. // MOVW R0, R3
  623. // SWPW R0, (R1)
  624. // CMP.S R0, R3
  625. // BEQ _tasout
  626. // EOR R3, R3 /* R3 = 0 */
  627. // CMP.S R0, R3
  628. // BEQ _tasout
  629. // MOVW $1, R15 /* abort: lock != 0 && lock != $0xDEADDEAD */
  630. //_tasout:
  631. // RET
  632. TEXT setlabel(SB), 1, $-4
  633. MOVW R13, 0(R0) /* sp */
  634. MOVW R14, 4(R0) /* pc */
  635. BARRIERS
  636. MOVW $0, R0
  637. RET
  638. TEXT gotolabel(SB), 1, $-4
  639. MOVW 0(R0), R13 /* sp */
  640. MOVW 4(R0), R14 /* pc */
  641. BARRIERS
  642. MOVW $1, R0
  643. RET
  644. TEXT getcallerpc(SB), 1, $-4
  645. MOVW 0(R13), R0
  646. RET
  647. TEXT _idlehands(SB), 1, $-4
  648. MOVW CPSR, R3
  649. ORR $(PsrDirq|PsrDfiq), R3, R1 /* splhi */
  650. MOVW R1, CPSR
  651. BARRIERS
  652. MOVW $0, R0 /* wait for interrupt */
  653. MCR CpSC, 0, R0, C(CpCACHE), C(CpCACHEintr), CpCACHEwait
  654. BARRIERS
  655. MOVW R3, CPSR /* splx */
  656. BARRIERS
  657. RET
  658. TEXT barriers(SB), 1, $-4
  659. BARRIERS
  660. RET