l.s 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774
  1. /*
  2. * sheevaplug machine assist
  3. * arm926ej-s processor at 1.2GHz
  4. *
  5. * loader uses R11 as scratch.
  6. * R9 and R10 are used for `extern register' variables.
  7. *
  8. * ARM v7 arch. ref. man. (I know, this is v5) §B1.3.3 that
  9. * we don't need barriers around moves to CPSR. The ARM v6 manual
  10. * seems to be silent on the subject.
  11. */
  12. #include "arm.s"
  13. /*
  14. * MCR and MRC are counter-intuitively named.
  15. * MCR coproc, opcode1, Rd, CRn, CRm[, opcode2] # arm -> coproc
  16. * MRC coproc, opcode1, Rd, CRn, CRm[, opcode2] # coproc -> arm
  17. */
  18. /*
  19. * Entered here from Das U-Boot with MMU disabled.
  20. * Until the MMU is enabled it is OK to call functions provided
  21. * they are within ±32MiB relative and do not require any
  22. * local variables or more than one argument (i.e. there is
  23. * no stack).
  24. */
  25. TEXT _start(SB), 1, $-4
  26. MOVW $setR12(SB), R12 /* load the SB */
  27. _main:
  28. /* SVC mode, interrupts disabled */
  29. MOVW $(PsrDirq|PsrDfiq|PsrMsvc), R1
  30. MOVW R1, CPSR
  31. BARRIERS
  32. /*
  33. * disable the MMU & caches,
  34. * switch to system permission & 32-bit addresses.
  35. */
  36. MOVW $(CpCsystem|CpCd32|CpCi32), R1
  37. MCR CpSC, 0, R1, C(CpCONTROL), C(0)
  38. ISB
  39. /*
  40. * disable the Sheevaplug's L2 cache, invalidate all caches
  41. */
  42. /* flush caches. 926ejs manual says we have to do it iteratively. */
  43. _dwbinv0:
  44. MRC CpSC, 0, PC, C(CpCACHE), C(CpCACHEwbi), CpCACHEtest
  45. BNE _dwbinv0
  46. /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
  47. BARRIERS
  48. /* make the l2 cache pay attention */
  49. MOVW $(PHYSIO+0x20100), R1 /* CPUCSREG */
  50. MOVW (4*10)(R1), R2
  51. ORR $(1<<3), R2 /* cpu->l2cfg |= L2exists */
  52. MOVW R2, (4*10)(R1)
  53. ISB
  54. /* invalidate l2 cache */
  55. MCR CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2inv), CpTCl2all
  56. ISB
  57. /* disable l2 cache. do this while l1 caches are off */
  58. MRC CpSC, CpL2, R1, C(CpTESTCFG), C(CpTCl2cfg), CpTCl2conf
  59. /* disabling write allocation is probably for cortex-a8 errata 460075 */
  60. /* l2 off, no wr alloc, no streaming */
  61. BIC $(CpTCl2ena | CpTCl2wralloc | CpTCldcstream), R1
  62. MCR CpSC, CpL2, R1, C(CpTESTCFG), C(CpTCl2cfg), CpTCl2conf
  63. BARRIERS
  64. /* flush caches. 926ejs manual says we have to do it iteratively. */
  65. _dwbinv1:
  66. MRC CpSC, 0, PC, C(CpCACHE), C(CpCACHEwbi), CpCACHEtest
  67. BNE _dwbinv1
  68. BARRIERS
  69. PUTC('\r')
  70. /* clear Mach */
  71. MOVW $PADDR(MACHADDR), R4 /* address of Mach */
  72. _machZ:
  73. MOVW R0, (R4)
  74. ADD $4, R4 /* bump PTE address */
  75. CMP.S $PADDR(L1+L1X(0)), R4
  76. BNE _machZ
  77. /*
  78. * set up the MMU page table
  79. */
  80. /* clear all PTEs first, to provide a default */
  81. PUTC('\n')
  82. MOVW $PADDR(L1+L1X(0)), R4 /* address of PTE for 0 */
  83. _ptenv0:
  84. ZEROPTE()
  85. CMP.S $PADDR(L1+16*KiB), R4
  86. BNE _ptenv0
  87. /* double map of PHYSDRAM, KZERO to PHYSDRAM for first few MBs */
  88. MOVW $PTEDRAM, R2 /* PTE bits */
  89. MOVW $PHYSDRAM, R3 /* pa */
  90. MOVW $PADDR(L1+L1X(PHYSDRAM)), R4 /* address of PTE for PHYSDRAM */
  91. MOVW $16, R5
  92. _ptdbl:
  93. FILLPTE()
  94. SUB.S $1, R5
  95. BNE _ptdbl
  96. /*
  97. * back up and fill in PTEs for memory at KZERO
  98. * there is 1 bank of 512MB of SDRAM at PHYSDRAM
  99. */
  100. MOVW $PTEDRAM, R2 /* PTE bits */
  101. MOVW $PHYSDRAM, R3
  102. MOVW $PADDR(L1+L1X(KZERO)), R4 /* start with PTE for KZERO */
  103. MOVW $512, R5 /* inner loop count */
  104. _ptekrw: /* set PTEs for 512MiB */
  105. FILLPTE()
  106. SUB.S $1, R5
  107. BNE _ptekrw
  108. /*
  109. * back up and fill in PTE for MMIO
  110. */
  111. MOVW $PTEIO, R2 /* PTE bits */
  112. MOVW $PHYSIO, R3
  113. MOVW $PADDR(L1+L1X(VIRTIO)), R4 /* start with PTE for VIRTIO */
  114. FILLPTE()
  115. /* mmu.c sets up the vectors later */
  116. /*
  117. * set up a temporary stack; avoid data & bss segments
  118. */
  119. MOVW $(PHYSDRAM | (128*1024*1024)), R13
  120. PUTC('P')
  121. /* set the domain access control */
  122. MOVW $Client, R0
  123. BL dacput(SB)
  124. /* set the translation table base */
  125. MOVW $PADDR(L1), R0
  126. BL ttbput(SB)
  127. MOVW $0, R0
  128. BL pidput(SB) /* paranoia */
  129. /* the little dance to turn the MMU & caches on */
  130. PUTC('l')
  131. BL cacheuwbinv(SB)
  132. BL mmuinvalidate(SB)
  133. BL mmuenable(SB)
  134. PUTC('a')
  135. /* warp the PC into the virtual map */
  136. MOVW $KZERO, R0
  137. BL _r15warp(SB)
  138. /*
  139. * now running at KZERO+something!
  140. */
  141. MOVW $setR12(SB), R12 /* reload the SB */
  142. /*
  143. * set up temporary stack again, in case we've just switched
  144. * to a new register set.
  145. */
  146. MOVW $(KZERO|(128*1024*1024)), R13
  147. /* can now execute arbitrary C code */
  148. BL cacheuwbinv(SB)
  149. PUTC('n')
  150. /* undo double map of 0, KZERO */
  151. MOVW $PADDR(L1+L1X(0)), R4 /* address of PTE for 0 */
  152. MOVW $0, R0
  153. MOVW $16, R5
  154. _ptudbl:
  155. MOVW R0, (R4)
  156. ADD $4, R4 /* bump PTE address */
  157. ADD $MiB, R0 /* bump pa */
  158. SUB.S $1, R5
  159. BNE _ptudbl
  160. BARRIERS
  161. MCR CpSC, 0, R0, C(CpTLB), C(CpTLBinvd), CpTLBinvse
  162. MCR CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinv
  163. BARRIERS
  164. PUTC(' ')
  165. /* pass Mach to main and set up the stack */
  166. MOVW $(MACHADDR), R0 /* Mach */
  167. MOVW R0, R13
  168. ADD $(MACHSIZE), R13 /* stack pointer */
  169. SUB $4, R13 /* space for link register */
  170. BL main(SB) /* void main(Mach*) */
  171. /* fall through */
  172. /* not used */
  173. TEXT _reset(SB), 1, $-4
  174. /* turn the caches off */
  175. MOVW $(PsrDirq|PsrDfiq|PsrMsvc), R0
  176. MOVW R0, CPSR
  177. BARRIERS
  178. BL cacheuwbinv(SB)
  179. MRC CpSC, 0, R0, C(CpCONTROL), C(0)
  180. BIC $(CpCwb|CpCicache|CpCdcache|CpCalign), R0
  181. MCR CpSC, 0, R0, C(CpCONTROL), C(0)
  182. BARRIERS
  183. PUTC('R')
  184. /* redo double map of 0, KZERO */
  185. MOVW $(L1+L1X(0)), R4 /* address of PTE for 0 */
  186. MOVW $PTEDRAM, R2 /* PTE bits */
  187. MOVW $0, R3
  188. MOVW $16, R5
  189. _ptrdbl:
  190. ORR R3, R2, R1 /* first identity-map 0 to 0, etc. */
  191. MOVW R1, (R4)
  192. ADD $4, R4 /* bump PTE address */
  193. ADD $MiB, R3 /* bump pa */
  194. SUB.S $1, R5
  195. BNE _ptrdbl
  196. BARRIERS
  197. PUTC('e')
  198. MOVW $0, R0
  199. MCR CpSC, 0, R0, C(CpTLB), C(CpTLBinvd), CpTLBinv
  200. MCR CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinv
  201. BARRIERS
  202. /* back to 29- or 26-bit addressing, mainly for SB */
  203. MRC CpSC, 0, R0, C(CpCONTROL), C(0)
  204. BIC $(CpCd32|CpCi32), R0
  205. MCR CpSC, 0, R0, C(CpCONTROL), C(0)
  206. BARRIERS
  207. /* turn the MMU off */
  208. MOVW $PHYSDRAM, R0
  209. BL _r15warp(SB)
  210. BL mmuinvalidate(SB)
  211. BL mmudisable(SB)
  212. PUTC('s')
  213. /* set new reset vector */
  214. MOVW $0, R2
  215. MOVW $0xe59ff018, R3 /* MOVW 0x18(R15), R15 */
  216. MOVW R3, (R2)
  217. PUTC('e')
  218. MOVW $PHYSBOOTROM, R3
  219. MOVW R3, 0x20(R2) /* where $0xe59ff018 jumps to */
  220. BARRIERS
  221. PUTC('t')
  222. PUTC('\r')
  223. PUTC('\n')
  224. /* ...and jump to it */
  225. MOVW R2, R15 /* software reboot */
  226. _limbo: /* should not get here... */
  227. B _limbo /* ... and can't get out */
  228. BL _div(SB) /* hack to load _div, etc. */
  229. TEXT _r15warp(SB), 1, $-4
  230. BIC $KSEGM, R14
  231. ORR R0, R14
  232. BIC $KSEGM, R13
  233. ORR R0, R13
  234. RET
  235. /* clobbers R1, R6 */
  236. TEXT myputc(SB), 1, $-4
  237. MOVW $PHYSCONS, R6
  238. _busy:
  239. MOVW 20(R6), R1
  240. BIC.S $~(1<<5), R1 /* (x->lsr & LSRthre) == 0? */
  241. BEQ _busy
  242. MOVW R3, (R6) /* print */
  243. ISB
  244. RET
  245. /*
  246. * l1 caches
  247. */
  248. TEXT l1cacheson(SB), 1, $-4
  249. MOVW CPSR, R5
  250. ORR $(PsrDirq|PsrDfiq), R5, R4
  251. MOVW R4, CPSR /* splhi */
  252. MRC CpSC, 0, R0, C(CpCONTROL), C(0)
  253. ORR $(CpCdcache|CpCicache|CpCwb), R0
  254. MCR CpSC, 0, R0, C(CpCONTROL), C(0)
  255. BARRIERS
  256. MOVW R5, CPSR /* splx */
  257. RET
  258. TEXT l1cachesoff(SB), 1, $-4
  259. MOVM.DB.W [R14], (SP) /* save lr on stack */
  260. MOVW CPSR, R5
  261. ORR $(PsrDirq|PsrDfiq), R5, R4
  262. MOVW R4, CPSR /* splhi */
  263. BL cacheuwbinv(SB)
  264. MRC CpSC, 0, R0, C(CpCONTROL), C(0)
  265. BIC $(CpCdcache|CpCicache|CpCwb), R0
  266. MCR CpSC, 0, R0, C(CpCONTROL), C(0)
  267. BARRIERS
  268. MOVW R5, CPSR /* splx */
  269. MOVM.IA.W (SP), [R14] /* restore lr */
  270. RET
  271. /*
  272. * cache* functions affect only the L1 caches, which are VIVT.
  273. */
  274. TEXT cachedwb(SB), 1, $-4 /* D writeback */
  275. MOVW CPSR, R3 /* splhi */
  276. ORR $(PsrDirq), R3, R1
  277. MOVW R1, CPSR
  278. BARRIERS /* force outstanding stores to cache */
  279. /* keep writing back dirty cache lines until no more exist */
  280. _dwb:
  281. MRC CpSC, 0, PC, C(CpCACHE), C(CpCACHEwb), CpCACHEtest
  282. BNE _dwb
  283. /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
  284. BARRIERS
  285. MOVW R3, CPSR /* splx */
  286. RET
  287. TEXT cachedwbse(SB), 1, $-4 /* D writeback SE */
  288. MOVW R0, R2 /* first arg: address */
  289. MOVW CPSR, R3 /* splhi */
  290. ORR $(PsrDirq), R3, R1
  291. MOVW R1, CPSR
  292. BARRIERS /* force outstanding stores to cache */
  293. MOVW 4(FP), R1 /* second arg: size */
  294. // CMP.S $(4*1024), R1
  295. // BGT _dwb
  296. ADD R2, R1
  297. BIC $(CACHELINESZ-1), R2
  298. _dwbse:
  299. MCR CpSC, 0, R2, C(CpCACHE), C(CpCACHEwb), CpCACHEse
  300. ADD $CACHELINESZ, R2
  301. CMP.S R2, R1
  302. BGT _dwbse
  303. /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
  304. BARRIERS
  305. MOVW R3, CPSR /* splx */
  306. RET
  307. TEXT cachedwbinv(SB), 1, $-4 /* D writeback+invalidate */
  308. MOVW CPSR, R3 /* splhi */
  309. ORR $(PsrDirq), R3, R1
  310. MOVW R1, CPSR
  311. BARRIERS /* force outstanding stores to cache */
  312. /* keep writing back dirty cache lines until no more exist */
  313. _dwbinv:
  314. MRC CpSC, 0, PC, C(CpCACHE), C(CpCACHEwbi), CpCACHEtest
  315. BNE _dwbinv
  316. /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
  317. BARRIERS
  318. MOVW R3, CPSR /* splx */
  319. RET
  320. TEXT cachedwbinvse(SB), 1, $-4 /* D writeback+invalidate SE */
  321. MOVW R0, R2 /* first arg: address */
  322. MOVW CPSR, R3 /* splhi */
  323. ORR $(PsrDirq), R3, R1
  324. MOVW R1, CPSR
  325. BARRIERS /* force outstanding stores to cache */
  326. MOVW 4(FP), R1 /* second arg: size */
  327. DSB
  328. // CMP.S $(4*1024), R1
  329. // BGT _dwbinv
  330. ADD R2, R1
  331. BIC $(CACHELINESZ-1), R2
  332. _dwbinvse:
  333. MCR CpSC, 0, R2, C(CpCACHE), C(CpCACHEwbi), CpCACHEse
  334. ADD $CACHELINESZ, R2
  335. CMP.S R2, R1
  336. BGT _dwbinvse
  337. /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
  338. BARRIERS
  339. MOVW R3, CPSR /* splx */
  340. RET
  341. TEXT cachedinvse(SB), 1, $-4 /* D invalidate SE */
  342. MOVW R0, R2 /* first arg: address */
  343. MOVW CPSR, R3 /* splhi */
  344. ORR $(PsrDirq), R3, R1
  345. MOVW R1, CPSR
  346. MOVW 4(FP), R1 /* second arg: size */
  347. DSB
  348. // CMP.S $(4*1024), R1
  349. // BGT _dinv
  350. ADD R2, R1
  351. BIC $(CACHELINESZ-1), R2
  352. _dinvse:
  353. MCR CpSC, 0, R2, C(CpCACHE), C(CpCACHEinvd), CpCACHEse
  354. ADD $CACHELINESZ, R2
  355. CMP.S R2, R1
  356. BGT _dinvse
  357. /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
  358. BARRIERS
  359. MOVW R3, CPSR /* splx */
  360. RET
  361. TEXT cacheuwbinv(SB), 1, $-4 /* D+I writeback+invalidate */
  362. MOVW CPSR, R3 /* splhi */
  363. ORR $(PsrDirq), R3, R1
  364. MOVW R1, CPSR
  365. BARRIERS /* force outstanding stores to cache */
  366. /* keep writing back dirty cache lines until no more exist */
  367. _uwbinv: /* D writeback+invalidate */
  368. MRC CpSC, 0, PC, C(CpCACHE), C(CpCACHEwbi), CpCACHEtest
  369. BNE _uwbinv
  370. /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
  371. BARRIERS
  372. MOVW $0, R0 /* I invalidate */
  373. MCR CpSC, 0, R0, C(CpCACHE), C(CpCACHEinvi), CpCACHEall
  374. /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
  375. BARRIERS
  376. MOVW R3, CPSR /* splx */
  377. RET
  378. TEXT cacheiinv(SB), 1, $-4 /* I invalidate */
  379. BARRIERS
  380. MCR CpSC, 0, R0, C(CpCACHE), C(CpCACHEinvi), CpCACHEall
  381. /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
  382. BARRIERS
  383. RET
  384. TEXT cachedinv(SB), 1, $-4 /* D invalidate */
  385. _dinv:
  386. BARRIERS
  387. MCR CpSC, 0, R0, C(CpCACHE), C(CpCACHEinvd), CpCACHEall
  388. /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
  389. BARRIERS
  390. RET
  391. /*
  392. * l2 cache
  393. *
  394. * these functions assume that the necessary l1 cache operations have been
  395. * or will be done explicitly by the caller.
  396. */
  397. /* enable l2 cache in config coproc. reg. do this while l1 caches are off. */
  398. TEXT l2cachecfgon(SB), 1, $-4
  399. BARRIERS
  400. MCR CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2inv), CpTCl2all
  401. BARRIERS
  402. MRC CpSC, CpL2, R1, C(CpTESTCFG), C(CpTCl2cfg), CpTCl2conf
  403. ORR $(CpTCl2ena | CpTCl2prefdis), R1 /* l2 on, prefetch off */
  404. MCR CpSC, CpL2, R1, C(CpTESTCFG), C(CpTCl2cfg), CpTCl2conf
  405. BARRIERS
  406. RET
  407. /* disable l2 cache in config coproc. reg. do this while l1 caches are off. */
  408. TEXT l2cachecfgoff(SB), 1, $-4
  409. BARRIERS
  410. MRC CpSC, CpL2, R1, C(CpTESTCFG), C(CpTCl2cfg), CpTCl2conf
  411. BIC $CpTCl2ena, R1
  412. MCR CpSC, CpL2, R1, C(CpTESTCFG), C(CpTCl2cfg), CpTCl2conf
  413. BARRIERS
  414. MCR CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2inv), CpTCl2all
  415. BARRIERS
  416. RET
  417. TEXT l2cacheuwb(SB), 1, $-4 /* L2 unified writeback */
  418. MCR CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2flush), CpTCl2all
  419. ISB
  420. RET
  421. TEXT l2cacheuwbse(SB), 1, $-4 /* L2 unified writeback SE */
  422. MOVW R0, R2 /* first arg: address */
  423. MOVW CPSR, R3 /* splhi */
  424. ORR $(PsrDirq), R3, R1
  425. MOVW R1, CPSR
  426. MOVW 4(FP), R1 /* second arg: size */
  427. ADD R2, R1
  428. BIC $(CACHELINESZ-1), R2
  429. _l2wbse:
  430. MCR CpSC, CpL2, R2, C(CpTESTCFG), C(CpTCl2flush), CpTCl2seva
  431. ADD $CACHELINESZ, R2
  432. CMP.S R2, R1
  433. BGT _l2wbse
  434. ISB
  435. MOVW R3, CPSR /* splx */
  436. RET
  437. TEXT l2cacheuwbinv(SB), 1, $-4 /* L2 unified writeback+invalidate */
  438. MOVW CPSR, R3 /* splhi */
  439. ORR $(PsrDirq), R3, R1
  440. MOVW R1, CPSR
  441. MCR CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2flush), CpTCl2all
  442. ISB
  443. MCR CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2inv), CpTCl2all
  444. ISB
  445. MOVW R3, CPSR /* splx */
  446. RET
  447. TEXT l2cacheuwbinvse(SB), 1, $-4 /* L2 unified writeback+invalidate SE */
  448. MOVW R0, R2 /* first arg: address */
  449. MOVW CPSR, R3 /* splhi */
  450. ORR $(PsrDirq), R3, R1
  451. MOVW R1, CPSR
  452. MOVW 4(FP), R1 /* second arg: size */
  453. ADD R2, R1
  454. BIC $(CACHELINESZ-1), R2
  455. _l2wbinvse:
  456. MCR CpSC, CpL2, R2, C(CpTESTCFG), C(CpTCl2flush), CpTCl2seva
  457. ISB
  458. MCR CpSC, CpL2, R2, C(CpTESTCFG), C(CpTCl2inv), CpTCl2seva
  459. ADD $CACHELINESZ, R2
  460. CMP.S R2, R1
  461. BGT _l2wbinvse
  462. ISB
  463. MOVW R3, CPSR /* splx */
  464. RET
  465. TEXT l2cacheuinv(SB), 1, $-4 /* L2 unified invalidate */
  466. MCR CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2inv), CpTCl2all
  467. ISB
  468. RET
  469. TEXT l2cacheuinvse(SB), 1, $-4 /* L2 unified invalidate SE */
  470. MOVW R0, R2 /* first arg: address */
  471. MOVW CPSR, R3 /* splhi */
  472. ORR $(PsrDirq), R3, R1
  473. MOVW R1, CPSR
  474. MOVW 4(FP), R1 /* second arg: size */
  475. ADD R2, R1
  476. BIC $(CACHELINESZ-1), R2
  477. _l2invse:
  478. MCR CpSC, CpL2, R2, C(CpTESTCFG), C(CpTCl2inv), CpTCl2seva
  479. ADD $CACHELINESZ, R2
  480. CMP.S R2, R1
  481. BGT _l2invse
  482. ISB
  483. MOVW R3, CPSR /* splx */
  484. RET
  485. /*
  486. * enable mmu, i and d caches, and high vector
  487. */
  488. TEXT mmuenable(SB), 1, $-4
  489. MRC CpSC, 0, R0, C(CpCONTROL), C(0)
  490. ORR $(CpChv|CpCmmu|CpCdcache|CpCicache|CpCwb|CpCsystem), R0
  491. BIC $(CpCrom), R0
  492. MCR CpSC, 0, R0, C(CpCONTROL), C(0)
  493. BARRIERS
  494. RET
  495. TEXT mmudisable(SB), 1, $-4
  496. MRC CpSC, 0, R0, C(CpCONTROL), C(0)
  497. BIC $(CpChv|CpCmmu|CpCdcache|CpCicache|CpCwb), R0
  498. MCR CpSC, 0, R0, C(CpCONTROL), C(0)
  499. BARRIERS
  500. RET
  501. TEXT mmuinvalidate(SB), 1, $-4 /* invalidate all */
  502. MOVW $0, R0
  503. MCR CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinv
  504. BARRIERS
  505. RET
  506. TEXT mmuinvalidateaddr(SB), 1, $-4 /* invalidate single entry */
  507. MCR CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinvse
  508. BARRIERS
  509. RET
  510. TEXT cpidget(SB), 1, $-4 /* main ID */
  511. MRC CpSC, 0, R0, C(CpID), C(0), CpIDid
  512. RET
  513. TEXT cpctget(SB), 1, $-4 /* cache type */
  514. MRC CpSC, 0, R0, C(CpID), C(0), CpIDct
  515. RET
  516. TEXT controlget(SB), 1, $-4 /* control */
  517. MRC CpSC, 0, R0, C(CpCONTROL), C(0)
  518. RET
  519. TEXT ttbget(SB), 1, $-4 /* translation table base */
  520. MRC CpSC, 0, R0, C(CpTTB), C(0)
  521. RET
  522. TEXT ttbput(SB), 1, $-4 /* translation table base */
  523. MCR CpSC, 0, R0, C(CpTTB), C(0)
  524. ISB
  525. RET
  526. TEXT dacget(SB), 1, $-4 /* domain access control */
  527. MRC CpSC, 0, R0, C(CpDAC), C(0)
  528. RET
  529. TEXT dacput(SB), 1, $-4 /* domain access control */
  530. MCR CpSC, 0, R0, C(CpDAC), C(0)
  531. ISB
  532. RET
  533. TEXT fsrget(SB), 1, $-4 /* fault status */
  534. MRC CpSC, 0, R0, C(CpFSR), C(0)
  535. RET
  536. TEXT farget(SB), 1, $-4 /* fault address */
  537. MRC CpSC, 0, R0, C(CpFAR), C(0x0)
  538. RET
  539. TEXT pidget(SB), 1, $-4 /* address translation pid */
  540. MRC CpSC, 0, R0, C(CpPID), C(0x0)
  541. RET
  542. TEXT pidput(SB), 1, $-4 /* address translation pid */
  543. MCR CpSC, 0, R0, C(CpPID), C(0x0)
  544. ISB
  545. RET
  546. TEXT splhi(SB), 1, $-4
  547. MOVW $(MACHADDR+4), R2 /* save caller pc in Mach */
  548. MOVW R14, 0(R2)
  549. MOVW CPSR, R0 /* turn off interrupts */
  550. ORR $(PsrDirq), R0, R1
  551. MOVW R1, CPSR
  552. RET
  553. TEXT spllo(SB), 1, $-4
  554. MOVW CPSR, R0
  555. BIC $(PsrDirq), R0, R1
  556. MOVW R1, CPSR
  557. RET
  558. TEXT splx(SB), 1, $-4
  559. MOVW $(MACHADDR+0x04), R2 /* save caller pc in Mach */
  560. MOVW R14, 0(R2)
  561. MOVW R0, R1 /* reset interrupt level */
  562. MOVW CPSR, R0
  563. MOVW R1, CPSR
  564. RET
  565. TEXT splxpc(SB), 1, $-4 /* for iunlock */
  566. MOVW R0, R1
  567. MOVW CPSR, R0
  568. MOVW R1, CPSR
  569. RET
  570. TEXT spldone(SB), 1, $0
  571. RET
  572. TEXT islo(SB), 1, $-4
  573. MOVW CPSR, R0
  574. AND $(PsrDirq), R0
  575. EOR $(PsrDirq), R0
  576. RET
  577. TEXT splfhi(SB), $-4
  578. MOVW CPSR, R0
  579. ORR $(PsrDfiq|PsrDirq), R0, R1
  580. MOVW R1, CPSR
  581. RET
  582. //TEXT splflo(SB), $-4
  583. // MOVW CPSR, R0
  584. // BIC $(PsrDfiq), R0, R1
  585. // MOVW R1, CPSR
  586. // RET
  587. TEXT tas(SB), $-4
  588. TEXT _tas(SB), $-4
  589. MOVW R0,R1
  590. MOVW $1,R0
  591. SWPW R0,(R1) /* fix: deprecated in armv7 */
  592. RET
  593. //TEXT tas32(SB), 1, $-4
  594. // MOVW R0, R1
  595. // MOVW $0xDEADDEAD, R0
  596. // MOVW R0, R3
  597. // SWPW R0, (R1)
  598. // CMP.S R0, R3
  599. // BEQ _tasout
  600. // EOR R3, R3 /* R3 = 0 */
  601. // CMP.S R0, R3
  602. // BEQ _tasout
  603. // MOVW $1, R15 /* abort: lock != 0 && lock != $0xDEADDEAD */
  604. //_tasout:
  605. // RET
  606. TEXT clz(SB), 1, $-4
  607. CLZ(0, 0) /* 0 is R0 */
  608. RET
  609. TEXT setlabel(SB), 1, $-4
  610. MOVW R13, 0(R0) /* sp */
  611. MOVW R14, 4(R0) /* pc */
  612. BARRIERS
  613. MOVW $0, R0
  614. RET
  615. TEXT gotolabel(SB), 1, $-4
  616. MOVW 0(R0), R13 /* sp */
  617. MOVW 4(R0), R14 /* pc */
  618. BARRIERS
  619. MOVW $1, R0
  620. RET
  621. TEXT getcallerpc(SB), 1, $-4
  622. MOVW 0(R13), R0
  623. RET
  624. TEXT _idlehands(SB), 1, $-4
  625. MOVW CPSR, R3
  626. // ORR $PsrDirq, R3, R1 /* splhi */
  627. BIC $PsrDirq, R3, R1 /* spllo */
  628. MOVW R1, CPSR
  629. MOVW $0, R0 /* wait for interrupt */
  630. MCR CpSC, 0, R0, C(CpCACHE), C(CpCACHEintr), CpCACHEwait
  631. ISB
  632. MOVW R3, CPSR /* splx */
  633. RET
  634. TEXT barriers(SB), 1, $-4
  635. BARRIERS
  636. RET