l.s 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856
  1. #include "mem.h"
  2. /*
  3. * Entered here from Compaq's bootldr with MMU disabled.
  4. */
  5. TEXT _start(SB), $-4
  6. MOVW $setR12(SB), R12 /* load the SB */
  7. _main:
  8. /* SVC mode, interrupts disabled */
  9. MOVW $(PsrDirq|PsrDfiq|PsrMsvc), R1
  10. MOVW R1, CPSR
  11. /* disable the MMU */
  12. MOVW $0x130, R1
  13. MCR CpMMU, 0, R1, C(CpControl), C(0x0)
  14. /* flush caches */
  15. MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x7), 0
  16. /* drain prefetch */
  17. MOVW R0,R0
  18. MOVW R0,R0
  19. MOVW R0,R0
  20. MOVW R0,R0
  21. /* drain write buffer */
  22. MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 4
  23. MOVW $(MACHADDR+4*BY2PG), R13 /* stack */
  24. SUB $4, R13 /* link */
  25. BL main(SB)
  26. BL exit(SB)
  27. /* we shouldn't get here */
  28. _mainloop:
  29. B _mainloop
  30. BL _div(SB) /* hack to get _div etc loaded */
  31. /* flush tlb's */
  32. TEXT mmuinvalidate(SB), $-4
  33. MCR CpMMU, 0, R0, C(CpTLBFlush), C(0x7)
  34. RET
  35. /* flush tlb's */
  36. TEXT mmuinvalidateaddr(SB), $-4
  37. MCR CpMMU, 0, R0, C(CpTLBFlush), C(0x6), 1
  38. RET
  39. /* write back and invalidate i and d caches */
  40. TEXT cacheflush(SB), $-4
  41. /* splhi */
  42. MOVW CPSR, R3
  43. ORR $(PsrDirq), R3, R1
  44. MOVW R1, CPSR
  45. /* write back any dirty data */
  46. MOVW $0xe0000000,R0
  47. ADD $(8*1024),R0,R1
  48. _cfloop:
  49. MOVW.P 32(R0),R2
  50. CMP.S R0,R1
  51. BGE _cfloop
  52. /* drain write buffer and invalidate i cache contents */
  53. MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 4
  54. MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x5), 0
  55. /* drain prefetch */
  56. MOVW R0,R0
  57. MOVW R0,R0
  58. MOVW R0,R0
  59. MOVW R0,R0
  60. /* splx */
  61. MOVW R3, CPSR
  62. RET
  63. /* write back d cache */
  64. TEXT cachewb(SB), $-4
  65. /* write back any dirty data */
  66. _cachewb:
  67. MOVW $0xe0000000,R0
  68. ADD $(8*1024),R0,R1
  69. _cwbloop:
  70. MOVW.P 32(R0),R2
  71. CMP.S R0,R1
  72. BGE _cwbloop
  73. /* drain write buffer */
  74. MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 4
  75. RET
  76. /* write back a single cache line */
  77. TEXT cachewbaddr(SB), $-4
  78. BIC $31,R0
  79. MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 1
  80. B _wbflush
  81. /* write back a region of cache lines */
  82. TEXT cachewbregion(SB), $-4
  83. MOVW 4(FP),R1
  84. CMP.S $(4*1024),R1
  85. BGT _cachewb
  86. ADD R0,R1
  87. BIC $31,R0
  88. _cwbrloop:
  89. MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 1
  90. ADD $32,R0
  91. CMP.S R0,R1
  92. BGT _cwbrloop
  93. B _wbflush
  94. /* invalidate the dcache */
  95. TEXT dcacheinvalidate(SB), $-4
  96. MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x6)
  97. RET
  98. /* invalidate the icache */
  99. TEXT icacheinvalidate(SB), $-4
  100. MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x9)
  101. RET
  102. /* drain write buffer */
  103. TEXT wbflush(SB), $-4
  104. _wbflush:
  105. MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 4
  106. RET
  107. /* return cpu id */
  108. TEXT getcpuid(SB), $-4
  109. MRC CpMMU, 0, R0, C(CpCPUID), C(0x0)
  110. RET
  111. /* return fault status */
  112. TEXT getfsr(SB), $-4
  113. MRC CpMMU, 0, R0, C(CpFSR), C(0x0)
  114. RET
  115. /* return mmu control register */
  116. TEXT getcontrol(SB), $-4
  117. SUB R0, R0
  118. MRC CpMMU, 0, R0, C(CpControl), C(0x0)
  119. RET
  120. /* return mmu dac register */
  121. TEXT getdac(SB), $-4
  122. SUB R0, R0
  123. MRC CpMMU, 0, R0, C(CpDAC), C(0x0)
  124. RET
  125. /* return mmu ttb register */
  126. TEXT getttb(SB), $-4
  127. SUB R0, R0
  128. MRC CpMMU, 0, R0, C(CpTTB), C(0x0)
  129. RET
  130. /* return fault address */
  131. TEXT getfar(SB), $-4
  132. MRC CpMMU, 0, R0, C(CpFAR), C(0x0)
  133. RET
  134. /* set the translation table base */
  135. TEXT putttb(SB), $-4
  136. MCR CpMMU, 0, R0, C(CpTTB), C(0x0)
  137. RET
  138. /*
  139. * enable mmu, i and d caches
  140. */
  141. TEXT mmuenable(SB), $-4
  142. MRC CpMMU, 0, R0, C(CpControl), C(0x0)
  143. ORR $(CpCmmuena|CpCdcache|CpCicache|CpCwb|CpCsystem), R0
  144. BIC $(CpCrom), R0
  145. MCR CpMMU, 0, R0, C(CpControl), C(0x0)
  146. MOVW R0, R0
  147. MOVW R0, R0
  148. MOVW R0, R0
  149. MOVW R0, R0
  150. RET
  151. TEXT mmudisable(SB), $-4
  152. MRC CpMMU, 0, R0, C(CpControl), C(0x0)
  153. BIC $(CpCmmuena|CpCdcache|CpCicache|CpCwb|CpCvivec), R0
  154. MCR CpMMU, 0, R0, C(CpControl), C(0x0)
  155. RET
  156. /*
  157. * use exception vectors at 0xffff0000
  158. */
  159. TEXT mappedIvecEnable(SB), $-4
  160. MRC CpMMU, 0, R0, C(CpControl), C(0x0)
  161. ORR $(CpCvivec), R0
  162. MCR CpMMU, 0, R0, C(CpControl), C(0x0)
  163. RET
  164. TEXT mappedIvecDisable(SB), $-4
  165. MRC CpMMU, 0, R0, C(CpControl), C(0x0)
  166. BIC $(CpCvivec), R0
  167. MCR CpMMU, 0, R0, C(CpControl), C(0x0)
  168. RET
  169. /* set the translation table base */
  170. TEXT putdac(SB), $-4
  171. MCR CpMMU, 0, R0, C(CpDAC), C(0x0)
  172. RET
  173. /* set address translation pid */
  174. TEXT putpid(SB), $-4
  175. MCR CpMMU, 0, R0, C(CpPID), C(0x0)
  176. RET
  177. /*
  178. * set the stack value for the mode passed in R0
  179. */
  180. TEXT setr13(SB), $-4
  181. MOVW 4(FP), R1
  182. MOVW CPSR, R2
  183. BIC $PsrMask, R2, R3
  184. ORR R0, R3
  185. MOVW R3, CPSR
  186. MOVW R13, R0
  187. MOVW R1, R13
  188. MOVW R2, CPSR
  189. RET
  190. /*
  191. * exception vectors, copied by trapinit() to somewhere useful
  192. */
  193. TEXT vectors(SB), $-4
  194. MOVW 0x18(R15), R15 /* reset */
  195. MOVW 0x18(R15), R15 /* undefined */
  196. MOVW 0x18(R15), R15 /* SWI */
  197. MOVW 0x18(R15), R15 /* prefetch abort */
  198. MOVW 0x18(R15), R15 /* data abort */
  199. MOVW 0x18(R15), R15 /* reserved */
  200. MOVW 0x18(R15), R15 /* IRQ */
  201. MOVW 0x18(R15), R15 /* FIQ */
  202. TEXT vtable(SB), $-4
  203. WORD $_vsvc(SB) /* reset, in svc mode already */
  204. WORD $_vund(SB) /* undefined, switch to svc mode */
  205. WORD $_vsvc(SB) /* swi, in svc mode already */
  206. WORD $_vpabt(SB) /* prefetch abort, switch to svc mode */
  207. WORD $_vdabt(SB) /* data abort, switch to svc mode */
  208. WORD $_vsvc(SB) /* reserved */
  209. WORD $_virq(SB) /* IRQ, switch to svc mode */
  210. WORD $_vfiq(SB) /* FIQ, switch to svc mode */
  211. TEXT _vrst(SB), $-4
  212. BL resettrap(SB)
  213. TEXT _vsvc(SB), $-4 /* SWI */
  214. MOVW.W R14, -4(R13) /* ureg->pc = interupted PC */
  215. MOVW SPSR, R14 /* ureg->psr = SPSR */
  216. MOVW.W R14, -4(R13) /* ... */
  217. MOVW $PsrMsvc, R14 /* ureg->type = PsrMsvc */
  218. MOVW.W R14, -4(R13) /* ... */
  219. MOVM.DB.W.S [R0-R14], (R13) /* save user level registers, at end r13 points to ureg */
  220. MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
  221. MOVW R13, R0 /* first arg is pointer to ureg */
  222. SUB $8, R13 /* space for argument+link */
  223. BL syscall(SB)
  224. ADD $(8+4*15), R13 /* make r13 point to ureg->type */
  225. MOVW 8(R13), R14 /* restore link */
  226. MOVW 4(R13), R0 /* restore SPSR */
  227. MOVW R0, SPSR /* ... */
  228. MOVM.DB.S (R13), [R0-R14] /* restore registers */
  229. ADD $8, R13 /* pop past ureg->{type+psr} */
  230. RFE /* MOVM.IA.S.W (R13), [R15] */
  231. TEXT _vund(SB), $-4 /* undefined */
  232. MOVM.IA [R0-R4], (R13) /* free some working space */
  233. MOVW $PsrMund, R0
  234. B _vswitch
  235. TEXT _vpabt(SB), $-4 /* prefetch abort */
  236. MOVM.IA [R0-R4], (R13) /* free some working space */
  237. MOVW $PsrMabt, R0 /* r0 = type */
  238. B _vswitch
  239. TEXT _vdabt(SB), $-4 /* prefetch abort */
  240. MOVM.IA [R0-R4], (R13) /* free some working space */
  241. MOVW $(PsrMabt+1), R0 /* r0 = type */
  242. B _vswitch
  243. TEXT _virq(SB), $-4 /* IRQ */
  244. MOVM.IA [R0-R4], (R13) /* free some working space */
  245. MOVW $PsrMirq, R0 /* r0 = type */
  246. B _vswitch
  247. /*
  248. * come here with type in R0 and R13 pointing above saved [r0-r4]
  249. * and type in r0. we'll switch to SVC mode and then call trap.
  250. */
  251. _vswitch:
  252. MOVW SPSR, R1 /* save SPSR for ureg */
  253. MOVW R14, R2 /* save interrupted pc for ureg */
  254. MOVW R13, R3 /* save pointer to where the original [R0-R3] are */
  255. /* switch to svc mode */
  256. MOVW CPSR, R14
  257. BIC $PsrMask, R14
  258. ORR $(PsrDirq|PsrDfiq|PsrMsvc), R14
  259. MOVW R14, CPSR
  260. /* interupted code kernel or user? */
  261. AND.S $0xf, R1, R4
  262. BEQ _userexcep
  263. /* here for trap from SVC mode */
  264. MOVM.DB.W [R0-R2], (R13) /* set ureg->{type, psr, pc}; r13 points to ureg->type */
  265. MOVM.IA (R3), [R0-R4] /* restore [R0-R4] from previous mode's stack */
  266. MOVM.DB.W [R0-R14], (R13) /* save kernel level registers, at end r13 points to ureg */
  267. MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
  268. MOVW R13, R0 /* first arg is pointer to ureg */
  269. SUB $8, R13 /* space for argument+link (for debugger) */
  270. MOVW $0xdeaddead,R11 /* marker */
  271. BL trap(SB)
  272. ADD $(8+4*15), R13 /* make r13 point to ureg->type */
  273. MOVW 8(R13), R14 /* restore link */
  274. MOVW 4(R13), R0 /* restore SPSR */
  275. MOVW R0, SPSR /* ... */
  276. MOVM.DB (R13), [R0-R14] /* restore registers */
  277. ADD $8, R13 /* pop past ureg->{type+psr} */
  278. RFE /* MOVM.IA.S.W (R13), [R15] */
  279. /* here for trap from USER mode */
  280. _userexcep:
  281. MOVM.DB.W [R0-R2], (R13) /* set ureg->{type, psr, pc}; r13 points to ureg->type */
  282. MOVM.IA (R3), [R0-R4] /* restore [R0-R4] from previous mode's stack */
  283. MOVM.DB.W.S [R0-R14], (R13) /* save kernel level registers, at end r13 points to ureg */
  284. MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
  285. MOVW R13, R0 /* first arg is pointer to ureg */
  286. SUB $8, R13 /* space for argument+link (for debugger) */
  287. BL trap(SB)
  288. ADD $(8+4*15), R13 /* make r13 point to ureg->type */
  289. MOVW 8(R13), R14 /* restore link */
  290. MOVW 4(R13), R0 /* restore SPSR */
  291. MOVW R0, SPSR /* ... */
  292. MOVM.DB.S (R13), [R0-R14] /* restore registers */
  293. ADD $8, R13 /* pop past ureg->{type+psr} */
  294. RFE /* MOVM.IA.S.W (R13), [R15] */
  295. TEXT _vfiq(SB), $-4 /* FIQ */
  296. RFE /* FIQ is special, ignore it for now */
  297. /*
  298. * This is the first jump from kernel to user mode.
  299. * Fake a return from interrupt.
  300. *
  301. * Enter with R0 containing the user stack pointer.
  302. * UTZERO + 0x20 is always the entry point.
  303. *
  304. */
  305. TEXT touser(SB),$-4
  306. /* store the user stack pointer into the USR_r13 */
  307. MOVM.DB.W [R0], (R13)
  308. MOVM.S.IA.W (R13),[R13]
  309. /* set up a PSR for user level */
  310. MOVW $(PsrMusr), R0
  311. MOVW R0,SPSR
  312. /* save the PC on the stack */
  313. MOVW $(UTZERO+0x20), R0
  314. MOVM.DB.W [R0],(R13)
  315. /* return from interrupt */
  316. RFE /* MOVM.IA.S.W (R13), [R15] */
  317. /*
  318. * here to jump to a newly forked process
  319. */
  320. TEXT forkret(SB),$-4
  321. ADD $(4*15), R13 /* make r13 point to ureg->type */
  322. MOVW 8(R13), R14 /* restore link */
  323. MOVW 4(R13), R0 /* restore SPSR */
  324. MOVW R0, SPSR /* ... */
  325. MOVM.DB.S (R13), [R0-R14] /* restore registers */
  326. ADD $8, R13 /* pop past ureg->{type+psr} */
  327. RFE /* MOVM.IA.S.W (R13), [R15] */
  328. TEXT splhi(SB), $-4
  329. /* save caller pc in Mach */
  330. MOVW $(MACHADDR+0x04),R2
  331. MOVW R14,0(R2)
  332. /* turn off interrupts */
  333. MOVW CPSR, R0
  334. ORR $(PsrDirq), R0, R1
  335. MOVW R1, CPSR
  336. RET
  337. TEXT spllo(SB), $-4
  338. MOVW CPSR, R0
  339. BIC $(PsrDirq), R0, R1
  340. MOVW R1, CPSR
  341. RET
  342. TEXT splx(SB), $-4
  343. /* save caller pc in Mach */
  344. MOVW $(MACHADDR+0x04),R2
  345. MOVW R14,0(R2)
  346. /* reset interrupt level */
  347. MOVW R0, R1
  348. MOVW CPSR, R0
  349. MOVW R1, CPSR
  350. RET
  351. TEXT splxpc(SB), $-4 /* for iunlock */
  352. MOVW R0, R1
  353. MOVW CPSR, R0
  354. MOVW R1, CPSR
  355. RET
  356. TEXT spldone(SB), $0
  357. RET
  358. TEXT islo(SB), $-4
  359. MOVW CPSR, R0
  360. AND $(PsrDirq), R0
  361. EOR $(PsrDirq), R0
  362. RET
  363. TEXT cpsrr(SB), $-4
  364. MOVW CPSR, R0
  365. RET
  366. TEXT spsrr(SB), $-4
  367. MOVW SPSR, R0
  368. RET
  369. TEXT getsp(SB), $-4
  370. MOVW R13, R0
  371. RET
  372. TEXT getlink(SB), $-4
  373. MOVW R14, R0
  374. RET
  375. TEXT getcallerpc(SB), $-4
  376. MOVW 0(R13), R0
  377. RET
  378. TEXT tas(SB), $-4
  379. MOVW R0, R1
  380. MOVW $0xDEADDEAD, R0
  381. MOVW R0, R3
  382. SWPW R0, (R1)
  383. CMP.S R0, R3
  384. BEQ _tasout
  385. EOR R3, R3
  386. CMP.S R0, R3
  387. BEQ _tasout
  388. MOVW $1,R15
  389. _tasout:
  390. RET
  391. TEXT setlabel(SB), $-4
  392. MOVW R13, 0(R0) /* sp */
  393. MOVW R14, 4(R0) /* pc */
  394. MOVW $0, R0
  395. RET
  396. TEXT gotolabel(SB), $-4
  397. MOVW 0(R0), R13 /* sp */
  398. MOVW 4(R0), R14 /* pc */
  399. MOVW $1, R0
  400. RET
  401. /* save the state machine in power_state[] for an upcoming suspend
  402. */
  403. TEXT setpowerlabel(SB), $-4
  404. MOVW $power_state+0(SB), R0
  405. /* svc */ /* power_state[]: what */
  406. MOVW R1, 0(R0)
  407. MOVW R2, 4(R0)
  408. MOVW R3, 8(R0)
  409. MOVW R4, 12(R0)
  410. MOVW R5, 16(R0)
  411. MOVW R6, 20(R0)
  412. MOVW R7, 24(R0)
  413. MOVW R8, 28(R0)
  414. MOVW R9, 32(R0)
  415. MOVW R10,36(R0)
  416. MOVW R11,40(R0)
  417. MOVW R12,44(R0)
  418. MOVW R13,48(R0)
  419. MOVW R14,52(R0)
  420. MOVW SPSR, R1
  421. MOVW R1, 56(R0)
  422. MOVW CPSR, R2
  423. MOVW R2, 60(R0)
  424. /* copro */
  425. MRC CpMMU, 0, R3, C(CpDAC), C(0x0)
  426. MOVW R3, 144(R0)
  427. MRC CpMMU, 0, R3, C(CpTTB), C(0x0)
  428. MOVW R3, 148(R0)
  429. MRC CpMMU, 0, R3, C(CpControl), C(0x0)
  430. MOVW R3, 152(R0)
  431. MRC CpMMU, 0, R3, C(CpFSR), C(0x0)
  432. MOVW R3, 156(R0)
  433. MRC CpMMU, 0, R3, C(CpFAR), C(0x0)
  434. MOVW R3, 160(R0)
  435. MRC CpMMU, 0, R3, C(CpPID), C(0x0)
  436. MOVW R3, 164(R0)
  437. /* usr */
  438. BIC $(PsrMask), R2, R3
  439. ORR $(0xdf), R3
  440. MOVW R3, CPSR
  441. MOVW SPSR, R11
  442. MOVW R11, 168(R0)
  443. MOVW R12, 172(R0)
  444. MOVW R13, 176(R0)
  445. MOVW R14, 180(R0)
  446. /* irq */
  447. BIC $(PsrMask), R2, R3
  448. ORR $(0xd2), R3
  449. MOVW R3, CPSR
  450. MOVW SPSR, R11
  451. MOVW R11, 64(R0)
  452. MOVW R12, 68(R0)
  453. MOVW R13, 72(R0)
  454. MOVW R14, 76(R0)
  455. /* und */
  456. BIC $(PsrMask), R2, R3
  457. ORR $(0xdb), R3
  458. MOVW R3, CPSR
  459. MOVW SPSR, R11
  460. MOVW R11, 80(R0)
  461. MOVW R12, 84(R0)
  462. MOVW R13, 88(R0)
  463. MOVW R14, 92(R0)
  464. /* abt */
  465. BIC $(PsrMask), R2, R3
  466. ORR $(0xd7), R3
  467. MOVW R3, CPSR
  468. MOVW SPSR, R11
  469. MOVW R11, 96(R0)
  470. MOVW R12, 100(R0)
  471. MOVW R13, 104(R0)
  472. MOVW R14, 108(R0)
  473. /* fiq */
  474. BIC $(PsrMask), R2, R3
  475. ORR $(0xd1), R3
  476. MOVW R3, CPSR
  477. MOVW SPSR, R7
  478. MOVW R7, 112(R0)
  479. MOVW R8, 116(R0)
  480. MOVW R9, 120(R0)
  481. MOVW R10,124(R0)
  482. MOVW R11,128(R0)
  483. MOVW R12,132(R0)
  484. MOVW R13,136(R0)
  485. MOVW R14,140(R0)
  486. /* done */
  487. MOVW R2, CPSR
  488. MOVW R1, SPSR
  489. MOVW $0, R0
  490. RET
  491. /* Entered after a resume from suspend state.
  492. * The bootldr jumps here after a processor reset.
  493. */
  494. TEXT power_resume(SB), $-4
  495. MOVW $setR12(SB), R12 /* load the SB */
  496. /* SVC mode, interrupts disabled */
  497. MOVW $(PsrDirq|PsrDfiq|PsrMsvc), R1
  498. MOVW R1, CPSR
  499. /* gotopowerlabel() */
  500. /* svc */
  501. MOVW $power_state+0(SB), R0
  502. MOVW 56(R0), R1 /* R1: SPSR, R2: CPSR */
  503. MOVW 60(R0), R2
  504. MOVW R1, SPSR
  505. MOVW R2, CPSR
  506. /* copro */
  507. /* flush caches */
  508. MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x7), 0
  509. /* drain prefetch */
  510. MOVW R0,R0
  511. MOVW R0,R0
  512. MOVW R0,R0
  513. MOVW R0,R0
  514. /* drain write buffer */
  515. MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 4
  516. MCR CpMMU, 0, R0, C(CpTLBFlush), C(0x7)
  517. MOVW 144(R0), R3
  518. MCR CpMMU, 0, R3, C(CpDAC), C(0x0)
  519. MOVW 148(R0), R3
  520. MCR CpMMU, 0, R3, C(CpTTB), C(0x0)
  521. MOVW 156(R0), R3
  522. MCR CpMMU, 0, R3, C(CpFSR), C(0x0)
  523. MOVW 160(R0), R3
  524. MCR CpMMU, 0, R3, C(CpFAR), C(0x0)
  525. MOVW 164(R0), R3
  526. MCR CpMMU, 0, R3, C(CpPID), C(0x0)
  527. MOVW 152(R0), R3
  528. MCR CpMMU, 0, R3, C(CpControl), C(0x0) /* Enable cache */
  529. MOVW R0,R0
  530. MOVW R0,R0
  531. MOVW R0,R0
  532. MOVW R0,R0
  533. /* flush i&d caches */
  534. MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x7), 0
  535. /* flush tlb */
  536. MCR CpMMU, 0, R0, C(CpTLBFlush), C(0x7), 0
  537. /* drain prefetch */
  538. MOVW R0,R0
  539. MOVW R0,R0
  540. MOVW R0,R0
  541. MOVW R0,R0
  542. /* usr */
  543. BIC $(PsrMask), R2, R3
  544. ORR $(0xdf), R3
  545. MOVW 168(R0), R11
  546. MOVW 172(R0), R12
  547. MOVW 176(R0), R13
  548. MOVW 180(R0), R14
  549. MOVW R11, SPSR
  550. /* irq */
  551. BIC $(PsrMask), R2, R3
  552. ORR $(0xd2), R3
  553. MOVW R3, CPSR
  554. MOVW 64(R0), R11
  555. MOVW 68(R0), R12
  556. MOVW 72(R0), R13
  557. MOVW 76(R0), R14
  558. MOVW R11, SPSR
  559. /* und */
  560. BIC $(PsrMask), R2, R3
  561. ORR $(0xdb), R3
  562. MOVW R3, CPSR
  563. MOVW 80(R0), R11
  564. MOVW 84(R0), R12
  565. MOVW 88(R0), R13
  566. MOVW 92(R0), R14
  567. MOVW R11, SPSR
  568. /* abt */
  569. BIC $(PsrMask), R2, R3
  570. ORR $(0xd7), R3
  571. MOVW R3, CPSR
  572. MOVW 96(R0), R11
  573. MOVW 100(R0), R12
  574. MOVW 104(R0), R13
  575. MOVW 108(R0), R14
  576. MOVW R11, SPSR
  577. /* fiq */
  578. BIC $(PsrMask), R2, R3
  579. ORR $(0xd1), R3
  580. MOVW R3, CPSR
  581. MOVW 112(R0), R7
  582. MOVW 116(R0), R8
  583. MOVW 120(R0), R9
  584. MOVW 124(R0), R10
  585. MOVW 128(R0), R11
  586. MOVW 132(R0), R12
  587. MOVW 136(R0), R13
  588. MOVW 140(R0), R14
  589. MOVW R7, SPSR
  590. /* svc */
  591. MOVW 56(R0), R1
  592. MOVW 60(R0), R2
  593. MOVW R1, SPSR
  594. MOVW R2, CPSR
  595. MOVW 0(R0), R1
  596. MOVW 4(R0), R2
  597. MOVW 8(R0), R3
  598. MOVW 12(R0),R4
  599. MOVW 16(R0),R5
  600. MOVW 20(R0),R6
  601. MOVW 24(R0),R7
  602. MOVW 28(R0),R8
  603. MOVW 32(R0),R9
  604. MOVW 36(R0),R10
  605. MOVW 40(R0),R11
  606. MOVW 44(R0),R12
  607. MOVW 48(R0),R13
  608. MOVW 52(R0),R14
  609. RET
  610. loop:
  611. B loop
  612. TEXT power_down(SB), $-4
  613. TEXT sa1100_power_off<>+0(SB),$8
  614. MOVW resetregs+0(SB),R7
  615. MOVW gpioregs+0(SB),R6
  616. MOVW memconfregs+0(SB),R5
  617. MOVW powerregs+0(SB),R3
  618. /* wakeup on power | rtc */
  619. MOVW $(PWR_rtc|PWR_gpio0),R2
  620. MOVW R2,0xc(R3)
  621. /* clear reset status */
  622. MOVW $(RCSR_all), R2
  623. MOVW R2, 0x4(R7)
  624. /* float */
  625. MOVW $(PCFR_opde|PCFR_fp|PCFR_fs), R2
  626. MOVW R2,0x10(R3)
  627. /* sleep state */
  628. MOVW $0,R2
  629. MOVW R2,0x18(R3)
  630. /* set resume address (pspr)*/
  631. MOVW $resumeaddr+0(SB),R1
  632. MOVW 0x0(R1), R2
  633. MOVW R2,0x8(R3)
  634. BL cacheflush(SB)
  635. /* disable clock switching */
  636. MCR CpPWR, 0, R1, C(CpTest), C(0x2), 2
  637. /* adjust mem timing */
  638. MOVW memconfregs+0(SB),R5
  639. MOVW 0x1c(R5), R2
  640. ORR $(MDREFR_k1db2), R2
  641. MOVW R2, 0x1c(R5)
  642. /* set PLL to lower speed w/ delay (ppcr = 0)*/
  643. MOVW powerregs+0(SB),R3
  644. MOVW $(120*206),R0
  645. l11: SUB $1,R0
  646. BGT l11
  647. MOVW $0, R2
  648. MOVW R2, 0x14(R3)
  649. MOVW $(120*206),R0
  650. l12: SUB $1,R0
  651. BGT l12
  652. /* setup registers for suspend procedure:
  653. * 1. clear RT in mscx (R1, R7, R8)
  654. * 2. clear DRI in mdrefr (R4)
  655. * 3. set slfrsh in mdrefr (R6)
  656. * 4. clear DE in mdcnfg (R9)
  657. * 5. clear dram refresh (R10)
  658. * 6. force sleep (R2)
  659. */
  660. /* 1 */
  661. MOVW 0x10(R5), R2
  662. BIC $(MSC_rt), R2
  663. MOVW R2, R1
  664. MOVW 0x14(R5), R2
  665. BIC $(MSC_rt), R2
  666. MOVW R2, R7
  667. MOVW 0x2c(R5), R2
  668. BIC $(MSC_rt), R2
  669. MOVW R2, R8
  670. /* 2 */
  671. MOVW 0x1c(R5), R2
  672. BIC $(0xff00), R2
  673. BIC $(0x00f0), R2
  674. MOVW R2, R4
  675. /* 3 */
  676. ORR $(MDREFR_slfrsh), R2, R6
  677. /* 4 */
  678. MOVW 0x0(R5), R9
  679. BIC $(MDCFNG_de), R9, R9
  680. /* 5 */
  681. MOVW R4, R2
  682. BIC $(MDREFR_slfrsh), R2, R2
  683. BIC $(MDREFR_e1pin), R2, R2
  684. MOVW R2, R10
  685. /* 6 */
  686. MOVW $1,R2
  687. TEXT power_magic(SB), $-4
  688. /* power_code gets copied into the area of no-ops below,
  689. * at a cache-line boundary (8 instructions)
  690. */
  691. MOVW R0, R0
  692. MOVW R0, R0
  693. MOVW R0, R0
  694. MOVW R0, R0
  695. MOVW R0, R0
  696. MOVW R0, R0
  697. MOVW R0, R0
  698. MOVW R0, R0
  699. MOVW R0, R0
  700. MOVW R0, R0
  701. MOVW R0, R0
  702. MOVW R0, R0
  703. MOVW R0, R0
  704. MOVW R0, R0
  705. MOVW R0, R0
  706. MOVW R0, R0
  707. TEXT power_code(SB), $-4
  708. /* Follow the procedure; this code gets copied to the no-op
  709. * area preceding this code
  710. */
  711. /* 1 */
  712. MOVW R1, 0x10(R5)
  713. MOVW R7, 0x14(R5)
  714. MOVW R8, 0x2c(R5)
  715. /* 2 */
  716. MOVW R4, 0x1c(R5)
  717. /* 3 */
  718. MOVW R6, 0x1c(R5)
  719. /* 4 */
  720. MOVW R9, 0x0(R5)
  721. /* 5 */
  722. MOVW R10, 0x1c(R5)
  723. /* 6 */
  724. MOVW R2, 0x0(R3)
  725. slloop:
  726. B slloop /* loop waiting for sleep */
  727. /* The first MCR instruction of this function needs to be on a cache-line
  728. * boundary; to make this happen, it will be copied to the first cache-line
  729. * boundary 8 words from the start of doze.
  730. *
  731. * Doze puts the machine into idle mode. Any interrupt will get it out
  732. * at the next instruction (the RET, to be precise).
  733. */
  734. TEXT doze(SB), $-4
  735. MOVW $UCDRAMZERO, R1
  736. MOVW R0,R0
  737. MOVW R0,R0
  738. MOVW R0,R0
  739. MOVW R0,R0
  740. MOVW R0,R0
  741. MOVW R0,R0
  742. MOVW R0,R0
  743. MOVW R0,R0
  744. MOVW R0,R0
  745. MOVW R0,R0
  746. MOVW R0,R0
  747. MOVW R0,R0
  748. MOVW R0,R0
  749. MOVW R0,R0
  750. MOVW R0,R0
  751. MOVW R0,R0
  752. MOVW R0,R0
  753. MOVW R0,R0
  754. MOVW R0,R0
  755. MOVW R0,R0
  756. MOVW R0,R0
  757. MOVW R0,R0
  758. MOVW R0,R0
  759. MOVW R0,R0
  760. RET
  761. TEXT doze_code(SB), $-4
  762. MCR CpPWR, 0, R0, C(CpTest), C(0x2), 2
  763. MOVW (R1), R0
  764. MCR CpPWR, 0, R0, C(CpTest), C(0x8), 2