lexception.s 8.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325
  1. /*
  2. * arm exception handlers
  3. */
  4. #include "arm.s"
  5. #undef B /* B is for 'botch' */
  6. /*
  7. * exception vectors, copied by trapinit() to somewhere useful
  8. */
  9. TEXT vectors(SB), 1, $-4
  10. MOVW 0x18(R15), R15 /* reset */
  11. MOVW 0x18(R15), R15 /* undefined instr. */
  12. MOVW 0x18(R15), R15 /* SWI & SMC */
  13. MOVW 0x18(R15), R15 /* prefetch abort */
  14. MOVW 0x18(R15), R15 /* data abort */
  15. MOVW 0x18(R15), R15 /* hypervisor call */
  16. MOVW 0x18(R15), R15 /* IRQ */
  17. MOVW 0x18(R15), R15 /* FIQ */
  18. TEXT vtable(SB), 1, $-4
  19. WORD $_vrst-KZERO(SB) /* reset, in svc mode already */
  20. WORD $_vund(SB) /* undefined, switch to svc mode */
  21. WORD $_vsvc(SB) /* swi, in svc mode already */
  22. WORD $_vpabt(SB) /* prefetch abort, switch to svc mode */
  23. WORD $_vdabt(SB) /* data abort, switch to svc mode */
  24. WORD $_vhype(SB) /* hypervisor call */
  25. WORD $_virq(SB) /* IRQ, switch to svc mode */
  26. WORD $_vfiq(SB) /* FIQ, switch to svc mode */
  27. /*
  28. * reset - start additional cpus
  29. */
  30. TEXT _vrst(SB), 1, $-4
  31. /* running in the zero segment (pc is lower 256MB) */
  32. CPSMODE(PsrMsvc) /* should be redundant */
  33. CPSID
  34. CPSAE
  35. SETEND(0) /* force little-endian */
  36. BARRIERS
  37. SETZSB
  38. MOVW $PsrMsvc, SPSR
  39. MOVW $0, R14
  40. /* invalidate i-cache and branch-target cache */
  41. MTCP CpSC, 0, PC, C(CpCACHE), C(CpCACHEinvi), CpCACHEall
  42. BARRIERS
  43. BL cpureset(SB)
  44. spin:
  45. B spin
  46. /*
  47. * system call
  48. */
  49. TEXT _vsvc(SB), 1, $-4 /* SWI */
  50. CLREX
  51. BARRIERS
  52. /* stack is m->stack */
  53. MOVW.W R14, -4(R13) /* ureg->pc = interrupted PC */
  54. MOVW SPSR, R14 /* ureg->psr = SPSR */
  55. MOVW.W R14, -4(R13) /* ... */
  56. MOVW $PsrMsvc, R14 /* ureg->type = PsrMsvc */
  57. MOVW.W R14, -4(R13) /* ... */
  58. /* avoid the ambiguity described in notes/movm.w. */
  59. MOVM.DB.S [R0-R14], (R13) /* save user level registers */
  60. SUB $(NREGS*4), R13 /* r13 now points to ureg */
  61. MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
  62. /*
  63. * set up m and up registers since user registers could contain anything
  64. */
  65. CPUID(R1)
  66. SLL $2, R1 /* convert to word index */
  67. MOVW $machaddr(SB), R2
  68. ADD R1, R2
  69. MOVW (R2), R(MACH) /* m = machaddr[cpuid] */
  70. CMP $0, R(MACH)
  71. MOVW.EQ $MACHADDR, R0 /* paranoia: use MACHADDR if 0 */
  72. MOVW 8(R(MACH)), R(USER) /* up = m->proc */
  73. MOVW ((NREGS+1)*4)(R13), R2 /* saved SPSR (user mode) */
  74. MOVW R13, R0 /* first arg is pointer to ureg */
  75. SUB $8, R13 /* space for argument+link */
  76. BL syscall(SB)
  77. /*
  78. * caller saves on plan 9, so registers other than 9, 10, 13 & 14
  79. * may have been trashed when we get here.
  80. */
  81. MOVW $setR12(SB), R12 /* reload kernel's SB */
  82. ADD $(8+4*NREGS), R13 /* make r13 point to ureg->type */
  83. MOVW 8(R13), R14 /* restore link */
  84. MOVW 4(R13), R0 /* restore SPSR */
  85. /*
  86. * return from user-mode exception.
  87. * expects new SPSR in R0. R13 must point to ureg->type.
  88. */
  89. _rfue:
  90. TEXT rfue(SB), 1, $-4
  91. MOVW R0, SPSR /* ... */
  92. /*
  93. * order on stack is type, psr, pc, but RFEV7 needs pc, psr.
  94. * step on type and previous word to hold temporary values.
  95. * we could instead change the order in which psr & pc are pushed.
  96. */
  97. MOVW 4(R13), R1 /* psr */
  98. MOVW 8(R13), R2 /* pc */
  99. MOVW R2, 4(R13) /* pc */
  100. MOVW R1, 8(R13) /* psr */
  101. MOVM.DB.S (R13), [R0-R14] /* restore user registers */
  102. ADD $4, R13 /* pop type, sp -> pc */
  103. RFEV7W(13)
  104. TEXT _vund(SB), 1, $-4 /* undefined */
  105. /* sp is m->sund */
  106. MOVM.IA [R0-R4], (R13) /* free some working space */
  107. MOVW $PsrMund, R0
  108. B _vswitch
  109. TEXT _vpabt(SB), 1, $-4 /* prefetch abort */
  110. /* sp is m->sabt */
  111. MOVM.IA [R0-R4], (R13) /* free some working space */
  112. MOVW $PsrMabt, R0 /* r0 = type */
  113. B _vswitch
  114. TEXT _vdabt(SB), 1, $-4 /* data abort */
  115. /* sp is m->sabt */
  116. MOVM.IA [R0-R4], (R13) /* free some working space */
  117. MOVW $(PsrMabt+1), R0 /* r0 = type */
  118. B _vswitch
  119. TEXT _virq(SB), 1, $-4 /* IRQ */
  120. /* sp is m->sirq */
  121. MOVM.IA [R0-R4], (R13) /* free some working space */
  122. MOVW $PsrMirq, R0 /* r0 = type */
  123. B _vswitch
  124. /*
  125. * come here with type in R0 and R13 pointing above saved [r0-r4].
  126. * we'll switch to SVC mode and then call trap.
  127. */
  128. _vswitch:
  129. // TEXT _vswtch(SB), 1, $-4 /* make symbol visible to debuggers */
  130. CLREX
  131. BARRIERS
  132. MOVW SPSR, R1 /* save SPSR for ureg */
  133. /*
  134. * R12 needs to be set before using PsrMbz, so BIGENDCHECK code has
  135. * been moved below.
  136. */
  137. MOVW R14, R2 /* save interrupted pc for ureg */
  138. MOVW R13, R3 /* save pointer to where the original [R0-R4] are */
  139. /*
  140. * switch processor to svc mode. this switches the banked registers
  141. * (r13 [sp] and r14 [link]) to those of svc mode (so we must be sure
  142. * to never get here already in svc mode).
  143. */
  144. CPSMODE(PsrMsvc) /* switch! */
  145. CPSID
  146. AND.S $0xf, R1, R4 /* interrupted code kernel or user? */
  147. BEQ _userexcep
  148. /*
  149. * here for trap from SVC mode
  150. */
  151. /* push ureg->{type, psr, pc} onto Msvc stack.
  152. * r13 points to ureg->type after.
  153. */
  154. MOVM.DB.W [R0-R2], (R13)
  155. MOVM.IA (R3), [R0-R4] /* restore [R0-R4] from previous mode's stack */
  156. /*
  157. * avoid the ambiguity described in notes/movm.w.
  158. * In order to get a predictable value in R13 after the stores,
  159. * separate the store-multiple from the stack-pointer adjustment.
  160. * We'll assume that the old value of R13 should be stored on the stack.
  161. */
  162. /* save kernel level registers, at end r13 points to ureg */
  163. MOVM.DB [R0-R14], (R13)
  164. SUB $(NREGS*4), R13 /* SP now points to saved R0 */
  165. MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
  166. /* previous mode was svc, so the saved spsr should be sane. */
  167. MOVW ((NREGS+1)*4)(R13), R1
  168. MOVM.IA (R13), [R0-R8] /* restore a few user registers */
  169. MOVW R13, R0 /* first arg is pointer to ureg */
  170. SUB $(4*2), R13 /* space for argument+link (for debugger) */
  171. MOVW $0xdeaddead, R11 /* marker */
  172. BL trap(SB) /* trap(ureg) */
  173. /*
  174. * caller saves on plan 9, so registers other than 9, 10, 13 & 14
  175. * may have been trashed when we get here.
  176. */
  177. MOVW $setR12(SB), R12 /* reload kernel's SB */
  178. ADD $(4*2+4*NREGS), R13 /* make r13 point to ureg->type */
  179. /*
  180. * if we interrupted a previous trap's handler and are now
  181. * returning to it, we need to propagate the current R(MACH) (R10)
  182. * by overriding the saved one on the stack, since we may have
  183. * been rescheduled and be on a different processor now than
  184. * at entry.
  185. */
  186. MOVW R(MACH), (-(NREGS-MACH)*4)(R13) /* restore current cpu's MACH */
  187. MOVW 8(R13), R14 /* restore link */
  188. MOVW 4(R13), R0 /* restore SPSR */
  189. /* return from kernel-mode exception */
  190. MOVW R0, SPSR /* ... */
  191. /*
  192. * order on stack is type, psr, pc, but RFEV7 needs pc, psr.
  193. * step on type and previous word to hold temporary values.
  194. * we could instead change the order in which psr & pc are pushed.
  195. */
  196. MOVW 4(R13), R1 /* psr */
  197. MOVW 8(R13), R2 /* pc */
  198. MOVW R2, 4(R13) /* pc */
  199. MOVW R1, 8(R13) /* psr */
  200. /* restore kernel regs other than SP; we're using it */
  201. SUB $(NREGS*4), R13
  202. MOVM.IA.W (R13), [R0-R12]
  203. ADD $4, R13 /* skip saved kernel SP */
  204. MOVM.IA.W (R13), [R14]
  205. ADD $4, R13 /* pop type, sp -> pc */
  206. BARRIERS
  207. RFEV7W(13)
  208. /*
  209. * here for trap from USER mode
  210. */
  211. _userexcep:
  212. MOVM.DB.W [R0-R2], (R13) /* set ureg->{type, psr, pc}; r13 points to ureg->type */
  213. MOVM.IA (R3), [R0-R4] /* restore [R0-R4] from previous mode's stack */
  214. /* avoid the ambiguity described in notes/movm.w. */
  215. MOVM.DB.S [R0-R14], (R13) /* save kernel level registers */
  216. SUB $(NREGS*4), R13 /* r13 now points to ureg */
  217. MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
  218. /*
  219. * set up m and up registers since user registers could contain anything
  220. */
  221. CPUID(R1)
  222. SLL $2, R1 /* convert to word index */
  223. MOVW $machaddr(SB), R2
  224. ADD R1, R2
  225. MOVW (R2), R(MACH) /* m = machaddr[cpuid] */
  226. CMP $0, R(MACH)
  227. MOVW.EQ $MACHADDR, R0 /* paranoia: use MACHADDR if 0 */
  228. MOVW 8(R(MACH)), R(USER) /* up = m->proc */
  229. MOVW ((NREGS+1)*4)(R13), R2 /* saved SPSR */
  230. MOVW R13, R0 /* first arg is pointer to ureg */
  231. SUB $(4*2), R13 /* space for argument+link (for debugger) */
  232. BL trap(SB) /* trap(ureg) */
  233. /*
  234. * caller saves on plan 9, so registers other than 9, 10, 13 & 14
  235. * may have been trashed when we get here.
  236. */
  237. ADD $(4*2+4*NREGS), R13 /* make r13 point to ureg->type */
  238. MOVW 8(R13), R14 /* restore link */
  239. MOVW 4(R13), R0 /* restore SPSR */
  240. MOVW 4(R13), R0 /* restore SPSR */
  241. B _rfue
  242. TEXT _vfiq(SB), 1, $-4 /* FIQ */
  243. PUTC('?')
  244. PUTC('f')
  245. PUTC('i')
  246. PUTC('q')
  247. RFE /* FIQ is special, ignore it for now */
  248. TEXT _vhype(SB), 1, $-4
  249. PUTC('?')
  250. PUTC('h')
  251. PUTC('y')
  252. PUTC('p')
  253. RFE
  254. /*
  255. * set the stack value for the mode passed in R0
  256. */
  257. TEXT setr13(SB), 1, $-4
  258. MOVW 4(FP), R1
  259. MOVW CPSR, R2
  260. BIC $(PsrMask|PsrMbz), R2, R3
  261. ORR $(PsrDirq|PsrDfiq), R3
  262. ORR R0, R3
  263. MOVW R3, CPSR /* switch to new mode */
  264. MOVW R13, R0 /* return old sp */
  265. MOVW R1, R13 /* install new one */
  266. MOVW R2, CPSR /* switch back to old mode */
  267. RET