lexception.s 6.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197
  1. /*
  2. * arm exception handlers
  3. */
  4. #include "arm.s"
  5. /*
  6. * exception vectors, copied by trapinit() to somewhere useful
  7. */
  8. TEXT vectors(SB), 1, $-4
  9. MOVW 0x18(R15), R15 /* reset */
  10. MOVW 0x18(R15), R15 /* undefined instr. */
  11. MOVW 0x18(R15), R15 /* SWI & SMC */
  12. MOVW 0x18(R15), R15 /* prefetch abort */
  13. MOVW 0x18(R15), R15 /* data abort */
  14. MOVW 0x18(R15), R15 /* reserved */
  15. MOVW 0x18(R15), R15 /* IRQ */
  16. MOVW 0x18(R15), R15 /* FIQ */
  17. TEXT vtable(SB), 1, $-4
  18. WORD $_vsvc(SB) /* reset, in svc mode already */
  19. WORD $_vund(SB) /* undefined, switch to svc mode */
  20. WORD $_vsvc(SB) /* swi, in svc mode already */
  21. WORD $_vpabt(SB) /* prefetch abort, switch to svc mode */
  22. WORD $_vdabt(SB) /* data abort, switch to svc mode */
  23. WORD $_vsvc(SB) /* reserved */
  24. WORD $_virq(SB) /* IRQ, switch to svc mode */
  25. WORD $_vfiq(SB) /* FIQ, switch to svc mode */
  26. TEXT _vsvc(SB), 1, $-4 /* SWI */
  27. MOVW.W R14, -4(R13) /* ureg->pc = interrupted PC */
  28. MOVW SPSR, R14 /* ureg->psr = SPSR */
  29. MOVW.W R14, -4(R13) /* ... */
  30. MOVW $PsrMsvc, R14 /* ureg->type = PsrMsvc */
  31. MOVW.W R14, -4(R13) /* ... */
  32. /* avoid the ambiguity described in notes/movm.w. */
  33. MOVM.DB.S [R0-R14], (R13) /* save user level registers */
  34. SUB $(15*4), R13 /* r13 now points to ureg */
  35. MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
  36. // MOVW $(KSEG0+16*KiB-MACHSIZE), R10 /* m */
  37. MOVW $(MACHADDR), R10 /* m */
  38. MOVW 8(R10), R9 /* up */
  39. MOVW R13, R0 /* first arg is pointer to ureg */
  40. SUB $8, R13 /* space for argument+link */
  41. BL syscall(SB)
  42. ADD $(8+4*15), R13 /* make r13 point to ureg->type */
  43. MOVW 8(R13), R14 /* restore link */
  44. MOVW 4(R13), R0 /* restore SPSR */
  45. MOVW R0, SPSR /* ... */
  46. MOVM.DB.S (R13), [R0-R14] /* restore registers */
  47. ADD $8, R13 /* pop past ureg->{type+psr} */
  48. RFE /* MOVM.IA.S.W (R13), [R15] */
  49. TEXT _vund(SB), 1, $-4 /* undefined */
  50. MOVM.IA [R0-R4], (R13) /* free some working space */
  51. MOVW $PsrMund, R0
  52. B _vswitch
  53. TEXT _vpabt(SB), 1, $-4 /* prefetch abort */
  54. MOVM.IA [R0-R4], (R13) /* free some working space */
  55. MOVW $PsrMabt, R0 /* r0 = type */
  56. B _vswitch
  57. TEXT _vdabt(SB), 1, $-4 /* data abort */
  58. MOVM.IA [R0-R4], (R13) /* free some working space */
  59. MOVW $(PsrMabt+1), R0 /* r0 = type */
  60. B _vswitch
  61. TEXT _virq(SB), 1, $-4 /* IRQ */
  62. MOVM.IA [R0-R4], (R13) /* free some working space */
  63. MOVW $PsrMirq, R0 /* r0 = type */
  64. B _vswitch
  65. /*
  66. * come here with type in R0 and R13 pointing above saved [r0-r4].
  67. * we'll switch to SVC mode and then call trap.
  68. */
  69. _vswitch:
  70. MOVW SPSR, R1 /* save SPSR for ureg */
  71. MOVW R14, R2 /* save interrupted pc for ureg */
  72. MOVW R13, R3 /* save pointer to where the original [R0-R4] are */
  73. /*
  74. * switch processor to svc mode. this switches the banked registers
  75. * (r13 [sp] and r14 [link]) to those of svc mode.
  76. */
  77. MOVW CPSR, R14
  78. BIC $PsrMask, R14
  79. ORR $(PsrDirq|PsrMsvc), R14
  80. MOVW R14, CPSR /* switch! */
  81. AND.S $0xf, R1, R4 /* interrupted code kernel or user? */
  82. BEQ _userexcep
  83. /* here for trap from SVC mode */
  84. MOVM.DB.W [R0-R2], (R13) /* set ureg->{type, psr, pc}; r13 points to ureg->type */
  85. MOVM.IA (R3), [R0-R4] /* restore [R0-R4] from previous mode's stack */
  86. /*
  87. * avoid the ambiguity described in notes/movm.w.
  88. * In order to get a predictable value in R13 after the stores,
  89. * separate the store-multiple from the stack-pointer adjustment.
  90. * We'll assume that the old value of R13 should be stored on the stack.
  91. */
  92. /* save kernel level registers, at end r13 points to ureg */
  93. MOVM.DB [R0-R14], (R13)
  94. SUB $(15*4), R13 /* SP now points to saved R0 */
  95. MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
  96. MOVW R13, R0 /* first arg is pointer to ureg */
  97. SUB $(4*2), R13 /* space for argument+link (for debugger) */
  98. MOVW $0xdeaddead, R11 /* marker */
  99. BL trap(SB)
  100. ADD $(4*2+4*15), R13 /* make r13 point to ureg->type */
  101. MOVW 8(R13), R14 /* restore link */
  102. MOVW 4(R13), R0 /* restore SPSR */
  103. MOVW R0, SPSR /* ... */
  104. MOVM.DB (R13), [R0-R14] /* restore registers */
  105. ADD $(4*2), R13 /* pop past ureg->{type+psr} to pc */
  106. RFE /* MOVM.IA.S.W (R13), [R15] */
  107. /* here for trap from USER mode */
  108. _userexcep:
  109. MOVM.DB.W [R0-R2], (R13) /* set ureg->{type, psr, pc}; r13 points to ureg->type */
  110. MOVM.IA (R3), [R0-R4] /* restore [R0-R4] from previous mode's stack */
  111. /* avoid the ambiguity described in notes/movm.w. */
  112. MOVM.DB.S [R0-R14], (R13) /* save kernel level registers */
  113. SUB $(15*4), R13 /* r13 now points to ureg */
  114. MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
  115. // MOVW $(KSEG0+16*KiB-MACHSIZE), R10 /* m */
  116. MOVW $(MACHADDR), R10 /* m */
  117. MOVW 8(R10), R9 /* up */
  118. MOVW R13, R0 /* first arg is pointer to ureg */
  119. SUB $(4*2), R13 /* space for argument+link (for debugger) */
  120. BL trap(SB)
  121. ADD $(4*2+4*15), R13 /* make r13 point to ureg->type */
  122. MOVW 8(R13), R14 /* restore link */
  123. MOVW 4(R13), R0 /* restore SPSR */
  124. MOVW R0, SPSR /* ... */
  125. MOVM.DB.S (R13), [R0-R14] /* restore registers */
  126. ADD $(4*2), R13 /* pop past ureg->{type+psr} */
  127. RFE /* MOVM.IA.S.W (R13), [R15] */
  128. TEXT _vfiq(SB), 1, $-4 /* FIQ */
  129. MOVW $PsrMfiq, R8 /* trap type */
  130. MOVW SPSR, R9 /* interrupted psr */
  131. MOVW R14, R10 /* interrupted pc */
  132. MOVM.DB.W [R8-R10], (R13) /* save in ureg */
  133. MOVM.DB.W.S [R0-R14], (R13) /* save interrupted regs */
  134. MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
  135. MOVW $(MACHADDR), R10 /* m */
  136. MOVW 8(R10), R9 /* up */
  137. MOVW R13, R0 /* first arg is pointer to ureg */
  138. SUB $(4*2), R13 /* space for argument+link (for debugger) */
  139. BL fiq(SB)
  140. ADD $(8+4*15), R13 /* make r13 point to ureg->type */
  141. MOVW 8(R13), R14 /* restore link */
  142. MOVW 4(R13), R0 /* restore SPSR */
  143. MOVW R0, SPSR /* ... */
  144. MOVM.DB.S (R13), [R0-R14] /* restore registers */
  145. ADD $8, R13 /* pop past ureg->{type+psr} */
  146. RFE /* MOVM.IA.S.W (R13), [R15] */
  147. /*
  148. * set the stack value for the mode passed in R0
  149. */
  150. TEXT setr13(SB), 1, $-4
  151. MOVW 4(FP), R1
  152. MOVW CPSR, R2
  153. BIC $PsrMask, R2, R3
  154. ORR R0, R3
  155. MOVW R3, CPSR /* switch to new mode */
  156. MOVW R13, R0 /* return old sp */
  157. MOVW R1, R13 /* install new one */
  158. MOVW R2, CPSR /* switch back to old mode */
  159. RET