lexception.s 5.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183
  1. /*
  2. * arm exception handlers
  3. */
  4. #include "mem.h"
  5. #include "arm.h"
  6. #undef B /* B is for 'botch' */
  7. /*
  8. * exception vectors, copied by trapinit() to somewhere useful
  9. */
  10. TEXT vectors(SB), 1, $-4
  11. MOVW 0x18(R15), R15 /* reset */
  12. MOVW 0x18(R15), R15 /* undefined instr. */
  13. MOVW 0x18(R15), R15 /* SWI & SMC */
  14. MOVW 0x18(R15), R15 /* prefetch abort */
  15. MOVW 0x18(R15), R15 /* data abort */
  16. MOVW 0x18(R15), R15 /* reserved */
  17. MOVW 0x18(R15), R15 /* IRQ */
  18. MOVW 0x18(R15), R15 /* FIQ */
  19. TEXT vtable(SB), 1, $-4
  20. WORD $_vsvc(SB) /* reset, in svc mode already */
  21. WORD $_vund(SB) /* undefined, switch to svc mode */
  22. WORD $_vsvc(SB) /* swi, in svc mode already */
  23. WORD $_vpabt(SB) /* prefetch abort, switch to svc mode */
  24. WORD $_vdabt(SB) /* data abort, switch to svc mode */
  25. WORD $_vsvc(SB) /* reserved */
  26. WORD $_virq(SB) /* IRQ, switch to svc mode */
  27. WORD $_vfiq(SB) /* FIQ, switch to svc mode */
  28. TEXT _vrst(SB), 1, $-4
  29. BL _reset(SB)
  30. TEXT _vsvc(SB), 1, $-4 /* SWI */
  31. MOVW.W R14, -4(R13) /* ureg->pc = interupted PC */
  32. MOVW SPSR, R14 /* ureg->psr = SPSR */
  33. MOVW.W R14, -4(R13) /* ... */
  34. MOVW $PsrMsvc, R14 /* ureg->type = PsrMsvc */
  35. MOVW.W R14, -4(R13) /* ... */
  36. // MOVM.DB.W.S [R0-R14], (R13) /* save user level registers, at end r13 points to ureg */
  37. MOVM.DB.S [R0-R14], (R13) /* save user level registers */
  38. SUB $(15*4), R13 /* r13 now points to ureg */
  39. MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
  40. MOVW $(KSEG0+16*KiB-MACHSIZE), R10 /* m */
  41. MOVW 8(R10), R9 /* up */
  42. MOVW R13, R0 /* first arg is pointer to ureg */
  43. SUB $8, R13 /* space for argument+link */
  44. BL syscall(SB)
  45. ADD $(8+4*15), R13 /* make r13 point to ureg->type */
  46. MOVW 8(R13), R14 /* restore link */
  47. MOVW 4(R13), R0 /* restore SPSR */
  48. MOVW R0, SPSR /* ... */
  49. MOVM.DB.S (R13), [R0-R14] /* restore registers */
  50. ADD $8, R13 /* pop past ureg->{type+psr} */
  51. RFE /* MOVM.IA.S.W (R13), [R15] */
  52. TEXT _vund(SB), 1, $-4 /* undefined */
  53. MOVM.IA [R0-R4], (R13) /* free some working space */
  54. MOVW $PsrMund, R0
  55. B _vswitch
  56. TEXT _vpabt(SB), 1, $-4 /* prefetch abort */
  57. MOVM.IA [R0-R4], (R13) /* free some working space */
  58. MOVW $PsrMabt, R0 /* r0 = type */
  59. B _vswitch
  60. TEXT _vdabt(SB), 1, $-4 /* data abort */
  61. MOVM.IA [R0-R4], (R13) /* free some working space */
  62. MOVW $(PsrMabt+1), R0 /* r0 = type */
  63. B _vswitch
  64. TEXT _virq(SB), 1, $-4 /* IRQ */
  65. MOVM.IA [R0-R4], (R13) /* free some working space */
  66. MOVW $PsrMirq, R0 /* r0 = type */
  67. B _vswitch
  68. /*
  69. * come here with type in R0 and R13 pointing above saved [r0-r4].
  70. * we'll switch to SVC mode and then call trap.
  71. */
  72. _vswitch:
  73. MOVW SPSR, R1 /* save SPSR for ureg */
  74. MOVW R14, R2 /* save interrupted pc for ureg */
  75. MOVW R13, R3 /* save pointer to where the original [R0-R4] are */
  76. /*
  77. * switch processor to svc mode. this switches the banked registers
  78. * (r13 [sp] and r14 [link]) to those of svc mode.
  79. */
  80. MOVW CPSR, R14
  81. BIC $PsrMask, R14
  82. ORR $(PsrDirq|PsrDfiq|PsrMsvc), R14
  83. MOVW R14, CPSR /* switch! */
  84. AND.S $0xf, R1, R4 /* interrupted code kernel or user? */
  85. BEQ _userexcep
  86. /* here for trap from SVC mode */
  87. MOVM.DB.W [R0-R2], (R13) /* set ureg->{type, psr, pc}; r13 points to ureg->type */
  88. MOVM.IA (R3), [R0-R4] /* restore [R0-R4] from previous mode's stack */
  89. /*
  90. * In order to get a predictable value in R13 after the stores,
  91. * separate the store-multiple from the stack-pointer adjustment.
  92. * We'll assume that the old value of R13 should be stored on the stack.
  93. */
  94. /* save kernel level registers, at end r13 points to ureg */
  95. // MOVM.DB.W [R0-R14], (R13)
  96. MOVM.DB [R0-R14], (R13)
  97. SUB $(15*4), R13 /* SP now points to saved R0 */
  98. MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
  99. MOVW R13, R0 /* first arg is pointer to ureg */
  100. // BL printr0(SB)
  101. SUB $(4*2), R13 /* space for argument+link (for debugger) */
  102. MOVW $0xdeaddead, R11 /* marker */
  103. BL trap(SB)
  104. ADD $(4*2+4*15), R13 /* make r13 point to ureg->type */
  105. MOVW 8(R13), R14 /* restore link */
  106. MOVW 4(R13), R0 /* restore SPSR */
  107. MOVW R0, SPSR /* ... */
  108. MOVM.DB (R13), [R0-R14] /* restore registers */
  109. ADD $(4*2), R13 /* pop past ureg->{type+psr} to pc */
  110. RFE /* MOVM.IA.S.W (R13), [R15] */
  111. /* here for trap from USER mode */
  112. _userexcep:
  113. MOVM.DB.W [R0-R2], (R13) /* set ureg->{type, psr, pc}; r13 points to ureg->type */
  114. MOVM.IA (R3), [R0-R4] /* restore [R0-R4] from previous mode's stack */
  115. // MOVM.DB.W.S [R0-R14], (R13) /* save kernel level registers, at end r13 points to ureg */
  116. MOVM.DB.S [R0-R14], (R13) /* save kernel level registers */
  117. SUB $(15*4), R13 /* r13 now points to ureg */
  118. MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
  119. MOVW $(KSEG0+16*KiB-MACHSIZE), R10 /* m */
  120. MOVW 8(R10), R9 /* up */
  121. MOVW R13, R0 /* first arg is pointer to ureg */
  122. SUB $(4*2), R13 /* space for argument+link (for debugger) */
  123. BL trap(SB)
  124. ADD $(4*2+4*15), R13 /* make r13 point to ureg->type */
  125. MOVW 8(R13), R14 /* restore link */
  126. MOVW 4(R13), R0 /* restore SPSR */
  127. MOVW R0, SPSR /* ... */
  128. MOVM.DB.S (R13), [R0-R14] /* restore registers */
  129. ADD $(4*2), R13 /* pop past ureg->{type+psr} */
  130. RFE /* MOVM.IA.S.W (R13), [R15] */
  131. TEXT _vfiq(SB), 1, $-4 /* FIQ */
  132. RFE /* FIQ is special, ignore it for now */
  133. /*
  134. * set the stack value for the mode passed in R0
  135. */
  136. TEXT setr13(SB), 1, $-4
  137. MOVW 4(FP), R1
  138. MOVW CPSR, R2
  139. BIC $PsrMask, R2, R3
  140. ORR R0, R3
  141. MOVW R3, CPSR
  142. MOVW R13, R0
  143. MOVW R1, R13
  144. MOVW R2, CPSR
  145. RET