lexception.s 5.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186
  1. /*
  2. * arm exception handlers
  3. */
  4. #include "arm.s"
  5. #undef B /* B is for 'botch' */
  6. /*
  7. * exception vectors, copied by trapinit() to somewhere useful
  8. */
  9. TEXT vectors(SB), 1, $-4
  10. MOVW 0x18(R15), R15 /* reset */
  11. MOVW 0x18(R15), R15 /* undefined instr. */
  12. MOVW 0x18(R15), R15 /* SWI & SMC */
  13. MOVW 0x18(R15), R15 /* prefetch abort */
  14. MOVW 0x18(R15), R15 /* data abort */
  15. MOVW 0x18(R15), R15 /* reserved */
  16. MOVW 0x18(R15), R15 /* IRQ */
  17. MOVW 0x18(R15), R15 /* FIQ */
  18. TEXT vtable(SB), 1, $-4
  19. WORD $_vsvc(SB) /* reset, in svc mode already */
  20. WORD $_vund(SB) /* undefined, switch to svc mode */
  21. WORD $_vsvc(SB) /* swi, in svc mode already */
  22. WORD $_vpabt(SB) /* prefetch abort, switch to svc mode */
  23. WORD $_vdabt(SB) /* data abort, switch to svc mode */
  24. WORD $_vsvc(SB) /* reserved */
  25. WORD $_virq(SB) /* IRQ, switch to svc mode */
  26. WORD $_vfiq(SB) /* FIQ, switch to svc mode */
  27. TEXT _vrst(SB), 1, $-4
  28. BL _reset(SB)
  29. TEXT _vsvc(SB), 1, $-4 /* SWI */
  30. MOVW.W R14, -4(R13) /* ureg->pc = interrupted PC */
  31. MOVW SPSR, R14 /* ureg->psr = SPSR */
  32. MOVW.W R14, -4(R13) /* ... */
  33. MOVW $PsrMsvc, R14 /* ureg->type = PsrMsvc */
  34. MOVW.W R14, -4(R13) /* ... */
  35. // MOVM.DB.W.S [R0-R14], (R13) /* save user level registers, at end r13 points to ureg */
  36. MOVM.DB.S [R0-R14], (R13) /* save user level registers */
  37. SUB $(15*4), R13 /* r13 now points to ureg */
  38. MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
  39. MOVW $(L1-MACHSIZE), R10 /* m */
  40. MOVW 8(R10), R9 /* up */
  41. MOVW R13, R0 /* first arg is pointer to ureg */
  42. SUB $8, R13 /* space for argument+link */
  43. BL syscall(SB)
  44. ADD $(8+4*15), R13 /* make r13 point to ureg->type */
  45. MOVW 8(R13), R14 /* restore link */
  46. MOVW 4(R13), R0 /* restore SPSR */
  47. MOVW R0, SPSR /* ... */
  48. MOVM.DB.S (R13), [R0-R14] /* restore registers */
  49. ADD $8, R13 /* pop past ureg->{type+psr} */
  50. RFE /* MOVM.IA.S.W (R13), [R15] */
  51. TEXT _vund(SB), 1, $-4 /* undefined */
  52. MOVM.IA [R0-R4], (R13) /* free some working space */
  53. MOVW $PsrMund, R0
  54. B _vswitch
  55. TEXT _vpabt(SB), 1, $-4 /* prefetch abort */
  56. MOVM.IA [R0-R4], (R13) /* free some working space */
  57. MOVW $PsrMabt, R0 /* r0 = type */
  58. B _vswitch
  59. TEXT _vdabt(SB), 1, $-4 /* data abort */
  60. MOVM.IA [R0-R4], (R13) /* free some working space */
  61. MOVW $(PsrMabt+1), R0 /* r0 = type */
  62. B _vswitch
  63. TEXT _virq(SB), 1, $-4 /* IRQ */
  64. MOVM.IA [R0-R4], (R13) /* free some working space */
  65. MOVW $PsrMirq, R0 /* r0 = type */
  66. B _vswitch
  67. /*
  68. * come here with type in R0 and R13 pointing above saved [r0-r4].
  69. * we'll switch to SVC mode and then call trap.
  70. */
  71. _vswitch:
  72. MOVW SPSR, R1 /* save SPSR for ureg */
  73. MOVW R14, R2 /* save interrupted pc for ureg */
  74. MOVW R13, R3 /* save pointer to where the original [R0-R4] are */
  75. /*
  76. * switch processor to svc mode. this switches the banked registers
  77. * (r13 [sp] and r14 [link]) to those of svc mode.
  78. */
  79. MOVW CPSR, R14
  80. BIC $PsrMask, R14
  81. ORR $(PsrDirq|PsrDfiq|PsrMsvc), R14
  82. MOVW R14, CPSR /* switch! */
  83. DSB; ISB /* force new cpsr to take effect */
  84. AND.S $0xf, R1, R4 /* interrupted code kernel or user? */
  85. BEQ _userexcep
  86. /* here for trap from SVC mode */
  87. MOVM.DB.W [R0-R2], (R13) /* set ureg->{type, psr, pc}; r13 points to ureg->type */
  88. MOVM.IA (R3), [R0-R4] /* restore [R0-R4] from previous mode's stack */
  89. /*
  90. * In order to get a predictable value in R13 after the stores,
  91. * separate the store-multiple from the stack-pointer adjustment.
  92. * We'll assume that the old value of R13 should be stored on the stack.
  93. */
  94. /* save kernel level registers, at end r13 points to ureg */
  95. // MOVM.DB.W [R0-R14], (R13)
  96. MOVM.DB [R0-R14], (R13)
  97. SUB $(15*4), R13 /* SP now points to saved R0 */
  98. MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
  99. MOVW R13, R0 /* first arg is pointer to ureg */
  100. SUB $(4*2), R13 /* space for argument+link (for debugger) */
  101. MOVW $0xdeaddead, R11 /* marker */
  102. BL trap(SB)
  103. ADD $(4*2+4*15), R13 /* make r13 point to ureg->type */
  104. MOVW 8(R13), R14 /* restore link */
  105. MOVW 4(R13), R0 /* restore SPSR */
  106. MOVW R0, SPSR /* ... */
  107. MOVM.DB (R13), [R0-R14] /* restore registers */
  108. ADD $(4*2), R13 /* pop past ureg->{type+psr} to pc */
  109. RFE /* MOVM.IA.S.W (R13), [R15] */
  110. /* here for trap from USER mode */
  111. _userexcep:
  112. MOVM.DB.W [R0-R2], (R13) /* set ureg->{type, psr, pc}; r13 points to ureg->type */
  113. MOVM.IA (R3), [R0-R4] /* restore [R0-R4] from previous mode's stack */
  114. // MOVM.DB.W.S [R0-R14], (R13) /* save kernel level registers, at end r13 points to ureg */
  115. MOVM.DB.S [R0-R14], (R13) /* save kernel level registers */
  116. SUB $(15*4), R13 /* r13 now points to ureg */
  117. MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
  118. MOVW $(L1-MACHSIZE), R10 /* m */
  119. MOVW 8(R10), R9 /* up */
  120. MOVW R13, R0 /* first arg is pointer to ureg */
  121. SUB $(4*2), R13 /* space for argument+link (for debugger) */
  122. BL trap(SB)
  123. ADD $(4*2+4*15), R13 /* make r13 point to ureg->type */
  124. MOVW 8(R13), R14 /* restore link */
  125. MOVW 4(R13), R0 /* restore SPSR */
  126. MOVW R0, SPSR /* ... */
  127. MOVM.DB.S (R13), [R0-R14] /* restore registers */
  128. ADD $(4*2), R13 /* pop past ureg->{type+psr} */
  129. RFE /* MOVM.IA.S.W (R13), [R15] */
  130. TEXT _vfiq(SB), 1, $-4 /* FIQ */
  131. WAVE('%')
  132. RFE /* FIQ is special, ignore it for now */
  133. /*
  134. * set the stack value for the mode passed in R0
  135. */
  136. TEXT setr13(SB), 1, $-4
  137. MOVW 4(FP), R1
  138. MOVW CPSR, R2
  139. BIC $PsrMask, R2, R3
  140. ORR R0, R3
  141. MOVW R3, CPSR
  142. BARRIERS
  143. MOVW R13, R3
  144. MOVW R1, R13
  145. MOVW R2, CPSR
  146. BARRIERS
  147. MOVW R3, R0
  148. RET