l64syscall.s 1.5 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879
  1. #include "mem.h"
  2. #include "amd64.h"
  3. .code64
  4. /*
  5. */
  6. .globl touser
  7. touser:
  8. CLI
  9. SWAPGS
  10. MOVQ $SSEL(SiUDS, SsRPL3), %rax
  11. MOVW %ax, %DS
  12. MOVW %ax, %ES
  13. MOVW %ax, %FS
  14. MOVW %ax, %GS
  15. MOVQ $(UTZERO+0x28), %rcx /* ip */
  16. MOVQ $If, %r11 /* flags */
  17. MOVQ %rdi, %rsp /* sp */
  18. .byte 0x48; SYSRET /* SYSRETQ */
  19. /*
  20. */
  21. .globl syscallentry
  22. syscallentry:
  23. SWAPGS
  24. .byte 0x65; MOVQ 0, %r15 /* m-> (MOVQ GS:0x0, R15) */
  25. MOVQ 16(%r15),%r14 /* m->proc */
  26. MOVQ %rsp, %r13
  27. MOVQ 16(RUSER), %rsp /* m->proc->kstack */
  28. ADDQ $KSTACK, %rsp
  29. PUSHQ $SSEL(SiUDS, SsRPL3) /* old stack segment */
  30. PUSHQ %r13 /* old sp */
  31. PUSHQ %r11 /* old flags */
  32. PUSHQ $SSEL(SiUCS, SsRPL3) /* old code segment */
  33. PUSHQ %rcx /* old ip */
  34. SUBQ $(18*8), %rsp /* unsaved registers */
  35. MOVW $SSEL(SiUDS, SsRPL3), (15*8+0)(%rsp)
  36. MOVW %ES, (15*8+2)(%rsp)
  37. MOVW %FS, (15*8+4)(%rsp)
  38. MOVW %GS, (15*8+6)(%rsp)
  39. PUSHQ %rsp /* Ureg* */
  40. PUSHQ %rdi /* system call number */
  41. CALL syscall
  42. .globl syscallreturn
  43. syscallreturn:
  44. MOVQ 16(%rsp), AX /* Ureg.ax */
  45. MOVQ (16+6*8)(%rsp), BP /* Ureg.bp */
  46. _syscallreturn:
  47. ADDQ $(17*8), %rsp /* registers + arguments */
  48. CLI
  49. SWAPGS
  50. MOVW 0(%rsp), %DS
  51. MOVW 2(%rsp), %ES
  52. MOVW 4(%rsp), %FS
  53. MOVW 6(%rsp), %GS
  54. MOVQ 24(%rsp), %rcx /* ip */
  55. MOVQ 40(%rsp), %r11 /* flags */
  56. MOVQ 48(%rsp), %rsp /* sp */
  57. .byte 0x48; SYSRET /* SYSRETQ */
  58. .globl sysrforkret
  59. sysrforkret:
  60. // DEBUG
  61. cli
  62. 1: jmp 1b
  63. MOVQ $0, %rax
  64. JMP _syscallreturn