memcpy.s 1.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081
  1. TEXT memcpy(SB), $0
  2. MOVQ RARG, DI
  3. MOVQ DI, AX /* return value */
  4. MOVQ p2+8(FP), SI
  5. MOVL n+16(FP), BX
  6. CMPL BX, $0
  7. JGT _ok
  8. JEQ _return /* nothing to do if n == 0 */
  9. MOVL $0, SI /* fault if n < 0 */
  10. /*
  11. * check and set for backwards:
  12. * (p2 < p1) && ((p2+n) > p1)
  13. */
  14. _ok:
  15. CMPQ SI, DI
  16. JGT _forward
  17. JEQ _return /* nothing to do if p2 == p1 */
  18. MOVQ SI, DX
  19. ADDQ BX, DX
  20. CMPQ DX, DI
  21. JGT _back
  22. /*
  23. * copy whole longs if aligned
  24. */
  25. _forward:
  26. CLD
  27. MOVQ SI, DX
  28. ORQ DI, DX
  29. ANDL $3, DX
  30. JNE c3f
  31. MOVQ BX, CX
  32. SHRQ $2, CX
  33. ANDL $3, BX
  34. REP; MOVSL
  35. /*
  36. * copy the rest, by bytes
  37. */
  38. JEQ _return /* flags set by above ANDL */
  39. c3f:
  40. MOVL BX, CX
  41. REP; MOVSB
  42. RET
  43. /*
  44. * whole thing backwards has
  45. * adjusted addresses
  46. */
  47. _back:
  48. ADDQ BX, DI
  49. ADDQ BX, SI
  50. STD
  51. SUBQ $4, DI
  52. SUBQ $4, SI
  53. /*
  54. * copy whole longs, if aligned
  55. */
  56. MOVQ DI, DX
  57. ORQ SI, DX
  58. ANDL $3, DX
  59. JNE c3b
  60. MOVL BX, CX
  61. SHRQ $2, CX
  62. ANDL $3, BX
  63. REP; MOVSL
  64. /*
  65. * copy the rest, by bytes
  66. */
  67. JEQ _return /* flags set by above ANDL */
  68. c3b:
  69. ADDQ $3, DI
  70. ADDQ $3, SI
  71. MOVL BX, CX
  72. REP; MOVSB
  73. _return:
  74. RET