atomic.h 2.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128
  1. #ifndef _INTERNAL_ATOMIC_H
  2. #define _INTERNAL_ATOMIC_H
  3. #include <stdint.h>
  4. static inline int a_ctz_64(uint64_t x)
  5. {
  6. int r;
  7. __asm__( "bsf %1,%0 ; jnz 1f ; bsf %2,%0 ; addl $32,%0\n1:"
  8. : "=r"(r) : "r"((unsigned)x), "r"((unsigned)(x>>32)) );
  9. return r;
  10. }
  11. static inline int a_ctz_l(unsigned long x)
  12. {
  13. long r;
  14. __asm__( "bsf %1,%0" : "=r"(r) : "r"(x) );
  15. return r;
  16. }
  17. static inline void a_and_64(volatile uint64_t *p, uint64_t v)
  18. {
  19. __asm__( "lock ; andl %1, (%0) ; lock ; andl %2, 4(%0)"
  20. : : "r"((long *)p), "r"((unsigned)v), "r"((unsigned)(v>>32)) : "memory" );
  21. }
  22. static inline void a_or_64(volatile uint64_t *p, uint64_t v)
  23. {
  24. __asm__( "lock ; orl %1, (%0) ; lock ; orl %2, 4(%0)"
  25. : : "r"((long *)p), "r"((unsigned)v), "r"((unsigned)(v>>32)) : "memory" );
  26. }
  27. static inline void a_store_l(volatile void *p, long x)
  28. {
  29. __asm__( "movl %1, %0" : "=m"(*(long *)p) : "r"(x) : "memory" );
  30. }
  31. static inline void a_or_l(volatile void *p, long v)
  32. {
  33. __asm__( "lock ; orl %1, %0"
  34. : "=m"(*(long *)p) : "r"(v) : "memory" );
  35. }
  36. static inline void *a_cas_p(volatile void *p, void *t, void *s)
  37. {
  38. __asm__( "lock ; cmpxchg %3, %1"
  39. : "=a"(t), "=m"(*(long *)p) : "a"(t), "r"(s) : "memory" );
  40. return t;
  41. }
  42. static inline long a_cas_l(volatile void *p, long t, long s)
  43. {
  44. __asm__( "lock ; cmpxchg %3, %1"
  45. : "=a"(t), "=m"(*(long *)p) : "a"(t), "r"(s) : "memory" );
  46. return t;
  47. }
  48. static inline int a_cas(volatile int *p, int t, int s)
  49. {
  50. __asm__( "lock ; cmpxchg %3, %1"
  51. : "=a"(t), "=m"(*p) : "a"(t), "r"(s) : "memory" );
  52. return t;
  53. }
  54. static inline void *a_swap_p(void *volatile *x, void *v)
  55. {
  56. __asm__( "xchg %0, %1" : "=r"(v), "=m"(*(void **)x) : "0"(v) : "memory" );
  57. return v;
  58. }
  59. static inline long a_swap_l(volatile void *x, long v)
  60. {
  61. __asm__( "xchg %0, %1" : "=r"(v), "=m"(*(long *)x) : "0"(v) : "memory" );
  62. return v;
  63. }
  64. static inline void a_or(volatile void *p, int v)
  65. {
  66. __asm__( "lock ; orl %1, %0"
  67. : "=m"(*(int *)p) : "r"(v) : "memory" );
  68. }
  69. static inline void a_and(volatile void *p, int v)
  70. {
  71. __asm__( "lock ; andl %1, %0"
  72. : "=m"(*(int *)p) : "r"(v) : "memory" );
  73. }
  74. static inline int a_swap(volatile int *x, int v)
  75. {
  76. __asm__( "xchg %0, %1" : "=r"(v), "=m"(*x) : "0"(v) : "memory" );
  77. return v;
  78. }
  79. #define a_xchg a_swap
  80. static inline int a_fetch_add(volatile int *x, int v)
  81. {
  82. __asm__( "lock ; xadd %0, %1" : "=r"(v), "=m"(*x) : "0"(v) : "memory" );
  83. return v;
  84. }
  85. static inline void a_inc(volatile int *x)
  86. {
  87. __asm__( "lock ; incl %0" : "=m"(*x) : "m"(*x) : "memory" );
  88. }
  89. static inline void a_dec(volatile int *x)
  90. {
  91. __asm__( "lock ; decl %0" : "=m"(*x) : "m"(*x) : "memory" );
  92. }
  93. static inline void a_store(volatile int *p, int x)
  94. {
  95. __asm__( "movl %1, %0" : "=m"(*p) : "r"(x) : "memory" );
  96. }
  97. static inline void a_spin()
  98. {
  99. __asm__ __volatile__( "pause" : : : "memory" );
  100. }
  101. static inline void a_crash()
  102. {
  103. __asm__ __volatile__( "hlt" : : : "memory" );
  104. }
  105. #endif