processor.h 8.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443
  1. #ifndef LIBCFLAT_PROCESSOR_H
  2. #define LIBCFLAT_PROCESSOR_H
  3. #include "libcflat.h"
  4. #include "msr.h"
  5. #include <stdint.h>
  6. #ifdef __x86_64__
  7. # define R "r"
  8. # define W "q"
  9. # define S "8"
  10. #else
  11. # define R "e"
  12. # define W "l"
  13. # define S "4"
  14. #endif
  15. #define X86_CR0_PE 0x00000001
  16. #define X86_CR0_MP 0x00000002
  17. #define X86_CR0_TS 0x00000008
  18. #define X86_CR0_WP 0x00010000
  19. #define X86_CR0_AM 0x00040000
  20. #define X86_CR0_PG 0x80000000
  21. #define X86_CR4_TSD 0x00000004
  22. #define X86_CR4_DE 0x00000008
  23. #define X86_CR4_PSE 0x00000010
  24. #define X86_CR4_PAE 0x00000020
  25. #define X86_CR4_VMXE 0x00002000
  26. #define X86_CR4_PCIDE 0x00020000
  27. #define X86_CR4_SMAP 0x00200000
  28. #define X86_CR4_PKE 0x00400000
  29. #define X86_EFLAGS_CF 0x00000001
  30. #define X86_EFLAGS_FIXED 0x00000002
  31. #define X86_EFLAGS_PF 0x00000004
  32. #define X86_EFLAGS_AF 0x00000010
  33. #define X86_EFLAGS_ZF 0x00000040
  34. #define X86_EFLAGS_SF 0x00000080
  35. #define X86_EFLAGS_TF 0x00000100
  36. #define X86_EFLAGS_IF 0x00000200
  37. #define X86_EFLAGS_DF 0x00000400
  38. #define X86_EFLAGS_OF 0x00000800
  39. #define X86_EFLAGS_NT 0x00004000
  40. #define X86_EFLAGS_AC 0x00040000
  41. #define X86_IA32_EFER 0xc0000080
  42. #define X86_EFER_LMA (1UL << 8)
  43. struct far_pointer32 {
  44. u32 offset;
  45. u16 selector;
  46. } __attribute__((packed));
  47. struct descriptor_table_ptr {
  48. u16 limit;
  49. ulong base;
  50. } __attribute__((packed));
  51. static inline void barrier(void)
  52. {
  53. asm volatile ("" : : : "memory");
  54. }
  55. static inline void clac(void)
  56. {
  57. asm volatile (".byte 0x0f, 0x01, 0xca" : : : "memory");
  58. }
  59. static inline void stac(void)
  60. {
  61. asm volatile (".byte 0x0f, 0x01, 0xcb" : : : "memory");
  62. }
  63. static inline u16 read_cs(void)
  64. {
  65. unsigned val;
  66. asm volatile ("mov %%cs, %0" : "=mr"(val));
  67. return val;
  68. }
  69. static inline u16 read_ds(void)
  70. {
  71. unsigned val;
  72. asm volatile ("mov %%ds, %0" : "=mr"(val));
  73. return val;
  74. }
  75. static inline u16 read_es(void)
  76. {
  77. unsigned val;
  78. asm volatile ("mov %%es, %0" : "=mr"(val));
  79. return val;
  80. }
  81. static inline u16 read_ss(void)
  82. {
  83. unsigned val;
  84. asm volatile ("mov %%ss, %0" : "=mr"(val));
  85. return val;
  86. }
  87. static inline u16 read_fs(void)
  88. {
  89. unsigned val;
  90. asm volatile ("mov %%fs, %0" : "=mr"(val));
  91. return val;
  92. }
  93. static inline u16 read_gs(void)
  94. {
  95. unsigned val;
  96. asm volatile ("mov %%gs, %0" : "=mr"(val));
  97. return val;
  98. }
  99. static inline unsigned long read_rflags(void)
  100. {
  101. unsigned long f;
  102. asm volatile ("pushf; pop %0\n\t" : "=rm"(f));
  103. return f;
  104. }
  105. static inline void write_ds(unsigned val)
  106. {
  107. asm volatile ("mov %0, %%ds" : : "rm"(val) : "memory");
  108. }
  109. static inline void write_es(unsigned val)
  110. {
  111. asm volatile ("mov %0, %%es" : : "rm"(val) : "memory");
  112. }
  113. static inline void write_ss(unsigned val)
  114. {
  115. asm volatile ("mov %0, %%ss" : : "rm"(val) : "memory");
  116. }
  117. static inline void write_fs(unsigned val)
  118. {
  119. asm volatile ("mov %0, %%fs" : : "rm"(val) : "memory");
  120. }
  121. static inline void write_gs(unsigned val)
  122. {
  123. asm volatile ("mov %0, %%gs" : : "rm"(val) : "memory");
  124. }
  125. static inline void write_rflags(unsigned long f)
  126. {
  127. asm volatile ("push %0; popf\n\t" : : "rm"(f));
  128. }
  129. static inline u64 rdmsr(u32 index)
  130. {
  131. u32 a, d;
  132. asm volatile ("rdmsr" : "=a"(a), "=d"(d) : "c"(index) : "memory");
  133. return a | ((u64)d << 32);
  134. }
  135. static inline void wrmsr(u32 index, u64 val)
  136. {
  137. u32 a = val, d = val >> 32;
  138. asm volatile ("wrmsr" : : "a"(a), "d"(d), "c"(index) : "memory");
  139. }
  140. static inline uint64_t rdpmc(uint32_t index)
  141. {
  142. uint32_t a, d;
  143. asm volatile ("rdpmc" : "=a"(a), "=d"(d) : "c"(index));
  144. return a | ((uint64_t)d << 32);
  145. }
  146. static inline void write_cr0(ulong val)
  147. {
  148. asm volatile ("mov %0, %%cr0" : : "r"(val) : "memory");
  149. }
  150. static inline ulong read_cr0(void)
  151. {
  152. ulong val;
  153. asm volatile ("mov %%cr0, %0" : "=r"(val) : : "memory");
  154. return val;
  155. }
  156. static inline void write_cr2(ulong val)
  157. {
  158. asm volatile ("mov %0, %%cr2" : : "r"(val) : "memory");
  159. }
  160. static inline ulong read_cr2(void)
  161. {
  162. ulong val;
  163. asm volatile ("mov %%cr2, %0" : "=r"(val) : : "memory");
  164. return val;
  165. }
  166. static inline void write_cr3(ulong val)
  167. {
  168. asm volatile ("mov %0, %%cr3" : : "r"(val) : "memory");
  169. }
  170. static inline ulong read_cr3(void)
  171. {
  172. ulong val;
  173. asm volatile ("mov %%cr3, %0" : "=r"(val) : : "memory");
  174. return val;
  175. }
  176. static inline void write_cr4(ulong val)
  177. {
  178. asm volatile ("mov %0, %%cr4" : : "r"(val) : "memory");
  179. }
  180. static inline ulong read_cr4(void)
  181. {
  182. ulong val;
  183. asm volatile ("mov %%cr4, %0" : "=r"(val) : : "memory");
  184. return val;
  185. }
  186. static inline void write_cr8(ulong val)
  187. {
  188. asm volatile ("mov %0, %%cr8" : : "r"(val) : "memory");
  189. }
  190. static inline ulong read_cr8(void)
  191. {
  192. ulong val;
  193. asm volatile ("mov %%cr8, %0" : "=r"(val) : : "memory");
  194. return val;
  195. }
  196. static inline void lgdt(const struct descriptor_table_ptr *ptr)
  197. {
  198. asm volatile ("lgdt %0" : : "m"(*ptr));
  199. }
  200. static inline void sgdt(struct descriptor_table_ptr *ptr)
  201. {
  202. asm volatile ("sgdt %0" : "=m"(*ptr));
  203. }
  204. static inline void lidt(const struct descriptor_table_ptr *ptr)
  205. {
  206. asm volatile ("lidt %0" : : "m"(*ptr));
  207. }
  208. static inline void sidt(struct descriptor_table_ptr *ptr)
  209. {
  210. asm volatile ("sidt %0" : "=m"(*ptr));
  211. }
  212. static inline void lldt(unsigned val)
  213. {
  214. asm volatile ("lldt %0" : : "rm"(val));
  215. }
  216. static inline u16 sldt(void)
  217. {
  218. u16 val;
  219. asm volatile ("sldt %0" : "=rm"(val));
  220. return val;
  221. }
  222. static inline void ltr(u16 val)
  223. {
  224. asm volatile ("ltr %0" : : "rm"(val));
  225. }
  226. static inline u16 str(void)
  227. {
  228. u16 val;
  229. asm volatile ("str %0" : "=rm"(val));
  230. return val;
  231. }
  232. static inline void write_dr6(ulong val)
  233. {
  234. asm volatile ("mov %0, %%dr6" : : "r"(val) : "memory");
  235. }
  236. static inline ulong read_dr6(void)
  237. {
  238. ulong val;
  239. asm volatile ("mov %%dr6, %0" : "=r"(val));
  240. return val;
  241. }
  242. static inline void write_dr7(ulong val)
  243. {
  244. asm volatile ("mov %0, %%dr7" : : "r"(val) : "memory");
  245. }
  246. static inline ulong read_dr7(void)
  247. {
  248. ulong val;
  249. asm volatile ("mov %%dr7, %0" : "=r"(val));
  250. return val;
  251. }
  252. struct cpuid { u32 a, b, c, d; };
  253. static inline struct cpuid raw_cpuid(u32 function, u32 index)
  254. {
  255. struct cpuid r;
  256. asm volatile ("cpuid"
  257. : "=a"(r.a), "=b"(r.b), "=c"(r.c), "=d"(r.d)
  258. : "0"(function), "2"(index));
  259. return r;
  260. }
  261. static inline struct cpuid cpuid_indexed(u32 function, u32 index)
  262. {
  263. u32 level = raw_cpuid(function & 0xf0000000, 0).a;
  264. if (level < function)
  265. return (struct cpuid) { 0, 0, 0, 0 };
  266. return raw_cpuid(function, index);
  267. }
  268. static inline struct cpuid cpuid(u32 function)
  269. {
  270. return cpuid_indexed(function, 0);
  271. }
  272. static inline u8 cpuid_maxphyaddr(void)
  273. {
  274. if (raw_cpuid(0x80000000, 0).a < 0x80000008)
  275. return 36;
  276. return raw_cpuid(0x80000008, 0).a & 0xff;
  277. }
  278. static inline void pause(void)
  279. {
  280. asm volatile ("pause");
  281. }
  282. static inline void cli(void)
  283. {
  284. asm volatile ("cli");
  285. }
  286. static inline void sti(void)
  287. {
  288. asm volatile ("sti");
  289. }
  290. static inline unsigned long long rdtsc()
  291. {
  292. long long r;
  293. #ifdef __x86_64__
  294. unsigned a, d;
  295. asm volatile ("rdtsc" : "=a"(a), "=d"(d));
  296. r = a | ((long long)d << 32);
  297. #else
  298. asm volatile ("rdtsc" : "=A"(r));
  299. #endif
  300. return r;
  301. }
  302. static inline unsigned long long rdtscp(u32 *aux)
  303. {
  304. long long r;
  305. #ifdef __x86_64__
  306. unsigned a, d;
  307. asm volatile ("rdtscp" : "=a"(a), "=d"(d), "=c"(*aux));
  308. r = a | ((long long)d << 32);
  309. #else
  310. asm volatile ("rdtscp" : "=A"(r), "=c"(*aux));
  311. #endif
  312. return r;
  313. }
  314. static inline void wrtsc(u64 tsc)
  315. {
  316. unsigned a = tsc, d = tsc >> 32;
  317. asm volatile("wrmsr" : : "a"(a), "d"(d), "c"(0x10));
  318. }
  319. static inline void irq_disable(void)
  320. {
  321. asm volatile("cli");
  322. }
  323. /* Note that irq_enable() does not ensure an interrupt shadow due
  324. * to the vagaries of compiler optimizations. If you need the
  325. * shadow, use a single asm with "sti" and the instruction after it.
  326. */
  327. static inline void irq_enable(void)
  328. {
  329. asm volatile("sti");
  330. }
  331. static inline void invlpg(volatile void *va)
  332. {
  333. asm volatile("invlpg (%0)" ::"r" (va) : "memory");
  334. }
  335. static inline void safe_halt(void)
  336. {
  337. asm volatile("sti; hlt");
  338. }
  339. static inline u32 read_pkru(void)
  340. {
  341. unsigned int eax, edx;
  342. unsigned int ecx = 0;
  343. unsigned int pkru;
  344. asm volatile(".byte 0x0f,0x01,0xee\n\t"
  345. : "=a" (eax), "=d" (edx)
  346. : "c" (ecx));
  347. pkru = eax;
  348. return pkru;
  349. }
  350. static inline void write_pkru(u32 pkru)
  351. {
  352. unsigned int eax = pkru;
  353. unsigned int ecx = 0;
  354. unsigned int edx = 0;
  355. asm volatile(".byte 0x0f,0x01,0xef\n\t"
  356. : : "a" (eax), "c" (ecx), "d" (edx));
  357. }
  358. static inline bool is_canonical(u64 addr)
  359. {
  360. return (s64)(addr << 16) >> 16 == addr;
  361. }
  362. #endif