emulator.c 33 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168
  1. #include "ioram.h"
  2. #include "vm.h"
  3. #include "libcflat.h"
  4. #include "desc.h"
  5. #include "types.h"
  6. #include "processor.h"
  7. #define memset __builtin_memset
  8. #define TESTDEV_IO_PORT 0xe0
  9. static int exceptions;
  10. struct regs {
  11. u64 rax, rbx, rcx, rdx;
  12. u64 rsi, rdi, rsp, rbp;
  13. u64 r8, r9, r10, r11;
  14. u64 r12, r13, r14, r15;
  15. u64 rip, rflags;
  16. };
  17. struct regs inregs, outregs, save;
  18. struct insn_desc {
  19. u64 ptr;
  20. size_t len;
  21. };
  22. static char st1[] = "abcdefghijklmnop";
  23. void test_stringio()
  24. {
  25. unsigned char r = 0;
  26. asm volatile("cld \n\t"
  27. "movw %0, %%dx \n\t"
  28. "rep outsb \n\t"
  29. : : "i"((short)TESTDEV_IO_PORT),
  30. "S"(st1), "c"(sizeof(st1) - 1));
  31. asm volatile("inb %1, %0\n\t" : "=a"(r) : "i"((short)TESTDEV_IO_PORT));
  32. report("outsb up", r == st1[sizeof(st1) - 2]); /* last char */
  33. asm volatile("std \n\t"
  34. "movw %0, %%dx \n\t"
  35. "rep outsb \n\t"
  36. : : "i"((short)TESTDEV_IO_PORT),
  37. "S"(st1 + sizeof(st1) - 2), "c"(sizeof(st1) - 1));
  38. asm volatile("cld \n\t" : : );
  39. asm volatile("in %1, %0\n\t" : "=a"(r) : "i"((short)TESTDEV_IO_PORT));
  40. report("outsb down", r == st1[0]);
  41. }
  42. void test_cmps_one(unsigned char *m1, unsigned char *m3)
  43. {
  44. void *rsi, *rdi;
  45. long rcx, tmp;
  46. rsi = m1; rdi = m3; rcx = 30;
  47. asm volatile("xor %[tmp], %[tmp] \n\t"
  48. "repe/cmpsb"
  49. : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
  50. : : "cc");
  51. report("repe/cmpsb (1)", rcx == 0 && rsi == m1 + 30 && rdi == m3 + 30);
  52. rsi = m1; rdi = m3; rcx = 30;
  53. asm volatile("or $1, %[tmp]\n\t" // clear ZF
  54. "repe/cmpsb"
  55. : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
  56. : : "cc");
  57. report("repe/cmpsb (1.zf)", rcx == 0 && rsi == m1 + 30 && rdi == m3 + 30);
  58. rsi = m1; rdi = m3; rcx = 15;
  59. asm volatile("xor %[tmp], %[tmp] \n\t"
  60. "repe/cmpsw"
  61. : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
  62. : : "cc");
  63. report("repe/cmpsw (1)", rcx == 0 && rsi == m1 + 30 && rdi == m3 + 30);
  64. rsi = m1; rdi = m3; rcx = 7;
  65. asm volatile("xor %[tmp], %[tmp] \n\t"
  66. "repe/cmpsl"
  67. : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
  68. : : "cc");
  69. report("repe/cmpll (1)", rcx == 0 && rsi == m1 + 28 && rdi == m3 + 28);
  70. rsi = m1; rdi = m3; rcx = 4;
  71. asm volatile("xor %[tmp], %[tmp] \n\t"
  72. "repe/cmpsq"
  73. : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
  74. : : "cc");
  75. report("repe/cmpsq (1)", rcx == 0 && rsi == m1 + 32 && rdi == m3 + 32);
  76. rsi = m1; rdi = m3; rcx = 130;
  77. asm volatile("xor %[tmp], %[tmp] \n\t"
  78. "repe/cmpsb"
  79. : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
  80. : : "cc");
  81. report("repe/cmpsb (2)",
  82. rcx == 29 && rsi == m1 + 101 && rdi == m3 + 101);
  83. rsi = m1; rdi = m3; rcx = 65;
  84. asm volatile("xor %[tmp], %[tmp] \n\t"
  85. "repe/cmpsw"
  86. : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
  87. : : "cc");
  88. report("repe/cmpsw (2)",
  89. rcx == 14 && rsi == m1 + 102 && rdi == m3 + 102);
  90. rsi = m1; rdi = m3; rcx = 32;
  91. asm volatile("xor %[tmp], %[tmp] \n\t"
  92. "repe/cmpsl"
  93. : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
  94. : : "cc");
  95. report("repe/cmpll (2)",
  96. rcx == 6 && rsi == m1 + 104 && rdi == m3 + 104);
  97. rsi = m1; rdi = m3; rcx = 16;
  98. asm volatile("xor %[tmp], %[tmp] \n\t"
  99. "repe/cmpsq"
  100. : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
  101. : : "cc");
  102. report("repe/cmpsq (2)",
  103. rcx == 3 && rsi == m1 + 104 && rdi == m3 + 104);
  104. }
  105. void test_cmps(void *mem)
  106. {
  107. unsigned char *m1 = mem, *m2 = mem + 1024;
  108. unsigned char m3[1024];
  109. for (int i = 0; i < 100; ++i)
  110. m1[i] = m2[i] = m3[i] = i;
  111. for (int i = 100; i < 200; ++i)
  112. m1[i] = (m3[i] = m2[i] = i) + 1;
  113. test_cmps_one(m1, m3);
  114. test_cmps_one(m1, m2);
  115. }
  116. void test_scas(void *mem)
  117. {
  118. bool z;
  119. void *di;
  120. *(ulong *)mem = 0x77665544332211;
  121. di = mem;
  122. asm ("scasb; setz %0" : "=rm"(z), "+D"(di) : "a"(0xff11));
  123. report("scasb match", di == mem + 1 && z);
  124. di = mem;
  125. asm ("scasb; setz %0" : "=rm"(z), "+D"(di) : "a"(0xff54));
  126. report("scasb mismatch", di == mem + 1 && !z);
  127. di = mem;
  128. asm ("scasw; setz %0" : "=rm"(z), "+D"(di) : "a"(0xff2211));
  129. report("scasw match", di == mem + 2 && z);
  130. di = mem;
  131. asm ("scasw; setz %0" : "=rm"(z), "+D"(di) : "a"(0xffdd11));
  132. report("scasw mismatch", di == mem + 2 && !z);
  133. di = mem;
  134. asm ("scasl; setz %0" : "=rm"(z), "+D"(di) : "a"(0xff44332211ul));
  135. report("scasd match", di == mem + 4 && z);
  136. di = mem;
  137. asm ("scasl; setz %0" : "=rm"(z), "+D"(di) : "a"(0x45332211));
  138. report("scasd mismatch", di == mem + 4 && !z);
  139. di = mem;
  140. asm ("scasq; setz %0" : "=rm"(z), "+D"(di) : "a"(0x77665544332211ul));
  141. report("scasq match", di == mem + 8 && z);
  142. di = mem;
  143. asm ("scasq; setz %0" : "=rm"(z), "+D"(di) : "a"(3));
  144. report("scasq mismatch", di == mem + 8 && !z);
  145. }
  146. void test_cr8(void)
  147. {
  148. unsigned long src, dst;
  149. dst = 777;
  150. src = 3;
  151. asm volatile("mov %[src], %%cr8; mov %%cr8, %[dst]"
  152. : [dst]"+r"(dst), [src]"+r"(src));
  153. report("mov %%cr8", dst == 3 && src == 3);
  154. }
  155. void test_push(void *mem)
  156. {
  157. unsigned long tmp;
  158. unsigned long *stack_top = mem + 4096;
  159. unsigned long *new_stack_top;
  160. unsigned long memw = 0x123456789abcdeful;
  161. memset(mem, 0x55, (void *)stack_top - mem);
  162. asm volatile("mov %%rsp, %[tmp] \n\t"
  163. "mov %[stack_top], %%rsp \n\t"
  164. "pushq $-7 \n\t"
  165. "pushq %[reg] \n\t"
  166. "pushq (%[mem]) \n\t"
  167. "pushq $-7070707 \n\t"
  168. "mov %%rsp, %[new_stack_top] \n\t"
  169. "mov %[tmp], %%rsp"
  170. : [tmp]"=&r"(tmp), [new_stack_top]"=r"(new_stack_top)
  171. : [stack_top]"r"(stack_top),
  172. [reg]"r"(-17l), [mem]"r"(&memw)
  173. : "memory");
  174. report("push $imm8", stack_top[-1] == -7ul);
  175. report("push %%reg", stack_top[-2] == -17ul);
  176. report("push mem", stack_top[-3] == 0x123456789abcdeful);
  177. report("push $imm", stack_top[-4] == -7070707);
  178. }
  179. void test_pop(void *mem)
  180. {
  181. unsigned long tmp, tmp3, rsp, rbp;
  182. unsigned long *stack_top = mem + 4096;
  183. unsigned long memw = 0x123456789abcdeful;
  184. static unsigned long tmp2;
  185. memset(mem, 0x55, (void *)stack_top - mem);
  186. asm volatile("pushq %[val] \n\t"
  187. "popq (%[mem])"
  188. : : [val]"m"(memw), [mem]"r"(mem) : "memory");
  189. report("pop mem", *(unsigned long *)mem == memw);
  190. memw = 7 - memw;
  191. asm volatile("mov %%rsp, %[tmp] \n\t"
  192. "mov %[stack_top], %%rsp \n\t"
  193. "pushq %[val] \n\t"
  194. "popq %[tmp2] \n\t"
  195. "mov %[tmp], %%rsp"
  196. : [tmp]"=&r"(tmp), [tmp2]"=m"(tmp2)
  197. : [val]"r"(memw), [stack_top]"r"(stack_top)
  198. : "memory");
  199. report("pop mem (2)", tmp2 == memw);
  200. memw = 129443 - memw;
  201. asm volatile("mov %%rsp, %[tmp] \n\t"
  202. "mov %[stack_top], %%rsp \n\t"
  203. "pushq %[val] \n\t"
  204. "popq %[tmp2] \n\t"
  205. "mov %[tmp], %%rsp"
  206. : [tmp]"=&r"(tmp), [tmp2]"=r"(tmp2)
  207. : [val]"r"(memw), [stack_top]"r"(stack_top)
  208. : "memory");
  209. report("pop reg", tmp2 == memw);
  210. asm volatile("mov %%rsp, %[tmp] \n\t"
  211. "mov %[stack_top], %%rsp \n\t"
  212. "push $1f \n\t"
  213. "ret \n\t"
  214. "2: jmp 2b \n\t"
  215. "1: mov %[tmp], %%rsp"
  216. : [tmp]"=&r"(tmp) : [stack_top]"r"(stack_top)
  217. : "memory");
  218. report("ret", 1);
  219. stack_top[-1] = 0x778899;
  220. asm volatile("mov %[stack_top], %%r8 \n\t"
  221. "mov %%rsp, %%r9 \n\t"
  222. "xchg %%rbp, %%r8 \n\t"
  223. "leave \n\t"
  224. "xchg %%rsp, %%r9 \n\t"
  225. "xchg %%rbp, %%r8 \n\t"
  226. "mov %%r9, %[tmp] \n\t"
  227. "mov %%r8, %[tmp3]"
  228. : [tmp]"=&r"(tmp), [tmp3]"=&r"(tmp3) : [stack_top]"r"(stack_top-1)
  229. : "memory", "r8", "r9");
  230. report("leave", tmp == (ulong)stack_top && tmp3 == 0x778899);
  231. rbp = 0xaa55aa55bb66bb66ULL;
  232. rsp = (unsigned long)stack_top;
  233. asm volatile("mov %[rsp], %%r8 \n\t"
  234. "mov %[rbp], %%r9 \n\t"
  235. "xchg %%rsp, %%r8 \n\t"
  236. "xchg %%rbp, %%r9 \n\t"
  237. "enter $0x1238, $0 \n\t"
  238. "xchg %%rsp, %%r8 \n\t"
  239. "xchg %%rbp, %%r9 \n\t"
  240. "xchg %%r8, %[rsp] \n\t"
  241. "xchg %%r9, %[rbp]"
  242. : [rsp]"+a"(rsp), [rbp]"+b"(rbp) : : "memory", "r8", "r9");
  243. report("enter",
  244. rsp == (unsigned long)stack_top - 8 - 0x1238
  245. && rbp == (unsigned long)stack_top - 8
  246. && stack_top[-1] == 0xaa55aa55bb66bb66ULL);
  247. }
  248. void test_ljmp(void *mem)
  249. {
  250. unsigned char *m = mem;
  251. volatile int res = 1;
  252. *(unsigned long**)m = &&jmpf;
  253. asm volatile ("data16/mov %%cs, %0":"=m"(*(m + sizeof(unsigned long))));
  254. asm volatile ("rex64/ljmp *%0"::"m"(*m));
  255. res = 0;
  256. jmpf:
  257. report("ljmp", res);
  258. }
  259. void test_incdecnotneg(void *mem)
  260. {
  261. unsigned long *m = mem, v = 1234;
  262. unsigned char *mb = mem, vb = 66;
  263. *m = 0;
  264. asm volatile ("incl %0":"+m"(*m));
  265. report("incl", *m == 1);
  266. asm volatile ("decl %0":"+m"(*m));
  267. report("decl", *m == 0);
  268. asm volatile ("incb %0":"+m"(*m));
  269. report("incb", *m == 1);
  270. asm volatile ("decb %0":"+m"(*m));
  271. report("decb", *m == 0);
  272. asm volatile ("lock incl %0":"+m"(*m));
  273. report("lock incl", *m == 1);
  274. asm volatile ("lock decl %0":"+m"(*m));
  275. report("lock decl", *m == 0);
  276. asm volatile ("lock incb %0":"+m"(*m));
  277. report("lock incb", *m == 1);
  278. asm volatile ("lock decb %0":"+m"(*m));
  279. report("lock decb", *m == 0);
  280. *m = v;
  281. asm ("lock negq %0" : "+m"(*m)); v = -v;
  282. report("lock negl", *m == v);
  283. asm ("lock notq %0" : "+m"(*m)); v = ~v;
  284. report("lock notl", *m == v);
  285. *mb = vb;
  286. asm ("lock negb %0" : "+m"(*mb)); vb = -vb;
  287. report("lock negb", *mb == vb);
  288. asm ("lock notb %0" : "+m"(*mb)); vb = ~vb;
  289. report("lock notb", *mb == vb);
  290. }
  291. void test_smsw(uint64_t *h_mem)
  292. {
  293. char mem[16];
  294. unsigned short msw, msw_orig, *pmsw;
  295. int i, zero;
  296. msw_orig = read_cr0();
  297. asm("smsw %0" : "=r"(msw));
  298. report("smsw (1)", msw == msw_orig);
  299. memset(mem, 0, 16);
  300. pmsw = (void *)mem;
  301. asm("smsw %0" : "=m"(pmsw[4]));
  302. zero = 1;
  303. for (i = 0; i < 8; ++i)
  304. if (i != 4 && pmsw[i])
  305. zero = 0;
  306. report("smsw (2)", msw == pmsw[4] && zero);
  307. /* Trigger exit on smsw */
  308. *h_mem = 0x12345678abcdeful;
  309. asm volatile("smsw %0" : "+m"(*h_mem));
  310. report("smsw (3)", msw == (unsigned short)*h_mem &&
  311. (*h_mem & ~0xfffful) == 0x12345678ab0000ul);
  312. }
  313. void test_lmsw(void)
  314. {
  315. char mem[16];
  316. unsigned short msw, *pmsw;
  317. unsigned long cr0;
  318. cr0 = read_cr0();
  319. msw = cr0 ^ 8;
  320. asm("lmsw %0" : : "r"(msw));
  321. printf("before %lx after %lx\n", cr0, read_cr0());
  322. report("lmsw (1)", (cr0 ^ read_cr0()) == 8);
  323. pmsw = (void *)mem;
  324. *pmsw = cr0;
  325. asm("lmsw %0" : : "m"(*pmsw));
  326. printf("before %lx after %lx\n", cr0, read_cr0());
  327. report("lmsw (2)", cr0 == read_cr0());
  328. /* lmsw can't clear cr0.pe */
  329. msw = (cr0 & ~1ul) ^ 4; /* change EM to force trap */
  330. asm("lmsw %0" : : "r"(msw));
  331. report("lmsw (3)", (cr0 ^ read_cr0()) == 4 && (cr0 & 1));
  332. /* back to normal */
  333. msw = cr0;
  334. asm("lmsw %0" : : "r"(msw));
  335. }
  336. void test_xchg(void *mem)
  337. {
  338. unsigned long *memq = mem;
  339. unsigned long rax;
  340. asm volatile("mov $0x123456789abcdef, %%rax\n\t"
  341. "mov %%rax, (%[memq])\n\t"
  342. "mov $0xfedcba9876543210, %%rax\n\t"
  343. "xchg %%al, (%[memq])\n\t"
  344. "mov %%rax, %[rax]\n\t"
  345. : [rax]"=r"(rax)
  346. : [memq]"r"(memq)
  347. : "memory", "rax");
  348. report("xchg reg, r/m (1)",
  349. rax == 0xfedcba98765432ef && *memq == 0x123456789abcd10);
  350. asm volatile("mov $0x123456789abcdef, %%rax\n\t"
  351. "mov %%rax, (%[memq])\n\t"
  352. "mov $0xfedcba9876543210, %%rax\n\t"
  353. "xchg %%ax, (%[memq])\n\t"
  354. "mov %%rax, %[rax]\n\t"
  355. : [rax]"=r"(rax)
  356. : [memq]"r"(memq)
  357. : "memory", "rax");
  358. report("xchg reg, r/m (2)",
  359. rax == 0xfedcba987654cdef && *memq == 0x123456789ab3210);
  360. asm volatile("mov $0x123456789abcdef, %%rax\n\t"
  361. "mov %%rax, (%[memq])\n\t"
  362. "mov $0xfedcba9876543210, %%rax\n\t"
  363. "xchg %%eax, (%[memq])\n\t"
  364. "mov %%rax, %[rax]\n\t"
  365. : [rax]"=r"(rax)
  366. : [memq]"r"(memq)
  367. : "memory", "rax");
  368. report("xchg reg, r/m (3)",
  369. rax == 0x89abcdef && *memq == 0x123456776543210);
  370. asm volatile("mov $0x123456789abcdef, %%rax\n\t"
  371. "mov %%rax, (%[memq])\n\t"
  372. "mov $0xfedcba9876543210, %%rax\n\t"
  373. "xchg %%rax, (%[memq])\n\t"
  374. "mov %%rax, %[rax]\n\t"
  375. : [rax]"=r"(rax)
  376. : [memq]"r"(memq)
  377. : "memory", "rax");
  378. report("xchg reg, r/m (4)",
  379. rax == 0x123456789abcdef && *memq == 0xfedcba9876543210);
  380. }
  381. void test_xadd(void *mem)
  382. {
  383. unsigned long *memq = mem;
  384. unsigned long rax;
  385. asm volatile("mov $0x123456789abcdef, %%rax\n\t"
  386. "mov %%rax, (%[memq])\n\t"
  387. "mov $0xfedcba9876543210, %%rax\n\t"
  388. "xadd %%al, (%[memq])\n\t"
  389. "mov %%rax, %[rax]\n\t"
  390. : [rax]"=r"(rax)
  391. : [memq]"r"(memq)
  392. : "memory", "rax");
  393. report("xadd reg, r/m (1)",
  394. rax == 0xfedcba98765432ef && *memq == 0x123456789abcdff);
  395. asm volatile("mov $0x123456789abcdef, %%rax\n\t"
  396. "mov %%rax, (%[memq])\n\t"
  397. "mov $0xfedcba9876543210, %%rax\n\t"
  398. "xadd %%ax, (%[memq])\n\t"
  399. "mov %%rax, %[rax]\n\t"
  400. : [rax]"=r"(rax)
  401. : [memq]"r"(memq)
  402. : "memory", "rax");
  403. report("xadd reg, r/m (2)",
  404. rax == 0xfedcba987654cdef && *memq == 0x123456789abffff);
  405. asm volatile("mov $0x123456789abcdef, %%rax\n\t"
  406. "mov %%rax, (%[memq])\n\t"
  407. "mov $0xfedcba9876543210, %%rax\n\t"
  408. "xadd %%eax, (%[memq])\n\t"
  409. "mov %%rax, %[rax]\n\t"
  410. : [rax]"=r"(rax)
  411. : [memq]"r"(memq)
  412. : "memory", "rax");
  413. report("xadd reg, r/m (3)",
  414. rax == 0x89abcdef && *memq == 0x1234567ffffffff);
  415. asm volatile("mov $0x123456789abcdef, %%rax\n\t"
  416. "mov %%rax, (%[memq])\n\t"
  417. "mov $0xfedcba9876543210, %%rax\n\t"
  418. "xadd %%rax, (%[memq])\n\t"
  419. "mov %%rax, %[rax]\n\t"
  420. : [rax]"=r"(rax)
  421. : [memq]"r"(memq)
  422. : "memory", "rax");
  423. report("xadd reg, r/m (4)",
  424. rax == 0x123456789abcdef && *memq == 0xffffffffffffffff);
  425. }
  426. void test_btc(void *mem)
  427. {
  428. unsigned int *a = mem;
  429. memset(mem, 0, 4 * sizeof(unsigned int));
  430. asm ("btcl $32, %0" :: "m"(a[0]) : "memory");
  431. asm ("btcl $1, %0" :: "m"(a[1]) : "memory");
  432. asm ("btcl %1, %0" :: "m"(a[0]), "r"(66) : "memory");
  433. report("btcl imm8, r/m", a[0] == 1 && a[1] == 2 && a[2] == 4);
  434. asm ("btcl %1, %0" :: "m"(a[3]), "r"(-1) : "memory");
  435. report("btcl reg, r/m", a[0] == 1 && a[1] == 2 && a[2] == 0x80000004);
  436. asm ("btcq %1, %0" : : "m"(a[2]), "r"(-1l) : "memory");
  437. report("btcq reg, r/m", a[0] == 1 && a[1] == 0x80000002 &&
  438. a[2] == 0x80000004 && a[3] == 0);
  439. }
  440. void test_bsfbsr(void *mem)
  441. {
  442. unsigned long rax, *memq = mem;
  443. unsigned eax, *meml = mem;
  444. unsigned short ax, *memw = mem;
  445. unsigned char z;
  446. *memw = 0xc000;
  447. asm("bsfw %[mem], %[a]" : [a]"=a"(ax) : [mem]"m"(*memw));
  448. report("bsfw r/m, reg", ax == 14);
  449. *meml = 0xc0000000;
  450. asm("bsfl %[mem], %[a]" : [a]"=a"(eax) : [mem]"m"(*meml));
  451. report("bsfl r/m, reg", eax == 30);
  452. *memq = 0xc00000000000;
  453. asm("bsfq %[mem], %[a]" : [a]"=a"(rax) : [mem]"m"(*memq));
  454. report("bsfq r/m, reg", rax == 46);
  455. *memq = 0;
  456. asm("bsfq %[mem], %[a]; setz %[z]"
  457. : [a]"=a"(rax), [z]"=rm"(z) : [mem]"m"(*memq));
  458. report("bsfq r/m, reg", z == 1);
  459. *memw = 0xc000;
  460. asm("bsrw %[mem], %[a]" : [a]"=a"(ax) : [mem]"m"(*memw));
  461. report("bsrw r/m, reg", ax == 15);
  462. *meml = 0xc0000000;
  463. asm("bsrl %[mem], %[a]" : [a]"=a"(eax) : [mem]"m"(*meml));
  464. report("bsrl r/m, reg", eax == 31);
  465. *memq = 0xc00000000000;
  466. asm("bsrq %[mem], %[a]" : [a]"=a"(rax) : [mem]"m"(*memq));
  467. report("bsrq r/m, reg", rax == 47);
  468. *memq = 0;
  469. asm("bsrq %[mem], %[a]; setz %[z]"
  470. : [a]"=a"(rax), [z]"=rm"(z) : [mem]"m"(*memq));
  471. report("bsrq r/m, reg", z == 1);
  472. }
  473. static void test_imul(ulong *mem)
  474. {
  475. ulong a;
  476. *mem = 51; a = 0x1234567812345678UL;
  477. asm ("imulw %1, %%ax" : "+a"(a) : "m"(*mem));
  478. report("imul ax, mem", a == 0x12345678123439e8);
  479. *mem = 51; a = 0x1234567812345678UL;
  480. asm ("imull %1, %%eax" : "+a"(a) : "m"(*mem));
  481. report("imul eax, mem", a == 0xa06d39e8);
  482. *mem = 51; a = 0x1234567812345678UL;
  483. asm ("imulq %1, %%rax" : "+a"(a) : "m"(*mem));
  484. report("imul rax, mem", a == 0xA06D39EBA06D39E8UL);
  485. *mem = 0x1234567812345678UL; a = 0x8765432187654321L;
  486. asm ("imulw $51, %1, %%ax" : "+a"(a) : "m"(*mem));
  487. report("imul ax, mem, imm8", a == 0x87654321876539e8);
  488. *mem = 0x1234567812345678UL;
  489. asm ("imull $51, %1, %%eax" : "+a"(a) : "m"(*mem));
  490. report("imul eax, mem, imm8", a == 0xa06d39e8);
  491. *mem = 0x1234567812345678UL;
  492. asm ("imulq $51, %1, %%rax" : "+a"(a) : "m"(*mem));
  493. report("imul rax, mem, imm8", a == 0xA06D39EBA06D39E8UL);
  494. *mem = 0x1234567812345678UL; a = 0x8765432187654321L;
  495. asm ("imulw $311, %1, %%ax" : "+a"(a) : "m"(*mem));
  496. report("imul ax, mem, imm", a == 0x8765432187650bc8);
  497. *mem = 0x1234567812345678UL;
  498. asm ("imull $311, %1, %%eax" : "+a"(a) : "m"(*mem));
  499. report("imul eax, mem, imm", a == 0x1d950bc8);
  500. *mem = 0x1234567812345678UL;
  501. asm ("imulq $311, %1, %%rax" : "+a"(a) : "m"(*mem));
  502. report("imul rax, mem, imm", a == 0x1D950BDE1D950BC8L);
  503. }
  504. static void test_muldiv(long *mem)
  505. {
  506. long a, d, aa, dd;
  507. u8 ex = 1;
  508. *mem = 0; a = 1; d = 2;
  509. asm (ASM_TRY("1f") "divq %3; movb $0, %2; 1:"
  510. : "+a"(a), "+d"(d), "+q"(ex) : "m"(*mem));
  511. report("divq (fault)", a == 1 && d == 2 && ex);
  512. *mem = 987654321098765UL; a = 123456789012345UL; d = 123456789012345UL;
  513. asm (ASM_TRY("1f") "divq %3; movb $0, %2; 1:"
  514. : "+a"(a), "+d"(d), "+q"(ex) : "m"(*mem));
  515. report("divq (1)",
  516. a == 0x1ffffffb1b963b33ul && d == 0x273ba4384ede2ul && !ex);
  517. aa = 0x1111111111111111; dd = 0x2222222222222222;
  518. *mem = 0x3333333333333333; a = aa; d = dd;
  519. asm("mulb %2" : "+a"(a), "+d"(d) : "m"(*mem));
  520. report("mulb mem", a == 0x1111111111110363 && d == dd);
  521. *mem = 0x3333333333333333; a = aa; d = dd;
  522. asm("mulw %2" : "+a"(a), "+d"(d) : "m"(*mem));
  523. report("mulw mem", a == 0x111111111111c963 && d == 0x2222222222220369);
  524. *mem = 0x3333333333333333; a = aa; d = dd;
  525. asm("mull %2" : "+a"(a), "+d"(d) : "m"(*mem));
  526. report("mull mem", a == 0x962fc963 && d == 0x369d036);
  527. *mem = 0x3333333333333333; a = aa; d = dd;
  528. asm("mulq %2" : "+a"(a), "+d"(d) : "m"(*mem));
  529. report("mulq mem", a == 0x2fc962fc962fc963 && d == 0x369d0369d0369d0);
  530. }
  531. typedef unsigned __attribute__((vector_size(16))) sse128;
  532. typedef union {
  533. sse128 sse;
  534. unsigned u[4];
  535. } sse_union;
  536. static bool sseeq(sse_union *v1, sse_union *v2)
  537. {
  538. bool ok = true;
  539. int i;
  540. for (i = 0; i < 4; ++i) {
  541. ok &= v1->u[i] == v2->u[i];
  542. }
  543. return ok;
  544. }
  545. static void test_sse(sse_union *mem)
  546. {
  547. sse_union v;
  548. write_cr0(read_cr0() & ~6); /* EM, TS */
  549. write_cr4(read_cr4() | 0x200); /* OSFXSR */
  550. v.u[0] = 1; v.u[1] = 2; v.u[2] = 3; v.u[3] = 4;
  551. asm("movdqu %1, %0" : "=m"(*mem) : "x"(v.sse));
  552. report("movdqu (read)", sseeq(&v, mem));
  553. mem->u[0] = 5; mem->u[1] = 6; mem->u[2] = 7; mem->u[3] = 8;
  554. asm("movdqu %1, %0" : "=x"(v.sse) : "m"(*mem));
  555. report("movdqu (write)", sseeq(mem, &v));
  556. v.u[0] = 1; v.u[1] = 2; v.u[2] = 3; v.u[3] = 4;
  557. asm("movaps %1, %0" : "=m"(*mem) : "x"(v.sse));
  558. report("movaps (read)", sseeq(mem, &v));
  559. mem->u[0] = 5; mem->u[1] = 6; mem->u[2] = 7; mem->u[3] = 8;
  560. asm("movaps %1, %0" : "=x"(v.sse) : "m"(*mem));
  561. report("movaps (write)", sseeq(&v, mem));
  562. v.u[0] = 1; v.u[1] = 2; v.u[2] = 3; v.u[3] = 4;
  563. asm("movapd %1, %0" : "=m"(*mem) : "x"(v.sse));
  564. report("movapd (read)", sseeq(mem, &v));
  565. mem->u[0] = 5; mem->u[1] = 6; mem->u[2] = 7; mem->u[3] = 8;
  566. asm("movapd %1, %0" : "=x"(v.sse) : "m"(*mem));
  567. report("movapd (write)", sseeq(&v, mem));
  568. }
  569. static void test_mmx(uint64_t *mem)
  570. {
  571. uint64_t v;
  572. write_cr0(read_cr0() & ~6); /* EM, TS */
  573. asm volatile("fninit");
  574. v = 0x0102030405060708ULL;
  575. asm("movq %1, %0" : "=m"(*mem) : "y"(v));
  576. report("movq (mmx, read)", v == *mem);
  577. *mem = 0x8070605040302010ull;
  578. asm("movq %1, %0" : "=y"(v) : "m"(*mem));
  579. report("movq (mmx, write)", v == *mem);
  580. }
  581. static void test_rip_relative(unsigned *mem, char *insn_ram)
  582. {
  583. /* movb $1, mem+2(%rip) */
  584. insn_ram[0] = 0xc6;
  585. insn_ram[1] = 0x05;
  586. *(unsigned *)&insn_ram[2] = 2 + (char *)mem - (insn_ram + 7);
  587. insn_ram[6] = 0x01;
  588. /* ret */
  589. insn_ram[7] = 0xc3;
  590. *mem = 0;
  591. asm("callq *%1" : "+m"(*mem) : "r"(insn_ram));
  592. report("movb $imm, 0(%%rip)", *mem == 0x10000);
  593. }
  594. static void test_shld_shrd(u32 *mem)
  595. {
  596. *mem = 0x12345678;
  597. asm("shld %2, %1, %0" : "+m"(*mem) : "r"(0xaaaaaaaaU), "c"((u8)3));
  598. report("shld (cl)", *mem == ((0x12345678 << 3) | 5));
  599. *mem = 0x12345678;
  600. asm("shrd %2, %1, %0" : "+m"(*mem) : "r"(0x55555555U), "c"((u8)3));
  601. report("shrd (cl)", *mem == ((0x12345678 >> 3) | (5u << 29)));
  602. }
  603. static void test_cmov(u32 *mem)
  604. {
  605. u64 val;
  606. *mem = 0xabcdef12u;
  607. asm ("movq $0x1234567812345678, %%rax\n\t"
  608. "cmpl %%eax, %%eax\n\t"
  609. "cmovnel (%[mem]), %%eax\n\t"
  610. "movq %%rax, %[val]\n\t"
  611. : [val]"=r"(val) : [mem]"r"(mem) : "%rax", "cc");
  612. report("cmovnel", val == 0x12345678ul);
  613. }
  614. #define INSN_XCHG_ALL \
  615. "xchg %rax, 0+save \n\t" \
  616. "xchg %rbx, 8+save \n\t" \
  617. "xchg %rcx, 16+save \n\t" \
  618. "xchg %rdx, 24+save \n\t" \
  619. "xchg %rsi, 32+save \n\t" \
  620. "xchg %rdi, 40+save \n\t" \
  621. "xchg %rsp, 48+save \n\t" \
  622. "xchg %rbp, 56+save \n\t" \
  623. "xchg %r8, 64+save \n\t" \
  624. "xchg %r9, 72+save \n\t" \
  625. "xchg %r10, 80+save \n\t" \
  626. "xchg %r11, 88+save \n\t" \
  627. "xchg %r12, 96+save \n\t" \
  628. "xchg %r13, 104+save \n\t" \
  629. "xchg %r14, 112+save \n\t" \
  630. "xchg %r15, 120+save \n\t"
  631. asm(
  632. ".align 4096\n\t"
  633. "insn_page:\n\t"
  634. "ret\n\t"
  635. "pushf\n\t"
  636. "push 136+save \n\t"
  637. "popf \n\t"
  638. INSN_XCHG_ALL
  639. "test_insn:\n\t"
  640. "in (%dx),%al\n\t"
  641. ".skip 31, 0x90\n\t"
  642. "test_insn_end:\n\t"
  643. INSN_XCHG_ALL
  644. "pushf \n\t"
  645. "pop 136+save \n\t"
  646. "popf \n\t"
  647. "ret \n\t"
  648. "insn_page_end:\n\t"
  649. ".align 4096\n\t"
  650. );
  651. #define MK_INSN(name, str) \
  652. asm ( \
  653. ".pushsection .data.insn \n\t" \
  654. "insn_" #name ": \n\t" \
  655. ".quad 1001f, 1002f - 1001f \n\t" \
  656. ".popsection \n\t" \
  657. ".pushsection .text.insn, \"ax\" \n\t" \
  658. "1001: \n\t" \
  659. "insn_code_" #name ": " str " \n\t" \
  660. "1002: \n\t" \
  661. ".popsection" \
  662. ); \
  663. extern struct insn_desc insn_##name;
  664. static void trap_emulator(uint64_t *mem, void *alt_insn_page,
  665. struct insn_desc *alt_insn)
  666. {
  667. ulong *cr3 = (ulong *)read_cr3();
  668. void *insn_ram;
  669. extern u8 insn_page[], test_insn[];
  670. insn_ram = vmap(virt_to_phys(insn_page), 4096);
  671. memcpy(alt_insn_page, insn_page, 4096);
  672. memcpy(alt_insn_page + (test_insn - insn_page),
  673. (void *)(alt_insn->ptr), alt_insn->len);
  674. save = inregs;
  675. /* Load the code TLB with insn_page, but point the page tables at
  676. alt_insn_page (and keep the data TLB clear, for AMD decode assist).
  677. This will make the CPU trap on the insn_page instruction but the
  678. hypervisor will see alt_insn_page. */
  679. install_page(cr3, virt_to_phys(insn_page), insn_ram);
  680. invlpg(insn_ram);
  681. /* Load code TLB */
  682. asm volatile("call *%0" : : "r"(insn_ram));
  683. install_page(cr3, virt_to_phys(alt_insn_page), insn_ram);
  684. /* Trap, let hypervisor emulate at alt_insn_page */
  685. asm volatile("call *%0": : "r"(insn_ram+1));
  686. outregs = save;
  687. }
  688. static unsigned long rip_advance;
  689. static void advance_rip_and_note_exception(struct ex_regs *regs)
  690. {
  691. ++exceptions;
  692. regs->rip += rip_advance;
  693. }
  694. static void test_mmx_movq_mf(uint64_t *mem, uint8_t *insn_page,
  695. uint8_t *alt_insn_page, void *insn_ram)
  696. {
  697. uint16_t fcw = 0; /* all exceptions unmasked */
  698. /* movq %mm0, (%rax) */
  699. void *stack = alloc_page();
  700. write_cr0(read_cr0() & ~6); /* TS, EM */
  701. exceptions = 0;
  702. handle_exception(MF_VECTOR, advance_rip_and_note_exception);
  703. asm volatile("fninit; fldcw %0" : : "m"(fcw));
  704. asm volatile("fldz; fldz; fdivp"); /* generate exception */
  705. MK_INSN(mmx_movq_mf, "movq %mm0, (%rax) \n\t");
  706. rip_advance = insn_mmx_movq_mf.len;
  707. inregs = (struct regs){ .rsp=(u64)stack+1024 };
  708. trap_emulator(mem, alt_insn_page, &insn_mmx_movq_mf);
  709. /* exit MMX mode */
  710. asm volatile("fnclex; emms");
  711. report("movq mmx generates #MF", exceptions == 1);
  712. handle_exception(MF_VECTOR, 0);
  713. }
  714. static void test_jmp_noncanonical(uint64_t *mem)
  715. {
  716. extern char nc_jmp_start, nc_jmp_end;
  717. *mem = 0x1111111111111111ul;
  718. exceptions = 0;
  719. rip_advance = &nc_jmp_end - &nc_jmp_start;
  720. handle_exception(GP_VECTOR, advance_rip_and_note_exception);
  721. asm volatile ("nc_jmp_start: jmp *%0; nc_jmp_end:" : : "m"(*mem));
  722. report("jump to non-canonical address", exceptions == 1);
  723. handle_exception(GP_VECTOR, 0);
  724. }
  725. static void test_movabs(uint64_t *mem, uint8_t *insn_page,
  726. uint8_t *alt_insn_page, void *insn_ram)
  727. {
  728. /* mov $0x9090909090909090, %rcx */
  729. MK_INSN(movabs, "mov $0x9090909090909090, %rcx\n\t");
  730. inregs = (struct regs){ 0 };
  731. trap_emulator(mem, alt_insn_page, &insn_movabs);
  732. report("64-bit mov imm2", outregs.rcx == 0x9090909090909090);
  733. }
  734. static void test_smsw_reg(uint64_t *mem, uint8_t *insn_page,
  735. uint8_t *alt_insn_page, void *insn_ram)
  736. {
  737. unsigned long cr0 = read_cr0();
  738. inregs = (struct regs){ .rax = 0x1234567890abcdeful };
  739. MK_INSN(smsww, "smsww %ax\n\t");
  740. trap_emulator(mem, alt_insn_page, &insn_smsww);
  741. report("16-bit smsw reg", (u16)outregs.rax == (u16)cr0 &&
  742. outregs.rax >> 16 == inregs.rax >> 16);
  743. MK_INSN(smswl, "smswl %eax\n\t");
  744. trap_emulator(mem, alt_insn_page, &insn_smswl);
  745. report("32-bit smsw reg", outregs.rax == (u32)cr0);
  746. MK_INSN(smswq, "smswq %rax\n\t");
  747. trap_emulator(mem, alt_insn_page, &insn_smswq);
  748. report("64-bit smsw reg", outregs.rax == cr0);
  749. }
  750. static void test_nop(uint64_t *mem, uint8_t *insn_page,
  751. uint8_t *alt_insn_page, void *insn_ram)
  752. {
  753. inregs = (struct regs){ .rax = 0x1234567890abcdeful };
  754. MK_INSN(nop, "nop\n\t");
  755. trap_emulator(mem, alt_insn_page, &insn_nop);
  756. report("nop", outregs.rax == inregs.rax);
  757. }
  758. static void test_mov_dr(uint64_t *mem, uint8_t *insn_page,
  759. uint8_t *alt_insn_page, void *insn_ram)
  760. {
  761. bool rtm_support = cpuid(7).b & (1 << 11);
  762. unsigned long dr6_fixed_1 = rtm_support ? 0xfffe0ff0ul : 0xffff0ff0ul;
  763. inregs = (struct regs){ .rax = 0 };
  764. MK_INSN(mov_to_dr6, "movq %rax, %dr6\n\t");
  765. trap_emulator(mem, alt_insn_page, &insn_mov_to_dr6);
  766. MK_INSN(mov_from_dr6, "movq %dr6, %rax\n\t");
  767. trap_emulator(mem, alt_insn_page, &insn_mov_from_dr6);
  768. report("mov_dr6", outregs.rax == dr6_fixed_1);
  769. }
  770. static void test_push16(uint64_t *mem)
  771. {
  772. uint64_t rsp1, rsp2;
  773. uint16_t r;
  774. asm volatile ( "movq %%rsp, %[rsp1]\n\t"
  775. "pushw %[v]\n\t"
  776. "popw %[r]\n\t"
  777. "movq %%rsp, %[rsp2]\n\t"
  778. "movq %[rsp1], %%rsp\n\t" :
  779. [rsp1]"=r"(rsp1), [rsp2]"=r"(rsp2), [r]"=r"(r)
  780. : [v]"m"(*mem) : "memory");
  781. report("push16", rsp1 == rsp2);
  782. }
  783. static void test_crosspage_mmio(volatile uint8_t *mem)
  784. {
  785. volatile uint16_t w, *pw;
  786. pw = (volatile uint16_t *)&mem[4095];
  787. mem[4095] = 0x99;
  788. mem[4096] = 0x77;
  789. asm volatile("mov %1, %0" : "=r"(w) : "m"(*pw) : "memory");
  790. report("cross-page mmio read", w == 0x7799);
  791. asm volatile("mov %1, %0" : "=m"(*pw) : "r"((uint16_t)0x88aa));
  792. report("cross-page mmio write", mem[4095] == 0xaa && mem[4096] == 0x88);
  793. }
  794. static void test_string_io_mmio(volatile uint8_t *mem)
  795. {
  796. /* Cross MMIO pages.*/
  797. volatile uint8_t *mmio = mem + 4032;
  798. asm volatile("outw %%ax, %%dx \n\t" : : "a"(0x9999), "d"(TESTDEV_IO_PORT));
  799. asm volatile ("cld; rep insb" : : "d" (TESTDEV_IO_PORT), "D" (mmio), "c" (1024));
  800. report("string_io_mmio", mmio[1023] == 0x99);
  801. }
  802. /* kvm doesn't allow lidt/lgdt from mmio, so the test is disabled */
  803. #if 0
  804. static void test_lgdt_lidt(volatile uint8_t *mem)
  805. {
  806. struct descriptor_table_ptr orig, fresh = {};
  807. sgdt(&orig);
  808. *(struct descriptor_table_ptr *)mem = (struct descriptor_table_ptr) {
  809. .limit = 0xf234,
  810. .base = 0x12345678abcd,
  811. };
  812. cli();
  813. asm volatile("lgdt %0" : : "m"(*(struct descriptor_table_ptr *)mem));
  814. sgdt(&fresh);
  815. lgdt(&orig);
  816. sti();
  817. report("lgdt (long address)", orig.limit == fresh.limit && orig.base == fresh.base);
  818. sidt(&orig);
  819. *(struct descriptor_table_ptr *)mem = (struct descriptor_table_ptr) {
  820. .limit = 0x432f,
  821. .base = 0xdbca87654321,
  822. };
  823. cli();
  824. asm volatile("lidt %0" : : "m"(*(struct descriptor_table_ptr *)mem));
  825. sidt(&fresh);
  826. lidt(&orig);
  827. sti();
  828. report("lidt (long address)", orig.limit == fresh.limit && orig.base == fresh.base);
  829. }
  830. #endif
  831. static void ss_bad_rpl(struct ex_regs *regs)
  832. {
  833. extern char ss_bad_rpl_cont;
  834. ++exceptions;
  835. regs->rip = (ulong)&ss_bad_rpl_cont;
  836. }
  837. static void test_sreg(volatile uint16_t *mem)
  838. {
  839. u16 ss = read_ss();
  840. // check for null segment load
  841. *mem = 0;
  842. asm volatile("mov %0, %%ss" : : "m"(*mem));
  843. report("mov null, %%ss", read_ss() == 0);
  844. // check for exception when ss.rpl != cpl on null segment load
  845. exceptions = 0;
  846. handle_exception(GP_VECTOR, ss_bad_rpl);
  847. *mem = 3;
  848. asm volatile("mov %0, %%ss; ss_bad_rpl_cont:" : : "m"(*mem));
  849. report("mov null, %%ss (with ss.rpl != cpl)", exceptions == 1 && read_ss() == 0);
  850. handle_exception(GP_VECTOR, 0);
  851. write_ss(ss);
  852. }
  853. /* Broken emulation causes triple fault, which skips the other tests. */
  854. #if 0
  855. static void test_lldt(volatile uint16_t *mem)
  856. {
  857. u64 gdt[] = { 0, /* null descriptor */
  858. #ifdef __X86_64__
  859. 0, /* ldt descriptor is 16 bytes in long mode */
  860. #endif
  861. 0x0000f82000000ffffull /* ldt descriptor */ };
  862. struct descriptor_table_ptr gdt_ptr = { .limit = sizeof(gdt) - 1,
  863. .base = (ulong)&gdt };
  864. struct descriptor_table_ptr orig_gdt;
  865. cli();
  866. sgdt(&orig_gdt);
  867. lgdt(&gdt_ptr);
  868. *mem = 0x8;
  869. asm volatile("lldt %0" : : "m"(*mem));
  870. lgdt(&orig_gdt);
  871. sti();
  872. report("lldt", sldt() == *mem);
  873. }
  874. #endif
  875. static void test_ltr(volatile uint16_t *mem)
  876. {
  877. struct descriptor_table_ptr gdt_ptr;
  878. uint64_t *gdt, *trp;
  879. uint16_t tr = str();
  880. uint64_t busy_mask = (uint64_t)1 << 41;
  881. sgdt(&gdt_ptr);
  882. gdt = (uint64_t *)gdt_ptr.base;
  883. trp = &gdt[tr >> 3];
  884. *trp &= ~busy_mask;
  885. *mem = tr;
  886. asm volatile("ltr %0" : : "m"(*mem) : "memory");
  887. report("ltr", str() == tr && (*trp & busy_mask));
  888. }
  889. static void test_simplealu(u32 *mem)
  890. {
  891. *mem = 0x1234;
  892. asm("or %1, %0" : "+m"(*mem) : "r"(0x8001));
  893. report("or", *mem == 0x9235);
  894. asm("add %1, %0" : "+m"(*mem) : "r"(2));
  895. report("add", *mem == 0x9237);
  896. asm("xor %1, %0" : "+m"(*mem) : "r"(0x1111));
  897. report("xor", *mem == 0x8326);
  898. asm("sub %1, %0" : "+m"(*mem) : "r"(0x26));
  899. report("sub", *mem == 0x8300);
  900. asm("clc; adc %1, %0" : "+m"(*mem) : "r"(0x100));
  901. report("adc(0)", *mem == 0x8400);
  902. asm("stc; adc %1, %0" : "+m"(*mem) : "r"(0x100));
  903. report("adc(0)", *mem == 0x8501);
  904. asm("clc; sbb %1, %0" : "+m"(*mem) : "r"(0));
  905. report("sbb(0)", *mem == 0x8501);
  906. asm("stc; sbb %1, %0" : "+m"(*mem) : "r"(0));
  907. report("sbb(1)", *mem == 0x8500);
  908. asm("and %1, %0" : "+m"(*mem) : "r"(0xfe77));
  909. report("and", *mem == 0x8400);
  910. asm("test %1, %0" : "+m"(*mem) : "r"(0xf000));
  911. report("test", *mem == 0x8400);
  912. }
  913. static void illegal_movbe_handler(struct ex_regs *regs)
  914. {
  915. extern char bad_movbe_cont;
  916. ++exceptions;
  917. regs->rip = (ulong)&bad_movbe_cont;
  918. }
  919. static void test_illegal_movbe(void)
  920. {
  921. if (!(cpuid(1).c & (1 << 22))) {
  922. report_skip("illegal movbe");
  923. return;
  924. }
  925. exceptions = 0;
  926. handle_exception(UD_VECTOR, illegal_movbe_handler);
  927. asm volatile(".byte 0x0f; .byte 0x38; .byte 0xf0; .byte 0xc0;\n\t"
  928. " bad_movbe_cont:" : : : "rax");
  929. report("illegal movbe", exceptions == 1);
  930. handle_exception(UD_VECTOR, 0);
  931. }
  932. int main()
  933. {
  934. void *mem;
  935. void *insn_page, *alt_insn_page;
  936. void *insn_ram;
  937. unsigned long t1, t2;
  938. setup_vm();
  939. setup_idt();
  940. mem = alloc_vpages(2);
  941. install_page((void *)read_cr3(), IORAM_BASE_PHYS, mem);
  942. // install the page twice to test cross-page mmio
  943. install_page((void *)read_cr3(), IORAM_BASE_PHYS, mem + 4096);
  944. insn_page = alloc_page();
  945. alt_insn_page = alloc_page();
  946. insn_ram = vmap(virt_to_phys(insn_page), 4096);
  947. // test mov reg, r/m and mov r/m, reg
  948. t1 = 0x123456789abcdef;
  949. asm volatile("mov %[t1], (%[mem]) \n\t"
  950. "mov (%[mem]), %[t2]"
  951. : [t2]"=r"(t2)
  952. : [t1]"r"(t1), [mem]"r"(mem)
  953. : "memory");
  954. report("mov reg, r/m (1)", t2 == 0x123456789abcdef);
  955. test_simplealu(mem);
  956. test_cmps(mem);
  957. test_scas(mem);
  958. test_push(mem);
  959. test_pop(mem);
  960. test_xchg(mem);
  961. test_xadd(mem);
  962. test_cr8();
  963. test_smsw(mem);
  964. test_lmsw();
  965. test_ljmp(mem);
  966. test_stringio();
  967. test_incdecnotneg(mem);
  968. test_btc(mem);
  969. test_bsfbsr(mem);
  970. test_imul(mem);
  971. test_muldiv(mem);
  972. test_sse(mem);
  973. test_mmx(mem);
  974. test_rip_relative(mem, insn_ram);
  975. test_shld_shrd(mem);
  976. //test_lgdt_lidt(mem);
  977. test_sreg(mem);
  978. //test_lldt(mem);
  979. test_ltr(mem);
  980. test_cmov(mem);
  981. test_mmx_movq_mf(mem, insn_page, alt_insn_page, insn_ram);
  982. test_movabs(mem, insn_page, alt_insn_page, insn_ram);
  983. test_smsw_reg(mem, insn_page, alt_insn_page, insn_ram);
  984. test_nop(mem, insn_page, alt_insn_page, insn_ram);
  985. test_mov_dr(mem, insn_page, alt_insn_page, insn_ram);
  986. test_push16(mem);
  987. test_crosspage_mmio(mem);
  988. test_string_io_mmio(mem);
  989. test_jmp_noncanonical(mem);
  990. test_illegal_movbe();
  991. return report_summary();
  992. }