codegen.rs 54 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809
  1. use cpu::cpu::{
  2. tlb_data, FLAG_CARRY, FLAG_OVERFLOW, FLAG_SIGN, FLAG_ZERO, TLB_GLOBAL, TLB_HAS_CODE,
  3. TLB_NO_USER, TLB_READONLY, TLB_VALID,
  4. };
  5. use cpu::global_pointers;
  6. use cpu::imports::mem8;
  7. use cpu::memory;
  8. use jit::JitContext;
  9. use modrm;
  10. use modrm::ModrmByte;
  11. use profiler;
  12. use regs;
  13. use wasmgen::wasm_builder::{WasmBuilder, WasmLocal, WasmLocalI64};
  14. pub fn gen_add_cs_offset(ctx: &mut JitContext) {
  15. ctx.builder
  16. .load_fixed_i32(global_pointers::get_seg_offset(regs::CS));
  17. ctx.builder.add_i32();
  18. }
  19. fn gen_get_eip(builder: &mut WasmBuilder) {
  20. builder.load_fixed_i32(global_pointers::instruction_pointer as u32);
  21. }
  22. pub fn gen_set_previous_eip_offset_from_eip(builder: &mut WasmBuilder, n: u32) {
  23. // previous_ip = instruction_pointer + n
  24. builder.const_i32(global_pointers::previous_ip as i32);
  25. gen_get_eip(builder);
  26. if n != 0 {
  27. builder.const_i32(n as i32);
  28. builder.add_i32();
  29. }
  30. builder.store_aligned_i32(0);
  31. }
  32. pub fn gen_set_eip_to_after_current_instruction(ctx: &mut JitContext) {
  33. ctx.builder
  34. .const_i32(global_pointers::instruction_pointer as i32);
  35. gen_get_eip(ctx.builder);
  36. ctx.builder.const_i32(!0xFFF);
  37. ctx.builder.and_i32();
  38. ctx.builder.const_i32(ctx.cpu.eip as i32 & 0xFFF);
  39. ctx.builder.or_i32();
  40. ctx.builder.store_aligned_i32(0);
  41. }
  42. pub fn gen_set_previous_eip_offset_from_eip_with_low_bits(
  43. builder: &mut WasmBuilder,
  44. low_bits: i32,
  45. ) {
  46. // previous_ip = instruction_pointer & ~0xFFF | low_bits;
  47. builder.const_i32(global_pointers::previous_ip as i32);
  48. gen_get_eip(builder);
  49. builder.const_i32(!0xFFF);
  50. builder.and_i32();
  51. builder.const_i32(low_bits);
  52. builder.or_i32();
  53. builder.store_aligned_i32(0);
  54. }
  55. pub fn gen_increment_instruction_pointer(builder: &mut WasmBuilder, n: u32) {
  56. builder.const_i32(global_pointers::instruction_pointer as i32);
  57. gen_get_eip(builder);
  58. builder.const_i32(n as i32);
  59. builder.add_i32();
  60. builder.store_aligned_i32(0);
  61. }
  62. pub fn gen_relative_jump(builder: &mut WasmBuilder, n: i32) {
  63. // add n to instruction_pointer (without setting the offset as above)
  64. builder.const_i32(global_pointers::instruction_pointer as i32);
  65. gen_get_eip(builder);
  66. builder.const_i32(n);
  67. builder.add_i32();
  68. builder.store_aligned_i32(0);
  69. }
  70. pub fn gen_absolute_indirect_jump(ctx: &mut JitContext, new_eip: WasmLocal) {
  71. ctx.builder
  72. .const_i32(global_pointers::instruction_pointer as i32);
  73. ctx.builder.get_local(&new_eip);
  74. ctx.builder.store_aligned_i32(0);
  75. ctx.builder.get_local(&new_eip);
  76. ctx.builder
  77. .load_fixed_i32(global_pointers::previous_ip as u32);
  78. ctx.builder.xor_i32();
  79. ctx.builder.const_i32(!0xFFF);
  80. ctx.builder.and_i32();
  81. ctx.builder.eqz_i32();
  82. ctx.builder.if_void();
  83. {
  84. // try staying in same page
  85. ctx.builder.get_local(&new_eip);
  86. ctx.builder.free_local(new_eip);
  87. ctx.builder
  88. .const_i32(ctx.start_of_current_instruction as i32);
  89. ctx.builder.const_i32(ctx.our_wasm_table_index as i32);
  90. ctx.builder.const_i32(ctx.state_flags.to_u32() as i32);
  91. ctx.builder.call_fn4_ret("jit_find_cache_entry_in_page");
  92. let new_basic_block_index = ctx.builder.tee_new_local();
  93. ctx.builder.const_i32(0);
  94. ctx.builder.ge_i32();
  95. ctx.builder.if_void();
  96. ctx.builder.get_local(&new_basic_block_index);
  97. ctx.builder.set_local(ctx.basic_block_index_local);
  98. ctx.builder.br(ctx.current_brtable_depth + 2); // to the loop
  99. ctx.builder.block_end();
  100. ctx.builder.free_local(new_basic_block_index);
  101. }
  102. ctx.builder.block_end();
  103. }
  104. pub fn gen_increment_timestamp_counter(builder: &mut WasmBuilder, n: i32) {
  105. builder.increment_fixed_i32(global_pointers::timestamp_counter as u32, n)
  106. }
  107. pub fn gen_get_reg8(ctx: &mut JitContext, r: u32) {
  108. match r {
  109. regs::AL | regs::CL | regs::DL | regs::BL => {
  110. ctx.builder.get_local(&ctx.register_locals[r as usize]);
  111. ctx.builder.const_i32(0xFF);
  112. ctx.builder.and_i32();
  113. },
  114. regs::AH | regs::CH | regs::DH | regs::BH => {
  115. ctx.builder
  116. .get_local(&ctx.register_locals[(r - 4) as usize]);
  117. ctx.builder.const_i32(8);
  118. ctx.builder.shr_u_i32();
  119. ctx.builder.const_i32(0xFF);
  120. ctx.builder.and_i32();
  121. },
  122. _ => assert!(false),
  123. }
  124. }
  125. /// Return a new local referencing one of the 8 bit registers or a direct reference to one of the
  126. /// register locals. Higher bits might be garbage (suitable for gen_cmp8 etc.). Must be freed with
  127. /// gen_free_reg8_or_alias.
  128. pub fn gen_get_reg8_or_alias_to_reg32(ctx: &mut JitContext, r: u32) -> WasmLocal {
  129. match r {
  130. regs::AL | regs::CL | regs::DL | regs::BL => ctx.register_locals[r as usize].unsafe_clone(),
  131. regs::AH | regs::CH | regs::DH | regs::BH => {
  132. ctx.builder
  133. .get_local(&ctx.register_locals[(r - 4) as usize]);
  134. ctx.builder.const_i32(8);
  135. ctx.builder.shr_u_i32();
  136. ctx.builder.set_new_local()
  137. },
  138. _ => panic!(),
  139. }
  140. }
  141. pub fn gen_free_reg8_or_alias(ctx: &mut JitContext, r: u32, local: WasmLocal) {
  142. match r {
  143. regs::AL | regs::CL | regs::DL | regs::BL => {},
  144. regs::AH | regs::CH | regs::DH | regs::BH => ctx.builder.free_local(local),
  145. _ => panic!(),
  146. }
  147. }
  148. pub fn gen_get_reg16(ctx: &mut JitContext, r: u32) {
  149. ctx.builder.get_local(&ctx.register_locals[r as usize]);
  150. ctx.builder.const_i32(0xFFFF);
  151. ctx.builder.and_i32();
  152. }
  153. pub fn gen_get_reg32(ctx: &mut JitContext, r: u32) {
  154. ctx.builder.get_local(&ctx.register_locals[r as usize]);
  155. }
  156. pub fn gen_set_reg8(ctx: &mut JitContext, r: u32) {
  157. match r {
  158. regs::AL | regs::CL | regs::DL | regs::BL => {
  159. // reg32[r] = stack_value & 0xFF | reg32[r] & ~0xFF
  160. ctx.builder.const_i32(0xFF);
  161. ctx.builder.and_i32();
  162. ctx.builder.get_local(&ctx.register_locals[r as usize]);
  163. ctx.builder.const_i32(!0xFF);
  164. ctx.builder.and_i32();
  165. ctx.builder.or_i32();
  166. ctx.builder.set_local(&ctx.register_locals[r as usize]);
  167. },
  168. regs::AH | regs::CH | regs::DH | regs::BH => {
  169. // reg32[r] = stack_value << 8 & 0xFF00 | reg32[r] & ~0xFF00
  170. ctx.builder.const_i32(8);
  171. ctx.builder.shl_i32();
  172. ctx.builder.const_i32(0xFF00);
  173. ctx.builder.and_i32();
  174. ctx.builder
  175. .get_local(&ctx.register_locals[(r - 4) as usize]);
  176. ctx.builder.const_i32(!0xFF00);
  177. ctx.builder.and_i32();
  178. ctx.builder.or_i32();
  179. ctx.builder
  180. .set_local(&ctx.register_locals[(r - 4) as usize]);
  181. },
  182. _ => assert!(false),
  183. }
  184. }
  185. pub fn gen_set_reg16(ctx: &mut JitContext, r: u32) {
  186. gen_set_reg16_local(ctx.builder, &ctx.register_locals[r as usize]);
  187. }
  188. pub fn gen_set_reg16_local(builder: &mut WasmBuilder, local: &WasmLocal) {
  189. // reg32[r] = v & 0xFFFF | reg32[r] & ~0xFFFF
  190. builder.const_i32(0xFFFF);
  191. builder.and_i32();
  192. builder.get_local(local);
  193. builder.const_i32(!0xFFFF);
  194. builder.and_i32();
  195. builder.or_i32();
  196. builder.set_local(local);
  197. }
  198. pub fn gen_set_reg32(ctx: &mut JitContext, r: u32) {
  199. ctx.builder.set_local(&ctx.register_locals[r as usize]);
  200. }
  201. pub fn decr_exc_asize(ctx: &mut JitContext) {
  202. gen_get_reg32(ctx, regs::ECX);
  203. ctx.builder.const_i32(1);
  204. ctx.builder.sub_i32();
  205. if ctx.cpu.asize_32() {
  206. gen_set_reg32(ctx, regs::ECX);
  207. }
  208. else {
  209. gen_set_reg16(ctx, regs::CX);
  210. }
  211. }
  212. pub fn gen_read_reg_xmm128_into_scratch(ctx: &mut JitContext, r: u32) {
  213. ctx.builder
  214. .const_i32(global_pointers::sse_scratch_register as i32);
  215. let dest = global_pointers::get_reg_xmm_offset(r);
  216. ctx.builder.const_i32(dest as i32);
  217. ctx.builder.load_aligned_i64(0);
  218. ctx.builder.store_aligned_i64(0);
  219. ctx.builder
  220. .const_i32(global_pointers::sse_scratch_register as i32 + 8);
  221. let dest = global_pointers::get_reg_xmm_offset(r) + 8;
  222. ctx.builder.const_i32(dest as i32);
  223. ctx.builder.load_aligned_i64(0);
  224. ctx.builder.store_aligned_i64(0);
  225. }
  226. pub fn gen_get_sreg(ctx: &mut JitContext, r: u32) {
  227. ctx.builder
  228. .load_fixed_u16(global_pointers::get_sreg_offset(r))
  229. }
  230. pub fn gen_get_ss_offset(ctx: &mut JitContext) {
  231. ctx.builder
  232. .load_fixed_i32(global_pointers::get_seg_offset(regs::SS));
  233. }
  234. pub fn gen_get_flags(builder: &mut WasmBuilder) {
  235. builder.load_fixed_i32(global_pointers::flags as u32);
  236. }
  237. pub fn gen_get_flags_changed(builder: &mut WasmBuilder) {
  238. builder.load_fixed_i32(global_pointers::flags_changed as u32);
  239. }
  240. pub fn gen_get_last_result(builder: &mut WasmBuilder) {
  241. builder.load_fixed_i32(global_pointers::last_result as u32);
  242. }
  243. pub fn gen_get_last_op_size(builder: &mut WasmBuilder) {
  244. builder.load_fixed_i32(global_pointers::last_op_size as u32);
  245. }
  246. pub fn gen_get_last_op1(builder: &mut WasmBuilder) {
  247. builder.load_fixed_i32(global_pointers::last_op1 as u32);
  248. }
  249. pub fn gen_get_page_fault(builder: &mut WasmBuilder) {
  250. builder.load_fixed_u8(global_pointers::page_fault as u32);
  251. }
  252. /// sign-extend a byte value on the stack and leave it on the stack
  253. pub fn sign_extend_i8(builder: &mut WasmBuilder) {
  254. builder.const_i32(24);
  255. builder.shl_i32();
  256. builder.const_i32(24);
  257. builder.shr_s_i32();
  258. }
  259. /// sign-extend a two byte value on the stack and leave it on the stack
  260. pub fn sign_extend_i16(builder: &mut WasmBuilder) {
  261. builder.const_i32(16);
  262. builder.shl_i32();
  263. builder.const_i32(16);
  264. builder.shr_s_i32();
  265. }
  266. pub fn gen_fn0_const(builder: &mut WasmBuilder, name: &str) { builder.call_fn0(name) }
  267. pub fn gen_fn1_const(builder: &mut WasmBuilder, name: &str, arg0: u32) {
  268. builder.const_i32(arg0 as i32);
  269. builder.call_fn1(name);
  270. }
  271. pub fn gen_fn2_const(builder: &mut WasmBuilder, name: &str, arg0: u32, arg1: u32) {
  272. builder.const_i32(arg0 as i32);
  273. builder.const_i32(arg1 as i32);
  274. builder.call_fn2(name);
  275. }
  276. pub fn gen_fn3_const(builder: &mut WasmBuilder, name: &str, arg0: u32, arg1: u32, arg2: u32) {
  277. builder.const_i32(arg0 as i32);
  278. builder.const_i32(arg1 as i32);
  279. builder.const_i32(arg2 as i32);
  280. builder.call_fn3(name);
  281. }
  282. // helper functions for gen/generate_jit.js
  283. pub fn gen_modrm_fn0(builder: &mut WasmBuilder, name: &str) {
  284. // generates: fn( _ )
  285. builder.call_fn1(name);
  286. }
  287. pub fn gen_modrm_fn1(builder: &mut WasmBuilder, name: &str, arg0: u32) {
  288. // generates: fn( _, arg0 )
  289. builder.const_i32(arg0 as i32);
  290. builder.call_fn2(name);
  291. }
  292. pub fn gen_modrm_fn2(builder: &mut WasmBuilder, name: &str, arg0: u32, arg1: u32) {
  293. // generates: fn( _, arg0, arg1 )
  294. builder.const_i32(arg0 as i32);
  295. builder.const_i32(arg1 as i32);
  296. builder.call_fn3(name);
  297. }
  298. pub fn gen_modrm_resolve(ctx: &mut JitContext, modrm_byte: ModrmByte) {
  299. modrm::gen(ctx, modrm_byte)
  300. }
  301. pub fn gen_set_reg8_r(ctx: &mut JitContext, dest: u32, src: u32) {
  302. // generates: reg8[r_dest] = reg8[r_src]
  303. gen_get_reg8(ctx, src);
  304. gen_set_reg8(ctx, dest);
  305. }
  306. pub fn gen_set_reg16_r(ctx: &mut JitContext, dest: u32, src: u32) {
  307. // generates: reg16[r_dest] = reg16[r_src]
  308. gen_get_reg16(ctx, src);
  309. gen_set_reg16(ctx, dest);
  310. }
  311. pub fn gen_set_reg32_r(ctx: &mut JitContext, dest: u32, src: u32) {
  312. // generates: reg32[r_dest] = reg32[r_src]
  313. gen_get_reg32(ctx, src);
  314. gen_set_reg32(ctx, dest);
  315. }
  316. pub fn gen_modrm_resolve_safe_read8(ctx: &mut JitContext, modrm_byte: ModrmByte) {
  317. gen_modrm_resolve(ctx, modrm_byte);
  318. let address_local = ctx.builder.set_new_local();
  319. gen_safe_read8(ctx, &address_local);
  320. ctx.builder.free_local(address_local);
  321. }
  322. pub fn gen_modrm_resolve_safe_read16(ctx: &mut JitContext, modrm_byte: ModrmByte) {
  323. gen_modrm_resolve(ctx, modrm_byte);
  324. let address_local = ctx.builder.set_new_local();
  325. gen_safe_read16(ctx, &address_local);
  326. ctx.builder.free_local(address_local);
  327. }
  328. pub fn gen_modrm_resolve_safe_read32(ctx: &mut JitContext, modrm_byte: ModrmByte) {
  329. gen_modrm_resolve(ctx, modrm_byte);
  330. let address_local = ctx.builder.set_new_local();
  331. gen_safe_read32(ctx, &address_local);
  332. ctx.builder.free_local(address_local);
  333. }
  334. pub fn gen_modrm_resolve_safe_read64(ctx: &mut JitContext, modrm_byte: ModrmByte) {
  335. gen_modrm_resolve(ctx, modrm_byte);
  336. let address_local = ctx.builder.set_new_local();
  337. gen_safe_read64(ctx, &address_local);
  338. ctx.builder.free_local(address_local);
  339. }
  340. pub fn gen_modrm_resolve_safe_read128(
  341. ctx: &mut JitContext,
  342. modrm_byte: ModrmByte,
  343. where_to_write: u32,
  344. ) {
  345. gen_modrm_resolve(ctx, modrm_byte);
  346. let address_local = ctx.builder.set_new_local();
  347. gen_safe_read128(ctx, &address_local, where_to_write);
  348. ctx.builder.free_local(address_local);
  349. }
  350. pub fn gen_safe_read8(ctx: &mut JitContext, address_local: &WasmLocal) {
  351. gen_safe_read(ctx, BitSize::BYTE, address_local, None);
  352. }
  353. pub fn gen_safe_read16(ctx: &mut JitContext, address_local: &WasmLocal) {
  354. gen_safe_read(ctx, BitSize::WORD, address_local, None);
  355. }
  356. pub fn gen_safe_read32(ctx: &mut JitContext, address_local: &WasmLocal) {
  357. gen_safe_read(ctx, BitSize::DWORD, address_local, None);
  358. }
  359. pub fn gen_safe_read64(ctx: &mut JitContext, address_local: &WasmLocal) {
  360. gen_safe_read(ctx, BitSize::QWORD, &address_local, None);
  361. }
  362. pub fn gen_safe_read128(ctx: &mut JitContext, address_local: &WasmLocal, where_to_write: u32) {
  363. gen_safe_read(ctx, BitSize::DQWORD, &address_local, Some(where_to_write));
  364. }
  365. // only used internally for gen_safe_write
  366. enum GenSafeWriteValue<'a> {
  367. I32(&'a WasmLocal),
  368. I64(&'a WasmLocalI64),
  369. TwoI64s(&'a WasmLocalI64, &'a WasmLocalI64),
  370. }
  371. enum GenSafeReadWriteValue {
  372. I32(WasmLocal),
  373. I64(WasmLocalI64),
  374. }
  375. #[derive(Copy, Clone, Eq, PartialEq)]
  376. pub enum BitSize {
  377. BYTE,
  378. WORD,
  379. DWORD,
  380. QWORD,
  381. DQWORD,
  382. }
  383. impl BitSize {
  384. pub fn bytes(&self) -> u32 {
  385. match self {
  386. BitSize::BYTE => 1,
  387. BitSize::WORD => 2,
  388. BitSize::DWORD => 4,
  389. BitSize::QWORD => 8,
  390. BitSize::DQWORD => 16,
  391. }
  392. }
  393. }
  394. pub fn gen_safe_write8(ctx: &mut JitContext, address_local: &WasmLocal, value_local: &WasmLocal) {
  395. gen_safe_write(
  396. ctx,
  397. BitSize::BYTE,
  398. address_local,
  399. GenSafeWriteValue::I32(value_local),
  400. )
  401. }
  402. pub fn gen_safe_write16(ctx: &mut JitContext, address_local: &WasmLocal, value_local: &WasmLocal) {
  403. gen_safe_write(
  404. ctx,
  405. BitSize::WORD,
  406. address_local,
  407. GenSafeWriteValue::I32(value_local),
  408. )
  409. }
  410. pub fn gen_safe_write32(ctx: &mut JitContext, address_local: &WasmLocal, value_local: &WasmLocal) {
  411. gen_safe_write(
  412. ctx,
  413. BitSize::DWORD,
  414. address_local,
  415. GenSafeWriteValue::I32(value_local),
  416. )
  417. }
  418. pub fn gen_safe_write64(
  419. ctx: &mut JitContext,
  420. address_local: &WasmLocal,
  421. value_local: &WasmLocalI64,
  422. ) {
  423. gen_safe_write(
  424. ctx,
  425. BitSize::QWORD,
  426. address_local,
  427. GenSafeWriteValue::I64(value_local),
  428. )
  429. }
  430. pub fn gen_safe_write128(
  431. ctx: &mut JitContext,
  432. address_local: &WasmLocal,
  433. value_local_low: &WasmLocalI64,
  434. value_local_high: &WasmLocalI64,
  435. ) {
  436. gen_safe_write(
  437. ctx,
  438. BitSize::DQWORD,
  439. address_local,
  440. GenSafeWriteValue::TwoI64s(value_local_low, value_local_high),
  441. )
  442. }
  443. fn gen_safe_read(
  444. ctx: &mut JitContext,
  445. bits: BitSize,
  446. address_local: &WasmLocal,
  447. where_to_write: Option<u32>,
  448. ) {
  449. // Execute a virtual memory read. All slow paths (memory-mapped IO, tlb miss, page fault and
  450. // read across page boundary are handled in safe_read_jit_slow
  451. // entry <- tlb_data[addr >> 12 << 2]
  452. // if entry & MASK == TLB_VALID && (addr & 0xFFF) <= 0x1000 - bytes: goto fast
  453. // entry <- safe_read_jit_slow(addr, instruction_pointer)
  454. // if page_fault: goto exit-with-pagefault
  455. // fast: mem[(entry & ~0xFFF) ^ addr]
  456. ctx.builder.block_void();
  457. ctx.builder.get_local(&address_local);
  458. ctx.builder.const_i32(12);
  459. ctx.builder.shr_u_i32();
  460. ctx.builder.const_i32(2);
  461. ctx.builder.shl_i32();
  462. ctx.builder
  463. .load_aligned_i32(unsafe { &tlb_data[0] as *const i32 as u32 });
  464. let entry_local = ctx.builder.tee_new_local();
  465. ctx.builder.const_i32(
  466. (0xFFF
  467. & !TLB_READONLY
  468. & !TLB_GLOBAL
  469. & !TLB_HAS_CODE
  470. & !(if ctx.cpu.cpl3() { 0 } else { TLB_NO_USER })) as i32,
  471. );
  472. ctx.builder.and_i32();
  473. ctx.builder.const_i32(TLB_VALID as i32);
  474. ctx.builder.eq_i32();
  475. if bits != BitSize::BYTE {
  476. ctx.builder.get_local(&address_local);
  477. ctx.builder.const_i32(0xFFF);
  478. ctx.builder.and_i32();
  479. ctx.builder.const_i32(0x1000 - bits.bytes() as i32);
  480. ctx.builder.le_i32();
  481. ctx.builder.and_i32();
  482. }
  483. ctx.builder.br_if(0);
  484. if cfg!(feature = "profiler") {
  485. ctx.builder.get_local(&address_local);
  486. ctx.builder.get_local(&entry_local);
  487. ctx.builder.call_fn2("report_safe_read_jit_slow");
  488. }
  489. ctx.builder.get_local(&address_local);
  490. ctx.builder
  491. .const_i32(ctx.start_of_current_instruction as i32 & 0xFFF);
  492. match bits {
  493. BitSize::BYTE => {
  494. ctx.builder.call_fn2_ret("safe_read8_slow_jit");
  495. },
  496. BitSize::WORD => {
  497. ctx.builder.call_fn2_ret("safe_read16_slow_jit");
  498. },
  499. BitSize::DWORD => {
  500. ctx.builder.call_fn2_ret("safe_read32s_slow_jit");
  501. },
  502. BitSize::QWORD => {
  503. ctx.builder.call_fn2_ret("safe_read64s_slow_jit");
  504. },
  505. BitSize::DQWORD => {
  506. ctx.builder.call_fn2_ret("safe_read128s_slow_jit");
  507. },
  508. }
  509. ctx.builder.tee_local(&entry_local);
  510. ctx.builder.const_i32(1);
  511. ctx.builder.and_i32();
  512. if cfg!(feature = "profiler") {
  513. ctx.builder.if_void();
  514. gen_debug_track_jit_exit(ctx.builder, ctx.start_of_current_instruction);
  515. ctx.builder.block_end();
  516. ctx.builder.get_local(&entry_local);
  517. ctx.builder.const_i32(1);
  518. ctx.builder.and_i32();
  519. }
  520. // -2 for the exit-with-pagefault block, +1 for leaving the nested if from this function
  521. ctx.builder.br_if(ctx.current_brtable_depth - 2 + 1);
  522. ctx.builder.block_end();
  523. gen_profiler_stat_increment(ctx.builder, profiler::stat::SAFE_READ_FAST); // XXX: Both fast and slow
  524. ctx.builder.get_local(&entry_local);
  525. ctx.builder.const_i32(!0xFFF);
  526. ctx.builder.and_i32();
  527. ctx.builder.get_local(&address_local);
  528. ctx.builder.xor_i32();
  529. // where_to_write is only used by dqword
  530. dbg_assert!((where_to_write != None) == (bits == BitSize::DQWORD));
  531. ctx.builder.const_i32(unsafe { mem8 } as i32);
  532. ctx.builder.add_i32();
  533. match bits {
  534. BitSize::BYTE => {
  535. ctx.builder.load_u8(0);
  536. },
  537. BitSize::WORD => {
  538. ctx.builder.load_unaligned_u16(0);
  539. },
  540. BitSize::DWORD => {
  541. ctx.builder.load_unaligned_i32(0);
  542. },
  543. BitSize::QWORD => {
  544. ctx.builder.load_unaligned_i64(0);
  545. },
  546. BitSize::DQWORD => {
  547. let where_to_write = where_to_write.unwrap();
  548. let virt_address_local = ctx.builder.set_new_local();
  549. ctx.builder.const_i32(0);
  550. ctx.builder.get_local(&virt_address_local);
  551. ctx.builder.load_unaligned_i64(0);
  552. ctx.builder.store_unaligned_i64(where_to_write);
  553. ctx.builder.const_i32(0);
  554. ctx.builder.get_local(&virt_address_local);
  555. ctx.builder.load_unaligned_i64(8);
  556. ctx.builder.store_unaligned_i64(where_to_write + 8);
  557. ctx.builder.free_local(virt_address_local);
  558. },
  559. }
  560. ctx.builder.free_local(entry_local);
  561. }
  562. fn gen_safe_write(
  563. ctx: &mut JitContext,
  564. bits: BitSize,
  565. address_local: &WasmLocal,
  566. value_local: GenSafeWriteValue,
  567. ) {
  568. // Execute a virtual memory write. All slow paths (memory-mapped IO, tlb miss, page fault,
  569. // write across page boundary and page containing jitted code are handled in safe_write_jit_slow
  570. // entry <- tlb_data[addr >> 12 << 2]
  571. // if entry & MASK == TLB_VALID && (addr & 0xFFF) <= 0x1000 - bytes: goto fast
  572. // entry <- safe_write_jit_slow(addr, value, instruction_pointer)
  573. // if page_fault: goto exit-with-pagefault
  574. // fast: mem[(entry & ~0xFFF) ^ addr] <- value
  575. ctx.builder.block_void();
  576. ctx.builder.get_local(&address_local);
  577. ctx.builder.const_i32(12);
  578. ctx.builder.shr_u_i32();
  579. ctx.builder.const_i32(2);
  580. ctx.builder.shl_i32();
  581. ctx.builder
  582. .load_aligned_i32(unsafe { &tlb_data[0] as *const i32 as u32 });
  583. let entry_local = ctx.builder.tee_new_local();
  584. ctx.builder
  585. .const_i32((0xFFF & !TLB_GLOBAL & !(if ctx.cpu.cpl3() { 0 } else { TLB_NO_USER })) as i32);
  586. ctx.builder.and_i32();
  587. ctx.builder.const_i32(TLB_VALID as i32);
  588. ctx.builder.eq_i32();
  589. if bits != BitSize::BYTE {
  590. ctx.builder.get_local(&address_local);
  591. ctx.builder.const_i32(0xFFF);
  592. ctx.builder.and_i32();
  593. ctx.builder.const_i32(0x1000 - bits.bytes() as i32);
  594. ctx.builder.le_i32();
  595. ctx.builder.and_i32();
  596. }
  597. ctx.builder.br_if(0);
  598. if cfg!(feature = "profiler") {
  599. ctx.builder.get_local(&address_local);
  600. ctx.builder.get_local(&entry_local);
  601. ctx.builder.call_fn2("report_safe_write_jit_slow");
  602. }
  603. ctx.builder.get_local(&address_local);
  604. match value_local {
  605. GenSafeWriteValue::I32(local) => ctx.builder.get_local(local),
  606. GenSafeWriteValue::I64(local) => ctx.builder.get_local_i64(local),
  607. GenSafeWriteValue::TwoI64s(local1, local2) => {
  608. ctx.builder.get_local_i64(local1);
  609. ctx.builder.get_local_i64(local2)
  610. },
  611. }
  612. ctx.builder
  613. .const_i32(ctx.start_of_current_instruction as i32 & 0xFFF);
  614. match bits {
  615. BitSize::BYTE => {
  616. ctx.builder.call_fn3_ret("safe_write8_slow_jit");
  617. },
  618. BitSize::WORD => {
  619. ctx.builder.call_fn3_ret("safe_write16_slow_jit");
  620. },
  621. BitSize::DWORD => {
  622. ctx.builder.call_fn3_ret("safe_write32_slow_jit");
  623. },
  624. BitSize::QWORD => {
  625. ctx.builder
  626. .call_fn3_i32_i64_i32_ret("safe_write64_slow_jit");
  627. },
  628. BitSize::DQWORD => {
  629. ctx.builder
  630. .call_fn4_i32_i64_i64_i32_ret("safe_write128_slow_jit");
  631. },
  632. }
  633. ctx.builder.tee_local(&entry_local);
  634. ctx.builder.const_i32(1);
  635. ctx.builder.and_i32();
  636. if cfg!(feature = "profiler") {
  637. ctx.builder.if_void();
  638. gen_debug_track_jit_exit(ctx.builder, ctx.start_of_current_instruction);
  639. ctx.builder.block_end();
  640. ctx.builder.get_local(&entry_local);
  641. ctx.builder.const_i32(1);
  642. ctx.builder.and_i32();
  643. }
  644. // -2 for the exit-with-pagefault block, +1 for leaving the nested if from this function
  645. ctx.builder.br_if(ctx.current_brtable_depth - 2 + 1);
  646. ctx.builder.block_end();
  647. gen_profiler_stat_increment(ctx.builder, profiler::stat::SAFE_WRITE_FAST); // XXX: Both fast and slow
  648. ctx.builder.get_local(&entry_local);
  649. ctx.builder.const_i32(!0xFFF);
  650. ctx.builder.and_i32();
  651. ctx.builder.get_local(&address_local);
  652. ctx.builder.xor_i32();
  653. ctx.builder.const_i32(unsafe { mem8 } as i32);
  654. ctx.builder.add_i32();
  655. match value_local {
  656. GenSafeWriteValue::I32(local) => ctx.builder.get_local(local),
  657. GenSafeWriteValue::I64(local) => ctx.builder.get_local_i64(local),
  658. GenSafeWriteValue::TwoI64s(local1, local2) => {
  659. assert!(bits == BitSize::DQWORD);
  660. let virt_address_local = ctx.builder.tee_new_local();
  661. ctx.builder.get_local_i64(local1);
  662. ctx.builder.store_unaligned_i64(0);
  663. ctx.builder.get_local(&virt_address_local);
  664. ctx.builder.get_local_i64(local2);
  665. ctx.builder.store_unaligned_i64(8);
  666. ctx.builder.free_local(virt_address_local);
  667. },
  668. }
  669. match bits {
  670. BitSize::BYTE => {
  671. ctx.builder.store_u8(0);
  672. },
  673. BitSize::WORD => {
  674. ctx.builder.store_unaligned_u16(0);
  675. },
  676. BitSize::DWORD => {
  677. ctx.builder.store_unaligned_i32(0);
  678. },
  679. BitSize::QWORD => {
  680. ctx.builder.store_unaligned_i64(0);
  681. },
  682. BitSize::DQWORD => {}, // handled above
  683. }
  684. ctx.builder.free_local(entry_local);
  685. }
  686. pub fn gen_safe_read_write(
  687. ctx: &mut JitContext,
  688. bits: BitSize,
  689. address_local: &WasmLocal,
  690. f: &dyn Fn(&mut JitContext),
  691. ) {
  692. // Execute a virtual memory read+write. All slow paths (memory-mapped IO, tlb miss, page fault,
  693. // write across page boundary and page containing jitted code are handled in
  694. // safe_read_write_jit_slow
  695. // entry <- tlb_data[addr >> 12 << 2]
  696. // can_use_fast_path <- entry & MASK == TLB_VALID && (addr & 0xFFF) <= 0x1000 - bytes
  697. // if can_use_fast_path: goto fast
  698. // entry <- safe_read_write_jit_slow(addr, instruction_pointer)
  699. // if page_fault: goto exit-with-pagefault
  700. // fast: value <- f(mem[(entry & ~0xFFF) ^ addr])
  701. // if !can_use_fast_path { safe_write_jit_slow(addr, value, instruction_pointer) }
  702. // mem[(entry & ~0xFFF) ^ addr] <- value
  703. ctx.builder.block_void();
  704. ctx.builder.get_local(address_local);
  705. ctx.builder.const_i32(12);
  706. ctx.builder.shr_u_i32();
  707. ctx.builder.const_i32(2);
  708. ctx.builder.shl_i32();
  709. ctx.builder
  710. .load_aligned_i32(unsafe { &tlb_data[0] as *const i32 as u32 });
  711. let entry_local = ctx.builder.tee_new_local();
  712. ctx.builder
  713. .const_i32((0xFFF & !TLB_GLOBAL & !(if ctx.cpu.cpl3() { 0 } else { TLB_NO_USER })) as i32);
  714. ctx.builder.and_i32();
  715. ctx.builder.const_i32(TLB_VALID as i32);
  716. ctx.builder.eq_i32();
  717. if bits != BitSize::BYTE {
  718. ctx.builder.get_local(&address_local);
  719. ctx.builder.const_i32(0xFFF);
  720. ctx.builder.and_i32();
  721. ctx.builder.const_i32(0x1000 - bits.bytes() as i32);
  722. ctx.builder.le_i32();
  723. ctx.builder.and_i32();
  724. }
  725. let can_use_fast_path_local = ctx.builder.tee_new_local();
  726. ctx.builder.br_if(0);
  727. if cfg!(feature = "profiler") {
  728. ctx.builder.get_local(&address_local);
  729. ctx.builder.get_local(&entry_local);
  730. ctx.builder.call_fn2("report_safe_read_write_jit_slow");
  731. }
  732. ctx.builder.get_local(&address_local);
  733. ctx.builder
  734. .const_i32(ctx.start_of_current_instruction as i32 & 0xFFF);
  735. match bits {
  736. BitSize::BYTE => {
  737. ctx.builder.call_fn2_ret("safe_read_write8_slow_jit");
  738. },
  739. BitSize::WORD => {
  740. ctx.builder.call_fn2_ret("safe_read_write16_slow_jit");
  741. },
  742. BitSize::DWORD => {
  743. ctx.builder.call_fn2_ret("safe_read_write32s_slow_jit");
  744. },
  745. BitSize::QWORD => {
  746. ctx.builder.call_fn2_ret("safe_read_write64_slow_jit");
  747. },
  748. BitSize::DQWORD => dbg_assert!(false),
  749. }
  750. ctx.builder.tee_local(&entry_local);
  751. ctx.builder.const_i32(1);
  752. ctx.builder.and_i32();
  753. if cfg!(feature = "profiler") {
  754. ctx.builder.if_void();
  755. gen_debug_track_jit_exit(ctx.builder, ctx.start_of_current_instruction);
  756. ctx.builder.block_end();
  757. ctx.builder.get_local(&entry_local);
  758. ctx.builder.const_i32(1);
  759. ctx.builder.and_i32();
  760. }
  761. // -2 for the exit-with-pagefault block, +1 for leaving the two nested ifs from this function
  762. ctx.builder.br_if(ctx.current_brtable_depth - 2 + 1);
  763. ctx.builder.block_end();
  764. gen_profiler_stat_increment(ctx.builder, profiler::stat::SAFE_READ_WRITE_FAST); // XXX: Also slow
  765. ctx.builder.get_local(&entry_local);
  766. ctx.builder.const_i32(!0xFFF);
  767. ctx.builder.and_i32();
  768. ctx.builder.get_local(&address_local);
  769. ctx.builder.xor_i32();
  770. ctx.builder.const_i32(unsafe { mem8 } as i32);
  771. ctx.builder.add_i32();
  772. ctx.builder.free_local(entry_local);
  773. let phys_addr_local = ctx.builder.tee_new_local();
  774. match bits {
  775. BitSize::BYTE => {
  776. ctx.builder.load_u8(0);
  777. },
  778. BitSize::WORD => {
  779. ctx.builder.load_unaligned_u16(0);
  780. },
  781. BitSize::DWORD => {
  782. ctx.builder.load_unaligned_i32(0);
  783. },
  784. BitSize::QWORD => {
  785. ctx.builder.load_unaligned_i64(0);
  786. },
  787. BitSize::DQWORD => assert!(false), // not used
  788. }
  789. // value is now on stack
  790. f(ctx);
  791. // TODO: Could get rid of this local by returning one from f
  792. let value_local = if bits == BitSize::QWORD {
  793. GenSafeReadWriteValue::I64(ctx.builder.set_new_local_i64())
  794. }
  795. else {
  796. GenSafeReadWriteValue::I32(ctx.builder.set_new_local())
  797. };
  798. ctx.builder.get_local(&can_use_fast_path_local);
  799. ctx.builder.eqz_i32();
  800. ctx.builder.if_void();
  801. {
  802. ctx.builder.get_local(&address_local);
  803. match &value_local {
  804. GenSafeReadWriteValue::I32(l) => ctx.builder.get_local(l),
  805. GenSafeReadWriteValue::I64(l) => ctx.builder.get_local_i64(l),
  806. }
  807. ctx.builder
  808. .const_i32(ctx.start_of_current_instruction as i32);
  809. match bits {
  810. BitSize::BYTE => {
  811. ctx.builder.call_fn3_ret("safe_write8_slow_jit");
  812. },
  813. BitSize::WORD => {
  814. ctx.builder.call_fn3_ret("safe_write16_slow_jit");
  815. },
  816. BitSize::DWORD => {
  817. ctx.builder.call_fn3_ret("safe_write32_slow_jit");
  818. },
  819. BitSize::QWORD => {
  820. ctx.builder
  821. .call_fn3_i32_i64_i32_ret("safe_write64_slow_jit");
  822. },
  823. BitSize::DQWORD => dbg_assert!(false),
  824. }
  825. ctx.builder.const_i32(1);
  826. ctx.builder.and_i32();
  827. ctx.builder.if_void();
  828. {
  829. // handled above
  830. if cfg!(debug_assertions) {
  831. ctx.builder.const_i32(match bits {
  832. BitSize::BYTE => 8,
  833. BitSize::WORD => 16,
  834. BitSize::DWORD => 32,
  835. BitSize::QWORD => 64,
  836. _ => {
  837. dbg_assert!(false);
  838. 0
  839. },
  840. });
  841. ctx.builder.get_local(&address_local);
  842. ctx.builder.call_fn2("bug_gen_safe_read_write_page_fault");
  843. }
  844. else {
  845. ctx.builder.unreachable();
  846. }
  847. }
  848. ctx.builder.block_end();
  849. }
  850. ctx.builder.block_end();
  851. ctx.builder.get_local(&phys_addr_local);
  852. match &value_local {
  853. GenSafeReadWriteValue::I32(l) => ctx.builder.get_local(l),
  854. GenSafeReadWriteValue::I64(l) => ctx.builder.get_local_i64(l),
  855. }
  856. match bits {
  857. BitSize::BYTE => {
  858. ctx.builder.store_u8(0);
  859. },
  860. BitSize::WORD => {
  861. ctx.builder.store_unaligned_u16(0);
  862. },
  863. BitSize::DWORD => {
  864. ctx.builder.store_unaligned_i32(0);
  865. },
  866. BitSize::QWORD => {
  867. ctx.builder.store_unaligned_i64(0);
  868. },
  869. BitSize::DQWORD => dbg_assert!(false),
  870. }
  871. match value_local {
  872. GenSafeReadWriteValue::I32(l) => ctx.builder.free_local(l),
  873. GenSafeReadWriteValue::I64(l) => ctx.builder.free_local_i64(l),
  874. }
  875. ctx.builder.free_local(can_use_fast_path_local);
  876. ctx.builder.free_local(phys_addr_local);
  877. }
  878. #[cfg(debug_assertions)]
  879. #[no_mangle]
  880. pub fn bug_gen_safe_read_write_page_fault(bits: i32, addr: u32) {
  881. dbg_log!("bug: gen_safe_read_write_page_fault {} {:x}", bits, addr);
  882. dbg_assert!(false);
  883. }
  884. pub fn gen_jmp_rel16(builder: &mut WasmBuilder, rel16: u16) {
  885. let cs_offset_addr = global_pointers::get_seg_offset(regs::CS);
  886. builder.load_fixed_i32(cs_offset_addr);
  887. let local = builder.set_new_local();
  888. // generate:
  889. // *instruction_pointer = cs_offset + ((*instruction_pointer - cs_offset + rel16) & 0xFFFF);
  890. {
  891. builder.const_i32(global_pointers::instruction_pointer as i32);
  892. gen_get_eip(builder);
  893. builder.get_local(&local);
  894. builder.sub_i32();
  895. builder.const_i32(rel16 as i32);
  896. builder.add_i32();
  897. builder.const_i32(0xFFFF);
  898. builder.and_i32();
  899. builder.get_local(&local);
  900. builder.add_i32();
  901. builder.store_aligned_i32(0);
  902. }
  903. builder.free_local(local);
  904. }
  905. pub fn gen_pop16_ss16(ctx: &mut JitContext) {
  906. // sp = segment_offsets[SS] + reg16[SP] (or just reg16[SP] if has_flat_segmentation)
  907. gen_get_reg16(ctx, regs::SP);
  908. if !ctx.cpu.has_flat_segmentation() {
  909. gen_get_ss_offset(ctx);
  910. ctx.builder.add_i32();
  911. }
  912. // result = safe_read16(sp)
  913. let address_local = ctx.builder.set_new_local();
  914. gen_safe_read16(ctx, &address_local);
  915. ctx.builder.free_local(address_local);
  916. // reg16[SP] += 2;
  917. gen_get_reg16(ctx, regs::SP);
  918. ctx.builder.const_i32(2);
  919. ctx.builder.add_i32();
  920. gen_set_reg16(ctx, regs::SP);
  921. // return value is already on stack
  922. }
  923. pub fn gen_pop16_ss32(ctx: &mut JitContext) {
  924. // esp = segment_offsets[SS] + reg32[ESP] (or just reg32[ESP] if has_flat_segmentation)
  925. gen_get_reg32(ctx, regs::ESP);
  926. if !ctx.cpu.has_flat_segmentation() {
  927. gen_get_ss_offset(ctx);
  928. ctx.builder.add_i32();
  929. }
  930. // result = safe_read16(esp)
  931. let address_local = ctx.builder.set_new_local();
  932. gen_safe_read16(ctx, &address_local);
  933. ctx.builder.free_local(address_local);
  934. // reg32[ESP] += 2;
  935. gen_get_reg32(ctx, regs::ESP);
  936. ctx.builder.const_i32(2);
  937. ctx.builder.add_i32();
  938. gen_set_reg32(ctx, regs::ESP);
  939. // return value is already on stack
  940. }
  941. pub fn gen_pop16(ctx: &mut JitContext) {
  942. if ctx.cpu.ssize_32() {
  943. gen_pop16_ss32(ctx);
  944. }
  945. else {
  946. gen_pop16_ss16(ctx);
  947. }
  948. }
  949. pub fn gen_pop32s_ss16(ctx: &mut JitContext) {
  950. // sp = reg16[SP]
  951. gen_get_reg16(ctx, regs::SP);
  952. // result = safe_read32s(segment_offsets[SS] + sp) (or just sp if has_flat_segmentation)
  953. if !ctx.cpu.has_flat_segmentation() {
  954. gen_get_ss_offset(ctx);
  955. ctx.builder.add_i32();
  956. }
  957. let address_local = ctx.builder.set_new_local();
  958. gen_safe_read32(ctx, &address_local);
  959. ctx.builder.free_local(address_local);
  960. // reg16[SP] = sp + 4;
  961. gen_get_reg16(ctx, regs::SP);
  962. ctx.builder.const_i32(4);
  963. ctx.builder.add_i32();
  964. gen_set_reg16(ctx, regs::SP);
  965. // return value is already on stack
  966. }
  967. pub fn gen_pop32s_ss32(ctx: &mut JitContext) {
  968. if !ctx.cpu.has_flat_segmentation() {
  969. gen_get_reg32(ctx, regs::ESP);
  970. gen_get_ss_offset(ctx);
  971. ctx.builder.add_i32();
  972. let address_local = ctx.builder.set_new_local();
  973. gen_safe_read32(ctx, &address_local);
  974. ctx.builder.free_local(address_local);
  975. }
  976. else {
  977. let reg = ctx.register_locals[regs::ESP as usize].unsafe_clone();
  978. gen_safe_read32(ctx, &reg);
  979. }
  980. gen_get_reg32(ctx, regs::ESP);
  981. ctx.builder.const_i32(4);
  982. ctx.builder.add_i32();
  983. gen_set_reg32(ctx, regs::ESP);
  984. // return value is already on stack
  985. }
  986. pub fn gen_pop32s(ctx: &mut JitContext) {
  987. if ctx.cpu.ssize_32() {
  988. gen_pop32s_ss32(ctx);
  989. }
  990. else {
  991. gen_pop32s_ss16(ctx);
  992. }
  993. }
  994. pub fn gen_adjust_stack_reg(ctx: &mut JitContext, offset: u32) {
  995. if ctx.cpu.ssize_32() {
  996. gen_get_reg32(ctx, regs::ESP);
  997. ctx.builder.const_i32(offset as i32);
  998. ctx.builder.add_i32();
  999. gen_set_reg32(ctx, regs::ESP);
  1000. }
  1001. else {
  1002. gen_get_reg16(ctx, regs::SP);
  1003. ctx.builder.const_i32(offset as i32);
  1004. ctx.builder.add_i32();
  1005. gen_set_reg16(ctx, regs::SP);
  1006. }
  1007. }
  1008. pub fn gen_leave(ctx: &mut JitContext, os32: bool) {
  1009. // [e]bp = safe_read{16,32}([e]bp)
  1010. if ctx.cpu.ssize_32() {
  1011. gen_get_reg32(ctx, regs::EBP);
  1012. }
  1013. else {
  1014. gen_get_reg16(ctx, regs::BP);
  1015. }
  1016. let old_vbp = ctx.builder.tee_new_local();
  1017. if !ctx.cpu.has_flat_segmentation() {
  1018. gen_get_ss_offset(ctx);
  1019. ctx.builder.add_i32();
  1020. }
  1021. if os32 {
  1022. let address_local = ctx.builder.set_new_local();
  1023. gen_safe_read32(ctx, &address_local);
  1024. ctx.builder.free_local(address_local);
  1025. gen_set_reg32(ctx, regs::EBP);
  1026. }
  1027. else {
  1028. let address_local = ctx.builder.set_new_local();
  1029. gen_safe_read16(ctx, &address_local);
  1030. ctx.builder.free_local(address_local);
  1031. gen_set_reg16(ctx, regs::BP);
  1032. }
  1033. // [e]sp = [e]bp + (os32 ? 4 : 2)
  1034. if ctx.cpu.ssize_32() {
  1035. ctx.builder.get_local(&old_vbp);
  1036. ctx.builder.const_i32(if os32 { 4 } else { 2 });
  1037. ctx.builder.add_i32();
  1038. gen_set_reg32(ctx, regs::ESP);
  1039. }
  1040. else {
  1041. ctx.builder.get_local(&old_vbp);
  1042. ctx.builder.const_i32(if os32 { 4 } else { 2 });
  1043. ctx.builder.add_i32();
  1044. gen_set_reg16(ctx, regs::SP);
  1045. }
  1046. ctx.builder.free_local(old_vbp);
  1047. }
  1048. pub fn gen_task_switch_test(ctx: &mut JitContext) {
  1049. // generate if(cr[0] & (CR0_EM | CR0_TS)) { task_switch_test_void(); return; }
  1050. let cr0_offset = global_pointers::get_creg_offset(0);
  1051. dbg_assert!(regs::CR0_EM | regs::CR0_TS <= 0xFF);
  1052. ctx.builder.load_fixed_u8(cr0_offset);
  1053. ctx.builder.const_i32((regs::CR0_EM | regs::CR0_TS) as i32);
  1054. ctx.builder.and_i32();
  1055. ctx.builder.if_void();
  1056. gen_debug_track_jit_exit(ctx.builder, ctx.start_of_current_instruction);
  1057. gen_set_previous_eip_offset_from_eip_with_low_bits(
  1058. ctx.builder,
  1059. ctx.start_of_current_instruction as i32 & 0xFFF,
  1060. );
  1061. gen_move_registers_from_locals_to_memory(ctx);
  1062. gen_fn0_const(ctx.builder, "task_switch_test_jit");
  1063. ctx.builder.return_();
  1064. ctx.builder.block_end();
  1065. }
  1066. pub fn gen_task_switch_test_mmx(ctx: &mut JitContext) {
  1067. // generate if(cr[0] & (CR0_EM | CR0_TS)) { task_switch_test_mmx_void(); return; }
  1068. let cr0_offset = global_pointers::get_creg_offset(0);
  1069. dbg_assert!(regs::CR0_EM | regs::CR0_TS <= 0xFF);
  1070. ctx.builder.load_fixed_u8(cr0_offset);
  1071. ctx.builder.const_i32((regs::CR0_EM | regs::CR0_TS) as i32);
  1072. ctx.builder.and_i32();
  1073. ctx.builder.if_void();
  1074. gen_debug_track_jit_exit(ctx.builder, ctx.start_of_current_instruction);
  1075. gen_set_previous_eip_offset_from_eip_with_low_bits(
  1076. ctx.builder,
  1077. ctx.start_of_current_instruction as i32 & 0xFFF,
  1078. );
  1079. gen_move_registers_from_locals_to_memory(ctx);
  1080. gen_fn0_const(ctx.builder, "task_switch_test_mmx_jit");
  1081. ctx.builder.return_();
  1082. ctx.builder.block_end();
  1083. }
  1084. pub fn gen_push16(ctx: &mut JitContext, value_local: &WasmLocal) {
  1085. if ctx.cpu.ssize_32() {
  1086. gen_get_reg32(ctx, regs::ESP);
  1087. }
  1088. else {
  1089. gen_get_reg16(ctx, regs::SP);
  1090. };
  1091. ctx.builder.const_i32(2);
  1092. ctx.builder.sub_i32();
  1093. let reg_updated_local = if !ctx.cpu.ssize_32() || !ctx.cpu.has_flat_segmentation() {
  1094. let reg_updated_local = ctx.builder.tee_new_local();
  1095. if !ctx.cpu.ssize_32() {
  1096. ctx.builder.const_i32(0xFFFF);
  1097. ctx.builder.and_i32();
  1098. }
  1099. if !ctx.cpu.has_flat_segmentation() {
  1100. gen_get_ss_offset(ctx);
  1101. ctx.builder.add_i32();
  1102. }
  1103. let sp_local = ctx.builder.set_new_local();
  1104. gen_safe_write16(ctx, &sp_local, &value_local);
  1105. ctx.builder.free_local(sp_local);
  1106. ctx.builder.get_local(&reg_updated_local);
  1107. reg_updated_local
  1108. }
  1109. else {
  1110. // short path: The address written to is equal to ESP/SP minus two
  1111. let reg_updated_local = ctx.builder.tee_new_local();
  1112. gen_safe_write16(ctx, &reg_updated_local, &value_local);
  1113. reg_updated_local
  1114. };
  1115. if ctx.cpu.ssize_32() {
  1116. gen_set_reg32(ctx, regs::ESP);
  1117. }
  1118. else {
  1119. gen_set_reg16(ctx, regs::SP);
  1120. };
  1121. ctx.builder.free_local(reg_updated_local);
  1122. }
  1123. pub fn gen_push32(ctx: &mut JitContext, value_local: &WasmLocal) {
  1124. if ctx.cpu.ssize_32() {
  1125. gen_get_reg32(ctx, regs::ESP);
  1126. }
  1127. else {
  1128. gen_get_reg16(ctx, regs::SP);
  1129. };
  1130. ctx.builder.const_i32(4);
  1131. ctx.builder.sub_i32();
  1132. let new_sp_local = if !ctx.cpu.ssize_32() || !ctx.cpu.has_flat_segmentation() {
  1133. let new_sp_local = ctx.builder.tee_new_local();
  1134. if !ctx.cpu.ssize_32() {
  1135. ctx.builder.const_i32(0xFFFF);
  1136. ctx.builder.and_i32();
  1137. }
  1138. if !ctx.cpu.has_flat_segmentation() {
  1139. gen_get_ss_offset(ctx);
  1140. ctx.builder.add_i32();
  1141. }
  1142. let sp_local = ctx.builder.set_new_local();
  1143. gen_safe_write32(ctx, &sp_local, &value_local);
  1144. ctx.builder.free_local(sp_local);
  1145. ctx.builder.get_local(&new_sp_local);
  1146. new_sp_local
  1147. }
  1148. else {
  1149. // short path: The address written to is equal to ESP/SP minus four
  1150. let new_sp_local = ctx.builder.tee_new_local();
  1151. gen_safe_write32(ctx, &new_sp_local, &value_local);
  1152. new_sp_local
  1153. };
  1154. if ctx.cpu.ssize_32() {
  1155. gen_set_reg32(ctx, regs::ESP);
  1156. }
  1157. else {
  1158. gen_set_reg16(ctx, regs::SP);
  1159. };
  1160. ctx.builder.free_local(new_sp_local);
  1161. }
  1162. pub fn gen_get_real_eip(ctx: &mut JitContext) {
  1163. gen_get_eip(ctx.builder);
  1164. ctx.builder
  1165. .load_fixed_i32(global_pointers::get_seg_offset(regs::CS));
  1166. ctx.builder.sub_i32();
  1167. }
  1168. pub fn gen_set_last_op1(builder: &mut WasmBuilder, source: &WasmLocal) {
  1169. builder.const_i32(global_pointers::last_op1 as i32);
  1170. builder.get_local(&source);
  1171. builder.store_aligned_i32(0);
  1172. }
  1173. pub fn gen_set_last_result(builder: &mut WasmBuilder, source: &WasmLocal) {
  1174. builder.const_i32(global_pointers::last_result as i32);
  1175. builder.get_local(&source);
  1176. builder.store_aligned_i32(0);
  1177. }
  1178. pub fn gen_set_last_op_size(builder: &mut WasmBuilder, value: i32) {
  1179. builder.const_i32(global_pointers::last_op_size as i32);
  1180. builder.const_i32(value);
  1181. builder.store_aligned_i32(0);
  1182. }
  1183. pub fn gen_set_flags_changed(builder: &mut WasmBuilder, value: i32) {
  1184. builder.const_i32(global_pointers::flags_changed as i32);
  1185. builder.const_i32(value);
  1186. builder.store_aligned_i32(0);
  1187. }
  1188. pub fn gen_clear_flags_changed_bits(builder: &mut WasmBuilder, bits_to_clear: i32) {
  1189. builder.const_i32(global_pointers::flags_changed as i32);
  1190. gen_get_flags_changed(builder);
  1191. builder.const_i32(!bits_to_clear);
  1192. builder.and_i32();
  1193. builder.store_aligned_i32(0);
  1194. }
  1195. pub fn gen_set_flags_bits(builder: &mut WasmBuilder, bits_to_set: i32) {
  1196. builder.const_i32(global_pointers::flags as i32);
  1197. gen_get_flags(builder);
  1198. builder.const_i32(bits_to_set);
  1199. builder.or_i32();
  1200. builder.store_aligned_i32(0);
  1201. }
  1202. pub fn gen_clear_flags_bits(builder: &mut WasmBuilder, bits_to_clear: i32) {
  1203. builder.const_i32(global_pointers::flags as i32);
  1204. gen_get_flags(builder);
  1205. builder.const_i32(!bits_to_clear);
  1206. builder.and_i32();
  1207. builder.store_aligned_i32(0);
  1208. }
  1209. pub fn gen_getzf(builder: &mut WasmBuilder) {
  1210. gen_get_flags_changed(builder);
  1211. builder.const_i32(FLAG_ZERO);
  1212. builder.and_i32();
  1213. builder.if_i32();
  1214. gen_get_last_result(builder);
  1215. let last_result = builder.tee_new_local();
  1216. builder.const_i32(-1);
  1217. builder.xor_i32();
  1218. builder.get_local(&last_result);
  1219. builder.free_local(last_result);
  1220. builder.const_i32(1);
  1221. builder.sub_i32();
  1222. builder.and_i32();
  1223. gen_get_last_op_size(builder);
  1224. builder.shr_u_i32();
  1225. builder.const_i32(1);
  1226. builder.and_i32();
  1227. builder.else_();
  1228. gen_get_flags(builder);
  1229. builder.const_i32(FLAG_ZERO);
  1230. builder.and_i32();
  1231. builder.block_end();
  1232. }
  1233. pub fn gen_getcf(builder: &mut WasmBuilder) {
  1234. gen_get_flags_changed(builder);
  1235. let flags_changed = builder.tee_new_local();
  1236. builder.const_i32(FLAG_CARRY);
  1237. builder.and_i32();
  1238. builder.if_i32();
  1239. builder.get_local(&flags_changed);
  1240. builder.const_i32(31);
  1241. builder.shr_s_i32();
  1242. builder.free_local(flags_changed);
  1243. let sub_mask = builder.set_new_local();
  1244. gen_get_last_result(builder);
  1245. builder.get_local(&sub_mask);
  1246. builder.xor_i32();
  1247. gen_get_last_op1(builder);
  1248. builder.get_local(&sub_mask);
  1249. builder.xor_i32();
  1250. builder.ltu_i32();
  1251. builder.else_();
  1252. gen_get_flags(builder);
  1253. builder.const_i32(FLAG_CARRY);
  1254. builder.and_i32();
  1255. builder.block_end();
  1256. builder.free_local(sub_mask);
  1257. }
  1258. pub fn gen_getsf(builder: &mut WasmBuilder) {
  1259. gen_get_flags_changed(builder);
  1260. builder.const_i32(FLAG_SIGN);
  1261. builder.and_i32();
  1262. builder.if_i32();
  1263. {
  1264. gen_get_last_result(builder);
  1265. gen_get_last_op_size(builder);
  1266. builder.shr_u_i32();
  1267. builder.const_i32(1);
  1268. builder.and_i32();
  1269. }
  1270. builder.else_();
  1271. {
  1272. gen_get_flags(builder);
  1273. builder.const_i32(FLAG_SIGN);
  1274. builder.and_i32();
  1275. }
  1276. builder.block_end();
  1277. }
  1278. pub fn gen_getof(builder: &mut WasmBuilder) {
  1279. gen_get_flags_changed(builder);
  1280. let flags_changed = builder.tee_new_local();
  1281. builder.const_i32(FLAG_OVERFLOW);
  1282. builder.and_i32();
  1283. builder.if_i32();
  1284. {
  1285. gen_get_last_op1(builder);
  1286. let last_op1 = builder.tee_new_local();
  1287. gen_get_last_result(builder);
  1288. let last_result = builder.tee_new_local();
  1289. builder.xor_i32();
  1290. builder.get_local(&last_result);
  1291. builder.get_local(&last_op1);
  1292. builder.sub_i32();
  1293. gen_get_flags_changed(builder);
  1294. builder.const_i32(31);
  1295. builder.shr_u_i32();
  1296. builder.sub_i32();
  1297. builder.get_local(&last_result);
  1298. builder.xor_i32();
  1299. builder.and_i32();
  1300. gen_get_last_op_size(builder);
  1301. builder.shr_u_i32();
  1302. builder.const_i32(1);
  1303. builder.and_i32();
  1304. builder.free_local(last_op1);
  1305. builder.free_local(last_result);
  1306. }
  1307. builder.else_();
  1308. {
  1309. gen_get_flags(builder);
  1310. builder.const_i32(FLAG_OVERFLOW);
  1311. builder.and_i32();
  1312. }
  1313. builder.block_end();
  1314. builder.free_local(flags_changed);
  1315. }
  1316. pub fn gen_test_be(builder: &mut WasmBuilder) {
  1317. // TODO: A more efficient implementation is possible
  1318. gen_getcf(builder);
  1319. gen_getzf(builder);
  1320. builder.or_i32();
  1321. }
  1322. pub fn gen_test_l(builder: &mut WasmBuilder) {
  1323. // TODO: A more efficient implementation is possible
  1324. gen_getsf(builder);
  1325. builder.eqz_i32();
  1326. gen_getof(builder);
  1327. builder.eqz_i32();
  1328. builder.xor_i32();
  1329. }
  1330. pub fn gen_test_le(builder: &mut WasmBuilder) {
  1331. // TODO: A more efficient implementation is possible
  1332. gen_test_l(builder);
  1333. gen_getzf(builder);
  1334. builder.or_i32();
  1335. }
  1336. pub fn gen_test_loopnz(ctx: &mut JitContext, is_asize_32: bool) {
  1337. gen_test_loop(ctx, is_asize_32);
  1338. ctx.builder.eqz_i32();
  1339. gen_getzf(&mut ctx.builder);
  1340. ctx.builder.or_i32();
  1341. ctx.builder.eqz_i32();
  1342. }
  1343. pub fn gen_test_loopz(ctx: &mut JitContext, is_asize_32: bool) {
  1344. gen_test_loop(ctx, is_asize_32);
  1345. ctx.builder.eqz_i32();
  1346. gen_getzf(&mut ctx.builder);
  1347. ctx.builder.eqz_i32();
  1348. ctx.builder.or_i32();
  1349. ctx.builder.eqz_i32();
  1350. }
  1351. pub fn gen_test_loop(ctx: &mut JitContext, is_asize_32: bool) {
  1352. if is_asize_32 {
  1353. gen_get_reg32(ctx, regs::ECX);
  1354. }
  1355. else {
  1356. gen_get_reg16(ctx, regs::CX);
  1357. }
  1358. }
  1359. pub fn gen_test_jcxz(ctx: &mut JitContext, is_asize_32: bool) {
  1360. if is_asize_32 {
  1361. gen_get_reg32(ctx, regs::ECX);
  1362. }
  1363. else {
  1364. gen_get_reg16(ctx, regs::CX);
  1365. }
  1366. ctx.builder.eqz_i32();
  1367. }
  1368. pub fn gen_fpu_get_sti(ctx: &mut JitContext, i: u32) {
  1369. ctx.builder
  1370. .const_i32(global_pointers::sse_scratch_register as i32);
  1371. ctx.builder.const_i32(i as i32);
  1372. ctx.builder.call_fn2("fpu_get_sti");
  1373. ctx.builder
  1374. .load_fixed_i64(global_pointers::sse_scratch_register as u32);
  1375. ctx.builder
  1376. .load_fixed_u16(global_pointers::sse_scratch_register as u32 + 8);
  1377. }
  1378. pub fn gen_fpu_load_m32(ctx: &mut JitContext, modrm_byte: ModrmByte) {
  1379. ctx.builder
  1380. .const_i32(global_pointers::sse_scratch_register as i32);
  1381. gen_modrm_resolve_safe_read32(ctx, modrm_byte);
  1382. ctx.builder.call_fn2("f32_to_f80");
  1383. ctx.builder
  1384. .load_fixed_i64(global_pointers::sse_scratch_register as u32);
  1385. ctx.builder
  1386. .load_fixed_u16(global_pointers::sse_scratch_register as u32 + 8);
  1387. }
  1388. pub fn gen_fpu_load_m64(ctx: &mut JitContext, modrm_byte: ModrmByte) {
  1389. ctx.builder
  1390. .const_i32(global_pointers::sse_scratch_register as i32);
  1391. gen_modrm_resolve_safe_read64(ctx, modrm_byte);
  1392. ctx.builder.call_fn2_i32_i64("f64_to_f80");
  1393. ctx.builder
  1394. .load_fixed_i64(global_pointers::sse_scratch_register as u32);
  1395. ctx.builder
  1396. .load_fixed_u16(global_pointers::sse_scratch_register as u32 + 8);
  1397. }
  1398. pub fn gen_fpu_load_i16(ctx: &mut JitContext, modrm_byte: ModrmByte) {
  1399. ctx.builder
  1400. .const_i32(global_pointers::sse_scratch_register as i32);
  1401. gen_modrm_resolve_safe_read16(ctx, modrm_byte);
  1402. sign_extend_i16(ctx.builder);
  1403. ctx.builder.call_fn2("i32_to_f80");
  1404. ctx.builder
  1405. .load_fixed_i64(global_pointers::sse_scratch_register as u32);
  1406. ctx.builder
  1407. .load_fixed_u16(global_pointers::sse_scratch_register as u32 + 8);
  1408. }
  1409. pub fn gen_fpu_load_i32(ctx: &mut JitContext, modrm_byte: ModrmByte) {
  1410. ctx.builder
  1411. .const_i32(global_pointers::sse_scratch_register as i32);
  1412. gen_modrm_resolve_safe_read32(ctx, modrm_byte);
  1413. ctx.builder.call_fn2("i32_to_f80");
  1414. ctx.builder
  1415. .load_fixed_i64(global_pointers::sse_scratch_register as u32);
  1416. ctx.builder
  1417. .load_fixed_u16(global_pointers::sse_scratch_register as u32 + 8);
  1418. }
  1419. pub fn gen_fpu_load_i64(ctx: &mut JitContext, modrm_byte: ModrmByte) {
  1420. ctx.builder
  1421. .const_i32(global_pointers::sse_scratch_register as i32);
  1422. gen_modrm_resolve_safe_read64(ctx, modrm_byte);
  1423. ctx.builder.call_fn2_i32_i64("i64_to_f80");
  1424. ctx.builder
  1425. .load_fixed_i64(global_pointers::sse_scratch_register as u32);
  1426. ctx.builder
  1427. .load_fixed_u16(global_pointers::sse_scratch_register as u32 + 8);
  1428. }
  1429. pub fn gen_trigger_de(ctx: &mut JitContext) {
  1430. gen_move_registers_from_locals_to_memory(ctx);
  1431. gen_set_previous_eip_offset_from_eip_with_low_bits(
  1432. ctx.builder,
  1433. ctx.start_of_current_instruction as i32 & 0xFFF,
  1434. );
  1435. gen_fn0_const(ctx.builder, "trigger_de");
  1436. gen_debug_track_jit_exit(ctx.builder, ctx.start_of_current_instruction);
  1437. ctx.builder.return_();
  1438. }
  1439. pub fn gen_trigger_ud(ctx: &mut JitContext) {
  1440. gen_move_registers_from_locals_to_memory(ctx);
  1441. gen_set_previous_eip_offset_from_eip_with_low_bits(
  1442. ctx.builder,
  1443. ctx.start_of_current_instruction as i32 & 0xFFF,
  1444. );
  1445. gen_fn0_const(ctx.builder, "trigger_ud");
  1446. gen_debug_track_jit_exit(ctx.builder, ctx.start_of_current_instruction);
  1447. ctx.builder.return_();
  1448. }
  1449. pub fn gen_trigger_gp(ctx: &mut JitContext, error_code: u32) {
  1450. gen_move_registers_from_locals_to_memory(ctx);
  1451. gen_set_previous_eip_offset_from_eip_with_low_bits(
  1452. ctx.builder,
  1453. ctx.start_of_current_instruction as i32 & 0xFFF,
  1454. );
  1455. gen_fn1_const(ctx.builder, "trigger_gp", error_code);
  1456. gen_debug_track_jit_exit(ctx.builder, ctx.start_of_current_instruction);
  1457. ctx.builder.return_();
  1458. }
  1459. pub fn gen_condition_fn(ctx: &mut JitContext, condition: u8) {
  1460. if condition & 0xF0 == 0x00 || condition & 0xF0 == 0x70 || condition & 0xF0 == 0x80 {
  1461. match condition & 0xF {
  1462. 0x0 => {
  1463. gen_getof(ctx.builder);
  1464. },
  1465. 0x1 => {
  1466. gen_getof(ctx.builder);
  1467. ctx.builder.eqz_i32();
  1468. },
  1469. 0x2 => {
  1470. gen_getcf(ctx.builder);
  1471. },
  1472. 0x3 => {
  1473. gen_getcf(ctx.builder);
  1474. ctx.builder.eqz_i32();
  1475. },
  1476. 0x4 => {
  1477. gen_getzf(ctx.builder);
  1478. },
  1479. 0x5 => {
  1480. gen_getzf(ctx.builder);
  1481. ctx.builder.eqz_i32();
  1482. },
  1483. 0x6 => {
  1484. gen_test_be(ctx.builder);
  1485. },
  1486. 0x7 => {
  1487. gen_test_be(ctx.builder);
  1488. ctx.builder.eqz_i32();
  1489. },
  1490. 0x8 => {
  1491. gen_getsf(ctx.builder);
  1492. },
  1493. 0x9 => {
  1494. gen_getsf(ctx.builder);
  1495. ctx.builder.eqz_i32();
  1496. },
  1497. 0xA => ctx.builder.call_fn0_ret("test_p"),
  1498. 0xB => ctx.builder.call_fn0_ret("test_np"),
  1499. 0xC => {
  1500. gen_test_l(ctx.builder);
  1501. },
  1502. 0xD => {
  1503. gen_test_l(ctx.builder);
  1504. ctx.builder.eqz_i32();
  1505. },
  1506. 0xE => {
  1507. gen_test_le(ctx.builder);
  1508. },
  1509. 0xF => {
  1510. gen_test_le(ctx.builder);
  1511. ctx.builder.eqz_i32();
  1512. },
  1513. _ => dbg_assert!(false),
  1514. }
  1515. }
  1516. else {
  1517. // loop, loopnz, loopz, jcxz
  1518. dbg_assert!(condition & !0x3 == 0xE0);
  1519. if condition == 0xE0 {
  1520. gen_test_loopnz(ctx, ctx.cpu.asize_32());
  1521. }
  1522. else if condition == 0xE1 {
  1523. gen_test_loopz(ctx, ctx.cpu.asize_32());
  1524. }
  1525. else if condition == 0xE2 {
  1526. gen_test_loop(ctx, ctx.cpu.asize_32());
  1527. }
  1528. else if condition == 0xE3 {
  1529. gen_test_jcxz(ctx, ctx.cpu.asize_32());
  1530. }
  1531. }
  1532. }
  1533. const RECORD_LOCAL_MEMORY_MOVES_AT_COMPILE_TIME: bool = false;
  1534. pub fn gen_move_registers_from_locals_to_memory(ctx: &mut JitContext) {
  1535. let instruction = memory::read32s(ctx.start_of_current_instruction) as u32;
  1536. if RECORD_LOCAL_MEMORY_MOVES_AT_COMPILE_TIME {
  1537. ::opstats::record_opstat_unguarded_register(instruction);
  1538. }
  1539. else {
  1540. ::opstats::gen_opstat_unguarded_register(ctx.builder, instruction);
  1541. }
  1542. for i in 0..8 {
  1543. ctx.builder
  1544. .const_i32(global_pointers::get_reg32_offset(i as u32) as i32);
  1545. ctx.builder.get_local(&ctx.register_locals[i]);
  1546. ctx.builder.store_aligned_i32(0);
  1547. }
  1548. }
  1549. pub fn gen_move_registers_from_memory_to_locals(ctx: &mut JitContext) {
  1550. let instruction = memory::read32s(ctx.start_of_current_instruction) as u32;
  1551. if RECORD_LOCAL_MEMORY_MOVES_AT_COMPILE_TIME {
  1552. ::opstats::record_opstat_unguarded_register(instruction);
  1553. }
  1554. else {
  1555. ::opstats::gen_opstat_unguarded_register(ctx.builder, instruction);
  1556. }
  1557. for i in 0..8 {
  1558. ctx.builder
  1559. .const_i32(global_pointers::get_reg32_offset(i as u32) as i32);
  1560. ctx.builder.load_aligned_i32(0);
  1561. ctx.builder.set_local(&ctx.register_locals[i]);
  1562. }
  1563. }
  1564. pub fn gen_profiler_stat_increment(builder: &mut WasmBuilder, stat: profiler::stat) {
  1565. if !cfg!(feature = "profiler") {
  1566. return;
  1567. }
  1568. let addr = unsafe { profiler::stat_array.as_mut_ptr().offset(stat as isize) } as u32;
  1569. builder.increment_fixed_i32(addr, 1)
  1570. }
  1571. pub fn gen_debug_track_jit_exit(builder: &mut WasmBuilder, address: u32) {
  1572. if cfg!(feature = "profiler") {
  1573. gen_fn1_const(builder, "track_jit_exit", address);
  1574. }
  1575. }