wasm_builder.rs 38 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034
  1. use std::collections::HashMap;
  2. use leb::{
  3. write_fixed_leb16_at_idx, write_fixed_leb32_at_idx, write_leb_i32, write_leb_i64, write_leb_u32,
  4. };
  5. use std::mem::transmute;
  6. use util::{SafeToU8, SafeToU16};
  7. use wasmgen::wasm_opcodes as op;
  8. #[derive(PartialEq)]
  9. #[allow(non_camel_case_types)]
  10. enum FunctionType {
  11. FN0_TYPE_INDEX,
  12. FN1_TYPE_INDEX,
  13. FN2_TYPE_INDEX,
  14. FN3_TYPE_INDEX,
  15. FN0_RET_TYPE_INDEX,
  16. FN0_RET_I64_TYPE_INDEX,
  17. FN1_RET_TYPE_INDEX,
  18. FN2_RET_TYPE_INDEX,
  19. FN1_RET_I64_TYPE_INDEX,
  20. FN1_F32_RET_TYPE_INDEX,
  21. FN1_F64_RET_TYPE_INDEX,
  22. FN2_I32_I64_TYPE_INDEX,
  23. FN2_I64_I32_TYPE_INDEX,
  24. FN2_I64_I32_RET_TYPE_INDEX,
  25. FN2_I64_I32_RET_I64_TYPE_INDEX,
  26. FN3_RET_TYPE_INDEX,
  27. FN3_I64_I32_I32_TYPE_INDEX,
  28. FN3_I32_I64_I32_TYPE_INDEX,
  29. FN3_I32_I64_I32_RET_TYPE_INDEX,
  30. FN4_I32_I64_I64_I32_RET_TYPE_INDEX,
  31. // When adding at the end, update LAST below
  32. }
  33. impl FunctionType {
  34. pub fn of_u8(x: u8) -> FunctionType {
  35. dbg_assert!(x <= FunctionType::LAST as u8);
  36. unsafe { transmute(x) }
  37. }
  38. pub fn to_u8(self: FunctionType) -> u8 { self as u8 }
  39. pub const LAST: FunctionType = FunctionType::FN4_I32_I64_I64_I32_RET_TYPE_INDEX;
  40. }
  41. pub const WASM_MODULE_ARGUMENT_COUNT: u8 = 1;
  42. pub struct WasmBuilder {
  43. output: Vec<u8>,
  44. instruction_body: Vec<u8>,
  45. idx_import_table_size: usize, // for rewriting once finished
  46. idx_import_count: usize, // for rewriting once finished
  47. idx_import_entries: usize, // for searching the imports
  48. import_table_size: usize, // the current import table size (to avoid reading 2 byte leb)
  49. import_count: u16, // same as above
  50. initial_static_size: usize, // size of module after initialization, rest is drained on reset
  51. // label for referencing block/if/loop constructs directly via branch instructions
  52. next_label: Label,
  53. label_stack: Vec<Label>,
  54. label_to_depth: HashMap<Label, usize>,
  55. free_locals_i32: Vec<WasmLocal>,
  56. free_locals_i64: Vec<WasmLocalI64>,
  57. local_count: u8,
  58. pub arg_local_initial_state: WasmLocal,
  59. }
  60. pub struct WasmLocal(u8);
  61. impl WasmLocal {
  62. pub fn idx(&self) -> u8 { self.0 }
  63. /// Unsafe: Can result in multiple free's. Should only be used for locals that are used during
  64. /// the whole module (for example, registers)
  65. pub fn unsafe_clone(&self) -> WasmLocal { WasmLocal(self.0) }
  66. }
  67. pub struct WasmLocalI64(u8);
  68. impl WasmLocalI64 {
  69. pub fn idx(&self) -> u8 { self.0 }
  70. }
  71. #[derive(Copy, Clone, Eq, Hash, PartialEq)]
  72. pub struct Label(u32);
  73. impl Label {
  74. const ZERO: Label = Label(0);
  75. fn next(&self) -> Label { Label(self.0.wrapping_add(1)) }
  76. }
  77. impl WasmBuilder {
  78. pub fn new() -> Self {
  79. let mut b = WasmBuilder {
  80. output: Vec::with_capacity(256),
  81. instruction_body: Vec::with_capacity(256),
  82. idx_import_table_size: 0,
  83. idx_import_count: 0,
  84. idx_import_entries: 0,
  85. import_table_size: 2,
  86. import_count: 0,
  87. initial_static_size: 0,
  88. label_to_depth: HashMap::new(),
  89. label_stack: Vec::new(),
  90. next_label: Label::ZERO,
  91. free_locals_i32: Vec::with_capacity(8),
  92. free_locals_i64: Vec::with_capacity(8),
  93. local_count: 0,
  94. arg_local_initial_state: WasmLocal(0),
  95. };
  96. b.init();
  97. b
  98. }
  99. fn init(&mut self) {
  100. self.output.extend("\0asm".as_bytes());
  101. // wasm version in leb128, 4 bytes
  102. self.output.push(op::WASM_VERSION);
  103. self.output.push(0);
  104. self.output.push(0);
  105. self.output.push(0);
  106. self.write_type_section();
  107. self.write_import_section_preamble();
  108. // store state of current pointers etc. so we can reset them later
  109. self.initial_static_size = self.output.len();
  110. }
  111. pub fn reset(&mut self) {
  112. self.output.drain(self.initial_static_size..);
  113. self.set_import_table_size(2);
  114. self.set_import_count(0);
  115. self.instruction_body.clear();
  116. self.free_locals_i32.clear();
  117. self.free_locals_i64.clear();
  118. self.local_count = 0;
  119. dbg_assert!(self.label_to_depth.is_empty());
  120. dbg_assert!(self.label_stack.is_empty());
  121. self.next_label = Label::ZERO;
  122. }
  123. pub fn finish(&mut self) -> usize {
  124. dbg_assert!(self.label_to_depth.is_empty());
  125. dbg_assert!(self.label_stack.is_empty());
  126. self.write_memory_import();
  127. self.write_function_section();
  128. self.write_export_section();
  129. // write code section preamble
  130. self.output.push(op::SC_CODE);
  131. let idx_code_section_size = self.output.len(); // we will write to this location later
  132. self.output.push(0);
  133. self.output.push(0); // write temp val for now using 4 bytes
  134. self.output.push(0);
  135. self.output.push(0);
  136. self.output.push(1); // number of function bodies: just 1
  137. // same as above but for body size of the function
  138. let idx_fn_body_size = self.output.len();
  139. self.output.push(0);
  140. self.output.push(0);
  141. self.output.push(0);
  142. self.output.push(0);
  143. dbg_assert!(
  144. self.local_count as usize == self.free_locals_i32.len() + self.free_locals_i64.len(),
  145. "All locals should have been freed"
  146. );
  147. let free_locals_i32 = &self.free_locals_i32;
  148. let free_locals_i64 = &self.free_locals_i64;
  149. let locals = (0..self.local_count).map(|i| {
  150. let local_index = WASM_MODULE_ARGUMENT_COUNT + i;
  151. if free_locals_i64.iter().any(|v| v.idx() == local_index) {
  152. op::TYPE_I64
  153. }
  154. else {
  155. dbg_assert!(free_locals_i32.iter().any(|v| v.idx() == local_index));
  156. op::TYPE_I32
  157. }
  158. });
  159. let mut groups = vec![];
  160. for local_type in locals {
  161. if let Some(last) = groups.last_mut() {
  162. let (last_type, last_count) = *last;
  163. if last_type == local_type {
  164. *last = (local_type, last_count + 1);
  165. continue;
  166. }
  167. }
  168. groups.push((local_type, 1));
  169. }
  170. dbg_assert!(groups.len() < 128);
  171. self.output.push(groups.len().safe_to_u8());
  172. for (local_type, count) in groups {
  173. dbg_assert!(count < 128);
  174. self.output.push(count);
  175. self.output.push(local_type);
  176. }
  177. self.output.append(&mut self.instruction_body);
  178. self.output.push(op::OP_END);
  179. // write the actual sizes to the pointer locations stored above. We subtract 4 from the actual
  180. // value because the ptr itself points to four bytes
  181. let fn_body_size = (self.output.len() - idx_fn_body_size - 4) as u32;
  182. write_fixed_leb32_at_idx(&mut self.output, idx_fn_body_size, fn_body_size);
  183. let code_section_size = (self.output.len() - idx_code_section_size - 4) as u32;
  184. write_fixed_leb32_at_idx(&mut self.output, idx_code_section_size, code_section_size);
  185. self.output.len()
  186. }
  187. pub fn write_type_section(&mut self) {
  188. self.output.push(op::SC_TYPE);
  189. let idx_section_size = self.output.len();
  190. self.output.push(0);
  191. self.output.push(0);
  192. let nr_of_function_types = FunctionType::to_u8(FunctionType::LAST) + 1;
  193. dbg_assert!(nr_of_function_types < 128);
  194. self.output.push(nr_of_function_types);
  195. for i in 0..(nr_of_function_types) {
  196. match FunctionType::of_u8(i) {
  197. FunctionType::FN0_TYPE_INDEX => {
  198. self.output.push(op::TYPE_FUNC);
  199. self.output.push(0); // no args
  200. self.output.push(0); // no return val
  201. },
  202. FunctionType::FN1_TYPE_INDEX => {
  203. self.output.push(op::TYPE_FUNC);
  204. self.output.push(1);
  205. self.output.push(op::TYPE_I32);
  206. self.output.push(0);
  207. },
  208. FunctionType::FN2_TYPE_INDEX => {
  209. self.output.push(op::TYPE_FUNC);
  210. self.output.push(2);
  211. self.output.push(op::TYPE_I32);
  212. self.output.push(op::TYPE_I32);
  213. self.output.push(0);
  214. },
  215. FunctionType::FN3_TYPE_INDEX => {
  216. self.output.push(op::TYPE_FUNC);
  217. self.output.push(3);
  218. self.output.push(op::TYPE_I32);
  219. self.output.push(op::TYPE_I32);
  220. self.output.push(op::TYPE_I32);
  221. self.output.push(0);
  222. },
  223. FunctionType::FN0_RET_TYPE_INDEX => {
  224. self.output.push(op::TYPE_FUNC);
  225. self.output.push(0);
  226. self.output.push(1);
  227. self.output.push(op::TYPE_I32);
  228. },
  229. FunctionType::FN0_RET_I64_TYPE_INDEX => {
  230. self.output.push(op::TYPE_FUNC);
  231. self.output.push(0);
  232. self.output.push(1);
  233. self.output.push(op::TYPE_I64);
  234. },
  235. FunctionType::FN1_RET_TYPE_INDEX => {
  236. self.output.push(op::TYPE_FUNC);
  237. self.output.push(1);
  238. self.output.push(op::TYPE_I32);
  239. self.output.push(1);
  240. self.output.push(op::TYPE_I32);
  241. },
  242. FunctionType::FN2_RET_TYPE_INDEX => {
  243. self.output.push(op::TYPE_FUNC);
  244. self.output.push(2);
  245. self.output.push(op::TYPE_I32);
  246. self.output.push(op::TYPE_I32);
  247. self.output.push(1);
  248. self.output.push(op::TYPE_I32);
  249. },
  250. FunctionType::FN1_RET_I64_TYPE_INDEX => {
  251. self.output.push(op::TYPE_FUNC);
  252. self.output.push(1);
  253. self.output.push(op::TYPE_I32);
  254. self.output.push(1);
  255. self.output.push(op::TYPE_I64);
  256. },
  257. FunctionType::FN1_F32_RET_TYPE_INDEX => {
  258. self.output.push(op::TYPE_FUNC);
  259. self.output.push(1);
  260. self.output.push(op::TYPE_F32);
  261. self.output.push(1);
  262. self.output.push(op::TYPE_I32);
  263. },
  264. FunctionType::FN1_F64_RET_TYPE_INDEX => {
  265. self.output.push(op::TYPE_FUNC);
  266. self.output.push(1);
  267. self.output.push(op::TYPE_F64);
  268. self.output.push(1);
  269. self.output.push(op::TYPE_I32);
  270. },
  271. FunctionType::FN2_I32_I64_TYPE_INDEX => {
  272. self.output.push(op::TYPE_FUNC);
  273. self.output.push(2);
  274. self.output.push(op::TYPE_I32);
  275. self.output.push(op::TYPE_I64);
  276. self.output.push(0);
  277. },
  278. FunctionType::FN2_I64_I32_TYPE_INDEX => {
  279. self.output.push(op::TYPE_FUNC);
  280. self.output.push(2);
  281. self.output.push(op::TYPE_I64);
  282. self.output.push(op::TYPE_I32);
  283. self.output.push(0);
  284. },
  285. FunctionType::FN2_I64_I32_RET_TYPE_INDEX => {
  286. self.output.push(op::TYPE_FUNC);
  287. self.output.push(2);
  288. self.output.push(op::TYPE_I64);
  289. self.output.push(op::TYPE_I32);
  290. self.output.push(1);
  291. self.output.push(op::TYPE_I32);
  292. },
  293. FunctionType::FN2_I64_I32_RET_I64_TYPE_INDEX => {
  294. self.output.push(op::TYPE_FUNC);
  295. self.output.push(2);
  296. self.output.push(op::TYPE_I64);
  297. self.output.push(op::TYPE_I32);
  298. self.output.push(1);
  299. self.output.push(op::TYPE_I64);
  300. },
  301. FunctionType::FN3_RET_TYPE_INDEX => {
  302. self.output.push(op::TYPE_FUNC);
  303. self.output.push(3);
  304. self.output.push(op::TYPE_I32);
  305. self.output.push(op::TYPE_I32);
  306. self.output.push(op::TYPE_I32);
  307. self.output.push(1);
  308. self.output.push(op::TYPE_I32);
  309. },
  310. FunctionType::FN3_I64_I32_I32_TYPE_INDEX => {
  311. self.output.push(op::TYPE_FUNC);
  312. self.output.push(3);
  313. self.output.push(op::TYPE_I64);
  314. self.output.push(op::TYPE_I32);
  315. self.output.push(op::TYPE_I32);
  316. self.output.push(0);
  317. },
  318. FunctionType::FN3_I32_I64_I32_TYPE_INDEX => {
  319. self.output.push(op::TYPE_FUNC);
  320. self.output.push(3);
  321. self.output.push(op::TYPE_I32);
  322. self.output.push(op::TYPE_I64);
  323. self.output.push(op::TYPE_I32);
  324. self.output.push(0);
  325. },
  326. FunctionType::FN3_I32_I64_I32_RET_TYPE_INDEX => {
  327. self.output.push(op::TYPE_FUNC);
  328. self.output.push(3);
  329. self.output.push(op::TYPE_I32);
  330. self.output.push(op::TYPE_I64);
  331. self.output.push(op::TYPE_I32);
  332. self.output.push(1);
  333. self.output.push(op::TYPE_I32);
  334. },
  335. FunctionType::FN4_I32_I64_I64_I32_RET_TYPE_INDEX => {
  336. self.output.push(op::TYPE_FUNC);
  337. self.output.push(4);
  338. self.output.push(op::TYPE_I32);
  339. self.output.push(op::TYPE_I64);
  340. self.output.push(op::TYPE_I64);
  341. self.output.push(op::TYPE_I32);
  342. self.output.push(1);
  343. self.output.push(op::TYPE_I32);
  344. },
  345. }
  346. }
  347. let new_len = self.output.len();
  348. let size = (new_len - 2) - idx_section_size;
  349. write_fixed_leb16_at_idx(&mut self.output, idx_section_size, size.safe_to_u16());
  350. }
  351. /// Goes over the import block to find index of an import entry by function name
  352. pub fn get_import_index(&self, fn_name: &str) -> Option<u16> {
  353. let mut offset = self.idx_import_entries;
  354. for i in 0..self.import_count {
  355. offset += 1; // skip length of module name
  356. offset += 1; // skip module name itself
  357. let len = self.output[offset] as usize;
  358. offset += 1;
  359. let name = self
  360. .output
  361. .get(offset..(offset + len))
  362. .expect("get function name");
  363. if name == fn_name.as_bytes() {
  364. return Some(i);
  365. }
  366. offset += len; // skip the string
  367. offset += 1; // skip import kind
  368. offset += 1; // skip type index
  369. }
  370. None
  371. }
  372. pub fn set_import_count(&mut self, count: u16) {
  373. dbg_assert!(count < 0x4000);
  374. self.import_count = count;
  375. let idx_import_count = self.idx_import_count;
  376. write_fixed_leb16_at_idx(&mut self.output, idx_import_count, count);
  377. }
  378. pub fn set_import_table_size(&mut self, size: usize) {
  379. dbg_assert!(size < 0x4000);
  380. self.import_table_size = size;
  381. let idx_import_table_size = self.idx_import_table_size;
  382. write_fixed_leb16_at_idx(&mut self.output, idx_import_table_size, size.safe_to_u16());
  383. }
  384. pub fn write_import_section_preamble(&mut self) {
  385. self.output.push(op::SC_IMPORT);
  386. self.idx_import_table_size = self.output.len();
  387. self.output.push(1 | 0b10000000);
  388. self.output.push(2); // 2 in 2 byte leb
  389. self.idx_import_count = self.output.len();
  390. self.output.push(1 | 0b10000000);
  391. self.output.push(0); // 0 in 2 byte leb
  392. // here after starts the actual list of imports
  393. self.idx_import_entries = self.output.len();
  394. }
  395. pub fn write_memory_import(&mut self) {
  396. self.output.push(1);
  397. self.output.push('e' as u8);
  398. self.output.push(1);
  399. self.output.push('m' as u8);
  400. self.output.push(op::EXT_MEMORY);
  401. self.output.push(0); // memory flag, 0 for no maximum memory limit present
  402. write_leb_u32(&mut self.output, 128); // initial memory length of 128 pages, takes 2 bytes in leb128
  403. let new_import_count = self.import_count + 1;
  404. self.set_import_count(new_import_count);
  405. let new_table_size = self.import_table_size + 8;
  406. self.set_import_table_size(new_table_size);
  407. }
  408. fn write_import_entry(&mut self, fn_name: &str, type_index: FunctionType) -> u16 {
  409. self.output.push(1); // length of module name
  410. self.output.push('e' as u8); // module name
  411. self.output.push(fn_name.len().safe_to_u8());
  412. self.output.extend(fn_name.as_bytes());
  413. self.output.push(op::EXT_FUNCTION);
  414. self.output.push(type_index.to_u8());
  415. let new_import_count = self.import_count + 1;
  416. self.set_import_count(new_import_count);
  417. let new_table_size = self.import_table_size + 1 + 1 + 1 + fn_name.len() + 1 + 1;
  418. self.set_import_table_size(new_table_size);
  419. self.import_count - 1
  420. }
  421. pub fn write_function_section(&mut self) {
  422. self.output.push(op::SC_FUNCTION);
  423. self.output.push(2); // length of this section
  424. self.output.push(1); // count of signature indices
  425. self.output.push(FunctionType::FN1_TYPE_INDEX.to_u8());
  426. }
  427. pub fn write_export_section(&mut self) {
  428. self.output.push(op::SC_EXPORT);
  429. self.output.push(1 + 1 + 1 + 1 + 2); // size of this section
  430. self.output.push(1); // count of table: just one function exported
  431. self.output.push(1); // length of exported function name
  432. self.output.push('f' as u8); // function name
  433. self.output.push(op::EXT_FUNCTION);
  434. // index of the exported function
  435. // function space starts with imports. index of last import is import count - 1
  436. // the last import however is a memory, so we subtract one from that
  437. let next_op_idx = self.output.len();
  438. self.output.push(0);
  439. self.output.push(0); // add 2 bytes for writing 16 byte val
  440. write_fixed_leb16_at_idx(&mut self.output, next_op_idx, self.import_count - 1);
  441. }
  442. fn get_fn_idx(&mut self, fn_name: &str, type_index: FunctionType) -> u16 {
  443. match self.get_import_index(fn_name) {
  444. Some(idx) => idx,
  445. None => {
  446. let idx = self.write_import_entry(fn_name, type_index);
  447. idx
  448. },
  449. }
  450. }
  451. pub fn get_output_ptr(&self) -> *const u8 { self.output.as_ptr() }
  452. pub fn get_output_len(&self) -> u32 { self.output.len() as u32 }
  453. fn open_block(&mut self) -> Label {
  454. let label = self.next_label;
  455. self.next_label = self.next_label.next();
  456. self.label_to_depth
  457. .insert(label, self.label_stack.len() + 1);
  458. self.label_stack.push(label);
  459. label
  460. }
  461. fn close_block(&mut self) {
  462. let label = self.label_stack.pop().unwrap();
  463. let old_depth = self.label_to_depth.remove(&label).unwrap();
  464. dbg_assert!(self.label_to_depth.len() + 1 == old_depth);
  465. }
  466. #[must_use = "local allocated but not used"]
  467. fn alloc_local(&mut self) -> WasmLocal {
  468. match self.free_locals_i32.pop() {
  469. Some(local) => local,
  470. None => {
  471. let new_idx = self.local_count + WASM_MODULE_ARGUMENT_COUNT;
  472. self.local_count = self.local_count.checked_add(1).unwrap();
  473. WasmLocal(new_idx)
  474. },
  475. }
  476. }
  477. pub fn free_local(&mut self, local: WasmLocal) {
  478. dbg_assert!(
  479. (WASM_MODULE_ARGUMENT_COUNT..self.local_count + WASM_MODULE_ARGUMENT_COUNT)
  480. .contains(&local.0)
  481. );
  482. self.free_locals_i32.push(local)
  483. }
  484. #[must_use = "local allocated but not used"]
  485. pub fn set_new_local(&mut self) -> WasmLocal {
  486. let local = self.alloc_local();
  487. self.instruction_body.push(op::OP_SETLOCAL);
  488. self.instruction_body.push(local.idx());
  489. local
  490. }
  491. #[must_use = "local allocated but not used"]
  492. pub fn tee_new_local(&mut self) -> WasmLocal {
  493. let local = self.alloc_local();
  494. self.instruction_body.push(op::OP_TEELOCAL);
  495. self.instruction_body.push(local.idx());
  496. local
  497. }
  498. pub fn set_local(&mut self, local: &WasmLocal) {
  499. self.instruction_body.push(op::OP_SETLOCAL);
  500. self.instruction_body.push(local.idx());
  501. }
  502. pub fn tee_local(&mut self, local: &WasmLocal) {
  503. self.instruction_body.push(op::OP_TEELOCAL);
  504. self.instruction_body.push(local.idx());
  505. }
  506. pub fn get_local(&mut self, local: &WasmLocal) {
  507. self.instruction_body.push(op::OP_GETLOCAL);
  508. self.instruction_body.push(local.idx());
  509. }
  510. #[must_use = "local allocated but not used"]
  511. fn alloc_local_i64(&mut self) -> WasmLocalI64 {
  512. match self.free_locals_i64.pop() {
  513. Some(local) => local,
  514. None => {
  515. let new_idx = self.local_count + WASM_MODULE_ARGUMENT_COUNT;
  516. self.local_count += 1;
  517. WasmLocalI64(new_idx)
  518. },
  519. }
  520. }
  521. pub fn free_local_i64(&mut self, local: WasmLocalI64) {
  522. dbg_assert!(
  523. (WASM_MODULE_ARGUMENT_COUNT..self.local_count + WASM_MODULE_ARGUMENT_COUNT)
  524. .contains(&local.0)
  525. );
  526. self.free_locals_i64.push(local)
  527. }
  528. #[must_use = "local allocated but not used"]
  529. pub fn set_new_local_i64(&mut self) -> WasmLocalI64 {
  530. let local = self.alloc_local_i64();
  531. self.instruction_body.push(op::OP_SETLOCAL);
  532. self.instruction_body.push(local.idx());
  533. local
  534. }
  535. #[must_use = "local allocated but not used"]
  536. pub fn tee_new_local_i64(&mut self) -> WasmLocalI64 {
  537. let local = self.alloc_local_i64();
  538. self.instruction_body.push(op::OP_TEELOCAL);
  539. self.instruction_body.push(local.idx());
  540. local
  541. }
  542. pub fn get_local_i64(&mut self, local: &WasmLocalI64) {
  543. self.instruction_body.push(op::OP_GETLOCAL);
  544. self.instruction_body.push(local.idx());
  545. }
  546. pub fn const_i32(&mut self, v: i32) {
  547. self.instruction_body.push(op::OP_I32CONST);
  548. write_leb_i32(&mut self.instruction_body, v);
  549. }
  550. pub fn const_i64(&mut self, v: i64) {
  551. self.instruction_body.push(op::OP_I64CONST);
  552. write_leb_i64(&mut self.instruction_body, v);
  553. }
  554. pub fn load_fixed_u8(&mut self, addr: u32) {
  555. self.const_i32(addr as i32);
  556. self.load_u8(0);
  557. }
  558. pub fn load_fixed_u16(&mut self, addr: u32) {
  559. // doesn't cause a failure in the generated code, but it will be much slower
  560. dbg_assert!((addr & 1) == 0);
  561. self.const_i32(addr as i32);
  562. self.instruction_body.push(op::OP_I32LOAD16U);
  563. self.instruction_body.push(op::MEM_ALIGN16);
  564. self.instruction_body.push(0); // immediate offset
  565. }
  566. pub fn load_fixed_i32(&mut self, addr: u32) {
  567. // doesn't cause a failure in the generated code, but it will be much slower
  568. dbg_assert!((addr & 3) == 0);
  569. self.const_i32(addr as i32);
  570. self.load_aligned_i32(0);
  571. }
  572. pub fn load_fixed_i64(&mut self, addr: u32) {
  573. // doesn't cause a failure in the generated code, but it will be much slower
  574. dbg_assert!((addr & 7) == 0);
  575. self.const_i32(addr as i32);
  576. self.load_aligned_i64(0);
  577. }
  578. pub fn load_u8(&mut self, byte_offset: u32) {
  579. self.instruction_body.push(op::OP_I32LOAD8U);
  580. self.instruction_body.push(op::MEM_NO_ALIGN);
  581. write_leb_u32(&mut self.instruction_body, byte_offset);
  582. }
  583. pub fn load_unaligned_i64(&mut self, byte_offset: u32) {
  584. self.instruction_body.push(op::OP_I64LOAD);
  585. self.instruction_body.push(op::MEM_NO_ALIGN);
  586. write_leb_u32(&mut self.instruction_body, byte_offset);
  587. }
  588. pub fn load_unaligned_i32(&mut self, byte_offset: u32) {
  589. self.instruction_body.push(op::OP_I32LOAD);
  590. self.instruction_body.push(op::MEM_NO_ALIGN);
  591. write_leb_u32(&mut self.instruction_body, byte_offset);
  592. }
  593. pub fn load_unaligned_u16(&mut self, byte_offset: u32) {
  594. self.instruction_body.push(op::OP_I32LOAD16U);
  595. self.instruction_body.push(op::MEM_NO_ALIGN);
  596. write_leb_u32(&mut self.instruction_body, byte_offset);
  597. }
  598. pub fn load_aligned_f64(&mut self, byte_offset: u32) {
  599. self.instruction_body.push(op::OP_F64LOAD);
  600. self.instruction_body.push(op::MEM_ALIGN64);
  601. write_leb_u32(&mut self.instruction_body, byte_offset);
  602. }
  603. pub fn load_aligned_i64(&mut self, byte_offset: u32) {
  604. self.instruction_body.push(op::OP_I64LOAD);
  605. self.instruction_body.push(op::MEM_ALIGN64);
  606. write_leb_u32(&mut self.instruction_body, byte_offset);
  607. }
  608. pub fn load_aligned_f32(&mut self, byte_offset: u32) {
  609. self.instruction_body.push(op::OP_F32LOAD);
  610. self.instruction_body.push(op::MEM_ALIGN32);
  611. write_leb_u32(&mut self.instruction_body, byte_offset);
  612. }
  613. pub fn load_aligned_i32(&mut self, byte_offset: u32) {
  614. self.instruction_body.push(op::OP_I32LOAD);
  615. self.instruction_body.push(op::MEM_ALIGN32);
  616. write_leb_u32(&mut self.instruction_body, byte_offset);
  617. }
  618. pub fn load_aligned_u16(&mut self, byte_offset: u32) {
  619. self.instruction_body.push(op::OP_I32LOAD16U);
  620. self.instruction_body.push(op::MEM_ALIGN16);
  621. write_leb_u32(&mut self.instruction_body, byte_offset);
  622. }
  623. pub fn store_u8(&mut self, byte_offset: u32) {
  624. self.instruction_body.push(op::OP_I32STORE8);
  625. self.instruction_body.push(op::MEM_NO_ALIGN);
  626. write_leb_u32(&mut self.instruction_body, byte_offset);
  627. }
  628. //pub fn store_aligned_u16(&mut self, byte_offset: u32) {
  629. // self.instruction_body.push(op::OP_I32STORE16);
  630. // self.instruction_body.push(op::MEM_ALIGN16);
  631. // write_leb_u32(&mut self.instruction_body, byte_offset);
  632. //}
  633. pub fn store_aligned_i32(&mut self, byte_offset: u32) {
  634. self.instruction_body.push(op::OP_I32STORE);
  635. self.instruction_body.push(op::MEM_ALIGN32);
  636. write_leb_u32(&mut self.instruction_body, byte_offset);
  637. }
  638. pub fn store_aligned_i64(&mut self, byte_offset: u32) {
  639. self.instruction_body.push(op::OP_I64STORE);
  640. self.instruction_body.push(op::MEM_ALIGN64);
  641. write_leb_u32(&mut self.instruction_body, byte_offset);
  642. }
  643. pub fn store_unaligned_u16(&mut self, byte_offset: u32) {
  644. self.instruction_body.push(op::OP_I32STORE16);
  645. self.instruction_body.push(op::MEM_NO_ALIGN);
  646. write_leb_u32(&mut self.instruction_body, byte_offset);
  647. }
  648. pub fn store_unaligned_i32(&mut self, byte_offset: u32) {
  649. self.instruction_body.push(op::OP_I32STORE);
  650. self.instruction_body.push(op::MEM_NO_ALIGN);
  651. write_leb_u32(&mut self.instruction_body, byte_offset);
  652. }
  653. pub fn store_unaligned_i64(&mut self, byte_offset: u32) {
  654. self.instruction_body.push(op::OP_I64STORE);
  655. self.instruction_body.push(op::MEM_NO_ALIGN);
  656. write_leb_u32(&mut self.instruction_body, byte_offset);
  657. }
  658. pub fn increment_fixed_i32(&mut self, byte_offset: u32, n: i32) {
  659. self.const_i32(byte_offset as i32);
  660. self.load_fixed_i32(byte_offset);
  661. self.const_i32(n);
  662. self.add_i32();
  663. self.store_aligned_i32(0);
  664. }
  665. pub fn add_i32(&mut self) { self.instruction_body.push(op::OP_I32ADD); }
  666. pub fn sub_i32(&mut self) { self.instruction_body.push(op::OP_I32SUB); }
  667. pub fn and_i32(&mut self) { self.instruction_body.push(op::OP_I32AND); }
  668. pub fn or_i32(&mut self) { self.instruction_body.push(op::OP_I32OR); }
  669. pub fn or_i64(&mut self) { self.instruction_body.push(op::OP_I64OR); }
  670. pub fn xor_i32(&mut self) { self.instruction_body.push(op::OP_I32XOR); }
  671. pub fn mul_i32(&mut self) { self.instruction_body.push(op::OP_I32MUL); }
  672. pub fn mul_i64(&mut self) { self.instruction_body.push(op::OP_I64MUL); }
  673. pub fn div_i64(&mut self) { self.instruction_body.push(op::OP_I64DIVU); }
  674. pub fn rem_i64(&mut self) { self.instruction_body.push(op::OP_I64REMU); }
  675. pub fn rotl_i32(&mut self) { self.instruction_body.push(op::OP_I32ROTL); }
  676. pub fn shl_i32(&mut self) { self.instruction_body.push(op::OP_I32SHL); }
  677. pub fn shl_i64(&mut self) { self.instruction_body.push(op::OP_I64SHL); }
  678. pub fn shr_u_i32(&mut self) { self.instruction_body.push(op::OP_I32SHRU); }
  679. pub fn shr_u_i64(&mut self) { self.instruction_body.push(op::OP_I64SHRU); }
  680. pub fn shr_s_i32(&mut self) { self.instruction_body.push(op::OP_I32SHRS); }
  681. pub fn eq_i32(&mut self) { self.instruction_body.push(op::OP_I32EQ); }
  682. pub fn eq_i64(&mut self) { self.instruction_body.push(op::OP_I64EQ); }
  683. pub fn ne_i32(&mut self) { self.instruction_body.push(op::OP_I32NE); }
  684. pub fn ne_i64(&mut self) { self.instruction_body.push(op::OP_I64NE); }
  685. pub fn le_i32(&mut self) { self.instruction_body.push(op::OP_I32LES); }
  686. #[allow(dead_code)]
  687. pub fn lt_i32(&mut self) { self.instruction_body.push(op::OP_I32LTS); }
  688. #[allow(dead_code)]
  689. pub fn ge_i32(&mut self) { self.instruction_body.push(op::OP_I32GES); }
  690. #[allow(dead_code)]
  691. pub fn gt_i32(&mut self) { self.instruction_body.push(op::OP_I32GTS); }
  692. pub fn gtu_i64(&mut self) { self.instruction_body.push(op::OP_I64GTU); }
  693. pub fn ltu_i32(&mut self) { self.instruction_body.push(op::OP_I32LTU); }
  694. pub fn reinterpret_i32_as_f32(&mut self) {
  695. self.instruction_body.push(op::OP_F32REINTERPRETI32);
  696. }
  697. //pub fn reinterpret_f32_as_i32(&mut self) {
  698. // self.instruction_body.push(op::OP_I32REINTERPRETF32);
  699. //}
  700. pub fn reinterpret_i64_as_f64(&mut self) {
  701. self.instruction_body.push(op::OP_F64REINTERPRETI64);
  702. }
  703. //pub fn reinterpret_f64_as_i64(&mut self) {
  704. // self.instruction_body.push(op::OP_I64REINTERPRETF64);
  705. //}
  706. //pub fn promote_f32_to_f64(&mut self) { self.instruction_body.push(op::OP_F64PROMOTEF32); }
  707. //pub fn demote_f64_to_f32(&mut self) { self.instruction_body.push(op::OP_F32DEMOTEF64); }
  708. //pub fn convert_i32_to_f64(&mut self) { self.instruction_body.push(op::OP_F64CONVERTSI32); }
  709. //pub fn convert_i64_to_f64(&mut self) { self.instruction_body.push(op::OP_F64CONVERTSI64); }
  710. pub fn extend_unsigned_i32_to_i64(&mut self) {
  711. self.instruction_body.push(op::OP_I64EXTENDUI32);
  712. }
  713. pub fn extend_signed_i32_to_i64(&mut self) { self.instruction_body.push(op::OP_I64EXTENDSI32); }
  714. pub fn wrap_i64_to_i32(&mut self) { self.instruction_body.push(op::OP_I32WRAPI64); }
  715. pub fn eqz_i32(&mut self) { self.instruction_body.push(op::OP_I32EQZ); }
  716. pub fn if_i32(&mut self) {
  717. self.open_block();
  718. self.instruction_body.push(op::OP_IF);
  719. self.instruction_body.push(op::TYPE_I32);
  720. }
  721. #[allow(dead_code)]
  722. pub fn if_i64(&mut self) {
  723. self.open_block();
  724. self.instruction_body.push(op::OP_IF);
  725. self.instruction_body.push(op::TYPE_I64);
  726. }
  727. #[allow(dead_code)]
  728. pub fn block_i32(&mut self) {
  729. self.open_block();
  730. self.instruction_body.push(op::OP_BLOCK);
  731. self.instruction_body.push(op::TYPE_I32);
  732. }
  733. pub fn if_void(&mut self) {
  734. self.open_block();
  735. self.instruction_body.push(op::OP_IF);
  736. self.instruction_body.push(op::TYPE_VOID_BLOCK);
  737. }
  738. pub fn else_(&mut self) {
  739. dbg_assert!(!self.label_stack.is_empty());
  740. self.instruction_body.push(op::OP_ELSE);
  741. }
  742. pub fn loop_void(&mut self) -> Label {
  743. self.instruction_body.push(op::OP_LOOP);
  744. self.instruction_body.push(op::TYPE_VOID_BLOCK);
  745. self.open_block()
  746. }
  747. pub fn block_void(&mut self) -> Label {
  748. self.instruction_body.push(op::OP_BLOCK);
  749. self.instruction_body.push(op::TYPE_VOID_BLOCK);
  750. self.open_block()
  751. }
  752. pub fn block_end(&mut self) {
  753. self.close_block();
  754. self.instruction_body.push(op::OP_END);
  755. }
  756. pub fn return_(&mut self) { self.instruction_body.push(op::OP_RETURN); }
  757. #[allow(dead_code)]
  758. pub fn drop_(&mut self) { self.instruction_body.push(op::OP_DROP); }
  759. pub fn brtable(
  760. &mut self,
  761. default_case: Label,
  762. cases: &mut dyn std::iter::ExactSizeIterator<Item = &Label>,
  763. ) {
  764. self.instruction_body.push(op::OP_BRTABLE);
  765. write_leb_u32(&mut self.instruction_body, cases.len() as u32);
  766. for case in cases {
  767. self.write_label(*case);
  768. }
  769. self.write_label(default_case);
  770. }
  771. pub fn br(&mut self, label: Label) {
  772. self.instruction_body.push(op::OP_BR);
  773. self.write_label(label);
  774. }
  775. pub fn br_if(&mut self, label: Label) {
  776. self.instruction_body.push(op::OP_BRIF);
  777. self.write_label(label);
  778. }
  779. fn write_label(&mut self, label: Label) {
  780. let depth = *self.label_to_depth.get(&label).unwrap();
  781. dbg_assert!(depth <= self.label_stack.len());
  782. write_leb_u32(
  783. &mut self.instruction_body,
  784. (self.label_stack.len() - depth) as u32,
  785. );
  786. }
  787. fn call_fn(&mut self, name: &str, function: FunctionType) {
  788. let i = self.get_fn_idx(name, function);
  789. self.instruction_body.push(op::OP_CALL);
  790. write_leb_u32(&mut self.instruction_body, i as u32);
  791. }
  792. pub fn call_fn0(&mut self, name: &str) { self.call_fn(name, FunctionType::FN0_TYPE_INDEX) }
  793. pub fn call_fn0_ret(&mut self, name: &str) {
  794. self.call_fn(name, FunctionType::FN0_RET_TYPE_INDEX)
  795. }
  796. pub fn call_fn0_ret_i64(&mut self, name: &str) {
  797. self.call_fn(name, FunctionType::FN0_RET_I64_TYPE_INDEX)
  798. }
  799. pub fn call_fn1(&mut self, name: &str) { self.call_fn(name, FunctionType::FN1_TYPE_INDEX) }
  800. pub fn call_fn1_ret(&mut self, name: &str) {
  801. self.call_fn(name, FunctionType::FN1_RET_TYPE_INDEX)
  802. }
  803. pub fn call_fn1_ret_i64(&mut self, name: &str) {
  804. self.call_fn(name, FunctionType::FN1_RET_I64_TYPE_INDEX)
  805. }
  806. pub fn call_fn1_f32_ret(&mut self, name: &str) {
  807. self.call_fn(name, FunctionType::FN1_F32_RET_TYPE_INDEX)
  808. }
  809. pub fn call_fn1_f64_ret(&mut self, name: &str) {
  810. self.call_fn(name, FunctionType::FN1_F64_RET_TYPE_INDEX)
  811. }
  812. pub fn call_fn2(&mut self, name: &str) { self.call_fn(name, FunctionType::FN2_TYPE_INDEX) }
  813. pub fn call_fn2_i32_i64(&mut self, name: &str) {
  814. self.call_fn(name, FunctionType::FN2_I32_I64_TYPE_INDEX)
  815. }
  816. pub fn call_fn2_i64_i32(&mut self, name: &str) {
  817. self.call_fn(name, FunctionType::FN2_I64_I32_TYPE_INDEX)
  818. }
  819. pub fn call_fn2_i64_i32_ret(&mut self, name: &str) {
  820. self.call_fn(name, FunctionType::FN2_I64_I32_RET_TYPE_INDEX)
  821. }
  822. pub fn call_fn2_i64_i32_ret_i64(&mut self, name: &str) {
  823. self.call_fn(name, FunctionType::FN2_I64_I32_RET_I64_TYPE_INDEX)
  824. }
  825. pub fn call_fn2_ret(&mut self, name: &str) {
  826. self.call_fn(name, FunctionType::FN2_RET_TYPE_INDEX)
  827. }
  828. pub fn call_fn3(&mut self, name: &str) { self.call_fn(name, FunctionType::FN3_TYPE_INDEX) }
  829. pub fn call_fn3_ret(&mut self, name: &str) {
  830. self.call_fn(name, FunctionType::FN3_RET_TYPE_INDEX)
  831. }
  832. pub fn call_fn3_i64_i32_i32(&mut self, name: &str) {
  833. self.call_fn(name, FunctionType::FN3_I64_I32_I32_TYPE_INDEX)
  834. }
  835. pub fn call_fn3_i32_i64_i32(&mut self, name: &str) {
  836. self.call_fn(name, FunctionType::FN3_I32_I64_I32_TYPE_INDEX)
  837. }
  838. pub fn call_fn3_i32_i64_i32_ret(&mut self, name: &str) {
  839. self.call_fn(name, FunctionType::FN3_I32_I64_I32_RET_TYPE_INDEX)
  840. }
  841. pub fn call_fn4_i32_i64_i64_i32_ret(&mut self, name: &str) {
  842. self.call_fn(name, FunctionType::FN4_I32_I64_I64_I32_RET_TYPE_INDEX)
  843. }
  844. pub fn unreachable(&mut self) { self.instruction_body.push(op::OP_UNREACHABLE) }
  845. pub fn instruction_body_length(&self) -> u32 { self.instruction_body.len() as u32 }
  846. }
  847. #[cfg(test)]
  848. mod tests {
  849. use std::fs::File;
  850. use std::io::Write;
  851. use wasmgen::wasm_builder;
  852. use wasmgen::wasm_builder::FunctionType;
  853. #[test]
  854. fn import_table_management() {
  855. let mut w = wasm_builder::WasmBuilder::new();
  856. assert_eq!(0, w.get_fn_idx("foo", FunctionType::FN0_TYPE_INDEX));
  857. assert_eq!(1, w.get_fn_idx("bar", FunctionType::FN1_TYPE_INDEX));
  858. assert_eq!(0, w.get_fn_idx("foo", FunctionType::FN0_TYPE_INDEX));
  859. assert_eq!(2, w.get_fn_idx("baz", FunctionType::FN2_TYPE_INDEX));
  860. }
  861. #[test]
  862. fn builder_test() {
  863. let mut m = wasm_builder::WasmBuilder::new();
  864. m.call_fn("foo", FunctionType::FN0_TYPE_INDEX);
  865. m.call_fn("bar", FunctionType::FN0_TYPE_INDEX);
  866. let local0 = m.alloc_local(); // for ensuring that reset clears previous locals
  867. m.free_local(local0);
  868. m.finish();
  869. m.reset();
  870. m.const_i32(2);
  871. m.call_fn("baz", FunctionType::FN1_RET_TYPE_INDEX);
  872. m.call_fn("foo", FunctionType::FN1_TYPE_INDEX);
  873. m.const_i32(10);
  874. let local1 = m.alloc_local();
  875. m.tee_local(&local1); // local1 = 10
  876. m.const_i32(20);
  877. m.add_i32();
  878. let local2 = m.alloc_local();
  879. m.tee_local(&local2); // local2 = 30
  880. m.free_local(local1);
  881. let local3 = m.alloc_local();
  882. assert_eq!(local3.idx(), wasm_builder::WASM_MODULE_ARGUMENT_COUNT);
  883. m.free_local(local2);
  884. m.free_local(local3);
  885. m.const_i32(30);
  886. m.ne_i32();
  887. m.if_void();
  888. m.unreachable();
  889. m.block_end();
  890. m.finish();
  891. let op_ptr = m.get_output_ptr();
  892. let op_len = m.get_output_len();
  893. dbg_log!("op_ptr: {:?}, op_len: {:?}", op_ptr, op_len);
  894. let mut f = File::create("build/dummy_output.wasm").expect("creating dummy_output.wasm");
  895. f.write_all(&m.output).expect("write dummy_output.wasm");
  896. }
  897. }