123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779 |
- "use strict";
- /** @const */
- var ASYNC_SAFE = false;
- (function()
- {
- if(typeof XMLHttpRequest === "undefined")
- {
- v86util.load_file = load_file_nodejs;
- }
- else
- {
- v86util.load_file = load_file;
- }
- v86util.AsyncXHRBuffer = AsyncXHRBuffer;
- v86util.AsyncFileBuffer = AsyncFileBuffer;
- v86util.SyncFileBuffer = SyncFileBuffer;
- /**
- * Decode a buffer into an unsigned LEB-128 integer
- * @param {Uint8Array} view Byte-stream of encoded integer
- * @param {number=} max_bits Maximum number of bits that are represented; see
- * https://github.com/WebAssembly/design/blob/master/BinaryEncoding.md#varuintn
- */
- v86util.decode_leb128_u = function(view, max_bits=256)
- {
- dbg_assert(view instanceof Uint8Array);
- const result = {
- value: 0,
- next_index: 0,
- };
- let shift = 0;
- const max_bytes = Math.ceil(max_bits / 7);
- while(result.next_index < view.length && result.next_index < max_bytes)
- {
- let byte = view[result.next_index++];
- result.value |= (byte & 127) << shift;
- if((byte & 128) === 0)
- {
- break;
- }
- shift += 7;
- }
- return result;
- };
- v86util.decode_dylink = function(module)
- {
- // Details on dylink section:
- // https://github.com/WebAssembly/tool-conventions/blob/master/DynamicLinking.md
- const dylink_sections = WebAssembly.Module.customSections(module, "dylink");
- dbg_assert(dylink_sections && dylink_sections.length === 1);
- const dylink_section = dylink_sections[0];
- const view = new Uint8Array(dylink_section);
- const { value: memory_size, next_index: table_size_start } =
- v86util.decode_leb128_u(view, 32);
- const table_size = v86util.decode_leb128_u(view.subarray(table_size_start), 32).value;
- return {
- memory_size,
- table_size,
- };
- };
- // Reads len characters at offset from Memory object mem as a JS string
- v86util.read_sized_string_from_mem = function read_sized_string_from_mem(mem, offset, len)
- {
- return String.fromCharCode(...new Uint8Array(mem.buffer, offset, len));
- };
- //XXX: figure out a better way to handle dylink issue than duplicating above function
- v86util.minimal_load_wasm = function minimal_load_wasm(filename, imports, cb)
- {
- function load_cb(bytes)
- {
- WebAssembly
- .instantiate(bytes, imports)
- .then(function({ instance }) {
- cb({
- memory: imports["env"]["memory"],
- exports: instance["exports"],
- instance,
- imports,
- filename,
- });
- });
- }
- v86util.load_file(filename, { done: load_cb });
- };
- /**
- * Fetches, compiles, and instantiates a wasm file
- * @param {string} filename
- * @param {Object} imports Object used for WebAssembly module's imports
- * @param {number} memory_size Bytes of memory the module wants for itself, excluding the space
- * the dylink section requests.
- * @param {number} table_size Number of table entries the module wants for itself, excluding
- * what the dylink section requests.
- * @param {function(Object)} cb Callback function that receives custom object with instance, memory,
- * exported functions, imports, and the filename.
- */
- v86util.load_wasm = function load_wasm(filename, imports, memory_size, table_size, cb)
- {
- dbg_assert(memory_size > 0);
- dbg_assert(typeof imports["env"] === "object");
- function load_cb(buffer)
- {
- WebAssembly.compile(buffer)
- .then(module => {
- const dylink = v86util.decode_dylink(module);
- let total_mem_pages = Math.ceil(
- (dylink.memory_size + memory_size) / WASM_PAGE_SIZE
- );
- // emscripten seems to require a minimum of 256 pages (16 MB)
- total_mem_pages = Math.max(256, total_mem_pages);
- try
- {
- imports["env"]["memory"] = new WebAssembly.Memory({
- "initial": total_mem_pages,
- "maximum": total_mem_pages,
- });
- }
- catch(e)
- {
- console.error(
- "Failed to allocate WASM memory of %d pages",
- total_mem_pages
- );
- throw e;
- }
- imports["env"]["memoryBase"] = memory_size;
- // XXX: Emscripten forces EMULATED_FUNCTION_POINTERS when
- // using SIDE_MODULE=1, which we use. Newer versions of emscripten add
- // all exported functions to the WebAssembly.Table, so we need extra space
- // here
- const EXTRA_TABLE_SPACE_FOR_EMULATED_FP = 10000;
- imports["env"][WASM_EXPORT_TABLE_NAME] = new WebAssembly.Table({
- "initial": dylink.table_size + table_size + EXTRA_TABLE_SPACE_FOR_EMULATED_FP,
- "element": "anyfunc",
- });
- imports["env"]["tableBase"] = table_size;
- return WebAssembly.instantiate(module, imports)
- .then(instance => ({ instance, module }));
- })
- .then(({ instance, module }) => {
- cb({
- memory: imports["env"]["memory"],
- exports: instance["exports"],
- instance,
- imports,
- filename,
- });
- });
- }
- v86util.load_file(filename, { done: load_cb });
- };
- /**
- * @param {string} filename
- * @param {Object} options
- */
- function load_file(filename, options)
- {
- var http = new XMLHttpRequest();
- http.open(options.method || "get", filename, true);
- if(!options.as_text)
- {
- http.responseType = "arraybuffer";
- }
- if(options.headers)
- {
- var header_names = Object.keys(options.headers);
- for(var i = 0; i < header_names.length; i++)
- {
- var name = header_names[i];
- http.setRequestHeader(name, options.headers[name]);
- }
- }
- if(options.range)
- {
- let start = options.range.start;
- let end = start + options.range.length - 1;
- http.setRequestHeader("Range", "bytes=" + start + "-" + end);
- }
- http.onload = function(e)
- {
- if(http.readyState === 4)
- {
- if(http.status !== 200 && http.status !== 206)
- {
- console.error("Loading the image `" + filename + "` failed (status %d)", http.status);
- }
- else if(http.response)
- {
- options.done && options.done(http.response, http);
- }
- }
- };
- if(options.progress)
- {
- http.onprogress = function(e)
- {
- options.progress(e);
- };
- }
- http.send(null);
- }
- function load_file_nodejs(filename, options)
- {
- let fs = require("fs");
- if(options.range)
- {
- dbg_assert(!options.as_text);
- fs["open"](filename, "r", (err, fd) =>
- {
- if(err) throw err;
- let length = options.range.length;
- var buffer = Buffer.allocUnsafe(length);
- fs["read"](fd, buffer, 0, length, options.range.start, (err, bytes_read) =>
- {
- if(err) throw err;
- dbg_assert(bytes_read === length);
- options.done && options.done(new Uint8Array(buffer));
- fs["close"](fd, (err) => {
- if(err) throw err;
- });
- });
- });
- }
- else
- {
- var o = {
- encoding: options.as_text ? "utf-8" : null,
- };
- fs["readFile"](filename, o, function(err, data)
- {
- if(err)
- {
- console.log("Could not read file:", filename, err);
- }
- else
- {
- var result = data;
- if(!options.as_text)
- {
- result = new Uint8Array(result).buffer;
- }
- options.done(result);
- }
- });
- }
- }
- if(typeof XMLHttpRequest === "undefined")
- {
- var determine_size = function(path, cb)
- {
- require("fs")["stat"](path, (err, stats) =>
- {
- if(err)
- {
- cb(err);
- }
- else
- {
- cb(null, stats.size);
- }
- });
- };
- }
- else
- {
- var determine_size = function(url, cb)
- {
- v86util.load_file(url, {
- done: (buffer, http) =>
- {
- var header = http.getResponseHeader("Content-Range") || "";
- var match = header.match(/\/(\d+)\s*$/);
- if(match)
- {
- cb(null, +match[1]);
- }
- else
- {
- const error = "`Range: bytes=...` header not supported (Got `" + header + "`)";
- cb(error);
- }
- },
- headers: {
- Range: "bytes=0-0",
- //"Accept-Encoding": "",
- // Added by Chromium, but can cause the whole file to be sent
- // Settings this to empty also causes problems and Chromium
- // doesn't seem to create this header any more
- //"If-Range": "",
- }
- });
- };
- }
- /**
- * Asynchronous access to ArrayBuffer, loading blocks lazily as needed,
- * using the `Range: bytes=...` header
- *
- * @constructor
- * @param {string} filename Name of the file to download
- * @param {number|undefined} size
- */
- function AsyncXHRBuffer(filename, size)
- {
- this.filename = filename;
- /** @const */
- this.block_size = 256;
- this.byteLength = size;
- this.loaded_blocks = Object.create(null);
- this.onload = undefined;
- this.onprogress = undefined;
- }
- AsyncXHRBuffer.prototype.load = function()
- {
- if(this.byteLength !== undefined)
- {
- this.onload && this.onload(Object.create(null));
- return;
- }
- // Determine the size using a request
- determine_size(this.filename, (error, size) =>
- {
- if(error)
- {
- console.assert(false, "Cannot use: " + this.filename + ". " + error);
- }
- else
- {
- dbg_assert(size >= 0);
- this.byteLength = size;
- this.onload && this.onload(Object.create(null));
- }
- });
- };
- /**
- * @param {number} offset
- * @param {number} len
- * @param {function(!Uint8Array)} fn
- */
- AsyncXHRBuffer.prototype.get_from_cache = function(offset, len, fn)
- {
- var number_of_blocks = len / this.block_size;
- var block_index = offset / this.block_size;
- for(var i = 0; i < number_of_blocks; i++)
- {
- var block = this.loaded_blocks[block_index + i];
- if(!block)
- {
- return;
- }
- }
- if(number_of_blocks === 1)
- {
- return this.loaded_blocks[block_index];
- }
- else
- {
- var result = new Uint8Array(len);
- for(var i = 0; i < number_of_blocks; i++)
- {
- result.set(this.loaded_blocks[block_index + i], i * this.block_size);
- }
- return result;
- }
- };
- /**
- * @param {number} offset
- * @param {number} len
- * @param {function(!Uint8Array)} fn
- */
- AsyncXHRBuffer.prototype.get = function(offset, len, fn)
- {
- console.assert(offset + len <= this.byteLength);
- console.assert(offset % this.block_size === 0);
- console.assert(len % this.block_size === 0);
- console.assert(len);
- var block = this.get_from_cache(offset, len, fn);
- if(block)
- {
- if(ASYNC_SAFE)
- {
- setTimeout(fn.bind(this, block), 0);
- }
- else
- {
- fn(block);
- }
- return;
- }
- v86util.load_file(this.filename, {
- done: function done(buffer)
- {
- var block = new Uint8Array(buffer);
- this.handle_read(offset, len, block);
- fn(block);
- }.bind(this),
- range: { start: offset, length: len },
- });
- };
- /**
- * Relies on this.byteLength, this.loaded_blocks and this.block_size
- *
- * @this {AsyncFileBuffer|AsyncXHRBuffer}
- *
- * @param {number} start
- * @param {!Uint8Array} data
- * @param {function()} fn
- */
- AsyncXHRBuffer.prototype.set = function(start, data, fn)
- {
- console.assert(start + data.byteLength <= this.byteLength);
- var len = data.length;
- console.assert(start % this.block_size === 0);
- console.assert(len % this.block_size === 0);
- console.assert(len);
- var start_block = start / this.block_size;
- var block_count = len / this.block_size;
- for(var i = 0; i < block_count; i++)
- {
- var block = this.loaded_blocks[start_block + i];
- if(block === undefined)
- {
- block = this.loaded_blocks[start_block + i] = new Uint8Array(this.block_size);
- }
- var data_slice = data.subarray(i * this.block_size, (i + 1) * this.block_size);
- block.set(data_slice);
- console.assert(block.byteLength === data_slice.length);
- }
- fn();
- };
- /**
- * @this {AsyncFileBuffer|AsyncXHRBuffer}
- * @param {number} offset
- * @param {number} len
- * @param {!Uint8Array} block
- */
- AsyncXHRBuffer.prototype.handle_read = function(offset, len, block)
- {
- // Used by AsyncXHRBuffer and AsyncFileBuffer
- // Overwrites blocks from the original source that have been written since
- var start_block = offset / this.block_size;
- var block_count = len / this.block_size;
- for(var i = 0; i < block_count; i++)
- {
- var written_block = this.loaded_blocks[start_block + i];
- if(written_block)
- {
- block.set(written_block, i * this.block_size);
- }
- //else
- //{
- // var cached = this.loaded_blocks[start_block + i] = new Uint8Array(this.block_size);
- // cached.set(block.subarray(i * this.block_size, (i + 1) * this.block_size));
- //}
- }
- };
- AsyncXHRBuffer.prototype.get_buffer = function(fn)
- {
- // We must download all parts, unlikely a good idea for big files
- fn();
- };
- AsyncXHRBuffer.prototype.get_written_blocks = function()
- {
- var count = 0;
- for(var _ in this.loaded_blocks)
- {
- count++;
- }
- var buffer = new Uint8Array(count * this.block_size);
- var indices = [];
- var i = 0;
- for(var index in this.loaded_blocks)
- {
- var block = this.loaded_blocks[index];
- dbg_assert(block.length === this.block_size);
- index = +index;
- indices.push(index);
- buffer.set(
- block,
- i * this.block_size
- );
- i++;
- }
- return {
- buffer,
- indices,
- block_size: this.block_size,
- };
- };
- AsyncXHRBuffer.prototype.get_state = function()
- {
- const state = [];
- const loaded_blocks = [];
- for(let [index, block] in Object.values(this.loaded_blocks))
- {
- dbg_assert(isFinite(+index));
- loaded_blocks.push([+index, block]);
- }
- state[0] = loaded_blocks;
- return state;
- };
- AsyncXHRBuffer.prototype.set_state = function(state)
- {
- const loaded_blocks = state[0];
- this.loaded_blocks = Object.create(null);
- for(let [index, block] of loaded_blocks)
- {
- this.loaded_blocks[index] = block;
- }
- };
- /**
- * Synchronous access to File, loading blocks from the input type=file
- * The whole file is loaded into memory during initialisation
- *
- * @constructor
- */
- function SyncFileBuffer(file)
- {
- this.file = file;
- this.byteLength = file.size;
- if(file.size > (1 << 30))
- {
- console.warn("SyncFileBuffer: Allocating buffer of " + (file.size >> 20) + " MB ...");
- }
- this.buffer = new ArrayBuffer(file.size);
- this.onload = undefined;
- this.onprogress = undefined;
- }
- SyncFileBuffer.prototype.load = function()
- {
- this.load_next(0);
- };
- /**
- * @param {number} start
- */
- SyncFileBuffer.prototype.load_next = function(start)
- {
- /** @const */
- var PART_SIZE = 4 << 20;
- var filereader = new FileReader();
- filereader.onload = function(e)
- {
- var buffer = new Uint8Array(e.target.result);
- new Uint8Array(this.buffer, start).set(buffer);
- this.load_next(start + PART_SIZE);
- }.bind(this);
- if(this.onprogress)
- {
- this.onprogress({
- loaded: start,
- total: this.byteLength,
- lengthComputable: true,
- });
- }
- if(start < this.byteLength)
- {
- var end = Math.min(start + PART_SIZE, this.byteLength);
- var slice = this.file.slice(start, end);
- filereader.readAsArrayBuffer(slice);
- }
- else
- {
- this.file = undefined;
- this.onload && this.onload({ buffer: this.buffer });
- }
- };
- /**
- * @param {number} start
- * @param {number} len
- * @param {function(!Uint8Array)} fn
- */
- SyncFileBuffer.prototype.get = function(start, len, fn)
- {
- console.assert(start + len <= this.byteLength);
- fn(new Uint8Array(this.buffer, start, len));
- };
- /**
- * @param {number} offset
- * @param {!Uint8Array} slice
- * @param {function()} fn
- */
- SyncFileBuffer.prototype.set = function(offset, slice, fn)
- {
- console.assert(offset + slice.byteLength <= this.byteLength);
- new Uint8Array(this.buffer, offset, slice.byteLength).set(slice);
- fn();
- };
- SyncFileBuffer.prototype.get_buffer = function(fn)
- {
- fn(this.buffer);
- };
- /**
- * Asynchronous access to File, loading blocks from the input type=file
- *
- * @constructor
- */
- function AsyncFileBuffer(file)
- {
- this.file = file;
- this.byteLength = file.size;
- /** @const */
- this.block_size = 256;
- this.loaded_blocks = Object.create(null);
- this.onload = undefined;
- this.onprogress = undefined;
- }
- AsyncFileBuffer.prototype.load = function()
- {
- this.onload && this.onload(Object.create(null));
- };
- /**
- * @param {number} offset
- * @param {number} len
- * @param {function(!Uint8Array)} fn
- */
- AsyncFileBuffer.prototype.get = function(offset, len, fn)
- {
- console.assert(offset % this.block_size === 0);
- console.assert(len % this.block_size === 0);
- console.assert(len);
- var block = this.get_from_cache(offset, len, fn);
- if(block)
- {
- fn(block);
- return;
- }
- var fr = new FileReader();
- fr.onload = function(e)
- {
- var buffer = e.target.result;
- var block = new Uint8Array(buffer);
- this.handle_read(offset, len, block);
- fn(block);
- }.bind(this);
- fr.readAsArrayBuffer(this.file.slice(offset, offset + len));
- };
- AsyncFileBuffer.prototype.get_from_cache = AsyncXHRBuffer.prototype.get_from_cache;
- AsyncFileBuffer.prototype.set = AsyncXHRBuffer.prototype.set;
- AsyncFileBuffer.prototype.handle_read = AsyncXHRBuffer.prototype.handle_read;
- AsyncFileBuffer.prototype.get_buffer = function(fn)
- {
- // We must load all parts, unlikely a good idea for big files
- fn();
- };
- AsyncFileBuffer.prototype.get_as_file = function(name)
- {
- var parts = [];
- var existing_blocks = Object.keys(this.loaded_blocks)
- .map(Number)
- .sort(function(x, y) { return x - y; });
- var current_offset = 0;
- for(var i = 0; i < existing_blocks.length; i++)
- {
- var block_index = existing_blocks[i];
- var block = this.loaded_blocks[block_index];
- var start = block_index * this.block_size;
- console.assert(start >= current_offset);
- if(start !== current_offset)
- {
- parts.push(this.file.slice(current_offset, start));
- current_offset = start;
- }
- parts.push(block);
- current_offset += block.length;
- }
- if(current_offset !== this.file.size)
- {
- parts.push(this.file.slice(current_offset));
- }
- var file = new File(parts, name);
- console.assert(file.size === this.file.size);
- return file;
- };
- })();
|