diff options
Diffstat (limited to 'tools')
-rw-r--r-- | tools/file_packager.py | 62 | ||||
-rw-r--r-- | tools/js_optimizer.py | 8 | ||||
-rw-r--r-- | tools/shared.py | 2 |
3 files changed, 46 insertions, 26 deletions
diff --git a/tools/file_packager.py b/tools/file_packager.py index a2349a57..3ba5b23f 100644 --- a/tools/file_packager.py +++ b/tools/file_packager.py @@ -11,7 +11,7 @@ data downloads. Usage: - file_packager.py TARGET [--preload A [B..]] [--embed C [D..]] [--compress COMPRESSION_DATA] [--crunch[=X]] [--js-output=OUTPUT.js] [--no-force] + file_packager.py TARGET [--preload A [B..]] [--embed C [D..]] [--compress COMPRESSION_DATA] [--crunch[=X]] [--js-output=OUTPUT.js] [--no-force] [--use-preload-cache] [--no-heap-copy] --crunch=X Will compress dxt files to crn with quality level X. The crunch commandline tool must be present and CRUNCH should be defined in ~/.emscripten that points to it. JS crunch decompressing code will @@ -27,6 +27,10 @@ Usage: --use-preload-cache Stores package in IndexedDB so that subsequent loads don't need to do XHR. Checks package version. + --no-heap-copy If specified, the preloaded filesystem is not copied inside the Emscripten HEAP, but kept in a separate typed array outside it. + The default, if this is not specified, is to embed the VFS inside the HEAP, so that mmap()ing files in it is a no-op. + Passing this flag optimizes for fread() usage, omitting it optimizes for mmap() usage. + Notes: * The file packager generates unix-style file paths. So if you are on windows and a file is accessed at @@ -43,7 +47,7 @@ from shared import Compression, execute, suffix, unsuffixed from subprocess import Popen, PIPE, STDOUT if len(sys.argv) == 1: - print '''Usage: file_packager.py TARGET [--preload A...] [--embed B...] [--compress COMPRESSION_DATA] [--crunch[=X]] [--js-output=OUTPUT.js] [--no-force] [--use-preload-cache] + print '''Usage: file_packager.py TARGET [--preload A...] [--embed B...] [--compress COMPRESSION_DATA] [--crunch[=X]] [--js-output=OUTPUT.js] [--no-force] [--use-preload-cache] [--no-heap-copy] See the source for more details.''' sys.exit(0) @@ -70,7 +74,12 @@ crunch = 0 plugins = [] jsoutput = None force = True +# If set to True, IndexedDB (IDBFS in library_idbfs.js) is used to locally cache VFS XHR so that subsequent +# page loads can read the data from the offline cache instead. use_preload_cache = False +# If set to True, the blob received from XHR is moved to the Emscripten HEAP, optimizing for mmap() performance. +# If set to False, the XHR blob is kept intact, and fread()s etc. are performed directly to that data. This optimizes for minimal memory usage and fread() performance. +no_heap_copy = True for arg in sys.argv[1:]: if arg == '--preload': @@ -91,6 +100,8 @@ for arg in sys.argv[1:]: force = False elif arg == '--use-preload-cache': use_preload_cache = True + elif arg == '--no-heap-copy': + no_heap_copy = False elif arg.startswith('--js-output'): jsoutput = arg.split('=')[1] if '=' in arg else None elif arg.startswith('--crunch'): @@ -134,6 +145,11 @@ if (not force) and len(data_files) == 0: ret = ''' var Module; if (typeof Module === 'undefined') Module = eval('(function() { try { return Module || {} } catch(e) { return {} } })()'); +if (!Module.expectedDataFileDownloads) { + Module.expectedDataFileDownloads = 0; + Module.finishedDataFileDownloads = 0; +} +Module.expectedDataFileDownloads++; (function() { ''' @@ -338,18 +354,9 @@ if has_preloaded: send: function() {}, onload: function() { var byteArray = this.byteArray.subarray(this.start, this.end); - if (this.crunched) { - var ddsHeader = byteArray.subarray(0, 128); - var that = this; - requestDecrunch(this.name, byteArray.subarray(128), function(ddsData) { - byteArray = new Uint8Array(ddsHeader.length + ddsData.length); - byteArray.set(ddsHeader, 0); - byteArray.set(ddsData, 128); - that.finish(byteArray); - }); - } else { +%s this.finish(byteArray); - } +%s }, finish: function(byteArray) { var that = this; @@ -365,7 +372,20 @@ if has_preloaded: this.requests[this.name] = null; }, }; - ''' + ''' % ('' if not crunch else ''' + if (this.crunched) { + var ddsHeader = byteArray.subarray(0, 128); + var that = this; + requestDecrunch(this.name, byteArray.subarray(128), function(ddsData) { + byteArray = new Uint8Array(ddsHeader.length + ddsData.length); + byteArray.set(ddsHeader, 0); + byteArray.set(ddsData, 128); + that.finish(byteArray); + }); + } else { +''', '' if not crunch else ''' + } +''') counter = 0 for file_ in data_files: @@ -405,12 +425,18 @@ for file_ in data_files: if has_preloaded: # Get the big archive and split it up - use_data = ''' + if no_heap_copy: + use_data = ''' // copy the entire loaded file into a spot in the heap. Files will refer to slices in that. They cannot be freed though. var ptr = Module['_malloc'](byteArray.length); Module['HEAPU8'].set(byteArray, ptr); DataRequest.prototype.byteArray = Module['HEAPU8'].subarray(ptr, ptr+byteArray.length); ''' + else: + use_data = ''' + // Reuse the bytearray from the XHR as the source for file reads. + DataRequest.prototype.byteArray = byteArray; +''' for file_ in data_files: if file_['mode'] == 'preload': use_data += ' DataRequest.prototype.requests["%s"].onload();\n' % (file_['dstpath']) @@ -427,12 +453,6 @@ if has_preloaded: package_uuid = uuid.uuid4(); remote_package_name = os.path.basename(Compression.compressed_name(data_target) if Compression.on else data_target) code += r''' - if (!Module.expectedDataFileDownloads) { - Module.expectedDataFileDownloads = 0; - Module.finishedDataFileDownloads = 0; - } - Module.expectedDataFileDownloads++; - var PACKAGE_PATH = window['encodeURIComponent'](window.location.pathname.toString().substring(0, window.location.pathname.toString().lastIndexOf('/')) + '/'); var PACKAGE_NAME = '%s'; var REMOTE_PACKAGE_NAME = '%s'; diff --git a/tools/js_optimizer.py b/tools/js_optimizer.py index d6f8921c..dcc9cd5f 100644 --- a/tools/js_optimizer.py +++ b/tools/js_optimizer.py @@ -29,13 +29,13 @@ class Minifier: during registerize perform minification of locals. ''' - def __init__(self, js, js_engine): + def __init__(self, js, js_engine, MAX_NAMES): self.js = js self.js_engine = js_engine + MAX_NAMES = min(MAX_NAMES, 120000) # Create list of valid short names - MAX_NAMES = 80000 INVALID_2 = set(['do', 'if', 'in']) INVALID_3 = set(['for', 'new', 'try', 'var', 'env', 'let']) @@ -56,7 +56,6 @@ class Minifier: if len(self.names) >= MAX_NAMES: break curr = a + b + c if curr not in INVALID_3: self.names.append(curr) - #print >> sys.stderr, self.names def minify_shell(self, shell, minify_whitespace, source_map=False): #print >> sys.stderr, "MINIFY SHELL 1111111111", shell, "\n222222222222222" @@ -187,7 +186,8 @@ EMSCRIPTEN_FUNCS(); ''' + js[end_funcs + len(end_funcs_marker):end_asm + len(end_asm_marker)] js = js[start_funcs + len(start_funcs_marker):end_funcs] - minifier = Minifier(js, js_engine) + # we assume there is a maximum of one new name per line + minifier = Minifier(js, js_engine, js.count('\n') + asm_shell.count('\n')) asm_shell_pre, asm_shell_post = minifier.minify_shell(asm_shell, 'minifyWhitespace' in passes, source_map).split('EMSCRIPTEN_FUNCS();'); asm_shell_post = asm_shell_post.replace('});', '})'); pre += asm_shell_pre + '\n' + start_funcs_marker diff --git a/tools/shared.py b/tools/shared.py index 6ba6ef50..252e3844 100644 --- a/tools/shared.py +++ b/tools/shared.py @@ -284,7 +284,7 @@ def check_node_version(): try: node = listify(NODE_JS) actual = Popen(node + ['--version'], stdout=PIPE).communicate()[0].strip() - version = tuple(map(int, actual.replace('v', '').split('.'))) + version = tuple(map(int, actual.replace('v', '').replace('-pre', '').split('.'))) if version >= EXPECTED_NODE_VERSION: return True logging.warning('node version appears too old (seeing "%s", expected "%s")' % (actual, 'v' + ('.'.join(map(str, EXPECTED_NODE_VERSION))))) |