aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore1
-rwxr-xr-xemscripten.py174
-rw-r--r--src/compiler.js2
-rw-r--r--src/relooper/test.txt4
-rw-r--r--src/relooper/test2.txt15
-rw-r--r--src/relooper/test3.txt38
-rw-r--r--src/relooper/test4.txt21
-rw-r--r--src/relooper/test6.txt15
-rw-r--r--src/relooper/test_debug.txt15
-rw-r--r--src/relooper/test_fuzz1.txt13
-rw-r--r--src/relooper/test_fuzz5.txt27
-rw-r--r--src/relooper/test_inf.txt651
-rw-r--r--src/settings.js2
-rwxr-xr-xtests/runner.py2
-rw-r--r--tools/cache.py194
l---------tools/eliminator/node_modules/.bin/cake1
l---------tools/eliminator/node_modules/.bin/coffee1
-rw-r--r--tools/file_packager.py2
-rw-r--r--tools/js_optimizer.py7
-rw-r--r--tools/jsrun.py27
-rw-r--r--tools/shared.py340
-rw-r--r--tools/tempfiles.py40
22 files changed, 856 insertions, 736 deletions
diff --git a/.gitignore b/.gitignore
index 31814a09..843b21b1 100644
--- a/.gitignore
+++ b/.gitignore
@@ -10,3 +10,4 @@ src/relooper.js.raw.js
src/relooper/*.o
src/relooper/*.out
+tests/fake/ \ No newline at end of file
diff --git a/emscripten.py b/emscripten.py
index 1fc5f190..0b9244c2 100755
--- a/emscripten.py
+++ b/emscripten.py
@@ -9,21 +9,9 @@ header files (so that the JS compiler can see the constants in those
headers, for the libc implementation in JS).
'''
-import os, sys, json, optparse, subprocess, re, time, multiprocessing
+import os, sys, json, optparse, subprocess, re, time, multiprocessing, functools
-if not os.environ.get('EMSCRIPTEN_SUPPRESS_USAGE_WARNING'):
- print >> sys.stderr, '''
-==============================================================
-WARNING: You should normally never use this! Use emcc instead.
-==============================================================
- '''
-
-from tools import shared
-
-DEBUG = os.environ.get('EMCC_DEBUG')
-if DEBUG == "0":
- DEBUG = None
-DEBUG_CACHE = DEBUG and "cache" in DEBUG
+from tools import jsrun, cache as cache_module, tempfiles
__rootpath__ = os.path.abspath(os.path.dirname(__file__))
def path_from_root(*pathelems):
@@ -32,11 +20,6 @@ def path_from_root(*pathelems):
"""
return os.path.join(__rootpath__, *pathelems)
-temp_files = shared.TempFiles()
-
-compiler_engine = None
-jcache = False
-
def scan(ll, settings):
# blockaddress(@main, %23)
blockaddrs = []
@@ -50,16 +33,20 @@ NUM_CHUNKS_PER_CORE = 1.25
MIN_CHUNK_SIZE = 1024*1024
MAX_CHUNK_SIZE = float(os.environ.get('EMSCRIPT_MAX_CHUNK_SIZE') or 'inf') # configuring this is just for debugging purposes
-def process_funcs(args):
- i, funcs, meta, settings_file, compiler, forwarded_file, libraries = args
+def process_funcs((i, funcs, meta, settings_file, compiler, forwarded_file, libraries, compiler_engine, temp_files)):
ll = ''.join(funcs) + '\n' + meta
funcs_file = temp_files.get('.func_%d.ll' % i).name
open(funcs_file, 'w').write(ll)
- out = shared.run_js(compiler, compiler_engine, [settings_file, funcs_file, 'funcs', forwarded_file] + libraries, stdout=subprocess.PIPE, cwd=path_from_root('src'))
- shared.try_delete(funcs_file)
+ out = jsrun.run_js(
+ compiler,
+ engine=compiler_engine,
+ args=[settings_file, funcs_file, 'funcs', forwarded_file] + libraries,
+ stdout=subprocess.PIPE)
+ tempfiles.try_delete(funcs_file)
return out
-def emscript(infile, settings, outfile, libraries=[]):
+def emscript(infile, settings, outfile, libraries=[], compiler_engine=None,
+ jcache=None, temp_files=None, DEBUG=None, DEBUG_CACHE=None):
"""Runs the emscripten LLVM-to-JS compiler. We parallelize as much as possible
Args:
@@ -78,7 +65,7 @@ def emscript(infile, settings, outfile, libraries=[]):
if DEBUG: print >> sys.stderr, 'emscript: ll=>js'
- if jcache: shared.JCache.ensure()
+ if jcache: jcache.ensure()
# Pre-scan ll and alter settings as necessary
if DEBUG: t = time.time()
@@ -147,13 +134,13 @@ def emscript(infile, settings, outfile, libraries=[]):
out = None
if jcache:
keys = [pre_input, settings_text, ','.join(libraries)]
- shortkey = shared.JCache.get_shortkey(keys)
+ shortkey = jcache.get_shortkey(keys)
if DEBUG_CACHE: print >>sys.stderr, 'shortkey', shortkey
- out = shared.JCache.get(shortkey, keys)
+ out = jcache.get(shortkey, keys)
if DEBUG_CACHE and not out:
- dfpath = os.path.join(shared.TEMP_DIR, "ems_" + shortkey)
+ dfpath = os.path.join(configuration.TEMP_DIR, "ems_" + shortkey)
dfp = open(dfpath, 'w')
dfp.write(pre_input);
dfp.write("\n\n========================== settings_text\n\n");
@@ -166,10 +153,10 @@ def emscript(infile, settings, outfile, libraries=[]):
if out and DEBUG: print >> sys.stderr, ' loading pre from jcache'
if not out:
open(pre_file, 'w').write(pre_input)
- out = shared.run_js(compiler, shared.COMPILER_ENGINE, [settings_file, pre_file, 'pre'] + libraries, stdout=subprocess.PIPE, cwd=path_from_root('src'))
+ out = jsrun.run_js(compiler, compiler_engine, [settings_file, pre_file, 'pre'] + libraries, stdout=subprocess.PIPE)
if jcache:
if DEBUG: print >> sys.stderr, ' saving pre to jcache'
- shared.JCache.set(shortkey, keys, out)
+ jcache.set(shortkey, keys, out)
pre, forwarded_data = out.split('//FORWARDED_DATA:')
forwarded_file = temp_files.get('.json').name
open(forwarded_file, 'w').write(forwarded_data)
@@ -194,15 +181,17 @@ def emscript(infile, settings, outfile, libraries=[]):
settings['EXPORTED_FUNCTIONS'] = forwarded_json['EXPORTED_FUNCTIONS']
save_settings()
- chunks = shared.JCache.chunkify(funcs, chunk_size, 'emscript_files' if jcache else None)
+ chunks = cache_module.chunkify(
+ funcs, chunk_size,
+ jcache.get_cachename('emscript_files') if jcache else None)
if jcache:
# load chunks from cache where we can # TODO: ignore small chunks
cached_outputs = []
def load_from_cache(chunk):
keys = [settings_text, forwarded_data, chunk]
- shortkey = shared.JCache.get_shortkey(keys) # TODO: share shortkeys with later code
- out = shared.JCache.get(shortkey, keys) # this is relatively expensive (pickling?)
+ shortkey = jcache.get_shortkey(keys) # TODO: share shortkeys with later code
+ out = jcache.get(shortkey, keys) # this is relatively expensive (pickling?)
if out:
cached_outputs.append(out)
return False
@@ -215,12 +204,16 @@ def emscript(infile, settings, outfile, libraries=[]):
# TODO: minimize size of forwarded data from funcs to what we actually need
- if cores == 1 and total_ll_size < MAX_CHUNK_SIZE: assert len(chunks) == 1, 'no point in splitting up without multiple cores'
+ if cores == 1 and total_ll_size < MAX_CHUNK_SIZE:
+ assert len(chunks) == 1, 'no point in splitting up without multiple cores'
if len(chunks) > 0:
if DEBUG: print >> sys.stderr, ' emscript: phase 2 working on %d chunks %s (intended chunk size: %.2f MB, meta: %.2f MB, forwarded: %.2f MB, total: %.2f MB)' % (len(chunks), ('using %d cores' % cores) if len(chunks) > 1 else '', chunk_size/(1024*1024.), len(meta)/(1024*1024.), len(forwarded_data)/(1024*1024.), total_ll_size/(1024*1024.))
- commands = [(i, chunks[i], meta, settings_file, compiler, forwarded_file, libraries) for i in range(len(chunks))]
+ commands = [
+ (i, chunk, meta, settings_file, compiler, forwarded_file, libraries, compiler_engine, temp_files)
+ for i, chunk in enumerate(chunks)
+ ]
if len(chunks) > 1:
pool = multiprocessing.Pool(processes=cores)
@@ -235,8 +228,8 @@ def emscript(infile, settings, outfile, libraries=[]):
for i in range(len(chunks)):
chunk = chunks[i]
keys = [settings_text, forwarded_data, chunk]
- shortkey = shared.JCache.get_shortkey(keys)
- shared.JCache.set(shortkey, keys, outputs[i])
+ shortkey = jcache.get_shortkey(keys)
+ jcache.set(shortkey, keys, outputs[i])
if out and DEBUG and len(chunks) > 0: print >> sys.stderr, ' saving %d funcchunks to jcache' % len(chunks)
if jcache: outputs += cached_outputs # TODO: preserve order
@@ -311,7 +304,7 @@ def emscript(infile, settings, outfile, libraries=[]):
if DEBUG: t = time.time()
post_file = temp_files.get('.post.ll').name
open(post_file, 'w').write('\n') # no input, just processing of forwarded data
- out = shared.run_js(compiler, shared.COMPILER_ENGINE, [settings_file, post_file, 'post', forwarded_file] + libraries, stdout=subprocess.PIPE, cwd=path_from_root('src'))
+ out = jsrun.run_js(compiler, compiler_engine, [settings_file, post_file, 'post', forwarded_file] + libraries, stdout=subprocess.PIPE)
post, last_forwarded_data = out.split('//FORWARDED_DATA:') # if this fails, perhaps the process failed prior to printing forwarded data?
last_forwarded_json = json.loads(last_forwarded_data)
@@ -495,8 +488,7 @@ Runtime.stackRestore = function(top) { asm.stackRestore(top) };
outfile.close()
-
-def main(args):
+def main(args, compiler_engine, cache, jcache, relooper, temp_files, DEBUG, DEBUG_CACHE):
# Prepare settings for serialization to JSON.
settings = {}
for setting in args.settings:
@@ -570,16 +562,23 @@ def main(args):
libraries = args.libraries[0].split(',') if len(args.libraries) > 0 else []
# Compile the assembly to Javascript.
- if settings.get('RELOOP'): shared.Building.ensure_relooper()
-
- emscript(args.infile, settings, args.outfile, libraries)
-
-if __name__ == '__main__':
+ if settings.get('RELOOP'):
+ if not relooper:
+ relooper = cache.get_path('relooper.js')
+ settings.setdefault('RELOOPER', relooper)
+ if not os.path.exists(relooper):
+ from tools import shared
+ shared.Building.ensure_relooper(relooper)
+
+ emscript(args.infile, settings, args.outfile, libraries, compiler_engine=compiler_engine,
+ jcache=jcache, temp_files=temp_files, DEBUG=DEBUG, DEBUG_CACHE=DEBUG_CACHE)
+
+def _main(environ):
parser = optparse.OptionParser(
- usage='usage: %prog [-h] [-H HEADERS] [-o OUTFILE] [-c COMPILER_ENGINE] [-s FOO=BAR]* infile',
- description=('You should normally never use this! Use emcc instead. '
- 'This is a wrapper around the JS compiler, converting .ll to .js.'),
- epilog='')
+ usage='usage: %prog [-h] [-H HEADERS] [-o OUTFILE] [-c COMPILER_ENGINE] [-s FOO=BAR]* infile',
+ description=('You should normally never use this! Use emcc instead. '
+ 'This is a wrapper around the JS compiler, converting .ll to .js.'),
+ epilog='')
parser.add_option('-H', '--headers',
default=[],
action='append',
@@ -592,8 +591,11 @@ if __name__ == '__main__':
default=sys.stdout,
help='Where to write the output; defaults to stdout.')
parser.add_option('-c', '--compiler',
- default=shared.COMPILER_ENGINE,
+ default=None,
help='Which JS engine to use to run the compiler; defaults to the one in ~/.emscripten.')
+ parser.add_option('--relooper',
+ default=None,
+ help='Which relooper file to use if RELOOP is enabled.')
parser.add_option('-s', '--setting',
dest='settings',
default=[],
@@ -605,16 +607,82 @@ if __name__ == '__main__':
action='store_true',
default=False,
help=('Enable jcache (ccache-like caching of compilation results, for faster incremental builds).'))
+ parser.add_option('-T', '--temp-dir',
+ default=None,
+ help=('Where to create temporary files.'))
+ parser.add_option('-v', '--verbose',
+ action='store_true',
+ dest='verbose',
+ help='Displays debug output')
+ parser.add_option('-q', '--quiet',
+ action='store_false',
+ dest='verbose',
+ help='Hides debug output')
+ parser.add_option('--suppressUsageWarning',
+ action='store_true',
+ default=environ.get('EMSCRIPTEN_SUPPRESS_USAGE_WARNING'),
+ help=('Suppress usage warning'))
# Convert to the same format that argparse would have produced.
keywords, positional = parser.parse_args()
+
+ if not keywords.suppressUsageWarning:
+ print >> sys.stderr, '''
+==============================================================
+WARNING: You should normally never use this! Use emcc instead.
+==============================================================
+ '''
+
if len(positional) != 1:
raise RuntimeError('Must provide exactly one positional argument.')
keywords.infile = os.path.abspath(positional[0])
if isinstance(keywords.outfile, basestring):
keywords.outfile = open(keywords.outfile, 'w')
- compiler_engine = keywords.compiler
- jcache = keywords.jcache
- temp_files.run_and_clean(lambda: main(keywords))
+ if keywords.relooper:
+ relooper = os.path.abspath(keywords.relooper)
+ else:
+ relooper = None # use the cache
+
+ def get_configuration():
+ if hasattr(get_configuration, 'configuration'):
+ return get_configuration.configuration
+
+ from tools import shared
+ configuration = shared.Configuration(environ=os.environ)
+ get_configuration.configuration = configuration
+ return configuration
+
+ if keywords.temp_dir is None:
+ temp_files = get_configuration().get_temp_files()
+ else:
+ temp_dir = os.path.abspath(keywords.temp_dir)
+ if not os.path.exists(temp_dir):
+ os.makedirs(temp_dir)
+ temp_files = tempfiles.TempFiles(temp_dir)
+
+ if keywords.compiler is None:
+ from tools import shared
+ keywords.compiler = shared.COMPILER_ENGINE
+
+ if keywords.verbose is None:
+ DEBUG = get_configuration().DEBUG
+ DEBUG_CACHE = get_configuration().DEBUG_CACHE
+ else:
+ DEBUG = keywords.verbose
+ DEBUG_CACHE = keywords.verbose
+
+ cache = cache_module.Cache()
+ temp_files.run_and_clean(lambda: main(
+ keywords,
+ compiler_engine=keywords.compiler,
+ cache=cache,
+ jcache=cache_module.JCache(cache) if keywords.jcache else None,
+ relooper=relooper,
+ temp_files=temp_files,
+ DEBUG=DEBUG,
+ DEBUG_CACHE=DEBUG_CACHE,
+ ))
+if __name__ == '__main__':
+ _main(environ=os.environ)
diff --git a/src/compiler.js b/src/compiler.js
index 1cd09c30..3047daf1 100644
--- a/src/compiler.js
+++ b/src/compiler.js
@@ -199,7 +199,7 @@ load('parseTools.js');
load('intertyper.js');
load('analyzer.js');
load('jsifier.js');
-if (RELOOP) load('relooper.js')
+if (RELOOP) load(RELOOPER)
globalEval(processMacros(preprocess(read('runtime.js'))));
Runtime.QUANTUM_SIZE = QUANTUM_SIZE;
diff --git a/src/relooper/test.txt b/src/relooper/test.txt
index 12d0ef39..b7c8794d 100644
--- a/src/relooper/test.txt
+++ b/src/relooper/test.txt
@@ -54,7 +54,7 @@ while(1) {
// code 2
if (!($2)) {
var $x_1 = $x_0;
- label = 18;
+ label = 19;
break;
}
// code 3
@@ -64,7 +64,7 @@ while(1) {
var $i_0 = $7;var $x_0 = $5;
}
}
-if (label == 18) {
+if (label == 19) {
// code 7
}
// code 4
diff --git a/src/relooper/test2.txt b/src/relooper/test2.txt
index a847e806..c77ce491 100644
--- a/src/relooper/test2.txt
+++ b/src/relooper/test2.txt
@@ -1,11 +1,12 @@
ep
-L1:
-if (ep -> LBB1) {
- LBB1
- if (!(LBB1 -> LBB2)) {
- break L1;
+do {
+ if (ep -> LBB1) {
+ LBB1
+ if (!(LBB1 -> LBB2)) {
+ break;
+ }
+ LBB2
}
- LBB2
-}
+} while(0);
LBB3
diff --git a/src/relooper/test3.txt b/src/relooper/test3.txt
index 7d06f06a..696542ef 100644
--- a/src/relooper/test3.txt
+++ b/src/relooper/test3.txt
@@ -1,25 +1,27 @@
ep
-L1:
-if (ep -> LBB1) {
- LBB1
- if (!(LBB1 -> LBB2)) {
- break L1;
+do {
+ if (ep -> LBB1) {
+ LBB1
+ if (!(LBB1 -> LBB2)) {
+ break;
+ }
+ LBB2
}
- LBB2
-}
+} while(0);
LBB3
-L5:
-if (LBB3 -> LBB4) {
- LBB4
- if (!(LBB4 -> LBB5)) {
- break L5;
- }
- while(1) {
- LBB5
- if (LBB5 -> LBB6) {
- break L5;
+L5: do {
+ if (LBB3 -> LBB4) {
+ LBB4
+ if (!(LBB4 -> LBB5)) {
+ break;
+ }
+ while(1) {
+ LBB5
+ if (LBB5 -> LBB6) {
+ break L5;
+ }
}
}
-}
+} while(0);
LBB6
diff --git a/src/relooper/test4.txt b/src/relooper/test4.txt
index 2ab3265a..f0bfb972 100644
--- a/src/relooper/test4.txt
+++ b/src/relooper/test4.txt
@@ -1,16 +1,17 @@
//19
-L1:
-if ( 1 ) {
- //20
- if (!( 1 )) {
+do {
+ if ( 1 ) {
+ //20
+ if (!( 1 )) {
+ label = 4;
+ break;
+ }
+ //21
+ break;
+ } else {
label = 4;
- break L1;
}
- //21
- break L1;
-} else {
- label = 4;
-}
+} while(0);
if (label == 4) {
//22
}
diff --git a/src/relooper/test6.txt b/src/relooper/test6.txt
index 0ec7e666..c5effd08 100644
--- a/src/relooper/test6.txt
+++ b/src/relooper/test6.txt
@@ -1,11 +1,12 @@
//0
-L1:
-if (check(0)) {
- //1
- if (!(check(1))) {
- break L1;
+do {
+ if (check(0)) {
+ //1
+ if (!(check(1))) {
+ break;
+ }
+ //2
}
- //2
-}
+} while(0);
//3
diff --git a/src/relooper/test_debug.txt b/src/relooper/test_debug.txt
index 02377fb7..1c7d0508 100644
--- a/src/relooper/test_debug.txt
+++ b/src/relooper/test_debug.txt
@@ -83,13 +83,14 @@ int main() {
// === Optimizing shapes ===
// Fusing Multiple to Simple
ep
-L1:
-if (ep -> LBB1) {
- LBB1
- if (!(LBB1 -> LBB2)) {
- break L1;
+do {
+ if (ep -> LBB1) {
+ LBB1
+ if (!(LBB1 -> LBB2)) {
+ break;
+ }
+ LBB2
}
- LBB2
-}
+} while(0);
LBB3
diff --git a/src/relooper/test_fuzz1.txt b/src/relooper/test_fuzz1.txt
index 09edb594..5122257e 100644
--- a/src/relooper/test_fuzz1.txt
+++ b/src/relooper/test_fuzz1.txt
@@ -3,12 +3,13 @@
print('entry'); var label; var state; var decisions = [4, 1, 7, 2, 6, 6, 8]; var index = 0; function check() { if (index == decisions.length) throw 'HALT'; return decisions[index++] }
print(5); state = check();
print(6); state = check();
-L3:
-if (state == 7) {
- print(7); state = check();
- label = 3;
- break L3;
-}
+do {
+ if (state == 7) {
+ print(7); state = check();
+ label = 3;
+ break;
+ }
+} while(0);
L5: while(1) {
if (label == 3) {
label = 0;
diff --git a/src/relooper/test_fuzz5.txt b/src/relooper/test_fuzz5.txt
index 7c795d53..9548205c 100644
--- a/src/relooper/test_fuzz5.txt
+++ b/src/relooper/test_fuzz5.txt
@@ -3,21 +3,22 @@
print('entry'); var label; var state; var decisions = [133, 98, 134, 143, 162, 187, 130, 87, 91, 49, 102, 47, 9, 132, 179, 176, 157, 25, 64, 161, 57, 107, 16, 167, 185, 45, 191, 180, 23, 131]; var index = 0; function check() { if (index == decisions.length) throw 'HALT'; return decisions[index++] }
L1: while(1) {
print(7); state = check();
- L3:
- if (state % 3 == 1) {
- label = 3;
- } else if (state % 3 == 0) {
- print(8); state = check();
- if (state % 2 == 0) {
- label = 5;
- break L3;
+ do {
+ if (state % 3 == 1) {
+ label = 3;
+ } else if (state % 3 == 0) {
+ print(8); state = check();
+ if (state % 2 == 0) {
+ label = 5;
+ break;
+ } else {
+ label = 7;
+ break;
+ }
} else {
- label = 7;
- break L3;
+ break L1;
}
- } else {
- break L1;
- }
+ } while(0);
while(1) {
if (label == 3) {
label = 0;
diff --git a/src/relooper/test_inf.txt b/src/relooper/test_inf.txt
index 3e292433..379d2083 100644
--- a/src/relooper/test_inf.txt
+++ b/src/relooper/test_inf.txt
@@ -5,34 +5,35 @@ if (uint(i4) >= uint(i5)) {
code 1
}
code 3
-L5:
-if (!(i2 == 0)) {
- code 4
- while(1) {
- code 5
- if (uint(i6) >= uint(i7)) {
- code 7
- } else {
- code 6
- }
- code 8
- if (uint(i6) >= uint(i7)) {
- code 10
- } else {
- code 9
- }
- code 11
- if (uint(i5) >= uint(i6)) {
- code 13
- } else {
- code 12
- }
- code 14
- if (!(i2 != 0)) {
- break L5;
+L5: do {
+ if (!(i2 == 0)) {
+ code 4
+ while(1) {
+ code 5
+ if (uint(i6) >= uint(i7)) {
+ code 7
+ } else {
+ code 6
+ }
+ code 8
+ if (uint(i6) >= uint(i7)) {
+ code 10
+ } else {
+ code 9
+ }
+ code 11
+ if (uint(i5) >= uint(i6)) {
+ code 13
+ } else {
+ code 12
+ }
+ code 14
+ if (!(i2 != 0)) {
+ break L5;
+ }
}
}
-}
+} while(0);
code 15
if (uint(i4) >= uint(i5)) {
code 17
@@ -40,178 +41,179 @@ if (uint(i4) >= uint(i5)) {
code 16
}
code 18
-L26:
-if (!(i2 == 0)) {
- code 19
- while(1) {
- code 20
- if (uint(i5) >= uint(i6)) {
- code 22
- } else {
- code 21
- }
- code 23
- if (uint(i5) >= uint(i6)) {
- code 25
- } else {
- code 24
- }
- code 26
- if (uint(i5) >= uint(i6)) {
- code 28
- } else {
- code 27
- }
- code 29
- if (uint(i5) >= uint(i6)) {
- code 31
- } else {
- code 30
- }
- code 32
- if (uint(i5) >= uint(i6)) {
- code 34
- } else {
- code 33
- }
- code 35
- if (uint(i5) >= uint(i6)) {
- code 37
- } else {
- code 36
- }
- code 38
- if (uint(i5) >= uint(i6)) {
- code 40
- } else {
- code 39
- }
- code 41
- if (uint(i5) >= uint(i6)) {
- code 43
- } else {
- code 42
- }
- code 44
- if (uint(i5) >= uint(i6)) {
- code 46
- } else {
- code 45
- }
- code 47
- if (uint(i5) >= uint(i6)) {
- code 49
- } else {
- code 48
- }
- code 50
- if (uint(i5) >= uint(i6)) {
- code 52
- } else {
- code 51
- }
- code 53
- if (uint(i5) >= uint(i6)) {
- code 55
- } else {
- code 54
- }
- code 56
- if (uint(i5) >= uint(i6)) {
- code 58
- } else {
- code 57
- }
- code 59
- if (uint(i5) >= uint(i6)) {
- code 61
- } else {
- code 60
- }
- code 62
- if (uint(i5) >= uint(i6)) {
- code 64
- } else {
- code 63
- }
- code 65
- if (uint(i5) >= uint(i6)) {
- code 67
- } else {
- code 66
- }
- code 68
- if (uint(i5) >= uint(i6)) {
- code 70
- } else {
- code 69
- }
- code 71
- if (uint(i5) >= uint(i6)) {
- code 73
- } else {
- code 72
- }
- code 74
- if (uint(i5) >= uint(i6)) {
- code 76
- } else {
- code 75
- }
- code 77
- if (uint(i5) >= uint(i6)) {
- code 79
- } else {
- code 78
- }
- code 80
- if (uint(i5) >= uint(i6)) {
- code 82
- } else {
- code 81
- }
- code 83
- if (uint(i5) >= uint(i6)) {
- code 85
- } else {
- code 84
- }
- code 86
- if (uint(i5) >= uint(i6)) {
- code 88
- } else {
- code 87
- }
- code 89
- if (uint(i5) >= uint(i6)) {
- code 91
- } else {
- code 90
- }
- code 92
- if (uint(i5) >= uint(i6)) {
- code 94
- } else {
- code 93
- }
- code 95
- if (uint(i5) >= uint(i6)) {
- code 97
- } else {
- code 96
- }
- code 98
- if (uint(i5) >= uint(i6)) {
- code 100
- } else {
- code 99
- }
- code 101
- if (!(i2 != 0)) {
- break L26;
+L26: do {
+ if (!(i2 == 0)) {
+ code 19
+ while(1) {
+ code 20
+ if (uint(i5) >= uint(i6)) {
+ code 22
+ } else {
+ code 21
+ }
+ code 23
+ if (uint(i5) >= uint(i6)) {
+ code 25
+ } else {
+ code 24
+ }
+ code 26
+ if (uint(i5) >= uint(i6)) {
+ code 28
+ } else {
+ code 27
+ }
+ code 29
+ if (uint(i5) >= uint(i6)) {
+ code 31
+ } else {
+ code 30
+ }
+ code 32
+ if (uint(i5) >= uint(i6)) {
+ code 34
+ } else {
+ code 33
+ }
+ code 35
+ if (uint(i5) >= uint(i6)) {
+ code 37
+ } else {
+ code 36
+ }
+ code 38
+ if (uint(i5) >= uint(i6)) {
+ code 40
+ } else {
+ code 39
+ }
+ code 41
+ if (uint(i5) >= uint(i6)) {
+ code 43
+ } else {
+ code 42
+ }
+ code 44
+ if (uint(i5) >= uint(i6)) {
+ code 46
+ } else {
+ code 45
+ }
+ code 47
+ if (uint(i5) >= uint(i6)) {
+ code 49
+ } else {
+ code 48
+ }
+ code 50
+ if (uint(i5) >= uint(i6)) {
+ code 52
+ } else {
+ code 51
+ }
+ code 53
+ if (uint(i5) >= uint(i6)) {
+ code 55
+ } else {
+ code 54
+ }
+ code 56
+ if (uint(i5) >= uint(i6)) {
+ code 58
+ } else {
+ code 57
+ }
+ code 59
+ if (uint(i5) >= uint(i6)) {
+ code 61
+ } else {
+ code 60
+ }
+ code 62
+ if (uint(i5) >= uint(i6)) {
+ code 64
+ } else {
+ code 63
+ }
+ code 65
+ if (uint(i5) >= uint(i6)) {
+ code 67
+ } else {
+ code 66
+ }
+ code 68
+ if (uint(i5) >= uint(i6)) {
+ code 70
+ } else {
+ code 69
+ }
+ code 71
+ if (uint(i5) >= uint(i6)) {
+ code 73
+ } else {
+ code 72
+ }
+ code 74
+ if (uint(i5) >= uint(i6)) {
+ code 76
+ } else {
+ code 75
+ }
+ code 77
+ if (uint(i5) >= uint(i6)) {
+ code 79
+ } else {
+ code 78
+ }
+ code 80
+ if (uint(i5) >= uint(i6)) {
+ code 82
+ } else {
+ code 81
+ }
+ code 83
+ if (uint(i5) >= uint(i6)) {
+ code 85
+ } else {
+ code 84
+ }
+ code 86
+ if (uint(i5) >= uint(i6)) {
+ code 88
+ } else {
+ code 87
+ }
+ code 89
+ if (uint(i5) >= uint(i6)) {
+ code 91
+ } else {
+ code 90
+ }
+ code 92
+ if (uint(i5) >= uint(i6)) {
+ code 94
+ } else {
+ code 93
+ }
+ code 95
+ if (uint(i5) >= uint(i6)) {
+ code 97
+ } else {
+ code 96
+ }
+ code 98
+ if (uint(i5) >= uint(i6)) {
+ code 100
+ } else {
+ code 99
+ }
+ code 101
+ if (!(i2 != 0)) {
+ break L26;
+ }
}
}
-}
+} while(0);
code 102
if (uint(i4) >= uint(i5)) {
code 104
@@ -219,136 +221,137 @@ if (uint(i4) >= uint(i5)) {
code 103
}
code 105
-L143:
-if (!(i2 == 0)) {
- code 106
- while(1) {
- code 107
- if (uint(i5) >= uint(i6)) {
- code 109
- } else {
- code 108
- }
- code 110
- if (uint(i5) >= uint(i6)) {
- code 112
- } else {
- code 111
- }
- code 113
- if (uint(i5) >= uint(i6)) {
- code 115
- } else {
- code 114
- }
- code 116
- if (uint(i5) >= uint(i6)) {
- code 118
- } else {
- code 117
- }
- code 119
- if (uint(i5) >= uint(i6)) {
- code 121
- } else {
- code 120
- }
- code 122
- if (uint(i5) >= uint(i6)) {
- code 124
- } else {
- code 123
- }
- code 125
- if (uint(i5) >= uint(i6)) {
- code 127
- } else {
- code 126
- }
- code 128
- if (uint(i5) >= uint(i6)) {
- code 130
- } else {
- code 129
- }
- code 131
- if (uint(i5) >= uint(i6)) {
- code 133
- } else {
- code 132
- }
- code 134
- if (uint(i5) >= uint(i6)) {
- code 136
- } else {
- code 135
- }
- code 137
- if (uint(i5) >= uint(i6)) {
- code 139
- } else {
- code 138
- }
- code 140
- if (uint(i5) >= uint(i6)) {
- code 142
- } else {
- code 141
- }
- code 143
- if (uint(i5) >= uint(i6)) {
- code 145
- } else {
- code 144
- }
- code 146
- if (uint(i5) >= uint(i6)) {
- code 148
- } else {
- code 147
- }
- code 149
- if (uint(i5) >= uint(i6)) {
- code 151
- } else {
- code 150
- }
- code 152
- if (uint(i5) >= uint(i6)) {
- code 154
- } else {
- code 153
- }
- code 155
- if (uint(i5) >= uint(i6)) {
- code 157
- } else {
- code 156
- }
- code 158
- if (uint(i5) >= uint(i6)) {
- code 160
- } else {
- code 159
- }
- code 161
- if (uint(i5) >= uint(i6)) {
- code 163
- } else {
- code 162
- }
- code 164
- if (uint(i5) >= uint(i6)) {
- code 166
- } else {
- code 165
- }
- code 167
- if (!(i2 != 0)) {
- break L143;
+L143: do {
+ if (!(i2 == 0)) {
+ code 106
+ while(1) {
+ code 107
+ if (uint(i5) >= uint(i6)) {
+ code 109
+ } else {
+ code 108
+ }
+ code 110
+ if (uint(i5) >= uint(i6)) {
+ code 112
+ } else {
+ code 111
+ }
+ code 113
+ if (uint(i5) >= uint(i6)) {
+ code 115
+ } else {
+ code 114
+ }
+ code 116
+ if (uint(i5) >= uint(i6)) {
+ code 118
+ } else {
+ code 117
+ }
+ code 119
+ if (uint(i5) >= uint(i6)) {
+ code 121
+ } else {
+ code 120
+ }
+ code 122
+ if (uint(i5) >= uint(i6)) {
+ code 124
+ } else {
+ code 123
+ }
+ code 125
+ if (uint(i5) >= uint(i6)) {
+ code 127
+ } else {
+ code 126
+ }
+ code 128
+ if (uint(i5) >= uint(i6)) {
+ code 130
+ } else {
+ code 129
+ }
+ code 131
+ if (uint(i5) >= uint(i6)) {
+ code 133
+ } else {
+ code 132
+ }
+ code 134
+ if (uint(i5) >= uint(i6)) {
+ code 136
+ } else {
+ code 135
+ }
+ code 137
+ if (uint(i5) >= uint(i6)) {
+ code 139
+ } else {
+ code 138
+ }
+ code 140
+ if (uint(i5) >= uint(i6)) {
+ code 142
+ } else {
+ code 141
+ }
+ code 143
+ if (uint(i5) >= uint(i6)) {
+ code 145
+ } else {
+ code 144
+ }
+ code 146
+ if (uint(i5) >= uint(i6)) {
+ code 148
+ } else {
+ code 147
+ }
+ code 149
+ if (uint(i5) >= uint(i6)) {
+ code 151
+ } else {
+ code 150
+ }
+ code 152
+ if (uint(i5) >= uint(i6)) {
+ code 154
+ } else {
+ code 153
+ }
+ code 155
+ if (uint(i5) >= uint(i6)) {
+ code 157
+ } else {
+ code 156
+ }
+ code 158
+ if (uint(i5) >= uint(i6)) {
+ code 160
+ } else {
+ code 159
+ }
+ code 161
+ if (uint(i5) >= uint(i6)) {
+ code 163
+ } else {
+ code 162
+ }
+ code 164
+ if (uint(i5) >= uint(i6)) {
+ code 166
+ } else {
+ code 165
+ }
+ code 167
+ if (!(i2 != 0)) {
+ break L143;
+ }
}
}
-}
+} while(0);
code 168
if (uint(i4) >= uint(i5)) {
code 170
diff --git a/src/settings.js b/src/settings.js
index e260ed2a..1bfcf92a 100644
--- a/src/settings.js
+++ b/src/settings.js
@@ -58,6 +58,8 @@ var ALLOW_MEMORY_GROWTH = 0; // If false, we abort with an error if we try to al
// Code embetterments
var MICRO_OPTS = 1; // Various micro-optimizations, like nativizing variables
var RELOOP = 0; // Recreate js native loops from llvm data
+var RELOOPER = 'relooper.js'; // Loads the relooper from this path relative to compiler.js
+
var USE_TYPED_ARRAYS = 2; // Use typed arrays for the heap. See https://github.com/kripken/emscripten/wiki/Code-Generation-Modes/
// 0 means no typed arrays are used.
// 1 has two heaps, IHEAP (int32) and FHEAP (double),
diff --git a/tests/runner.py b/tests/runner.py
index c9c8ac9e..e631b025 100755
--- a/tests/runner.py
+++ b/tests/runner.py
@@ -11943,6 +11943,8 @@ fi
try_delete(CANONICAL_TEMP_DIR)
def test_relooper(self):
+ RELOOPER = Cache.get_path('relooper.js')
+
restore()
for phase in range(2): # 0: we wipe the relooper dir. 1: we have it, so should just update
if phase == 0: Cache.erase()
diff --git a/tools/cache.py b/tools/cache.py
new file mode 100644
index 00000000..e7908fba
--- /dev/null
+++ b/tools/cache.py
@@ -0,0 +1,194 @@
+import os.path, sys, shutil, hashlib, cPickle, zlib, time
+
+import tempfiles
+
+# Permanent cache for dlmalloc and stdlibc++
+class Cache:
+ def __init__(self, dirname=None, debug=False):
+ if dirname is None:
+ dirname = os.environ.get('EM_CACHE')
+ if not dirname:
+ dirname = os.path.expanduser(os.path.join('~', '.emscripten_cache'))
+ self.dirname = dirname
+ self.debug = debug
+
+ def ensure(self):
+ if not os.path.exists(self.dirname):
+ os.makedirs(self.dirname)
+
+ def erase(self):
+ tempfiles.try_delete(self.dirname)
+ try:
+ open(self.dirname + '__last_clear', 'w').write('last clear: ' + time.asctime() + '\n')
+ except Exception, e:
+ print >> sys.stderr, 'failed to save last clear time: ', e
+
+ def get_path(self, shortname):
+ return os.path.join(self.dirname, shortname)
+
+ # Request a cached file. If it isn't in the cache, it will be created with
+ # the given creator function
+ def get(self, shortname, creator, extension='.bc'):
+ if not shortname.endswith(extension): shortname += extension
+ cachename = os.path.join(self.dirname, shortname)
+ if os.path.exists(cachename):
+ return cachename
+ self.ensure()
+ shutil.copyfile(creator(), cachename)
+ return cachename
+
+# JS-specific cache. We cache the results of compilation and optimization,
+# so that in incremental builds we can just load from cache.
+# We cache reasonably-large-sized chunks
+class JCache:
+ def __init__(self, cache):
+ self.cache = cache
+ self.dirname = os.path.join(cache.dirname, 'jcache')
+ self.debug = cache.debug
+
+ def ensure(self):
+ self.cache.ensure()
+ if not os.path.exists(self.dirname):
+ os.makedirs(self.dirname)
+
+ def get_shortkey(self, keys):
+ if type(keys) not in [list, tuple]:
+ keys = [keys]
+ ret = ''
+ for key in keys:
+ assert type(key) == str
+ ret += hashlib.md5(key).hexdigest()
+ return ret
+
+ def get_cachename(self, shortkey):
+ return os.path.join(self.dirname, shortkey)
+
+ # Returns a cached value, if it exists. Make sure the full key matches
+ def get(self, shortkey, keys):
+ if self.debug: print >> sys.stderr, 'jcache get?', shortkey
+ cachename = self.get_cachename(shortkey)
+ if not os.path.exists(cachename):
+ if self.debug: print >> sys.stderr, 'jcache none at all'
+ return
+ try:
+ data = cPickle.loads(zlib.decompress(open(cachename).read()))
+ except Exception, e:
+ if DEBUG_CACHE: print >> sys.stderr, 'jcache decompress/unpickle error:', e
+ if len(data) != 2:
+ if self.debug: print >> sys.stderr, 'jcache error in get'
+ return
+ oldkeys = data[0]
+ if len(oldkeys) != len(keys):
+ if self.debug: print >> sys.stderr, 'jcache collision (a)'
+ return
+ for i in range(len(oldkeys)):
+ if oldkeys[i] != keys[i]:
+ if self.debug: print >> sys.stderr, 'jcache collision (b)'
+ return
+ if self.debug: print >> sys.stderr, 'jcache win'
+ return data[1]
+
+ # Sets the cached value for a key (from get_key)
+ def set(self, shortkey, keys, value):
+ cachename = self.get_cachename(shortkey)
+ try:
+ f = open(cachename, 'w')
+ f.write(zlib.compress(cPickle.dumps([keys, value])))
+ f.close()
+ except Exception, e:
+ if DEBUG_CACHE: print >> sys.stderr, 'jcache compress/pickle error:', e
+ return
+ # for i in range(len(keys)):
+ # open(cachename + '.key' + str(i), 'w').write(keys[i])
+ # open(cachename + '.value', 'w').write(value)
+
+# Given a set of functions of form (ident, text), and a preferred chunk size,
+# generates a set of chunks for parallel processing and caching.
+# It is very important to generate similar chunks in incremental builds, in
+# order to maximize the chance of cache hits. To achieve that, we save the
+# chunking used in the previous compilation of this phase, and we try to
+# generate the same chunks, barring big differences in function sizes that
+# violate our chunk size guideline. If caching is not used, chunking_file
+# should be None
+def chunkify(funcs, chunk_size, chunking_file, DEBUG=False):
+ previous_mapping = None
+ if chunking_file:
+ chunking_file = chunking_file
+ if os.path.exists(chunking_file):
+ try:
+ previous_mapping = cPickle.Unpickler(open(chunking_file, 'rb')).load() # maps a function identifier to the chunk number it will be in
+ #if DEBUG: print >> sys.stderr, 'jscache previous mapping', previous_mapping
+ except:
+ pass
+ chunks = []
+ if previous_mapping:
+ # initialize with previous chunking
+ news = []
+ for func in funcs:
+ ident, data = func
+ assert ident, 'need names for jcache chunking'
+ if not ident in previous_mapping:
+ news.append(func)
+ else:
+ n = previous_mapping[ident]
+ while n >= len(chunks): chunks.append([])
+ chunks[n].append(func)
+ if DEBUG: print >> sys.stderr, 'jscache not in previous chunking', len(news)
+ # add news and adjust for new sizes
+ spilled = news
+ for i in range(len(chunks)):
+ chunk = chunks[i]
+ size = sum([len(func[1]) for func in chunk])
+ #if DEBUG: print >> sys.stderr, 'need spilling?', i, size, len(chunk), 'vs', chunk_size, 1.5*chunk_size
+ while size > 1.5*chunk_size and len(chunk) > 1:
+ spill = chunk.pop()
+ spilled.append(spill)
+ size -= len(spill[1])
+ #if DEBUG: print >> sys.stderr, 'jscache new + spilled', len(spilled)
+ for chunk in chunks:
+ size = sum([len(func[1]) for func in chunk])
+ while size < 0.66*chunk_size and len(spilled) > 0:
+ spill = spilled.pop()
+ chunk.append(spill)
+ size += len(spill[1])
+ chunks = filter(lambda chunk: len(chunk) > 0, chunks) # might have empty ones, eliminate them
+ funcs = spilled # we will allocate these into chunks as if they were normal inputs
+ #if DEBUG: print >> sys.stderr, 'leftover spills', len(spilled)
+ # initialize reasonably, the rest of the funcs we need to split out
+ curr = []
+ total_size = 0
+ for i in range(len(funcs)):
+ func = funcs[i]
+ curr_size = len(func[1])
+ if total_size + curr_size < chunk_size:
+ curr.append(func)
+ total_size += curr_size
+ else:
+ chunks.append(curr)
+ curr = [func]
+ total_size = curr_size
+ if curr:
+ chunks.append(curr)
+ curr = None
+ if chunking_file:
+ # sort within each chunk, to keep the order identical
+ for chunk in chunks:
+ chunk.sort(key=lambda func: func[0])
+ # save new mapping info
+ new_mapping = {}
+ for i in range(len(chunks)):
+ chunk = chunks[i]
+ for ident, data in chunk:
+ assert ident not in new_mapping, 'cannot have duplicate names in jcache chunking'
+ new_mapping[ident] = i
+ cPickle.Pickler(open(chunking_file, 'wb')).dump(new_mapping)
+ #if DEBUG:
+ # for i in range(len(chunks)):
+ # chunk = chunks[i]
+ # print >> sys.stderr, 'final chunk', i, len(chunk)
+ # print >> sys.stderr, 'new mapping:', new_mapping
+ # if previous_mapping:
+ # for ident in set(previous_mapping.keys() + new_mapping.keys()):
+ # if previous_mapping.get(ident) != new_mapping.get(ident):
+ # print >> sys.stderr, 'mapping inconsistency', ident, previous_mapping.get(ident), new_mapping.get(ident)
+ return [''.join([func[1] for func in chunk]) for chunk in chunks] # remove function names
diff --git a/tools/eliminator/node_modules/.bin/cake b/tools/eliminator/node_modules/.bin/cake
deleted file mode 120000
index d95f32af..00000000
--- a/tools/eliminator/node_modules/.bin/cake
+++ /dev/null
@@ -1 +0,0 @@
-../coffee-script/bin/cake \ No newline at end of file
diff --git a/tools/eliminator/node_modules/.bin/coffee b/tools/eliminator/node_modules/.bin/coffee
deleted file mode 120000
index b57f275d..00000000
--- a/tools/eliminator/node_modules/.bin/coffee
+++ /dev/null
@@ -1 +0,0 @@
-../coffee-script/bin/coffee \ No newline at end of file
diff --git a/tools/file_packager.py b/tools/file_packager.py
index bfa8e2f0..73ff4919 100644
--- a/tools/file_packager.py
+++ b/tools/file_packager.py
@@ -35,8 +35,8 @@ TODO: You can also provide .crn files yourself, pre-crunched. With this o
import os, sys, shutil, random
-from shared import Compression, execute, suffix, unsuffixed
import shared
+from shared import Compression, execute, suffix, unsuffixed
from subprocess import Popen, PIPE, STDOUT
data_target = sys.argv[1]
diff --git a/tools/js_optimizer.py b/tools/js_optimizer.py
index 231c6257..2fd2211b 100644
--- a/tools/js_optimizer.py
+++ b/tools/js_optimizer.py
@@ -2,7 +2,8 @@
import os, sys, subprocess, multiprocessing, re
import shared
-temp_files = shared.TempFiles()
+configuration = shared.configuration
+temp_files = configuration.get_temp_files()
__rootpath__ = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
def path_from_root(*pathelems):
@@ -30,7 +31,7 @@ def run_on_chunk(command):
return filename
def run_on_js(filename, passes, js_engine, jcache):
-
+ if isinstance(jcache, bool) and jcache: jcache = shared.JCache
if jcache: shared.JCache.ensure()
if type(passes) == str:
@@ -106,7 +107,7 @@ def run_on_js(filename, passes, js_engine, jcache):
intended_num_chunks = int(round(cores * NUM_CHUNKS_PER_CORE))
chunk_size = min(MAX_CHUNK_SIZE, max(MIN_CHUNK_SIZE, total_size / intended_num_chunks))
- chunks = shared.JCache.chunkify(funcs, chunk_size, 'jsopt' if jcache else None)
+ chunks = shared.chunkify(funcs, chunk_size, jcache.get_cachename('jsopt') if jcache else None)
if jcache:
# load chunks from cache where we can # TODO: ignore small chunks
diff --git a/tools/jsrun.py b/tools/jsrun.py
new file mode 100644
index 00000000..27c55350
--- /dev/null
+++ b/tools/jsrun.py
@@ -0,0 +1,27 @@
+import time
+from subprocess import Popen, PIPE, STDOUT
+
+def timeout_run(proc, timeout, note='unnamed process', full_output=False):
+ start = time.time()
+ if timeout is not None:
+ while time.time() - start < timeout and proc.poll() is None:
+ time.sleep(0.1)
+ if proc.poll() is None:
+ proc.kill() # XXX bug: killing emscripten.py does not kill it's child process!
+ raise Exception("Timed out: " + note)
+ out = proc.communicate()
+ return '\n'.join(out) if full_output else out[0]
+
+def run_js(filename, engine=None, args=[], check_timeout=False, stdout=PIPE, stderr=None, cwd=None, full_output=False):
+ if type(engine) is not list:
+ engine = [engine]
+ command = engine + [filename] + (['--'] if 'd8' in engine[0] else []) + args
+ return timeout_run(
+ Popen(
+ command,
+ stdout=stdout,
+ stderr=stderr,
+ cwd=cwd),
+ 15*60 if check_timeout else None,
+ 'Execution',
+ full_output=full_output)
diff --git a/tools/shared.py b/tools/shared.py
index 0594f81e..f343d6d6 100644
--- a/tools/shared.py
+++ b/tools/shared.py
@@ -1,6 +1,7 @@
-import shutil, time, os, sys, json, tempfile, copy, shlex, atexit, subprocess, hashlib, cPickle, zlib, re
+import shutil, time, os, sys, json, tempfile, copy, shlex, atexit, subprocess, hashlib, cPickle, re
from subprocess import Popen, PIPE, STDOUT
from tempfile import mkstemp
+import jsrun, cache, tempfiles
def listify(x):
if type(x) is not list: return [x]
@@ -180,7 +181,7 @@ def check_node_version():
# we re-check sanity when the settings are changed)
# We also re-check sanity and clear the cache when the version changes
-EMSCRIPTEN_VERSION = '1.2.4'
+EMSCRIPTEN_VERSION = '1.2.5'
def check_sanity(force=False):
try:
@@ -289,34 +290,50 @@ AUTODEBUGGER = path_from_root('tools', 'autodebugger.py')
BINDINGS_GENERATOR = path_from_root('tools', 'bindings_generator.py')
EXEC_LLVM = path_from_root('tools', 'exec_llvm.py')
FILE_PACKAGER = path_from_root('tools', 'file_packager.py')
-RELOOPER = path_from_root('src', 'relooper.js')
# Temp dir. Create a random one, unless EMCC_DEBUG is set, in which case use TEMP_DIR/emscripten_temp
-try:
- TEMP_DIR
-except:
- print >> sys.stderr, 'TEMP_DIR not defined in ~/.emscripten, using /tmp'
- TEMP_DIR = '/tmp'
+class Configuration:
+ def __init__(self, environ):
+ self.DEBUG = environ.get('EMCC_DEBUG')
+ if self.DEBUG == "0":
+ self.DEBUG = None
+ self.DEBUG_CACHE = self.DEBUG and "cache" in self.DEBUG
+ self.EMSCRIPTEN_TEMP_DIR = None
-CANONICAL_TEMP_DIR = os.path.join(TEMP_DIR, 'emscripten_temp')
-EMSCRIPTEN_TEMP_DIR = None
+ try:
+ self.TEMP_DIR = TEMP_DIR
+ except NameError:
+ print >> sys.stderr, 'TEMP_DIR not defined in ~/.emscripten, using /tmp'
+ self.TEMP_DIR = '/tmp'
-DEBUG = os.environ.get('EMCC_DEBUG')
-if DEBUG == "0":
- DEBUG = None
-DEBUG_CACHE = DEBUG and "cache" in DEBUG
+ self.CANONICAL_TEMP_DIR = os.path.join(self.TEMP_DIR, 'emscripten_temp')
-if DEBUG:
- try:
- EMSCRIPTEN_TEMP_DIR = CANONICAL_TEMP_DIR
- if not os.path.exists(EMSCRIPTEN_TEMP_DIR):
- os.makedirs(EMSCRIPTEN_TEMP_DIR)
- except Exception, e:
- print >> sys.stderr, e, 'Could not create canonical temp dir. Check definition of TEMP_DIR in ~/.emscripten'
+ if self.DEBUG:
+ try:
+ self.EMSCRIPTEN_TEMP_DIR = self.CANONICAL_TEMP_DIR
+ if not os.path.exists(self.EMSCRIPTEN_TEMP_DIR):
+ os.makedirs(self.EMSCRIPTEN_TEMP_DIR)
+ except Exception, e:
+ print >> sys.stderr, e, 'Could not create canonical temp dir. Check definition of TEMP_DIR in ~/.emscripten'
+
+ def get_temp_files(self):
+ return tempfiles.TempFiles(
+ tmp=self.TEMP_DIR if not self.DEBUG else self.EMSCRIPTEN_TEMP_DIR,
+ save_debug_files=os.environ.get('EMCC_DEBUG_SAVE'))
+
+ def debug_log(self, msg):
+ if self.DEBUG:
+ print >> sys.stderr, msg
+
+configuration = Configuration(environ=os.environ)
+DEBUG = configuration.DEBUG
+EMSCRIPTEN_TEMP_DIR = configuration.EMSCRIPTEN_TEMP_DIR
+DEBUG_CACHE = configuration.DEBUG_CACHE
+CANONICAL_TEMP_DIR = configuration.CANONICAL_TEMP_DIR
if not EMSCRIPTEN_TEMP_DIR:
- EMSCRIPTEN_TEMP_DIR = tempfile.mkdtemp(prefix='emscripten_temp_', dir=TEMP_DIR)
+ EMSCRIPTEN_TEMP_DIR = tempfile.mkdtemp(prefix='emscripten_temp_', dir=configuration.TEMP_DIR)
def clean_temp():
try_delete(EMSCRIPTEN_TEMP_DIR)
atexit.register(clean_temp)
@@ -414,42 +431,7 @@ if not WINDOWS:
pass
# Temp file utilities
-
-def try_delete(filename):
- try:
- os.unlink(filename)
- except:
- try:
- shutil.rmtree(filename)
- except:
- pass
-
-class TempFiles:
- def __init__(self):
- self.to_clean = []
-
- def note(self, filename):
- self.to_clean.append(filename)
-
- def get(self, suffix):
- """Returns a named temp file with the given prefix."""
- named_file = tempfile.NamedTemporaryFile(dir=TEMP_DIR if not DEBUG else EMSCRIPTEN_TEMP_DIR, suffix=suffix, delete=False)
- self.note(named_file.name)
- return named_file
-
- def clean(self):
- if os.environ.get('EMCC_DEBUG_SAVE'):
- print >> sys.stderr, 'not cleaning up temp files since in debug-save mode, see them in %s' % EMSCRIPTEN_TEMP_DIR
- return
- for filename in self.to_clean:
- try_delete(filename)
- self.to_clean = []
-
- def run_and_clean(self, func):
- try:
- return func()
- finally:
- self.clean()
+from tempfiles import try_delete
# Utilities
@@ -463,23 +445,10 @@ def check_engine(engine):
print 'Checking JS engine %s failed. Check %s. Details: %s' % (str(engine), EM_CONFIG, str(e))
return False
-def timeout_run(proc, timeout, note='unnamed process', full_output=False):
- start = time.time()
- if timeout is not None:
- while time.time() - start < timeout and proc.poll() is None:
- time.sleep(0.1)
- if proc.poll() is None:
- proc.kill() # XXX bug: killing emscripten.py does not kill it's child process!
- raise Exception("Timed out: " + note)
- out = proc.communicate()
- return '\n'.join(out) if full_output else out[0]
-
-def run_js(filename, engine=None, args=[], check_timeout=False, stdout=PIPE, stderr=None, cwd=None, full_output=False):
- if engine is None: engine = JS_ENGINES[0]
- engine = listify(engine)
- #if not WINDOWS: 'd8' in engine[0] or 'node' in engine[0]: engine += ['--max-stack-size=8192', '--stack_size=8192'] # needed for some big projects
- command = engine + [filename] + (['--'] if 'd8' in engine[0] else []) + args
- return timeout_run(Popen(command, stdout=stdout, stderr=stderr, cwd=cwd), 15*60 if check_timeout else None, 'Execution', full_output=full_output)
+def run_js(filename, engine=None, *args, **kw):
+ if engine is None:
+ engine = JS_ENGINES[0]
+ return jsrun.run_js(filename, engine, *args, **kw)
def to_cc(cxx):
# By default, LLVM_GCC and CLANG are really the C++ versions. This gets an explicit C version
@@ -654,7 +623,7 @@ set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)''' % { 'winfix': '' if not WINDOWS e
.replace('$EMSCRIPTEN_ROOT', path_from_root('').replace('\\', '/')) \
.replace('$CFLAGS', env['CFLAGS']) \
.replace('$CXXFLAGS', env['CFLAGS'])
- toolchainFile = mkstemp(suffix='.cmaketoolchain.txt', dir=TEMP_DIR)[1]
+ toolchainFile = mkstemp(suffix='.cmaketoolchain.txt', dir=configuration.TEMP_DIR)[1]
open(toolchainFile, 'w').write(CMakeToolchain)
args.append('-DCMAKE_TOOLCHAIN_FILE=%s' % os.path.abspath(toolchainFile))
return args
@@ -968,8 +937,9 @@ set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)''' % { 'winfix': '' if not WINDOWS e
os.environ['EMSCRIPTEN_SUPPRESS_USAGE_WARNING'] = '1'
# Run Emscripten
+ Settings.RELOOPER = Cache.get_path('relooper.js')
settings = Settings.serialize()
- compiler_output = timeout_run(Popen([PYTHON, EMSCRIPTEN, filename + ('.o.ll' if append_ext else ''), '-o', filename + '.o.js'] + settings + extra_args, stdout=PIPE), None, 'Compiling')
+ compiler_output = jsrun.timeout_run(Popen([PYTHON, EMSCRIPTEN, filename + ('.o.ll' if append_ext else ''), '-o', filename + '.o.js'] + settings + extra_args, stdout=PIPE), None, 'Compiling')
#print compiler_output
# Detect compilation crashes and errors
@@ -1161,25 +1131,26 @@ set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)''' % { 'winfix': '' if not WINDOWS e
# Make sure the relooper exists. If it does not, check out the relooper code and bootstrap it
@staticmethod
- def ensure_relooper():
- if os.path.exists(RELOOPER): return
+ def ensure_relooper(relooper):
+ if os.path.exists(relooper): return
+ Cache.ensure()
curr = os.getcwd()
try:
ok = False
print >> sys.stderr, '======================================='
print >> sys.stderr, 'bootstrapping relooper...'
- Cache.ensure()
os.chdir(path_from_root('src'))
def make(opt_level):
- raw = RELOOPER + '.raw.js'
+ raw = relooper + '.raw.js'
Building.emcc(os.path.join('relooper', 'Relooper.cpp'), ['-I' + os.path.join('relooper'), '--post-js',
os.path.join('relooper', 'emscripten', 'glue.js'),
'-s', 'TOTAL_MEMORY=52428800',
'-s', 'EXPORTED_FUNCTIONS=["_rl_set_output_buffer","_rl_make_output_buffer","_rl_new_block","_rl_delete_block","_rl_block_add_branch_to","_rl_new_relooper","_rl_delete_relooper","_rl_relooper_add_block","_rl_relooper_calculate","_rl_relooper_render", "_rl_set_asm_js_mode"]',
'-s', 'DEFAULT_LIBRARY_FUNCS_TO_INCLUDE=["memcpy", "memset", "malloc", "free", "puts"]',
+ '-s', 'RELOOPER="' + relooper + '"',
'-O' + str(opt_level), '--closure', '0'], raw)
- f = open(RELOOPER, 'w')
+ f = open(relooper, 'w')
f.write("// Relooper, (C) 2012 Alon Zakai, MIT license, https://github.com/kripken/Relooper\n")
f.write("var Relooper = (function() {\n");
f.write(open(raw).read())
@@ -1199,7 +1170,7 @@ set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)''' % { 'winfix': '' if not WINDOWS e
finally:
os.chdir(curr)
if not ok:
- print >> sys.stderr, 'bootstrapping relooper failed. You may need to manually create src/relooper.js by compiling it, see src/relooper/emscripten'
+ print >> sys.stderr, 'bootstrapping relooper failed. You may need to manually create relooper.js by compiling it, see src/relooper/emscripten'
1/0
@staticmethod
@@ -1228,205 +1199,10 @@ set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)''' % { 'winfix': '' if not WINDOWS e
open(outfile, 'w').write(src)
return outfile
-# Permanent cache for dlmalloc and stdlibc++
-class Cache:
- dirname = os.environ.get('EM_CACHE')
- if not dirname:
- dirname = os.path.expanduser(os.path.join('~', '.emscripten_cache'))
-
- @staticmethod
- def ensure():
- if not os.path.exists(Cache.dirname):
- os.makedirs(Cache.dirname)
-
- @staticmethod
- def erase():
- try:
- shutil.rmtree(Cache.dirname)
- except:
- pass
- try_delete(RELOOPER)
- try:
- open(Cache.dirname + '__last_clear', 'w').write('last clear: ' + time.asctime() + '\n')
- except:
- print >> sys.stderr, 'failed to save last clear time'
-
- # Request a cached file. If it isn't in the cache, it will be created with
- # the given creator function
- @staticmethod
- def get(shortname, creator):
- if not shortname.endswith('.bc'): shortname += '.bc'
- cachename = os.path.join(Cache.dirname, shortname)
- if os.path.exists(cachename):
- return cachename
- Cache.ensure()
- shutil.copyfile(creator(), cachename)
- return cachename
-
-# JS-specific cache. We cache the results of compilation and optimization,
-# so that in incremental builds we can just load from cache.
-# We cache reasonably-large-sized chunks
-class JCache:
- dirname = os.path.join(Cache.dirname, 'jcache')
-
- @staticmethod
- def ensure():
- Cache.ensure()
- if not os.path.exists(JCache.dirname):
- os.makedirs(JCache.dirname)
-
- @staticmethod
- def get_shortkey(keys):
- if type(keys) not in [list, tuple]:
- keys = [keys]
- ret = ''
- for key in keys:
- assert type(key) == str
- ret += hashlib.md5(key).hexdigest()
- return ret
-
- @staticmethod
- def get_cachename(shortkey):
- return os.path.join(JCache.dirname, shortkey)
-
- # Returns a cached value, if it exists. Make sure the full key matches
- @staticmethod
- def get(shortkey, keys):
- if DEBUG_CACHE: print >> sys.stderr, 'jcache get?', shortkey
- cachename = JCache.get_cachename(shortkey)
- if not os.path.exists(cachename):
- if DEBUG_CACHE: print >> sys.stderr, 'jcache none at all'
- return
- try:
- data = cPickle.loads(zlib.decompress(open(cachename).read()))
- except Exception, e:
- if DEBUG_CACHE: print >> sys.stderr, 'jcache decompress/unpickle error:', e
- return
- if len(data) != 2:
- if DEBUG_CACHE: print >> sys.stderr, 'jcache error in get'
- return
- oldkeys = data[0]
- if len(oldkeys) != len(keys):
- if DEBUG_CACHE: print >> sys.stderr, 'jcache collision (a)'
- return
- for i in range(len(oldkeys)):
- if oldkeys[i] != keys[i]:
- if DEBUG_CACHE: print >> sys.stderr, 'jcache collision (b)'
- return
- if DEBUG_CACHE: print >> sys.stderr, 'jcache win'
- return data[1]
-
- # Sets the cached value for a key (from get_key)
- @staticmethod
- def set(shortkey, keys, value):
- if DEBUG_CACHE: print >> sys.stderr, 'save to cache', shortkey
- cachename = JCache.get_cachename(shortkey)
- try:
- f = open(cachename, 'w')
- f.write(zlib.compress(cPickle.dumps([keys, value])))
- f.close()
- except Exception, e:
- if DEBUG_CACHE: print >> sys.stderr, 'jcache compress/pickle error:', e
- return
- #if DEBUG:
- # for i in range(len(keys)):
- # open(cachename + '.key' + str(i), 'w').write(keys[i])
- # open(cachename + '.value', 'w').write(value)
-
- # Given a set of functions of form (ident, text), and a preferred chunk size,
- # generates a set of chunks for parallel processing and caching.
- # It is very important to generate similar chunks in incremental builds, in
- # order to maximize the chance of cache hits. To achieve that, we save the
- # chunking used in the previous compilation of this phase, and we try to
- # generate the same chunks, barring big differences in function sizes that
- # violate our chunk size guideline. If caching is not used, chunking_file
- # should be None
- @staticmethod
- def chunkify(funcs, chunk_size, chunking_file):
- previous_mapping = None
- if chunking_file:
- chunking_file = JCache.get_cachename(chunking_file)
- if os.path.exists(chunking_file):
- try:
- previous_mapping = cPickle.Unpickler(open(chunking_file, 'rb')).load() # maps a function identifier to the chunk number it will be in
- if DEBUG: print >> sys.stderr, 'jscache previous mapping of size %d loaded from %s' % (len(previous_mapping), chunking_file)
- except Exception, e:
- print >> sys.stderr, 'Failed to load and unpickle previous chunking file at %s: ' % chunking_file, e
- else:
- print >> sys.stderr, 'Previous chunking file not found at %s' % chunking_file
- chunks = []
- if previous_mapping:
- # initialize with previous chunking
- news = []
- for func in funcs:
- ident, data = func
- assert ident, 'need names for jcache chunking'
- if not ident in previous_mapping:
- news.append(func)
- else:
- n = previous_mapping[ident]
- while n >= len(chunks): chunks.append([])
- chunks[n].append(func)
- if DEBUG: print >> sys.stderr, 'jscache not in previous chunking', len(news)
- # add news and adjust for new sizes
- spilled = news
- for i in range(len(chunks)):
- chunk = chunks[i]
- size = sum([len(func[1]) for func in chunk])
- #if DEBUG: print >> sys.stderr, 'need spilling?', i, size, len(chunk), 'vs', chunk_size, 1.5*chunk_size
- while size > 1.5*chunk_size and len(chunk) > 1:
- spill = chunk.pop()
- spilled.append(spill)
- size -= len(spill[1])
- #if DEBUG: print >> sys.stderr, 'jscache new + spilled', len(spilled)
- for chunk in chunks:
- size = sum([len(func[1]) for func in chunk])
- while size < 0.66*chunk_size and len(spilled) > 0:
- spill = spilled.pop()
- chunk.append(spill)
- size += len(spill[1])
- chunks = filter(lambda chunk: len(chunk) > 0, chunks) # might have empty ones, eliminate them
- funcs = spilled # we will allocate these into chunks as if they were normal inputs
- #if DEBUG: print >> sys.stderr, 'leftover spills', len(spilled)
- # initialize reasonably, the rest of the funcs we need to split out
- curr = []
- total_size = 0
- for i in range(len(funcs)):
- func = funcs[i]
- curr_size = len(func[1])
- if total_size + curr_size < chunk_size:
- curr.append(func)
- total_size += curr_size
- else:
- chunks.append(curr)
- curr = [func]
- total_size = curr_size
- if curr:
- chunks.append(curr)
- curr = None
- if chunking_file:
- # sort within each chunk, to keep the order identical
- for chunk in chunks:
- chunk.sort(key=lambda func: func[0])
- # save new mapping info
- new_mapping = {}
- for i in range(len(chunks)):
- chunk = chunks[i]
- for ident, data in chunk:
- assert ident not in new_mapping, 'cannot have duplicate names in jcache chunking'
- new_mapping[ident] = i
- cPickle.Pickler(open(chunking_file, 'wb')).dump(new_mapping)
- if DEBUG: print >> sys.stderr, 'jscache mapping of size %d saved to %s' % (len(new_mapping), chunking_file)
- #if DEBUG:
- # for i in range(len(chunks)):
- # chunk = chunks[i]
- # print >> sys.stderr, 'final chunk', i, len(chunk)
- # print >> sys.stderr, 'new mapping:', new_mapping
- # if previous_mapping:
- # for ident in set(previous_mapping.keys() + new_mapping.keys()):
- # if previous_mapping.get(ident) != new_mapping.get(ident):
- # print >> sys.stderr, 'mapping inconsistency', ident, previous_mapping.get(ident), new_mapping.get(ident)
- return [''.join([func[1] for func in chunk]) for chunk in chunks] # remove function names
+# compatibility with existing emcc, etc. scripts
+Cache = cache.Cache(debug=DEBUG_CACHE)
+JCache = cache.JCache(Cache)
+chunkify = cache.chunkify
class JS:
@staticmethod
diff --git a/tools/tempfiles.py b/tools/tempfiles.py
new file mode 100644
index 00000000..1721b2bb
--- /dev/null
+++ b/tools/tempfiles.py
@@ -0,0 +1,40 @@
+import os
+import shutil
+import tempfile
+
+def try_delete(filename):
+ try:
+ os.unlink(filename)
+ except:
+ if os.path.exists(filename):
+ shutil.rmtree(filename, ignore_errors=True)
+
+class TempFiles:
+ def __init__(self, tmp, save_debug_files=False):
+ self.tmp = tmp
+ self.save_debug_files = save_debug_files
+
+ self.to_clean = []
+
+ def note(self, filename):
+ self.to_clean.append(filename)
+
+ def get(self, suffix):
+ """Returns a named temp file with the given prefix."""
+ named_file = tempfile.NamedTemporaryFile(dir=self.tmp, suffix=suffix, delete=False)
+ self.note(named_file.name)
+ return named_file
+
+ def clean(self):
+ if self.save_debug_files:
+ print >> sys.stderr, 'not cleaning up temp files since in debug-save mode, see them in %s' % (self.tmp,)
+ return
+ for filename in self.to_clean:
+ try_delete(filename)
+ self.to_clean = []
+
+ def run_and_clean(self, func):
+ try:
+ return func()
+ finally:
+ self.clean()