aboutsummaryrefslogtreecommitdiff
path: root/tests/runner.py
diff options
context:
space:
mode:
Diffstat (limited to 'tests/runner.py')
-rw-r--r--tests/runner.py101
1 files changed, 96 insertions, 5 deletions
diff --git a/tests/runner.py b/tests/runner.py
index 3fd63574..cd87d3d3 100644
--- a/tests/runner.py
+++ b/tests/runner.py
@@ -163,6 +163,13 @@ class RunnerCore(unittest.TestCase):
def run_native(self, filename, args):
Popen([filename+'.native'] + args, stdout=PIPE, stderr=STDOUT).communicate()[0]
+ def assertIdentical(self, x, y):
+ if x != y:
+ raise Exception("Expected to have '%s' == '%s', diff:\n\n%s" % (
+ limit_size(x), limit_size(y),
+ limit_size(''.join([a.rstrip()+'\n' for a in difflib.unified_diff(x.split('\n'), y.split('\n'), fromfile='expected', tofile='actual')]))
+ ))
+
def assertContained(self, value, string):
if type(value) is not str: value = value() # lazy loading
if type(string) is not str: string = string()
@@ -236,6 +243,8 @@ if 'benchmark' not in str(sys.argv):
js_engines = [SPIDERMONKEY_ENGINE, V8_ENGINE]
if Settings.USE_TYPED_ARRAYS == 2:
js_engines = [SPIDERMONKEY_ENGINE] # when oh when will v8 support typed arrays in the console
+ js_engines = filter(lambda engine: os.path.exists(engine[0]), js_engines)
+ assert len(js_engines) > 0, 'No JS engine present to run this test with. Check ~/.emscripten and the paths therein.'
for engine in js_engines:
js_output = self.run_generated_code(engine, filename + '.o.js', args)
if output_nicerizer is not None:
@@ -2910,6 +2919,8 @@ if 'benchmark' not in str(sys.argv):
self.do_run(src, expected)
CORRECT_SIGNS = 0
+ # libc++ tests
+
def test_iostream(self):
src = '''
#include <iostream>
@@ -2923,6 +2934,41 @@ if 'benchmark' not in str(sys.argv):
self.do_run(src, 'hello world')
+ def test_stdvec(self):
+ src = '''
+ #include <vector>
+ #include <stdio.h>
+
+ struct S {
+ int a;
+ float b;
+ };
+
+ void foo(int a, float b)
+ {
+ printf("%d:%.2f\\n", a, b);
+ }
+
+ int main ( int argc, char *argv[] )
+ {
+ std::vector<S> ar;
+ S s;
+
+ s.a = 789;
+ s.b = 123.456f;
+ ar.push_back(s);
+
+ s.a = 0;
+ s.b = 100.1f;
+ ar.push_back(s);
+
+ foo(ar[0].a, ar[0].b);
+ foo(ar[1].a, ar[1].b);
+ }
+ '''
+
+ self.do_run(src, '789:123.46\n0:100.1')
+
### 'Big' tests
def test_fannkuch(self):
@@ -3194,7 +3240,7 @@ if 'benchmark' not in str(sys.argv):
# Combine libraries
- combined = os.path.join(self.get_building_dir(), 'combined.bc')
+ combined = os.path.join(self.get_build_dir(), 'combined.bc')
Building.link([freetype, poppler], combined)
self.do_ll_run(combined,
@@ -3278,7 +3324,7 @@ if 'benchmark' not in str(sys.argv):
includes=[path_from_root('tests', 'openjpeg', 'libopenjpeg'),
path_from_root('tests', 'openjpeg', 'codec'),
path_from_root('tests', 'openjpeg', 'common'),
- os.path.join(self.get_building_dir(), 'openjpeg')],
+ os.path.join(self.get_build_dir(), 'openjpeg')],
force_c=True,
post_build=post,
output_nicerizer=image_compare)# build_ll_hook=self.do_autodebug)
@@ -3381,6 +3427,38 @@ if 'benchmark' not in str(sys.argv):
self.do_run(src, '*hello slim world*', build_ll_hook=hook)
def test_profiling(self):
+ src = '''
+ #include <emscripten.h>
+ #include <unistd.h>
+
+ int main()
+ {
+ EMSCRIPTEN_PROFILE_INIT(3);
+ EMSCRIPTEN_PROFILE_BEGIN(0);
+ usleep(10 * 1000);
+ EMSCRIPTEN_PROFILE_END(0);
+ EMSCRIPTEN_PROFILE_BEGIN(1);
+ usleep(50 * 1000);
+ EMSCRIPTEN_PROFILE_END(1);
+ EMSCRIPTEN_PROFILE_BEGIN(2);
+ usleep(250 * 1000);
+ EMSCRIPTEN_PROFILE_END(2);
+ return 0;
+ }
+ '''
+
+ def post1(filename):
+ src = open(filename, 'a')
+ src.write('''
+ Profiling.dump();
+ ''')
+ src.close()
+
+ self.do_run(src, '''Profiling data:
+Block 0: ''', post_build=post1)
+
+ # Part 2: old JS version
+
Settings.PROFILE = 1
Settings.INVOKE_RUN = 0
@@ -3427,7 +3505,6 @@ if 'benchmark' not in str(sys.argv):
''')
src.close()
- # Using build_ll_hook forces a recompile, which leads to DFE being done even without opts
self.do_run(src, ': __Z6inner1i (5000)\n*ok*', post_build=post)
### Integration tests
@@ -4108,13 +4185,27 @@ TT = %s
input = open(path_from_root('tools', 'eliminator', 'eliminator-test.js')).read()
expected = open(path_from_root('tools', 'eliminator', 'eliminator-test-output.js')).read()
output = Popen([COFFEESCRIPT, VARIABLE_ELIMINATOR], stdin=PIPE, stdout=PIPE, stderr=PIPE).communicate(input)[0]
- self.assertEquals(output, expected)
+ self.assertIdentical(expected, output)
else:
# Benchmarks. Run them with argument |benchmark|. To run a specific test, do
# |benchmark.test_X|.
- print "Running Emscripten benchmarks..."
+ fingerprint = [time.asctime()]
+ try:
+ fingerprint.append('em: ' + Popen(['git', 'show'], stdout=PIPE).communicate()[0].split('\n')[0])
+ except:
+ pass
+ try:
+ d = os.getcwd()
+ os.chdir(os.path.expanduser('~/Dev/mozilla-central'))
+ fingerprint.append('sm: ' + filter(lambda line: 'changeset' in line,
+ Popen(['hg', 'tip'], stdout=PIPE).communicate()[0].split('\n'))[0])
+ except:
+ pass
+ finally:
+ os.chdir(d)
+ print 'Running Emscripten benchmarks... [ %s ]' % ' | '.join(fingerprint)
sys.argv = filter(lambda x: x != 'benchmark', sys.argv)