aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rwxr-xr-xemcc144
-rw-r--r--src/jsifier.js84
-rw-r--r--src/parseTools.js16
-rw-r--r--src/settings.js9
-rw-r--r--src/shell.html6
-rwxr-xr-xtests/runner.py538
-rw-r--r--tools/eliminator/node_modules/uglify-js/lib/parse-js.js32
-rw-r--r--tools/eliminator/node_modules/uglify-js/lib/process.js72
-rw-r--r--tools/js-optimizer.js557
-rw-r--r--tools/js_optimizer.py25
-rw-r--r--tools/node_modules/source-map/.npmignore2
-rw-r--r--tools/node_modules/source-map/.travis.yml4
-rw-r--r--tools/node_modules/source-map/CHANGELOG.md58
-rw-r--r--tools/node_modules/source-map/LICENSE28
-rw-r--r--tools/node_modules/source-map/Makefile.dryice.js166
-rw-r--r--tools/node_modules/source-map/README.md347
-rw-r--r--tools/node_modules/source-map/build/assert-shim.js56
-rw-r--r--tools/node_modules/source-map/build/mini-require.js152
-rw-r--r--tools/node_modules/source-map/build/prefix-source-map.jsm20
-rw-r--r--tools/node_modules/source-map/build/prefix-utils.jsm18
-rw-r--r--tools/node_modules/source-map/build/suffix-browser.js8
-rw-r--r--tools/node_modules/source-map/build/suffix-source-map.jsm6
-rw-r--r--tools/node_modules/source-map/build/suffix-utils.jsm21
-rw-r--r--tools/node_modules/source-map/build/test-prefix.js8
-rw-r--r--tools/node_modules/source-map/build/test-suffix.js3
-rw-r--r--tools/node_modules/source-map/lib/source-map.js8
-rw-r--r--tools/node_modules/source-map/lib/source-map/array-set.js96
-rw-r--r--tools/node_modules/source-map/lib/source-map/base64-vlq.js144
-rw-r--r--tools/node_modules/source-map/lib/source-map/base64.js42
-rw-r--r--tools/node_modules/source-map/lib/source-map/binary-search.js81
-rw-r--r--tools/node_modules/source-map/lib/source-map/source-map-consumer.js430
-rw-r--r--tools/node_modules/source-map/lib/source-map/source-map-generator.js381
-rw-r--r--tools/node_modules/source-map/lib/source-map/source-node.js353
-rw-r--r--tools/node_modules/source-map/lib/source-map/util.js117
-rw-r--r--tools/node_modules/source-map/node_modules/amdefine/LICENSE58
-rw-r--r--tools/node_modules/source-map/node_modules/amdefine/README.md119
-rw-r--r--tools/node_modules/source-map/node_modules/amdefine/amdefine.js299
-rw-r--r--tools/node_modules/source-map/node_modules/amdefine/package.json33
-rw-r--r--tools/node_modules/source-map/package.json74
-rwxr-xr-xtools/node_modules/source-map/test/run-tests.js73
-rw-r--r--tools/node_modules/source-map/test/source-map/test-api.js26
-rw-r--r--tools/node_modules/source-map/test/source-map/test-array-set.js71
-rw-r--r--tools/node_modules/source-map/test/source-map/test-base64-vlq.js24
-rw-r--r--tools/node_modules/source-map/test/source-map/test-base64.js35
-rw-r--r--tools/node_modules/source-map/test/source-map/test-binary-search.js54
-rw-r--r--tools/node_modules/source-map/test/source-map/test-dog-fooding.js72
-rw-r--r--tools/node_modules/source-map/test/source-map/test-source-map-consumer.js306
-rw-r--r--tools/node_modules/source-map/test/source-map/test-source-map-generator.js391
-rw-r--r--tools/node_modules/source-map/test/source-map/test-source-node.js282
-rw-r--r--tools/node_modules/source-map/test/source-map/util.js152
-rw-r--r--tools/shared.py33
-rw-r--r--tools/source-maps/sourcemap2json.js15
-rwxr-xr-xtools/source-maps/sourcemapper.js177
-rw-r--r--tools/test-js-optimizer-asm-pre-output.js1
-rw-r--r--tools/test-js-optimizer-asm-pre.js1
55 files changed, 5696 insertions, 632 deletions
diff --git a/emcc b/emcc
index 3bc35aa4..705538fc 100755
--- a/emcc
+++ b/emcc
@@ -49,7 +49,7 @@ emcc can be influenced by a few environment variables:
import os, sys, shutil, tempfile, subprocess, shlex, time, re, logging
from subprocess import PIPE, STDOUT
-from tools import shared
+from tools import shared, jsrun
from tools.shared import Compression, execute, suffix, unsuffixed, unsuffixed_basename
from tools.response_file import read_response_file
@@ -127,11 +127,6 @@ Options that are modified or new in %s include:
(For details on the affects of different
opt levels, see apply_opt_level() in
tools/shared.py and also src/settings.js.)
- Note: Optimizations are only done when
- compiling to JavaScript, not to intermediate
- bitcode, *unless* you build with
- EMCC_OPTIMIZE_NORMALLY=1 (not recommended
- unless you know what you are doing!)
-O2 As -O1, plus the relooper (loop recreation),
LLVM -O2 optimizations, and
@@ -144,7 +139,7 @@ Options that are modified or new in %s include:
-s DOUBLE_MODE=0
-s PRECISE_I64_MATH=0
--closure 1
- --llvm-lto 1
+ --llvm-lto 3
This is not recommended at all. A better idea
is to try each of these separately on top of
@@ -203,10 +198,13 @@ Options that are modified or new in %s include:
-g2 Preserve function names
-g3 Preserve variable names
-g4 Preserve LLVM debug info (if -g was
- used when compiling the C/C++ sources)
- and show line number debug comments.
- This is the highest level of debuggability.
- (default in -O0)
+ used when compiling the C/C++ sources),
+ show line number debug comments, and
+ generate source maps. This is the highest
+ level of debuggability. Note that this
+ may make -O1 and above significantly
+ slower because JS optimization will be
+ limited to 1 core. (default in -O0)
--typed-arrays <mode> 0: No typed arrays
1: Parallel typed arrays
@@ -217,10 +215,15 @@ Options that are modified or new in %s include:
2: -O2 LLVM optimizations
3: -O3 LLVM optimizations (default in -O2+)
- --llvm-lto <level> 0: No LLVM LTO (default in -O2 and below)
- 1: LLVM LTO (default in -O3)
+ --llvm-lto <level> 0: No LLVM LTO (default)
+ 1: LLVM LTO is performed
+ 2: We combine all the bitcode and run LLVM opt -O3
+ on that (which optimizes across modules, but is
+ not the same as normal LTO), but do not do normal
+ LTO
+ 3: We do both 2 and then 1
Note: If LLVM optimizations are not run
- (see --llvm-opts), setting this to 1 has no
+ (see --llvm-opts), setting this has no
effect.
--closure <on> 0: No closure compiler (default in -O2 and below)
@@ -731,6 +734,14 @@ try:
settings_changes = []
+ def validate_arg_level(level_string, max_level, err_msg):
+ try:
+ level = int(level_string)
+ assert 0 <= level <= max_level
+ except:
+ raise Exception(err_msg)
+ return level
+
for i in range(len(newargs)):
newargs[i] = newargs[i].strip() # On Windows Vista (and possibly others), excessive spaces in the command line leak into the items in this array, so trim e.g. 'foo.cpp ' -> 'foo.cpp'
if newargs[i].startswith('-O'):
@@ -739,11 +750,7 @@ try:
if requested_level == 's':
requested_level = 2
settings_changes.append('INLINING_LIMIT=50')
- try:
- opt_level = int(requested_level)
- assert 0 <= opt_level <= 3
- except:
- raise Exception('Invalid optimization level: ' + newargs[i])
+ opt_level = validate_arg_level(requested_level, 3, 'Invalid optimization level: ' + newargs[i])
newargs[i] = ''
elif newargs[i].startswith('--llvm-opts'):
check_bad_eq(newargs[i])
@@ -787,12 +794,8 @@ try:
newargs[i+1] = ''
elif newargs[i].startswith('-g'):
requested_level = newargs[i][2:] or '3'
- try:
- debug_level = int(requested_level)
- assert 0 <= debug_level <= 4
- except:
- raise Exception('Invalid debug level: ' + newargs[i])
- newargs[i] = '-g' # discard level for clang args
+ debug_level = validate_arg_level(requested_level, 4, 'Invalid debug level: ' + newargs[i])
+ newargs[i] = '-g' # we'll need this to get LLVM debug info
elif newargs[i] == '--bind':
bind = True
newargs[i] = ''
@@ -881,10 +884,19 @@ try:
newargs = newargs + [default_cxx_std]
if llvm_opts is None: llvm_opts = LLVM_OPT_LEVEL[opt_level]
- if llvm_lto is None: llvm_lto = opt_level >= 3
+ if llvm_lto is None and opt_level >= 3: llvm_lto = 3
if opt_level == 0: debug_level = 4
if closure is None and opt_level == 3: closure = True
+ if llvm_lto is None and bind:
+ logging.debug('running lto for embind') # XXX this is a workaround for a pointer issue
+ llvm_lto = 1
+
+ # TODO: support source maps with js_transform
+ if js_transform and debug_level >= 4:
+ logging.warning('disabling source maps because a js transform is being done')
+ debug_level = 3
+
if DEBUG: start_time = time.time() # done after parsing arguments, which might affect debug state
if closure:
@@ -1050,6 +1062,10 @@ try:
else:
raise Exception('unknown llvm target: ' + str(shared.LLVM_TARGET))
+ if shared.Settings.USE_TYPED_ARRAYS != 2 and llvm_opts > 0:
+ logging.warning('disabling LLVM optimizations, need typed arrays mode 2 for them')
+ llvm_opts = 0
+
## Compile source code to bitcode
logging.debug('compiling to bitcode')
@@ -1089,20 +1105,20 @@ try:
shared.Building.llvm_as(input_file, temp_file)
temp_files.append(temp_file)
- if not LEAVE_INPUTS_RAW: assert len(temp_files) == len(input_files)
+ if not LEAVE_INPUTS_RAW:
+ assert len(temp_files) == len(input_files)
+
+ # Optimize source files
+ if llvm_opts > 0:
+ for i in range(len(input_files)):
+ input_file = input_files[i]
+ if input_files[i].endswith(SOURCE_SUFFIXES):
+ temp_file = temp_files[i]
+ logging.debug('optimizing %s with -O%d' % (input_file, llvm_opts))
+ shared.Building.llvm_opt(temp_file, llvm_opts)
# If we were just asked to generate bitcode, stop there
if final_suffix not in JS_CONTAINING_SUFFIXES:
- if llvm_opts > 0:
- if not os.environ.get('EMCC_OPTIMIZE_NORMALLY'):
- logging.warning('-Ox flags ignored, since not generating JavaScript')
- else:
- for input_file in input_files:
- if input_file.endswith(SOURCE_SUFFIXES):
- logging.debug('optimizing %s with -O%d since EMCC_OPTIMIZE_NORMALLY defined' % (input_file, llvm_opts))
- shared.Building.llvm_opt(in_temp(unsuffixed(uniquename(input_file)) + '.o'), llvm_opts)
- else:
- logging.debug('not optimizing %s despite EMCC_OPTIMIZE_NORMALLY since not source code' % (input_file))
if not specified_target:
for input_file in input_files:
shutil.move(in_temp(unsuffixed(uniquename(input_file)) + '.o'), unsuffixed_basename(input_file) + '.' + final_suffix)
@@ -1134,6 +1150,8 @@ try:
symbols = filter(lambda symbol: symbol not in exclude, symbols)
return set(symbols)
+ lib_opts = ['-O2']
+
# XXX We also need to add libc symbols that use malloc, for example strdup. It's very rare to use just them and not
# a normal malloc symbol (like free, after calling strdup), so we haven't hit this yet, but it is possible.
libc_symbols = read_symbols(shared.path_from_root('system', 'lib', 'libc.symbols'))
@@ -1142,7 +1160,7 @@ try:
libcxx_symbols = read_symbols(shared.path_from_root('system', 'lib', 'libcxx', 'symbols'), exclude=libc_symbols)
libcxxabi_symbols = read_symbols(shared.path_from_root('system', 'lib', 'libcxxabi', 'symbols'), exclude=libc_symbols)
- # XXX we should disable EMCC_DEBUG (and EMCC_OPTIMIZE_NORMALLY?) when building libs, just like in the relooper
+ # XXX we should disable EMCC_DEBUG when building libs, just like in the relooper
def build_libc(lib_filename, files):
o_s = []
@@ -1151,7 +1169,7 @@ try:
musl_internal_includes = shared.path_from_root('system', 'lib', 'libc', 'musl', 'src', 'internal')
for src in files:
o = in_temp(os.path.basename(src) + '.o')
- execute([shared.PYTHON, shared.EMCC, shared.path_from_root('system', 'lib', src), '-o', o, '-I', musl_internal_includes], stdout=stdout, stderr=stderr)
+ execute([shared.PYTHON, shared.EMCC, shared.path_from_root('system', 'lib', src), '-o', o, '-I', musl_internal_includes] + lib_opts, stdout=stdout, stderr=stderr)
o_s.append(o)
if prev_cxx: os.environ['EMMAKEN_CXX'] = prev_cxx
shared.Building.link(o_s, in_temp(lib_filename))
@@ -1162,7 +1180,7 @@ try:
for src in files:
o = in_temp(src + '.o')
srcfile = shared.path_from_root(src_dirname, src)
- execute([shared.PYTHON, shared.EMXX, srcfile, '-o', o, '-std=c++11'], stdout=stdout, stderr=stderr)
+ execute([shared.PYTHON, shared.EMXX, srcfile, '-o', o, '-std=c++11'] + lib_opts, stdout=stdout, stderr=stderr)
o_s.append(o)
shared.Building.link(o_s, in_temp(lib_filename))
return in_temp(lib_filename)
@@ -1409,16 +1427,15 @@ try:
# Optimize, if asked to
if not LEAVE_INPUTS_RAW:
link_opts = [] if debug_level >= 4 else ['-strip-debug'] # remove LLVM debug if we are not asked for it
- if llvm_opts > 0:
- if not os.environ.get('EMCC_OPTIMIZE_NORMALLY'):
- shared.Building.llvm_opt(in_temp(target_basename + '.bc'), llvm_opts)
- if DEBUG: save_intermediate('opt', 'bc')
- # Do LTO in a separate pass to work around LLVM bug XXX (see failure e.g. in cubescript)
- else:
- logging.debug('not running opt because EMCC_OPTIMIZE_NORMALLY was specified, opt should have been run before')
+
+ if llvm_lto >= 2:
+ logging.debug('running LLVM opt -O3 as pre-LTO')
+ shared.Building.llvm_opt(in_temp(target_basename + '.bc'), ['-O3'])
+ if DEBUG: save_intermediate('opt', 'bc')
+
if shared.Building.can_build_standalone():
# If we can LTO, do it before dce, since it opens up dce opportunities
- if llvm_lto and shared.Building.can_use_unsafe_opts():
+ if llvm_lto and llvm_lto != 2 and shared.Building.can_use_unsafe_opts():
if not shared.Building.can_inline(): link_opts.append('-disable-inlining')
# do not internalize in std-link-opts - it ignores internalize-public-api-list - and add a manual internalize
link_opts += ['-disable-internalize'] + shared.Building.get_safe_internalize() + ['-std-link-opts']
@@ -1496,9 +1513,11 @@ try:
final += '.tr.js'
posix = True if not shared.WINDOWS else False
logging.debug('applying transform: %s' % js_transform)
- execute(shlex.split(js_transform, posix=posix) + [os.path.abspath(final)])
+ subprocess.check_call(shlex.split(js_transform, posix=posix) + [os.path.abspath(final)])
if DEBUG: save_intermediate('transformed')
+ js_transform_tempfiles = [final]
+
# It is useful to run several js optimizer passes together, to save on unneeded unparsing/reparsing
js_optimizer_queue = []
def flush_js_optimizer_queue():
@@ -1508,7 +1527,8 @@ try:
if shared.Settings.ASM_JS:
js_optimizer_queue = ['asm'] + js_optimizer_queue
logging.debug('applying js optimization passes: %s', js_optimizer_queue)
- final = shared.Building.js_optimizer(final, js_optimizer_queue, jcache)
+ final = shared.Building.js_optimizer(final, js_optimizer_queue, jcache, debug_level >= 4)
+ js_transform_tempfiles.append(final)
if DEBUG: save_intermediate('js_opts')
else:
for name in js_optimizer_queue:
@@ -1516,7 +1536,8 @@ try:
if shared.Settings.ASM_JS:
passes = ['asm'] + passes
logging.debug('applying js optimization pass: %s', passes)
- final = shared.Building.js_optimizer(final, passes, jcache)
+ final = shared.Building.js_optimizer(final, passes, jcache, debug_level >= 4)
+ js_transform_tempfiles.append(final)
save_intermediate(name)
js_optimizer_queue = []
@@ -1525,7 +1546,8 @@ try:
if DEBUG == '2':
# Clean up the syntax a bit
- final = shared.Building.js_optimizer(final, [], jcache)
+ final = shared.Building.js_optimizer(final, [], jcache, debug_level >= 4)
+ js_transform_tempfiles.append(final)
if DEBUG: save_intermediate('pretty')
def get_eliminate():
@@ -1543,6 +1565,8 @@ try:
flush_js_optimizer_queue()
logging.debug('running closure')
+ # no need to add this to js_transform_tempfiles, because closure and
+ # debug_level > 0 are never simultaneously true
final = shared.Building.closure_compiler(final)
if DEBUG: save_intermediate('closure')
@@ -1590,6 +1614,7 @@ try:
src = re.sub('/\* memory initializer \*/ allocate\(([\d,\.concat\(\)\[\]\\n ]+)"i8", ALLOC_NONE, Runtime\.GLOBAL_BASE\)', repl, src, count=1)
open(final + '.mem.js', 'w').write(src)
final += '.mem.js'
+ js_transform_tempfiles[-1] = final # simple text substitution preserves comment line number mappings
if DEBUG:
if os.path.exists(memfile):
save_intermediate('meminit')
@@ -1597,12 +1622,25 @@ try:
else:
logging.debug('did not see memory initialization')
+ def generate_source_map(map_file_base_name, offset=0):
+ jsrun.run_js(shared.path_from_root('tools', 'source-maps', 'sourcemapper.js'),
+ shared.NODE_JS, js_transform_tempfiles +
+ ['--sourceRoot', os.getcwd(),
+ '--mapFileBaseName', map_file_base_name,
+ '--offset', str(offset)])
+
# If we were asked to also generate HTML, do that
if final_suffix == 'html':
logging.debug('generating HTML')
shell = open(shell_path).read()
html = open(target, 'w')
if not Compression.on:
+ if debug_level >= 4:
+ match = re.match('.*?<script[^>]*>{{{ SCRIPT_CODE }}}</script>', shell,
+ re.DOTALL)
+ if match is None:
+ raise RuntimeError('Could not find script insertion point')
+ generate_source_map(target, match.group().count('\n'))
html.write(shell.replace('{{{ SCRIPT_CODE }}}', open(final).read()))
else:
# Compress the main code
@@ -1673,6 +1711,8 @@ try:
from tools.split import split_javascript_file
split_javascript_file(final, unsuffixed(target), split_js_file)
else:
+ if debug_level >= 4: generate_source_map(target)
+
# copy final JS to output
shutil.move(final, target)
diff --git a/src/jsifier.js b/src/jsifier.js
index ac6c259b..14c9cfbe 100644
--- a/src/jsifier.js
+++ b/src/jsifier.js
@@ -285,42 +285,60 @@ function JSify(data, functionsOnly, givenFunctions) {
index = makeGlobalUse(item.ident); // index !== null indicates we are indexing this
allocator = 'ALLOC_NONE';
}
- if (item.external) {
- if (Runtime.isNumberType(item.type) || isPointerType(item.type)) {
- constant = zeros(Runtime.getNativeFieldSize(item.type));
- } else {
- constant = makeEmptyStruct(item.type);
+
+ if (isBSS(item)) {
+ var length = calcAllocatedSize(item.type);
+ length = Runtime.alignMemory(length);
+
+ // If using indexed globals, go ahead and early out (no need to explicitly
+ // initialize).
+ if (!NAMED_GLOBALS) {
+ return ret;
+ }
+ // If using named globals, we can at least shorten the call to allocate by
+ // passing an integer representing the size of memory to alloc instead of
+ // an array of 0s of size length.
+ else {
+ constant = length;
}
} else {
- constant = parseConst(item.value, item.type, item.ident);
- }
- assert(typeof constant === 'object');//, [typeof constant, JSON.stringify(constant), item.external]);
-
- // This is a flattened object. We need to find its idents, so they can be assigned to later
- constant.forEach(function(value, i) {
- if (needsPostSet(value)) { // ident, or expression containing an ident
- ret.push({
- intertype: 'GlobalVariablePostSet',
- JS: makeSetValue(makeGlobalUse(item.ident), i, value, 'i32', false, true) + ';' // ignore=true, since e.g. rtti and statics cause lots of safe_heap errors
- });
- constant[i] = '0';
+ if (item.external) {
+ if (Runtime.isNumberType(item.type) || isPointerType(item.type)) {
+ constant = zeros(Runtime.getNativeFieldSize(item.type));
+ } else {
+ constant = makeEmptyStruct(item.type);
+ }
+ } else {
+ constant = parseConst(item.value, item.type, item.ident);
}
- });
+ assert(typeof constant === 'object');//, [typeof constant, JSON.stringify(constant), item.external]);
+
+ // This is a flattened object. We need to find its idents, so they can be assigned to later
+ constant.forEach(function(value, i) {
+ if (needsPostSet(value)) { // ident, or expression containing an ident
+ ret.push({
+ intertype: 'GlobalVariablePostSet',
+ JS: makeSetValue(makeGlobalUse(item.ident), i, value, 'i32', false, true) + ';' // ignore=true, since e.g. rtti and statics cause lots of safe_heap errors
+ });
+ constant[i] = '0';
+ }
+ });
- if (item.external) {
- // External variables in shared libraries should not be declared as
- // they would shadow similarly-named globals in the parent, so do nothing here.
- if (BUILD_AS_SHARED_LIB) return ret;
- // Library items need us to emit something, but everything else requires nothing.
- if (!LibraryManager.library[item.ident.slice(1)]) return ret;
- }
+ if (item.external) {
+ // External variables in shared libraries should not be declared as
+ // they would shadow similarly-named globals in the parent, so do nothing here.
+ if (BUILD_AS_SHARED_LIB) return ret;
+ // Library items need us to emit something, but everything else requires nothing.
+ if (!LibraryManager.library[item.ident.slice(1)]) return ret;
+ }
- // ensure alignment
- constant = constant.concat(zeros(Runtime.alignMemory(constant.length) - constant.length));
+ // ensure alignment
+ constant = constant.concat(zeros(Runtime.alignMemory(constant.length) - constant.length));
- // Special case: class vtables. We make sure they are null-terminated, to allow easy runtime operations
- if (item.ident.substr(0, 5) == '__ZTV') {
- constant = constant.concat(zeros(Runtime.alignMemory(QUANTUM_SIZE)));
+ // Special case: class vtables. We make sure they are null-terminated, to allow easy runtime operations
+ if (item.ident.substr(0, 5) == '__ZTV') {
+ constant = constant.concat(zeros(Runtime.alignMemory(QUANTUM_SIZE)));
+ }
}
// NOTE: This is the only place that could potentially create static
@@ -695,7 +713,9 @@ function JSify(data, functionsOnly, givenFunctions) {
}
}
i++;
- return JS + (Debugging.on ? Debugging.getComment(line.lineNum) : '');
+ // invoke instructions span two lines, and the debug info is located
+ // on the second line, hence the +1
+ return JS + (Debugging.on ? Debugging.getComment(line.lineNum + (line.intertype === 'invoke' ? 1 : 0)) : '');
})
.join('\n')
.split('\n') // some lines include line breaks
@@ -1566,7 +1586,7 @@ function JSify(data, functionsOnly, givenFunctions) {
print('STATICTOP = STATIC_BASE + ' + Runtime.alignMemory(Variables.nextIndexedOffset) + ';\n');
}
var generated = itemsDict.function.concat(itemsDict.type).concat(itemsDict.GlobalVariableStub).concat(itemsDict.GlobalVariable);
- print(generated.map(function(item) { return item.JS }).join('\n'));
+ print(generated.map(function(item) { return item.JS; }).join('\n'));
if (phase == 'pre') {
if (memoryInitialization.length > 0) {
diff --git a/src/parseTools.js b/src/parseTools.js
index 0b83a12b..babb2692 100644
--- a/src/parseTools.js
+++ b/src/parseTools.js
@@ -467,6 +467,17 @@ function isIndexableGlobal(ident) {
return !data.alias && !data.external;
}
+function isBSS(item) {
+ if (!USE_BSS) {
+ return false;
+ }
+
+ // return true if a global is uninitialized or initialized to 0
+ return item.external ||
+ (item.value && item.value.intertype === 'emptystruct') ||
+ (item.value && item.value.value !== undefined && item.value.value === '0');
+}
+
function makeGlobalDef(ident) {
if (!NAMED_GLOBALS && isIndexableGlobal(ident)) return '';
return 'var ' + ident + ';';
@@ -490,7 +501,10 @@ function sortGlobals(globals) {
ks.sort();
var inv = invertArray(ks);
return values(globals).sort(function(a, b) {
- return inv[b.ident] - inv[a.ident];
+ // sort globals based on if they need to be explicitly initialized or not (moving
+ // values that don't need to be to the end of the array). if equal, sort by name.
+ return (Number(isBSS(a)) - Number(isBSS(b))) ||
+ (inv[b.ident] - inv[a.ident]);
});
}
diff --git a/src/settings.js b/src/settings.js
index 3a91b488..4b6f033b 100644
--- a/src/settings.js
+++ b/src/settings.js
@@ -67,9 +67,9 @@ var RELOOP = 0; // Recreate js native loops from llvm data
var RELOOPER = 'relooper.js'; // Loads the relooper from this path relative to compiler.js
var USE_TYPED_ARRAYS = 2; // Use typed arrays for the heap. See https://github.com/kripken/emscripten/wiki/Code-Generation-Modes/
- // 0 means no typed arrays are used.
+ // 0 means no typed arrays are used. This mode disallows LLVM optimizations
// 1 has two heaps, IHEAP (int32) and FHEAP (double),
- // and addresses there are a match for normal addresses. This is deprecated.
+ // and addresses there are a match for normal addresses. This mode disallows LLVM optimizations.
// 2 is a single heap, accessible through views as int8, int32, etc. This is
// the recommended mode both for performance and for compatibility.
var USE_FHEAP = 1; // Relevant in USE_TYPED_ARRAYS == 1. If this is disabled, only IHEAP will be used, and FHEAP
@@ -236,6 +236,11 @@ var FS_LOG = 0; // Log all FS operations. This is especially helpful when you'r
// a new project and want to see a list of file system operations happening
// so that you can create a virtual file system with all of the required files.
+var USE_BSS = 1; // https://en.wikipedia.org/wiki/.bss
+ // When enabled, 0-initialized globals are sorted to the end of the globals list,
+ // enabling us to not explicitly store the initialization value for each 0 byte.
+ // This significantly lowers the memory initialization array size.
+
var NAMED_GLOBALS = 0; // If 1, we use global variables for globals. Otherwise
// they are referred to by a base plus an offset (called an indexed global),
// saving global variables but adding runtime overhead.
diff --git a/src/shell.html b/src/shell.html
index f7eb9e1f..00765271 100644
--- a/src/shell.html
+++ b/src/shell.html
@@ -87,10 +87,6 @@
};
Module.setStatus('Downloading...');
</script>
- <script type='text/javascript'>
-
- {{{ SCRIPT_CODE }}}
-
- </script>
+ <script type='text/javascript'>{{{ SCRIPT_CODE }}}</script>
</body>
</html>
diff --git a/tests/runner.py b/tests/runner.py
index 2c459f6f..563e6782 100755
--- a/tests/runner.py
+++ b/tests/runner.py
@@ -170,16 +170,13 @@ class RunnerCore(unittest.TestCase):
post1 = post_build
post2 = None
- def run_post(post):
- if not post: return
- exec post in locals()
- shutil.copyfile(filename + '.o.js', filename + '.o.js.prepost.js')
- process(filename + '.o.js')
-
if self.emcc_args is None:
Building.emscripten(filename, append_ext=True, extra_args=extra_emscripten_args)
- run_post(post1)
- run_post(post2)
+ if post1:
+ exec post1 in locals()
+ shutil.copyfile(filename + '.o.js', filename + '.o.js.prepost.js')
+ process(filename + '.o.js')
+ if post2: post2(filename + '.o.js')
else:
transform_args = []
if post1:
@@ -196,7 +193,7 @@ process(sys.argv[1])
transform.close()
transform_args = ['--js-transform', "%s %s" % (PYTHON, transform_filename)]
Building.emcc(filename + '.o.ll', Settings.serialize() + self.emcc_args + transform_args + Building.COMPILER_TEST_OPTS, filename + '.o.js')
- run_post(post2)
+ if post2: post2(filename + '.o.js')
# Build JavaScript code from source code
def build(self, src, dirname, filename, output_processor=None, main_file=None, additional_files=[], libraries=[], includes=[], build_ll_hook=None, extra_emscripten_args=[], post_build=None):
@@ -233,11 +230,11 @@ process(sys.argv[1])
os.remove(f + '.o')
except:
pass
- args = [PYTHON, EMCC] + Building.COMPILER_TEST_OPTS + \
+ args = [PYTHON, EMCC] + Building.COMPILER_TEST_OPTS + Settings.serialize() + \
['-I', dirname, '-I', os.path.join(dirname, 'include')] + \
map(lambda include: '-I' + include, includes) + \
['-c', f, '-o', f + '.o']
- output = Popen(args, stdout=PIPE, stderr=self.stderr_redirect).communicate()[0]
+ output = Popen(args, stdout=PIPE, stderr=self.stderr_redirect if not DEBUG else None).communicate()[0]
assert os.path.exists(f + '.o'), 'Source compilation error: ' + output
# Link all files
@@ -438,7 +435,7 @@ process(sys.argv[1])
sys.argv = map(lambda arg: arg if not arg.startswith('test_') else 'default.' + arg, sys.argv)
-test_modes = ['default', 'o1', 'o2', 'asm1', 'asm2', 'asm2g', 'asm2x86', 's_0_0', 's_0_1', 's_1_0', 's_1_1']
+test_modes = ['default', 'o1', 'o2', 'asm1', 'asm2', 'asm2g', 'asm2x86', 's_0_0', 's_0_1']
test_index = 0
@@ -2107,12 +2104,12 @@ Succeeded!
}
return int(&x); // both for the number, and forces x to not be nativized
}
- int main()
+ int main(int argc, char **argv)
{
// We should get the same value for the first and last - stack has unwound
- int x1 = test(0);
+ int x1 = test(argc - 2);
int x2 = test(100);
- int x3 = test(0);
+ int x3 = test((argc - 2) / 4);
printf("*%d,%d*\\n", x3-x1, x2 != x1);
return 0;
}
@@ -4082,7 +4079,7 @@ def process(filename):
#include <assert.h>
#include "emscripten.h"
- int main()
+ int main(int argc, char **argv)
{
char *buf1 = (char*)malloc(100);
char *data1 = "hello";
@@ -4096,6 +4093,8 @@ def process(filename):
int totalMemory = emscripten_run_script_int("TOTAL_MEMORY");
char *buf3 = (char*)malloc(totalMemory+1);
+ buf3[argc] = (int)buf2;
+ if (argc % 7 == 6) printf("%d\n", memcpy(buf3, buf1, argc));
char *buf4 = (char*)malloc(100);
float *buf5 = (float*)malloc(100);
//printf("totalMemory: %d bufs: %d,%d,%d,%d,%d\n", totalMemory, buf1, buf2, buf3, buf4, buf5);
@@ -4233,6 +4232,8 @@ def process(filename):
''', args=['34962', '26214', '35040'])
def test_indirectbr(self):
+ Building.COMPILER_TEST_OPTS = filter(lambda x: x != '-g', Building.COMPILER_TEST_OPTS)
+
src = '''
#include <stdio.h>
int main(void) {
@@ -8220,7 +8221,7 @@ void*:16
if self.run_name == 'o2':
self.emcc_args += ['--closure', '1'] # Use closure here for some additional coverage
- Building.COMPILER_TEST_OPTS = [] # remove -g, so we have one test without it by default
+ Building.COMPILER_TEST_OPTS = filter(lambda x: x != '-g', Building.COMPILER_TEST_OPTS) # remove -g, so we have one test without it by default
if self.emcc_args is None: Settings.SAFE_HEAP = 0 # Has some actual loads of unwritten-to places, in the C++ code...
# Overflows happen in hash loop
@@ -8252,7 +8253,7 @@ void*:16
def test_gcc_unmangler(self):
Settings.NAMED_GLOBALS = 1 # test coverage for this
- Building.COMPILER_TEST_OPTS = ['-I' + path_from_root('third_party')]
+ Building.COMPILER_TEST_OPTS += ['-I' + path_from_root('third_party')]
self.do_run(open(path_from_root('third_party', 'gcc_demangler.c')).read(), '*d_demangle(char const*, int, unsigned int*)*', args=['_ZL10d_demanglePKciPj'])
@@ -8367,6 +8368,8 @@ def process(filename):
force_c=True)
def test_zlib(self):
+ if not Settings.USE_TYPED_ARRAYS == 2: return self.skip('works in general, but cached build will be optimized and fail, so disable this')
+
if Settings.ASM_JS:
self.banned_js_engines = [NODE_JS] # TODO investigate
@@ -8475,6 +8478,8 @@ def process(filename):
def test_openjpeg(self):
if self.emcc_args is None: return self.skip('needs libc for getopt')
+ Building.COMPILER_TEST_OPTS = filter(lambda x: x != '-g', Building.COMPILER_TEST_OPTS) # remove -g, so we have one test without it by default
+
if Settings.USE_TYPED_ARRAYS == 2:
Settings.CORRECT_SIGNS = 1
else:
@@ -8596,6 +8601,7 @@ def process(filename):
def test_python(self):
if self.emcc_args is None: return self.skip('requires emcc')
if Settings.QUANTUM_SIZE == 1: return self.skip('TODO: make this work')
+ if not self.is_le32(): return self.skip('fails on non-le32') # FIXME
#Settings.EXPORTED_FUNCTIONS += ['_PyRun_SimpleStringFlags'] # for the demo
@@ -8695,6 +8701,7 @@ def process(filename):
def test_autodebug(self):
if Building.LLVM_OPTS: return self.skip('LLVM opts mess us up')
+ Building.COMPILER_TEST_OPTS += ['--llvm-opts', '0']
# Run a test that should work, generating some code
self.test_structs()
@@ -8973,13 +8980,11 @@ def process(filename):
self.do_run(src, output)
shutil.move(self.in_dir('src.cpp.o.js'), self.in_dir('normal.js'))
- self.emcc_args.append('-s')
- self.emcc_args.append('ASM_JS=0')
+ Settings.ASM_JS = 0
Settings.PGO = 1
self.do_run(src, output)
+ Settings.ASM_JS = 1
Settings.PGO = 0
- self.emcc_args.append('-s')
- self.emcc_args.append('ASM_JS=1')
shutil.move(self.in_dir('src.cpp.o.js'), self.in_dir('pgo.js'))
pgo_output = run_js(self.in_dir('pgo.js')).split('\n')[1]
@@ -9215,97 +9220,92 @@ def process(filename):
src.close()
'''
- post3 = '''
-def process(filename):
- script_src_2 = \'\'\'
- var sme = new Module.Parent(42);
- sme.mulVal(2);
- Module.print('*')
- Module.print(sme.getVal());
-
- Module.print('c1');
-
- var c1 = new Module.Child1();
- Module.print(c1.getVal());
- c1.mulVal(2);
- Module.print(c1.getVal());
- Module.print(c1.getValSqr());
- Module.print(c1.getValSqr(3));
- Module.print(c1.getValTimes()); // default argument should be 1
- Module.print(c1.getValTimes(2));
-
- Module.print('c1 v2');
-
- c1 = new Module.Child1(8); // now with a parameter, we should handle the overloading automatically and properly and use constructor #2
- Module.print(c1.getVal());
- c1.mulVal(2);
- Module.print(c1.getVal());
- Module.print(c1.getValSqr());
- Module.print(c1.getValSqr(3));
-
- Module.print('c2')
-
- var c2 = new Module.Child2();
- Module.print(c2.getVal());
- c2.mulVal(2);
- Module.print(c2.getVal());
- Module.print(c2.getValCube());
- var succeeded;
- try {
- succeeded = 0;
- Module.print(c2.doSomethingSecret()); // should fail since private
- succeeded = 1;
- } catch(e) {}
- Module.print(succeeded);
- try {
- succeeded = 0;
- Module.print(c2.getValSqr()); // function from the other class
- succeeded = 1;
- } catch(e) {}
- Module.print(succeeded);
- try {
- succeeded = 0;
- c2.getValCube(); // sanity
- succeeded = 1;
- } catch(e) {}
- Module.print(succeeded);
-
- Module.Child2.prototype.printStatic(); // static calls go through the prototype
-
- // virtual function
- c2.virtualFunc();
- Module.Child2.prototype.runVirtualFunc(c2);
- c2.virtualFunc2();
-
-''' + ('''
- // extend the class from JS
- var c3 = new Module.Child2;
- Module.customizeVTable(c3, [{
- original: Module.Child2.prototype.virtualFunc,
- replacement: function() {
- Module.print('*js virtualf replacement*');
- }
- }, {
- original: Module.Child2.prototype.virtualFunc2,
- replacement: function() {
- Module.print('*js virtualf2 replacement*');
- }
- }]);
- c3.virtualFunc();
- Module.Child2.prototype.runVirtualFunc(c3);
- c3.virtualFunc2();
-
- c2.virtualFunc(); // original should remain the same
- Module.Child2.prototype.runVirtualFunc(c2);
- c2.virtualFunc2();
-''') + '''
-
- Module.print('*ok*');
- \'\'\'
- src = open(filename, 'a')
- src.write(script_src_2 + '\\n')
- src.close()
-'''
+ def post3(filename):
+ script_src_2 = '''
+ var sme = new Module.Parent(42);
+ sme.mulVal(2);
+ Module.print('*')
+ Module.print(sme.getVal());
+
+ Module.print('c1');
+
+ var c1 = new Module.Child1();
+ Module.print(c1.getVal());
+ c1.mulVal(2);
+ Module.print(c1.getVal());
+ Module.print(c1.getValSqr());
+ Module.print(c1.getValSqr(3));
+ Module.print(c1.getValTimes()); // default argument should be 1
+ Module.print(c1.getValTimes(2));
+
+ Module.print('c1 v2');
+
+ c1 = new Module.Child1(8); // now with a parameter, we should handle the overloading automatically and properly and use constructor #2
+ Module.print(c1.getVal());
+ c1.mulVal(2);
+ Module.print(c1.getVal());
+ Module.print(c1.getValSqr());
+ Module.print(c1.getValSqr(3));
+
+ Module.print('c2')
+
+ var c2 = new Module.Child2();
+ Module.print(c2.getVal());
+ c2.mulVal(2);
+ Module.print(c2.getVal());
+ Module.print(c2.getValCube());
+ var succeeded;
+ try {
+ succeeded = 0;
+ Module.print(c2.doSomethingSecret()); // should fail since private
+ succeeded = 1;
+ } catch(e) {}
+ Module.print(succeeded);
+ try {
+ succeeded = 0;
+ Module.print(c2.getValSqr()); // function from the other class
+ succeeded = 1;
+ } catch(e) {}
+ Module.print(succeeded);
+ try {
+ succeeded = 0;
+ c2.getValCube(); // sanity
+ succeeded = 1;
+ } catch(e) {}
+ Module.print(succeeded);
+
+ Module.Child2.prototype.printStatic(); // static calls go through the prototype
+
+ // virtual function
+ c2.virtualFunc();
+ Module.Child2.prototype.runVirtualFunc(c2);
+ c2.virtualFunc2();
+
+ // extend the class from JS
+ var c3 = new Module.Child2;
+ Module.customizeVTable(c3, [{
+ original: Module.Child2.prototype.virtualFunc,
+ replacement: function() {
+ Module.print('*js virtualf replacement*');
+ }
+ }, {
+ original: Module.Child2.prototype.virtualFunc2,
+ replacement: function() {
+ Module.print('*js virtualf2 replacement*');
+ }
+ }]);
+ c3.virtualFunc();
+ Module.Child2.prototype.runVirtualFunc(c3);
+ c3.virtualFunc2();
+
+ c2.virtualFunc(); // original should remain the same
+ Module.Child2.prototype.runVirtualFunc(c2);
+ c2.virtualFunc2();
+ Module.print('*ok*');
+ '''
+ src = open(filename, 'a')
+ src.write(script_src_2 + '\n')
+ src.close()
Settings.RESERVED_FUNCTION_POINTERS = 20
@@ -9349,7 +9349,7 @@ Child2:9
*virtualf*
*virtualf2*''') + '''
*ok*
-''', post_build=[post2, post3])
+''', post_build=(post2, post3))
def test_scriptaclass_2(self):
if self.emcc_args is None: return self.skip('requires emcc')
@@ -9578,25 +9578,138 @@ def process(filename):
}
'''
try:
- post = r'''
-def process(filename):
- lines = open(filename, 'r').readlines()
- lines = filter(lambda line: '___assert_fail(' in line or '___assert_func(' in line, lines)
- found_line_num = any(('//@line 7 "' in line) for line in lines)
- found_filename = any(('src.cpp"\n' in line) for line in lines)
- assert found_line_num, 'Must have debug info with the line number'
- assert found_filename, 'Must have debug info with the filename'
-'''
- self.do_run(src, '*nothingatall*', post_build=post)
+ self.do_run(src, '*nothingatall*')
except Exception, e:
# This test *should* fail
- assert 'Assertion failed' in str(e), str(e)
+ assert 'Assertion failed: x < 15' in str(e), str(e)
+
+ lines = open('src.cpp.o.js', 'r').readlines()
+ lines = filter(lambda line: '___assert_fail(' in line or '___assert_func(' in line, lines)
+ found_line_num = any(('//@line 7 "' in line) for line in lines)
+ found_filename = any(('src.cpp"\n' in line) for line in lines)
+ assert found_line_num, 'Must have debug info with the line number'
+ assert found_filename, 'Must have debug info with the filename'
+
+ def test_source_map(self):
+ if Settings.USE_TYPED_ARRAYS != 2: return self.skip("doesn't pass without typed arrays")
+ if NODE_JS not in JS_ENGINES: return self.skip('sourcemapper requires Node to run')
+ if '-g' not in Building.COMPILER_TEST_OPTS: Building.COMPILER_TEST_OPTS.append('-g')
+
+ src = '''
+ #include <stdio.h>
+ #include <assert.h>
+
+ __attribute__((noinline)) int foo() {
+ printf("hi"); // line 6
+ return 1; // line 7
+ }
+
+ int main() {
+ printf("%d", foo()); // line 11
+ return 0; // line 12
+ }
+ '''
+
+ dirname = self.get_dir()
+ src_filename = os.path.join(dirname, 'src.cpp')
+ out_filename = os.path.join(dirname, 'a.out.js')
+ no_maps_filename = os.path.join(dirname, 'no-maps.out.js')
+
+ with open(src_filename, 'w') as f: f.write(src)
+ assert '-g4' not in Building.COMPILER_TEST_OPTS
+ Building.emcc(src_filename, Settings.serialize() + self.emcc_args +
+ Building.COMPILER_TEST_OPTS, out_filename)
+ # the file name may find its way into the generated code, so make sure we
+ # can do an apples-to-apples comparison by compiling with the same file name
+ shutil.move(out_filename, no_maps_filename)
+ with open(no_maps_filename) as f: no_maps_file = f.read()
+ no_maps_file = re.sub(' *//@.*$', '', no_maps_file, flags=re.MULTILINE)
+ Building.COMPILER_TEST_OPTS.append('-g4')
+
+ def build_and_check():
+ import json
+ Building.emcc(src_filename, Settings.serialize() + self.emcc_args +
+ Building.COMPILER_TEST_OPTS, out_filename, stderr=PIPE)
+ with open(out_filename) as f: out_file = f.read()
+ # after removing the @line and @sourceMappingURL comments, the build
+ # result should be identical to the non-source-mapped debug version.
+ # this is worth checking because the parser AST swaps strings for token
+ # objects when generating source maps, so we want to make sure the
+ # optimizer can deal with both types.
+ out_file = re.sub(' *//@.*$', '', out_file, flags=re.MULTILINE)
+ self.assertIdentical(no_maps_file, out_file)
+ map_filename = out_filename + '.map'
+ data = json.load(open(map_filename, 'r'))
+ self.assertIdentical(out_filename, data['file'])
+ self.assertIdentical(src_filename, data['sources'][0])
+ self.assertIdentical(src, data['sourcesContent'][0])
+ mappings = json.loads(jsrun.run_js(
+ path_from_root('tools', 'source-maps', 'sourcemap2json.js'),
+ tools.shared.NODE_JS, [map_filename]))
+ seen_lines = set()
+ for m in mappings:
+ self.assertIdentical(src_filename, m['source'])
+ seen_lines.add(m['originalLine'])
+ # ensure that all the 'meaningful' lines in the original code get mapped
+ assert seen_lines.issuperset([6, 7, 11, 12])
+
+ # EMCC_DEBUG=2 causes lots of intermediate files to be written, and so
+ # serves as a stress test for source maps because it needs to correlate
+ # line numbers across all those files.
+ old_emcc_debug = os.environ.get('EMCC_DEBUG', None)
+ os.environ.pop('EMCC_DEBUG', None)
+ try:
+ build_and_check()
+ os.environ['EMCC_DEBUG'] = '2'
+ build_and_check()
+ finally:
+ if old_emcc_debug is not None:
+ os.environ['EMCC_DEBUG'] = old_emcc_debug
+ else:
+ os.environ.pop('EMCC_DEBUG', None)
+
+ def test_exception_source_map(self):
+ if Settings.USE_TYPED_ARRAYS != 2: return self.skip("doesn't pass without typed arrays")
+ if '-g4' not in Building.COMPILER_TEST_OPTS: Building.COMPILER_TEST_OPTS.append('-g4')
+ if NODE_JS not in JS_ENGINES: return self.skip('sourcemapper requires Node to run')
+
+ src = '''
+ #include <stdio.h>
+
+ __attribute__((noinline)) void foo(int i) {
+ if (i < 10) throw i; // line 5
+ }
+
+ int main() {
+ int i;
+ scanf("%d", &i);
+ foo(i);
+ return 0;
+ }
+ '''
+
+ def post(filename):
+ import json
+ map_filename = filename + '.map'
+ mappings = json.loads(jsrun.run_js(
+ path_from_root('tools', 'source-maps', 'sourcemap2json.js'),
+ tools.shared.NODE_JS, [map_filename]))
+ with open(filename) as f: lines = f.readlines()
+ for m in mappings:
+ if m['originalLine'] == 5 and '__cxa_throw' in lines[m['generatedLine']]:
+ return
+ assert False, 'Must label throw statements with line numbers'
+
+ dirname = self.get_dir()
+ self.build(src, dirname, os.path.join(dirname, 'src.cpp'), post_build=(None, post))
def test_linespecific(self):
if Settings.ASM_JS: return self.skip('asm always has corrections on')
if '-g' not in Building.COMPILER_TEST_OPTS: Building.COMPILER_TEST_OPTS.append('-g')
- if self.emcc_args: self.emcc_args += ['--llvm-opts', '0'] # llvm full opts make the expected failures here not happen
+ if self.emcc_args:
+ self.emcc_args += ['--llvm-opts', '0'] # llvm full opts make the expected failures here not happen
+ Building.COMPILER_TEST_OPTS += ['--llvm-opts', '0']
Settings.CHECK_SIGNS = 0
Settings.CHECK_OVERFLOWS = 0
@@ -9902,7 +10015,7 @@ finalizing 3 (global == 0)
''')
# Generate tests for everything
- def make_run(fullname, name=-1, compiler=-1, llvm_opts=0, embetter=0, quantum_size=0, typed_arrays=0, emcc_args=None, env='{}'):
+ def make_run(fullname, name=-1, compiler=-1, embetter=0, quantum_size=0, typed_arrays=0, emcc_args=None, env='{}'):
exec('''
class %s(T):
run_name = '%s'
@@ -9937,9 +10050,18 @@ class %s(T):
Building.LLVM_OPTS = 0
if '-O2' in self.emcc_args:
Building.COMPILER_TEST_OPTS = [] # remove -g in -O2 tests, for more coverage
+ #Building.COMPILER_TEST_OPTS += self.emcc_args
+ for arg in self.emcc_args:
+ if arg.startswith('-O'):
+ Building.COMPILER_TEST_OPTS.append(arg) # so bitcode is optimized too, this is for cpp to ll
+ else:
+ try:
+ key, value = arg.split('=')
+ Settings[key] = value # forward -s K=V
+ except:
+ pass
return
- llvm_opts = %d # 1 is yes, 2 is yes and unsafe
embetter = %d
quantum_size = %d
# TODO: Move much of these to a init() function in shared.py, and reuse that
@@ -9949,14 +10071,13 @@ class %s(T):
Settings.MICRO_OPTS = embetter
Settings.QUANTUM_SIZE = quantum_size
Settings.ASSERTIONS = 1-embetter
- Settings.SAFE_HEAP = 1-(embetter and llvm_opts)
- Building.LLVM_OPTS = llvm_opts
- Settings.CHECK_OVERFLOWS = 1-(embetter or llvm_opts)
- Settings.CORRECT_OVERFLOWS = 1-(embetter and llvm_opts)
+ Settings.SAFE_HEAP = 1-embetter
+ Settings.CHECK_OVERFLOWS = 1-embetter
+ Settings.CORRECT_OVERFLOWS = 1-embetter
Settings.CORRECT_SIGNS = 0
Settings.CORRECT_ROUNDINGS = 0
Settings.CORRECT_OVERFLOWS_LINES = CORRECT_SIGNS_LINES = CORRECT_ROUNDINGS_LINES = SAFE_HEAP_LINES = []
- Settings.CHECK_SIGNS = 0 #1-(embetter or llvm_opts)
+ Settings.CHECK_SIGNS = 0 #1-embetter
Settings.RUNTIME_TYPE_INFO = 0
Settings.DISABLE_EXCEPTION_CATCHING = 0
Settings.INCLUDE_FULL_LIBRARY = 0
@@ -9965,12 +10086,10 @@ class %s(T):
Settings.EMULATE_UNALIGNED_ACCESSES = int(Settings.USE_TYPED_ARRAYS == 2 and Building.LLVM_OPTS == 2)
Settings.DOUBLE_MODE = 1 if Settings.USE_TYPED_ARRAYS and Building.LLVM_OPTS == 0 else 0
Settings.PRECISE_I64_MATH = 0
- Settings.NAMED_GLOBALS = 0 if not (embetter and llvm_opts) else 1
-
- Building.pick_llvm_opts(3)
+ Settings.NAMED_GLOBALS = 0 if not embetter else 1
TT = %s
-''' % (fullname, fullname, env, fullname, fullname, compiler, str(emcc_args), llvm_opts, embetter, quantum_size, typed_arrays, fullname))
+''' % (fullname, fullname, env, fullname, fullname, compiler, str(emcc_args), embetter, quantum_size, typed_arrays, fullname))
return TT
# Make one run with the defaults
@@ -9989,16 +10108,14 @@ TT = %s
exec('''asm2x86 = make_run("asm2x86", compiler=CLANG, emcc_args=["-O2", "-g", "-s", "CHECK_HEAP_ALIGN=1"], env='{"EMCC_LLVM_TARGET": "i386-pc-linux-gnu"}')''')
# Make custom runs with various options
- for compiler, quantum, embetter, typed_arrays, llvm_opts in [
- (CLANG, 4, 0, 0, 0),
- (CLANG, 4, 0, 0, 1),
- (CLANG, 4, 1, 1, 0),
- (CLANG, 4, 1, 1, 1),
+ for compiler, quantum, embetter, typed_arrays in [
+ (CLANG, 4, 0, 0),
+ (CLANG, 4, 1, 1),
]:
- fullname = 's_%d_%d%s%s' % (
- llvm_opts, embetter, '' if quantum == 4 else '_q' + str(quantum), '' if typed_arrays in [0, 1] else '_t' + str(typed_arrays)
+ fullname = 's_0_%d%s%s' % (
+ embetter, '' if quantum == 4 else '_q' + str(quantum), '' if typed_arrays in [0, 1] else '_t' + str(typed_arrays)
)
- exec('%s = make_run(fullname, %r,%r,%d,%d,%d,%d)' % (fullname, fullname, compiler, llvm_opts, embetter, quantum, typed_arrays))
+ exec('%s = make_run(fullname, %r,%r,%d,%d,%d)' % (fullname, fullname, compiler, embetter, quantum, typed_arrays))
del T # T is just a shape for the specific subclasses, we don't test it itself
@@ -10132,23 +10249,25 @@ Options that are modified or new in %s include:
(['-o', 'something.js', '-O3', '-s', 'ASM_JS=0'], 3, None, 1, 1),
# and, test compiling to bitcode first
(['-o', 'something.bc'], 0, [], 0, 0),
- (['-o', 'something.bc'], 0, ['-O0'], 0, 0),
- (['-o', 'something.bc'], 1, ['-O1'], 0, 0),
- (['-o', 'something.bc'], 2, ['-O2'], 0, 0),
- (['-o', 'something.bc'], 3, ['-O3', '-s', 'ASM_JS=0'], 1, 0),
- (['-O1', '-o', 'something.bc'], 0, [], 0, 0), # -Ox is ignored and warned about
+ (['-o', 'something.bc', '-O0'], 0, [], 0, 0),
+ (['-o', 'something.bc', '-O1'], 1, ['-O1'], 0, 0),
+ (['-o', 'something.bc', '-O2'], 2, ['-O2'], 0, 0),
+ (['-o', 'something.bc', '-O3'], 3, ['-O3', '-s', 'ASM_JS=0'], 1, 0),
+ (['-O1', '-o', 'something.bc'], 1, [], 0, 0),
]:
print params, opt_level, bc_params, closure, has_malloc
self.clear()
keep_debug = '-g' in params
- output = Popen([PYTHON, compiler, path_from_root('tests', 'hello_world_loop' + ('_malloc' if has_malloc else '') + '.cpp')] + params,
+ args = [PYTHON, compiler, path_from_root('tests', 'hello_world_loop' + ('_malloc' if has_malloc else '') + '.cpp')] + params
+ print '..', args
+ output = Popen(args,
stdout=PIPE, stderr=PIPE).communicate()
assert len(output[0]) == 0, output[0]
if bc_params is not None:
- if '-O1' in params and 'something.bc' in params:
- assert '-Ox flags ignored, since not generating JavaScript' in output[1]
assert os.path.exists('something.bc'), output[1]
- output = Popen([PYTHON, compiler, 'something.bc', '-o', 'something.js'] + bc_params, stdout=PIPE, stderr=PIPE).communicate()
+ bc_args = [PYTHON, compiler, 'something.bc', '-o', 'something.js'] + bc_params
+ print '....', bc_args
+ output = Popen(bc_args, stdout=PIPE, stderr=PIPE).communicate()
assert os.path.exists('something.js'), output[1]
assert ('Applying some potentially unsafe optimizations!' in output[1]) == (opt_level >= 3), 'unsafe warning should appear in opt >= 3'
self.assertContained('hello, world!', run_js('something.js'))
@@ -11397,64 +11516,6 @@ seeked= file.
code = open('a.out.js').read()
assert 'SAFE_HEAP' in code, 'valid -s option had an effect'
- def test_optimize_normally(self):
- assert not os.environ.get('EMCC_OPTIMIZE_NORMALLY')
- assert not os.environ.get('EMCC_DEBUG')
-
- for optimize_normally in [0, 1]:
- print optimize_normally
- try:
- if optimize_normally: os.environ['EMCC_OPTIMIZE_NORMALLY'] = '1'
- os.environ['EMCC_DEBUG'] = '1'
-
- open(self.in_dir('main.cpp'), 'w').write(r'''
- extern "C" {
- void something();
- }
-
- int main() {
- something();
- return 0;
- }
- ''')
- open(self.in_dir('supp.cpp'), 'w').write(r'''
- #include <stdio.h>
-
- extern "C" {
- void something() {
- printf("yello\n");
- }
- }
- ''')
- out, err = Popen([PYTHON, EMCC, self.in_dir('main.cpp'), '-O2', '-o', 'main.o'], stdout=PIPE, stderr=PIPE).communicate()
- assert ("emcc: LLVM opts: ['-O3']" in err) == optimize_normally
- assert (' with -O3 since EMCC_OPTIMIZE_NORMALLY defined' in err) == optimize_normally
-
- out, err = Popen([PYTHON, EMCC, self.in_dir('supp.cpp'), '-O2', '-o', 'supp.o'], stdout=PIPE, stderr=PIPE).communicate()
- assert ("emcc: LLVM opts: ['-O3']" in err) == optimize_normally
- assert (' with -O3 since EMCC_OPTIMIZE_NORMALLY defined' in err) == optimize_normally
-
- out, err = Popen([PYTHON, EMCC, self.in_dir('main.o'), self.in_dir('supp.o'), '-O2', '-o', 'both.o'], stdout=PIPE, stderr=PIPE).communicate()
- assert "emcc: LLVM opts: ['-O3']" not in err
- assert ' with -O3 since EMCC_OPTIMIZE_NORMALLY defined' not in err
- assert ('despite EMCC_OPTIMIZE_NORMALLY since not source code' in err) == optimize_normally
-
- out, err = Popen([PYTHON, EMCC, self.in_dir('main.cpp'), self.in_dir('supp.cpp'), '-O2', '-o', 'both2.o'], stdout=PIPE, stderr=PIPE).communicate()
- assert ("emcc: LLVM opts: ['-O3']" in err) == optimize_normally
- assert (' with -O3 since EMCC_OPTIMIZE_NORMALLY defined' in err) == optimize_normally
-
- for last in ['both.o', 'both2.o']:
- out, err = Popen([PYTHON, EMCC, self.in_dir('both.o'), '-O2', '-o', last + '.js', '--memory-init-file', '0'], stdout=PIPE, stderr=PIPE).communicate()
- assert ("emcc: LLVM opts: ['-O3']" not in err) == optimize_normally
- assert ' with -O3 since EMCC_OPTIMIZE_NORMALLY defined' not in err
- output = run_js(last + '.js')
- assert 'yello' in output, 'code works ' + err
- assert open('both.o.js').read() == open('both2.o.js').read()
-
- finally:
- if optimize_normally: del os.environ['EMCC_OPTIMIZE_NORMALLY']
- del os.environ['EMCC_DEBUG']
-
def test_jcache_printf(self):
open(self.in_dir('src.cpp'), 'w').write(r'''
#include <stdio.h>
@@ -11705,6 +11766,42 @@ elif 'browser' in str(sys.argv):
self.btest('hello_world_sdl.cpp', reference='htmltest.png',
message='You should see "hello, world!" and a colored cube.')
+ def test_html_source_map(self):
+ if 'test_html_source_map' not in str(sys.argv): return self.skip('''This test
+ requires manual intervention; will not be run unless explicitly requested''')
+ cpp_file = os.path.join(self.get_dir(), 'src.cpp')
+ html_file = os.path.join(self.get_dir(), 'src.html')
+ # browsers will try to 'guess' the corresponding original line if a
+ # generated line is unmapped, so if we want to make sure that our
+ # numbering is correct, we need to provide a couple of 'possible wrong
+ # answers'. thus, we add some printf calls so that the cpp file gets
+ # multiple mapped lines. in other words, if the program consists of a
+ # single 'throw' statement, browsers may just map any thrown exception to
+ # that line, because it will be the only mapped line.
+ with open(cpp_file, 'w') as f:
+ f.write(r'''
+ #include <cstdio>
+
+ int main() {
+ printf("Starting test\n");
+ try {
+ throw 42; // line 8
+ } catch (int e) { }
+ printf("done\n");
+ return 0;
+ }
+ ''')
+ # use relative paths when calling emcc, because file:// URIs can only load
+ # sourceContent when the maps are relative paths
+ Popen([PYTHON, EMCC, 'src.cpp', '-o', 'src.html', '-g4'],
+ cwd=self.get_dir()).communicate()
+ webbrowser.open_new('file://' + html_file)
+ print '''
+Set the debugger to pause on exceptions
+You should see an exception thrown at src.cpp:7.
+Press any key to continue.'''
+ raw_input()
+
def build_native_lzma(self):
lzma_native = path_from_root('third_party', 'lzma.js', 'lzma-native')
if os.path.isfile(lzma_native) and os.access(lzma_native, os.X_OK): return
@@ -13342,9 +13439,10 @@ process(sys.argv[1])
try_delete(final_filename)
output = Popen([PYTHON, EMCC, filename, #'-O3',
'-O2', '-s', 'DOUBLE_MODE=0', '-s', 'PRECISE_I64_MATH=0',
- '--llvm-lto', '1', '--memory-init-file', '0', '--js-transform', 'python hardcode.py',
+ '--llvm-lto', '3', '--memory-init-file', '0', '--js-transform', 'python hardcode.py',
'-s', 'TOTAL_MEMORY=128*1024*1024',
'--closure', '1',
+ #'-g',
'-o', final_filename] + shared_args + emcc_args, stdout=PIPE, stderr=self.stderr_redirect).communicate()
assert os.path.exists(final_filename), 'Failed to compile file: ' + output[0]
@@ -13659,7 +13757,7 @@ process(sys.argv[1])
args = [path_from_root('tests', 'nbody-java', x) for x in os.listdir(path_from_root('tests', 'nbody-java')) if x.endswith('.c')] + \
['-I' + path_from_root('tests', 'nbody-java')]
self.do_benchmark('nbody_java', '', '''Time(s)''',
- force_c=True, emcc_args=args + ['-s', 'PRECISE_I64_MATH=1', '--llvm-lto', '0'], native_args=args + ['-lgc', '-std=c99', '-target', 'x86_64-pc-linux-gnu', '-lm'])
+ force_c=True, emcc_args=args + ['-s', 'PRECISE_I64_MATH=1', '--llvm-lto', '2'], native_args=args + ['-lgc', '-std=c99', '-target', 'x86_64-pc-linux-gnu', '-lm'])
def lua(self, benchmark, expected, output_parser=None, args_processor=None):
shutil.copyfile(path_from_root('tests', 'lua', benchmark + '.lua'), benchmark + '.lua')
@@ -14051,8 +14149,7 @@ fi
try_delete('a.out.js')
basebc_name = os.path.join(TEMP_DIR, 'emscripten_temp', 'emcc-0-basebc.bc')
- dcebc_name1 = os.path.join(TEMP_DIR, 'emscripten_temp', 'emcc-1-linktime.bc')
- dcebc_name2 = os.path.join(TEMP_DIR, 'emscripten_temp', 'emcc-2-linktime.bc')
+ dcebc_name = os.path.join(TEMP_DIR, 'emscripten_temp', 'emcc-1-linktime.bc')
ll_names = [os.path.join(TEMP_DIR, 'emscripten_temp', 'emcc-X-ll.ll').replace('X', str(x)) for x in range(2,5)]
# Building a file that *does* need dlmalloc *should* trigger cache generation, but only the first time
@@ -14060,7 +14157,6 @@ fi
for i in range(3):
print filename, libname, i
self.clear()
- dcebc_name = dcebc_name1 if i == 0 else dcebc_name2
try_delete(basebc_name) # we might need to check this file later
try_delete(dcebc_name) # we might need to check this file later
for ll_name in ll_names: try_delete(ll_name)
@@ -14077,9 +14173,9 @@ fi
assert os.path.exists(os.path.join(EMCC_CACHE, libname + '.bc'))
if libname == 'libcxx':
print os.stat(os.path.join(EMCC_CACHE, libname + '.bc')).st_size, os.stat(basebc_name).st_size, os.stat(dcebc_name).st_size
- assert os.stat(os.path.join(EMCC_CACHE, libname + '.bc')).st_size > 1800000, 'libc++ is big'
- assert os.stat(basebc_name).st_size > 1800000, 'libc++ is indeed big'
- assert os.stat(dcebc_name).st_size < 750000, 'Dead code elimination must remove most of libc++'
+ assert os.stat(os.path.join(EMCC_CACHE, libname + '.bc')).st_size > 1000000, 'libc++ is big'
+ assert os.stat(basebc_name).st_size > 1000000, 'libc++ is indeed big'
+ assert os.stat(dcebc_name).st_size < 500000, 'Dead code elimination must remove most of libc++'
# should only have metadata in -O0, not 1 and 2
if i > 0:
for ll_name in ll_names:
@@ -14166,7 +14262,7 @@ fi
(['--jcache'], 'hello_malloc.cpp', False, True, False, True, False, True, []),
([], 'hello_malloc.cpp', False, False, False, False, False, False, []),
# new, huge file
- ([], 'hello_libcxx.cpp', False, False, False, False, False, False, ('4 chunks',)),
+ ([], 'hello_libcxx.cpp', False, False, False, False, False, False, ('3 chunks',)),
(['--jcache'], 'hello_libcxx.cpp', True, False, True, False, True, False, []),
(['--jcache'], 'hello_libcxx.cpp', False, True, False, True, False, True, []),
([], 'hello_libcxx.cpp', False, False, False, False, False, False, []),
diff --git a/tools/eliminator/node_modules/uglify-js/lib/parse-js.js b/tools/eliminator/node_modules/uglify-js/lib/parse-js.js
index a89163c6..c7c2025f 100644
--- a/tools/eliminator/node_modules/uglify-js/lib/parse-js.js
+++ b/tools/eliminator/node_modules/uglify-js/lib/parse-js.js
@@ -679,7 +679,28 @@ function NodeWithToken(str, start, end) {
this.end = end;
};
-NodeWithToken.prototype.toString = function() { return this.name; };
+NodeWithToken.prototype = {
+ get length() {
+ return this.name.length;
+ },
+ set length(v) {
+ return this.name.length = v;
+ },
+ replace: function() { return this.name.replace.apply(this.name, arguments); },
+ concat: function() { return this.name.concat.apply(this.name, arguments); },
+ indexOf: function() { return this.name.indexOf.apply(this.name, arguments); },
+ lastIndexOf: function() { return this.name.lastIndexOf.apply(this.name, arguments); },
+ lastIndexOf: function() { return this.name.lastIndexOf.apply(this.name, arguments); },
+ match: function() { return this.name.match.apply(this.name, arguments); },
+ search: function() { return this.name.search.apply(this.name, arguments); },
+ slice: function() { return this.name.slice.apply(this.name, arguments); },
+ split: function() { return this.name.split.apply(this.name, arguments); },
+ substr: function() { return this.name.substr.apply(this.name, arguments); },
+ substring: function() { return this.name.substring.apply(this.name, arguments); },
+ toString: function() { return this.name; },
+ toJSON: function() { return this.name; },
+ valueOf: function() { return this.name; },
+};
function parse($TEXT, exigent_mode, embed_tokens) {
@@ -765,15 +786,12 @@ function parse($TEXT, exigent_mode, embed_tokens) {
return ex;
};
- function add_tokens(str, start, end) {
- return str instanceof NodeWithToken ? str : new NodeWithToken(str, start, end);
- };
-
function maybe_embed_tokens(parser) {
if (embed_tokens) return function() {
var start = S.token;
var ast = parser.apply(this, arguments);
- ast[0] = add_tokens(ast[0], start, prev());
+ ast.start = start;
+ ast.end = prev;
return ast;
};
else return parser;
@@ -1314,7 +1332,7 @@ function characters(str) {
function member(name, array) {
for (var i = array.length; --i >= 0;)
- if (array[i] === name)
+ if (array[i] == name)
return true;
return false;
};
diff --git a/tools/eliminator/node_modules/uglify-js/lib/process.js b/tools/eliminator/node_modules/uglify-js/lib/process.js
index c3abb6f8..e1d692b2 100644
--- a/tools/eliminator/node_modules/uglify-js/lib/process.js
+++ b/tools/eliminator/node_modules/uglify-js/lib/process.js
@@ -64,6 +64,19 @@ var jsp = require("./parse-js"),
PRECEDENCE = jsp.PRECEDENCE,
OPERATORS = jsp.OPERATORS;
+function NodeWithLine(str, line) {
+ this.str = str;
+ this.line = line;
+}
+
+NodeWithLine.prototype = new String();
+NodeWithLine.prototype.toString = function() { return this.str; }
+NodeWithLine.prototype.valueOf = function() { return this.str; }
+NodeWithLine.prototype.lineComment = function() { return " //@line " + this.line; }
+
+// XXX ugly hack
+String.prototype.lineComment = function() { return ""; }
+
/* -----[ helper for AST traversal ]----- */
function ast_walker() {
@@ -1363,6 +1376,7 @@ var SPLICE_NEEDS_BRACKETS = jsp.array_to_hash([ "if", "while", "do", "for", "for
function gen_code(ast, options) {
options = defaults(options, {
+ debug: false,
indent_start : 0,
indent_level : 4,
quote_keys : false,
@@ -1422,7 +1436,19 @@ function gen_code(ast, options) {
};
function add_commas(a) {
- return a.join("," + space);
+ var str = a.join("," + space);
+ if (options.debug) {
+ // if a line contains more than one comma-separated segment, assign it the
+ // original line number of the first NodeWithLine segment
+ for (var i = 0, l = a.length; i < l; i ++) {
+ var v = a[i];
+ if (v instanceof NodeWithLine) {
+ v.str = str;
+ return v
+ }
+ }
+ }
+ return str;
};
function parenthesize(expr) {
@@ -1484,7 +1510,10 @@ function gen_code(ast, options) {
a.push(m[2] + "e-" + (m[1].length + m[2].length),
str.substr(str.indexOf(".")));
}
- return best_of(a);
+ var best = best_of(a);
+ if (options.debug && this.start)
+ return new NodeWithLine(best, this.start.line);
+ return best;
};
var w = ast_walker();
@@ -1512,7 +1541,15 @@ function gen_code(ast, options) {
},
"block": make_block,
"var": function(defs) {
- return "var " + add_commas(MAP(defs, make_1vardef)) + ";";
+ var s = "var " + add_commas(MAP(defs, make_1vardef)) + ";";
+ if (options.debug) {
+ // hack: we don't support mapping one optimized line to more than one
+ // generated line, so in case of multiple comma-separated var definitions,
+ // just take the first
+ if (defs[0][1] && defs[0][1].start)
+ return s + (new NodeWithLine(s, defs[0][1].start.line)).lineComment();
+ }
+ return s;
},
"const": function(defs) {
return "const " + add_commas(MAP(defs, make_1vardef)) + ";";
@@ -1567,7 +1604,10 @@ function gen_code(ast, options) {
"assign": function(op, lvalue, rvalue) {
if (op && op !== true) op += "=";
else op = "=";
- return add_spaces([ make(lvalue), op, parenthesize(rvalue, "seq") ]);
+ var s = add_spaces([ make(lvalue), op, parenthesize(rvalue, "seq") ]);
+ if (options.debug && this.start)
+ return new NodeWithLine(s, this.start.line);
+ return s;
},
"dot": function(expr) {
var out = make(expr), i = 1;
@@ -1584,9 +1624,12 @@ function gen_code(ast, options) {
var f = make(func);
if (needs_parens(func))
f = "(" + f + ")";
- return f + "(" + add_commas(MAP(args, function(expr){
+ var str = f + "(" + add_commas(MAP(args, function(expr){
return parenthesize(expr, "seq");
})) + ")";
+ if (options.debug && this.start)
+ return new NodeWithLine(str, this.start.line)
+ return str;
},
"function": make_function,
"defun": make_function,
@@ -1621,8 +1664,9 @@ function gen_code(ast, options) {
},
"return": function(expr) {
var out = [ "return" ];
- if (expr != null) out.push(make(expr));
- return add_spaces(out) + ";";
+ var str = make(expr);
+ if (expr != null) out.push(str);
+ return add_spaces(out) + ";" + (str ? str.lineComment() : '');
},
"binary": function(operator, lvalue, rvalue) {
var left = make(lvalue), right = make(rvalue);
@@ -1642,7 +1686,16 @@ function gen_code(ast, options) {
&& rvalue[0] == "regexp" && /^script/i.test(rvalue[1])) {
right = " " + right;
}
- return add_spaces([ left, operator, right ]);
+ var str = add_spaces([ left, operator, right ]);
+ if (options.debug) {
+ if (this.start)
+ return new NodeWithLine(str, this.start.line);
+ else if (lvalue.start)
+ return new NodeWithLine(str, lvalue.start.line);
+ else if (rvalue.start)
+ return new NodeWithLine(str, rvalue.start.line);
+ }
+ return str;
},
"unary-prefix": function(operator, expr) {
var val = make(expr);
@@ -1698,7 +1751,8 @@ function gen_code(ast, options) {
})), "]" ]);
},
"stat": function(stmt) {
- return make(stmt).replace(/;*\s*$/, ";");
+ var str = make(stmt);
+ return str.replace(/;*\s*$/, ";") + str.lineComment();
},
"seq": function() {
return add_commas(MAP(slice(arguments), make));
diff --git a/tools/js-optimizer.js b/tools/js-optimizer.js
index 7561abc8..3cd1b229 100644
--- a/tools/js-optimizer.js
+++ b/tools/js-optimizer.js
@@ -11,6 +11,7 @@
// *** Environment setup code ***
var arguments_ = [];
+var debug = false;
var ENVIRONMENT_IS_NODE = typeof process === 'object';
var ENVIRONMENT_IS_WEB = typeof window === 'object';
@@ -102,7 +103,7 @@ function globalEval(x) {
eval.call(null, x);
}
-if (typeof load == 'undefined' && typeof read != 'undefined') {
+if (typeof load === 'undefined' && typeof read != 'undefined') {
this['load'] = function(f) {
globalEval(read(f));
};
@@ -146,11 +147,12 @@ var generatedFunctions = false; // whether we have received only generated funct
var extraInfo = null;
function srcToAst(src) {
- return uglify.parser.parse(src);
+ return uglify.parser.parse(src, false, debug);
}
function astToSrc(ast, minifyWhitespace) {
return uglify.uglify.gen_code(ast, {
+ debug: debug,
ascii_only: true,
beautify: !minifyWhitespace,
indent_level: 1
@@ -162,10 +164,10 @@ function astToSrc(ast, minifyWhitespace) {
function traverseChildren(node, traverse, pre, post, stack) {
for (var i = 0; i < node.length; i++) {
var subnode = node[i];
- if (typeof subnode == 'object' && subnode && subnode.length) {
+ if (Array.isArray(subnode)) {
var subresult = traverse(subnode, pre, post, stack);
- if (subresult == true) return true;
- if (subresult !== null && typeof subresult == 'object') node[i] = subresult;
+ if (subresult === true) return true;
+ if (subresult !== null && typeof subresult === 'object') node[i] = subresult;
}
}
}
@@ -186,16 +188,16 @@ function traverseChildren(node, traverse, pre, post, stack) {
// was stopped, true. Otherwise undefined.
function traverse(node, pre, post, stack) {
var type = node[0], result, len;
- var relevant = typeof type == 'string';
+ var relevant = typeof type === 'string';
if (relevant) {
if (stack) len = stack.length;
var result = pre(node, type, stack);
- if (result == true) return true;
- if (result !== null && typeof result == 'object') node = result; // Continue processing on this node
- if (stack && len == stack.length) stack.push(0);
+ if (result === true) return true;
+ if (result && result !== null) node = result; // Continue processing on this node
+ if (stack && len === stack.length) stack.push(0);
}
if (result !== null) {
- if (traverseChildren(node, traverse, pre, post, stack) == true) return true;
+ if (traverseChildren(node, traverse, pre, post, stack) === true) return true;
}
if (relevant) {
if (post) {
@@ -211,7 +213,7 @@ function traverse(node, pre, post, stack) {
function traverseGenerated(ast, pre, post, stack) {
assert(generatedFunctions);
traverse(ast, function(node) {
- if (node[0] == 'defun') {
+ if (node[0] === 'defun') {
traverse(node, pre, post, stack);
return null;
}
@@ -220,13 +222,13 @@ function traverseGenerated(ast, pre, post, stack) {
function traverseGeneratedFunctions(ast, callback) {
assert(generatedFunctions);
- if (ast[0] == 'toplevel') {
+ if (ast[0] === 'toplevel') {
var stats = ast[1];
for (var i = 0; i < stats.length; i++) {
var curr = stats[i];
- if (curr[0] == 'defun') callback(curr);
+ if (curr[0] === 'defun') callback(curr);
}
- } else if (ast[0] == 'defun') {
+ } else if (ast[0] === 'defun') {
callback(ast);
}
}
@@ -236,7 +238,7 @@ function traverseWithVariables(ast, callback) {
traverse(ast, function(node, type, stack) {
if (type in FUNCTION) {
stack.push({ type: 'function', vars: node[2] });
- } else if (type == 'var') {
+ } else if (type === 'var') {
// Find our function, add our vars
var func = stack[stack.length-1];
if (func) {
@@ -244,12 +246,12 @@ function traverseWithVariables(ast, callback) {
}
}
}, function(node, type, stack) {
- if (type == 'toplevel' || type in FUNCTION) {
+ if (type === 'toplevel' || type in FUNCTION) {
// We know all of the variables that are seen here, proceed to do relevant replacements
var allVars = stack.map(function(item) { return item ? item.vars : [] }).reduce(concatenator, []); // FIXME dictionary for speed?
traverse(node, function(node2, type2, stack2) {
// Be careful not to look into our inner functions. They have already been processed.
- if (sum(stack2) > 1 || (type == 'toplevel' && sum(stack2) == 1)) return;
+ if (sum(stack2) > 1 || (type === 'toplevel' && sum(stack2) === 1)) return;
if (type2 in FUNCTION) stack2.push(1);
return callback(node2, type2, allVars);
}, null, []);
@@ -262,7 +264,7 @@ function emptyNode() { // XXX do we need to create new nodes here? can't we reus
}
function isEmptyNode(node) {
- return node.length == 2 && node[0] == 'toplevel' && node[1].length == 0;
+ return node.length === 2 && node[0] === 'toplevel' && node[1].length === 0;
}
// Passes
@@ -284,7 +286,7 @@ function unGlobalize(ast) {
throw 'this is deprecated!'; // and does not work with parallel compilation
- assert(ast[0] == 'toplevel');
+ assert(ast[0] === 'toplevel');
var values = {};
// Find global renamings of the relevant values
ast[1].forEach(function(node, i) {
@@ -305,7 +307,7 @@ function unGlobalize(ast) {
ast[1][i][1][j] = emptyNode();
var assigned = false;
traverseWithVariables(ast, function(node, type, allVars) {
- if (type == 'assign' && node[2][0] == 'name' && node[2][1] == ident) assigned = true;
+ if (type === 'assign' && node[2][0] === 'name' && node[2][1] === ident) assigned = true;
});
ast[1][i][1][j] = [ident, value];
if (!assigned) {
@@ -315,12 +317,12 @@ function unGlobalize(ast) {
return true;
});
- if (node[1].length == 0) {
+ if (node[1].length === 0) {
ast[1][i] = emptyNode();
}
});
traverseWithVariables(ast, function(node, type, allVars) {
- if (type == 'name') {
+ if (type === 'name') {
var ident = node[1];
if (ident in values && allVars.indexOf(ident) < 0) {
return copy(values[ident]);
@@ -343,9 +345,9 @@ function unGlobalize(ast) {
// is now explicit.
function removeAssignsToUndefined(ast) {
traverse(ast, function(node, type) {
- if (type == 'assign' && jsonCompare(node[3], ['unary-prefix', 'void', ['num', 0]])) {
+ if (type === 'assign' && jsonCompare(node[3], ['unary-prefix', 'void', ['num', 0]])) {
return emptyNode();
- } else if (type == 'var') {
+ } else if (type === 'var') {
node[1] = node[1].map(function(varItem, j) {
var ident = varItem[0];
var value = varItem[1];
@@ -359,10 +361,10 @@ function removeAssignsToUndefined(ast) {
while (modified) {
modified = false;
traverse(ast, function(node, type) {
- if (type == 'assign' && jsonCompare(node[3], emptyNode())) {
+ if (type === 'assign' && jsonCompare(node[3], emptyNode())) {
modified = true;
return emptyNode();
- } else if (type == 'var') {
+ } else if (type === 'var') {
node[1] = node[1].map(function(varItem, j) {
var ident = varItem[0];
var value = varItem[1];
@@ -380,19 +382,19 @@ function removeAssignsToUndefined(ast) {
// are actually necessary. It's easy to clean those up now.
function removeUnneededLabelSettings(ast) {
traverse(ast, function(node, type) {
- if (type == 'defun') { // all of our compiled code is in defun nodes
+ if (type === 'defun') { // all of our compiled code is in defun nodes
// Find all checks
var checked = {};
traverse(node, function(node, type) {
- if (type == 'binary' && node[1] == '==' && node[2][0] == 'name' && node[2][1] == 'label') {
- assert(node[3][0] == 'num');
+ if (type === 'binary' && node[1] === '==' && node[2][0] === 'name' && node[2][1] === 'label') {
+ assert(node[3][0] === 'num');
checked[node[3][1]] = 1;
}
});
// Remove unneeded sets
traverse(node, function(node, type) {
- if (type == 'assign' && node[2][0] == 'name' && node[2][1] == 'label') {
- assert(node[3][0] == 'num');
+ if (type === 'assign' && node[2][0] === 'name' && node[2][1] === 'label') {
+ assert(node[3][0] === 'num');
if (!(node[3][1] in checked)) return emptyNode();
}
});
@@ -408,13 +410,13 @@ function simplifyExpressionsPre(ast) {
// Look for (x&A)<<B>>B and replace it with X&A if possible.
function simplifySignExtends(ast) {
traverse(ast, function(node, type) {
- if (type == 'binary' && node[1] == '>>' && node[3][0] == 'num' &&
- node[2][0] == 'binary' && node[2][1] == '<<' && node[2][3][0] == 'num' && node[3][1] == node[2][3][1]) {
+ if (type === 'binary' && node[1] === '>>' && node[3][0] === 'num' &&
+ node[2][0] === 'binary' && node[2][1] === '<<' && node[2][3][0] === 'num' && node[3][1] === node[2][3][1]) {
var innerNode = node[2][2];
var shifts = node[3][1];
- if (innerNode[0] == 'binary' && innerNode[1] == '&' && innerNode[3][0] == 'num') {
+ if (innerNode[0] === 'binary' && innerNode[1] === '&' && innerNode[3][0] === 'num') {
var mask = innerNode[3][1];
- if (mask << shifts >> shifts == mask) {
+ if (mask << shifts >> shifts === mask) {
return innerNode;
}
}
@@ -444,9 +446,10 @@ function simplifyExpressionsPre(ast) {
while (rerun) {
rerun = false;
traverse(ast, function process(node, type, stack) {
- if (type == 'binary' && node[1] == '|') {
- if (node[2][0] == 'num' && node[3][0] == 'num') {
- return ['num', node[2][1] | node[3][1]];
+ if (type === 'binary' && node[1] === '|') {
+ if (node[2][0] === 'num' && node[3][0] === 'num') {
+ node[2][1] |= node[3][1];
+ return node[2];
}
var go = false;
if (jsonCompare(node[2], ZERO)) {
@@ -466,9 +469,9 @@ function simplifyExpressionsPre(ast) {
for (var i = stack.length-1; i >= 0; i--) {
if (stack[i] >= 1) {
if (asm) {
- if (stack[stack.length-1] < 2 && node[2][0] == 'call') break; // we can only remove multiple |0s on these
+ if (stack[stack.length-1] < 2 && node[2][0] === 'call') break; // we can only remove multiple |0s on these
if (stack[stack.length-1] < 1 && (node[2][0] in COERCION_REQUIRING_OPS ||
- (node[2][0] == 'binary' && node[2][1] in COERCION_REQUIRING_BINARIES))) break; // we can remove |0 or >>2
+ (node[2][0] === 'binary' && node[2][1] in COERCION_REQUIRING_BINARIES))) break; // we can remove |0 or >>2
}
// we will replace ourselves with the non-zero side. Recursively process that node.
var result = jsonCompare(node[2], ZERO) ? node[3] : node[2], other;
@@ -479,15 +482,15 @@ function simplifyExpressionsPre(ast) {
}
rerun = true;
return process(result, result[0], stack);
- } else if (stack[i] == -1) {
+ } else if (stack[i] === -1) {
break; // Too bad, we can't
}
}
stack.push(2); // From here on up, no need for this kind of correction, it's done at the top
// (Add this at the end, so it is only added if we did not remove it)
- } else if (type == 'binary' && node[1] in USEFUL_BINARY_OPS) {
+ } else if (type === 'binary' && node[1] in USEFUL_BINARY_OPS) {
stack.push(1);
- } else if ((type == 'binary' && node[1] in SAFE_BINARY_OPS) || type == 'num' || type == 'name') {
+ } else if ((type === 'binary' && node[1] in SAFE_BINARY_OPS) || type === 'num' || type === 'name') {
stack.push(0); // This node is safe in that it does not interfere with this optimization
} else {
stack.push(-1); // This node is dangerous! Give up if you see this before you see '1'
@@ -503,7 +506,7 @@ function simplifyExpressionsPre(ast) {
var heapBits, heapUnsigned;
function parseHeap(name) {
if (name.substr(0, 4) != 'HEAP') return false;
- heapUnsigned = name[4] == 'U';
+ heapUnsigned = name[4] === 'U';
heapBits = parseInt(name.substr(heapUnsigned ? 5 : 4));
return true;
}
@@ -511,21 +514,21 @@ function simplifyExpressionsPre(ast) {
var hasTempDoublePtr = false, rerunOrZeroPass = false;
traverse(ast, function(node, type) {
- if (type == 'name') {
- if (node[1] == 'tempDoublePtr') hasTempDoublePtr = true;
- } else if (type == 'binary' && node[1] == '&' && node[3][0] == 'num') {
- if (node[2][0] == 'num') return ['num', node[2][1] & node[3][1]];
+ if (type === 'name') {
+ if (node[1] === 'tempDoublePtr') hasTempDoublePtr = true;
+ } else if (type === 'binary' && node[1] === '&' && node[3][0] === 'num') {
+ if (node[2][0] === 'num') return ['num', node[2][1] & node[3][1]];
var input = node[2];
var amount = node[3][1];
- if (input[0] == 'binary' && input[1] == '&' && input[3][0] == 'num') {
+ if (input[0] === 'binary' && input[1] === '&' && input[3][0] === 'num') {
// Collapse X & 255 & 1
node[3][1] = amount & input[3][1];
node[2] = input[2];
- } else if (input[0] == 'sub' && input[1][0] == 'name') {
+ } else if (input[0] === 'sub' && input[1][0] === 'name') {
// HEAP8[..] & 255 => HEAPU8[..]
var name = input[1][1];
if (parseHeap(name)) {
- if (amount == Math.pow(2, heapBits)-1) {
+ if (amount === Math.pow(2, heapBits)-1) {
if (!heapUnsigned) {
input[1][1] = 'HEAPU' + heapBits; // make unsigned
}
@@ -539,14 +542,14 @@ function simplifyExpressionsPre(ast) {
}
}
}
- } else if (type == 'binary' && node[1] == '>>' && node[3][0] == 'num' &&
- node[2][0] == 'binary' && node[2][1] == '<<' && node[2][3][0] == 'num' &&
- node[2][2][0] == 'sub' && node[2][2][1][0] == 'name') {
+ } else if (type === 'binary' && node[1] === '>>' && node[3][0] === 'num' &&
+ node[2][0] === 'binary' && node[2][1] === '<<' && node[2][3][0] === 'num' &&
+ node[2][2][0] === 'sub' && node[2][2][1][0] === 'name') {
// collapse HEAPU?8[..] << 24 >> 24 etc. into HEAP8[..] | 0
var amount = node[3][1];
var name = node[2][2][1][1];
- if (amount == node[2][3][1] && parseHeap(name)) {
- if (heapBits == 32 - amount) {
+ if (amount === node[2][3][1] && parseHeap(name)) {
+ if (heapBits === 32 - amount) {
node[2][2][1][1] = 'HEAP' + heapBits;
node[1] = '|';
node[2] = node[2][2];
@@ -555,32 +558,34 @@ function simplifyExpressionsPre(ast) {
return node;
}
}
- } else if (type == 'assign') {
+ } else if (type === 'assign') {
// optimizations for assigning into HEAP32 specifically
- if (node[1] === true && node[2][0] == 'sub' && node[2][1][0] == 'name' && node[2][1][1] == 'HEAP32') {
+ if (node[1] === true && node[2][0] === 'sub' && node[2][1][0] === 'name' && node[2][1][1] === 'HEAP32') {
// HEAP32[..] = x | 0 does not need the | 0 (unless it is a mandatory |0 of a call)
- if (node[3][0] == 'binary' && node[3][1] == '|') {
- if (node[3][2][0] == 'num' && node[3][2][1] == 0 && node[3][3][0] != 'call') {
+ if (node[3][0] === 'binary' && node[3][1] === '|') {
+ if (node[3][2][0] === 'num' && node[3][2][1] === 0 && node[3][3][0] != 'call') {
node[3] = node[3][3];
- } else if (node[3][3][0] == 'num' && node[3][3][1] == 0 && node[3][2][0] != 'call') {
+ } else if (node[3][3][0] === 'num' && node[3][3][1] === 0 && node[3][2][0] != 'call') {
node[3] = node[3][2];
}
}
}
var value = node[3];
- if (value[0] == 'binary' && value[1] == '|') {
+ if (value[0] === 'binary' && value[1] === '|') {
// canonicalize order of |0 to end
- if (value[2][0] == 'num' && value[2][1] == 0) {
+ if (value[2][0] === 'num' && value[2][1] === 0) {
var temp = value[2];
value[2] = value[3];
value[3] = temp;
}
// if a seq ends in an |0, remove an external |0
// note that it is only safe to do this in assigns, like we are doing here (return (x, y|0); is not valid)
- if (value[2][0] == 'seq' && value[2][2][0] == 'binary' && value[2][2][1] in USEFUL_BINARY_OPS) {
+ if (value[2][0] === 'seq' && value[2][2][0] === 'binary' && value[2][2][1] in USEFUL_BINARY_OPS) {
node[3] = value[2];
}
}
+ } else if (type == 'sub' && node[1][0] == 'name' && /^FUNCTION_TABLE.*/.exec(node[1][1])) {
+ return null; // do not traverse subchildren here, we should not collapse 55 & 126. TODO: optimize this into a nonvirtual call (also because we lose some other opts here)!
}
});
@@ -589,27 +594,27 @@ function simplifyExpressionsPre(ast) {
if (asm) {
if (hasTempDoublePtr) {
traverse(ast, function(node, type) {
- if (type == 'assign') {
- if (node[1] === true && node[2][0] == 'sub' && node[2][1][0] == 'name' && node[2][1][1] == 'HEAP32') {
+ if (type === 'assign') {
+ if (node[1] === true && node[2][0] === 'sub' && node[2][1][0] === 'name' && node[2][1][1] === 'HEAP32') {
// remove bitcasts that are now obviously pointless, e.g.
// HEAP32[$45 >> 2] = HEAPF32[tempDoublePtr >> 2] = ($14 < $28 ? $14 : $28) - $42, HEAP32[tempDoublePtr >> 2] | 0;
var value = node[3];
- if (value[0] == 'seq' && value[1][0] == 'assign' && value[1][2][0] == 'sub' && value[1][2][1][0] == 'name' && value[1][2][1][1] == 'HEAPF32' &&
- value[1][2][2][0] == 'binary' && value[1][2][2][2][0] == 'name' && value[1][2][2][2][1] == 'tempDoublePtr') {
+ if (value[0] === 'seq' && value[1][0] === 'assign' && value[1][2][0] === 'sub' && value[1][2][1][0] === 'name' && value[1][2][1][1] === 'HEAPF32' &&
+ value[1][2][2][0] === 'binary' && value[1][2][2][2][0] === 'name' && value[1][2][2][2][1] === 'tempDoublePtr') {
// transform to HEAPF32[$45 >> 2] = ($14 < $28 ? $14 : $28) - $42;
node[2][1][1] = 'HEAPF32';
node[3] = value[1][3];
}
}
- } else if (type == 'seq') {
+ } else if (type === 'seq') {
// (HEAP32[tempDoublePtr >> 2] = HEAP32[$37 >> 2], +HEAPF32[tempDoublePtr >> 2])
// ==>
// +HEAPF32[$37 >> 2]
- if (node[0] == 'seq' && node[1][0] == 'assign' && node[1][2][0] == 'sub' && node[1][2][1][0] == 'name' &&
- (node[1][2][1][1] == 'HEAP32' || node[1][2][1][1] == 'HEAPF32') &&
- node[1][2][2][0] == 'binary' && node[1][2][2][2][0] == 'name' && node[1][2][2][2][1] == 'tempDoublePtr' &&
- node[1][3][0] == 'sub' && node[1][3][1][0] == 'name' && (node[1][3][1][1] == 'HEAP32' || node[1][3][1][1] == 'HEAPF32')) {
- if (node[1][2][1][1] == 'HEAP32') {
+ if (node[0] === 'seq' && node[1][0] === 'assign' && node[1][2][0] === 'sub' && node[1][2][1][0] === 'name' &&
+ (node[1][2][1][1] === 'HEAP32' || node[1][2][1][1] === 'HEAPF32') &&
+ node[1][2][2][0] === 'binary' && node[1][2][2][2][0] === 'name' && node[1][2][2][2][1] === 'tempDoublePtr' &&
+ node[1][3][0] === 'sub' && node[1][3][1][0] === 'name' && (node[1][3][1][1] === 'HEAP32' || node[1][3][1][1] === 'HEAPF32')) {
+ if (node[1][2][1][1] === 'HEAP32') {
node[1][3][1][1] = 'HEAPF32';
return ['unary-prefix', '+', node[1][3]];
} else {
@@ -624,11 +629,11 @@ function simplifyExpressionsPre(ast) {
// the other heap type, then eliminate the bitcast
var bitcastVars = {};
traverse(ast, function(node, type) {
- if (type == 'assign' && node[1] === true && node[2][0] == 'name') {
+ if (type === 'assign' && node[1] === true && node[2][0] === 'name') {
var value = node[3];
- if (value[0] == 'seq' && value[1][0] == 'assign' && value[1][2][0] == 'sub' && value[1][2][1][0] == 'name' &&
- (value[1][2][1][1] == 'HEAP32' || value[1][2][1][1] == 'HEAPF32') &&
- value[1][2][2][0] == 'binary' && value[1][2][2][2][0] == 'name' && value[1][2][2][2][1] == 'tempDoublePtr') {
+ if (value[0] === 'seq' && value[1][0] === 'assign' && value[1][2][0] === 'sub' && value[1][2][1][0] === 'name' &&
+ (value[1][2][1][1] === 'HEAP32' || value[1][2][1][1] === 'HEAPF32') &&
+ value[1][2][2][0] === 'binary' && value[1][2][2][2][0] === 'name' && value[1][2][2][2][1] === 'tempDoublePtr') {
var name = node[2][1];
if (!bitcastVars[name]) bitcastVars[name] = {
define_HEAP32: 0, define_HEAPF32: 0, use_HEAP32: 0, use_HEAPF32: 0, bad: false, namings: 0, defines: [], uses: []
@@ -639,15 +644,15 @@ function simplifyExpressionsPre(ast) {
}
});
traverse(ast, function(node, type) {
- if (type == 'name' && bitcastVars[node[1]]) {
+ if (type === 'name' && bitcastVars[node[1]]) {
bitcastVars[node[1]].namings++;
- } else if (type == 'assign' && node[1] === true) {
+ } else if (type === 'assign' && node[1] === true) {
var value = node[3];
- if (value[0] == 'name') {
+ if (value[0] === 'name') {
var name = value[1];
if (bitcastVars[name]) {
var target = node[2];
- if (target[0] == 'sub' && target[1][0] == 'name' && (target[1][1] == 'HEAP32' || target[1][1] == 'HEAPF32')) {
+ if (target[0] === 'sub' && target[1][0] === 'name' && (target[1][1] === 'HEAP32' || target[1][1] === 'HEAPF32')) {
bitcastVars[name]['use_' + target[1][1]]++;
bitcastVars[name].uses.push(node);
}
@@ -659,14 +664,14 @@ function simplifyExpressionsPre(ast) {
for (var v in bitcastVars) {
var info = bitcastVars[v];
// good variables define only one type, use only one type, have definitions and uses, and define as a different type than they use
- if (info.define_HEAP32*info.define_HEAPF32 == 0 && info.use_HEAP32*info.use_HEAPF32 == 0 &&
+ if (info.define_HEAP32*info.define_HEAPF32 === 0 && info.use_HEAP32*info.use_HEAPF32 === 0 &&
info.define_HEAP32+info.define_HEAPF32 > 0 && info.use_HEAP32+info.use_HEAPF32 > 0 &&
- info.define_HEAP32*info.use_HEAP32 == 0 && info.define_HEAPF32*info.use_HEAPF32 == 0 &&
- v in asmData.vars && info.namings == info.define_HEAP32+info.define_HEAPF32+info.use_HEAP32+info.use_HEAPF32) {
+ info.define_HEAP32*info.use_HEAP32 === 0 && info.define_HEAPF32*info.use_HEAPF32 === 0 &&
+ v in asmData.vars && info.namings === info.define_HEAP32+info.define_HEAPF32+info.use_HEAP32+info.use_HEAPF32) {
var correct = info.use_HEAP32 ? 'HEAPF32' : 'HEAP32';
info.defines.forEach(function(define) {
define[3] = define[3][1][3];
- if (correct == 'HEAP32') {
+ if (correct === 'HEAP32') {
define[3] = ['binary', '|', define[3], ['num', 0]];
} else {
define[3] = ['unary-prefix', '+', define[3]];
@@ -684,7 +689,7 @@ function simplifyExpressionsPre(ast) {
// optimize num >> num, in asm we need this here since we do not run optimizeShifts
traverse(ast, function(node, type) {
- if (type == 'binary' && node[1] == '>>' && node[2][0] == 'num' && node[3][0] == 'num') {
+ if (type === 'binary' && node[1] === '>>' && node[2][0] === 'num' && node[3][0] === 'num') {
node[0] = 'num';
node[1] = node[2][1] >> node[3][1];
node.length = 2;
@@ -700,15 +705,16 @@ function simplifyExpressionsPre(ast) {
while (rerun) {
rerun = false;
traverse(ast, function(node, type) {
- if (type == 'binary' && node[1] == '+') {
- if (node[2][0] == 'num' && node[3][0] == 'num') {
+ if (type === 'binary' && node[1] === '+') {
+ if (node[2][0] === 'num' && node[3][0] === 'num') {
rerun = true;
- return ['num', node[2][1] + node[3][1]];
+ node[2][1] += node[3][1];
+ return node[2];
}
for (var i = 2; i <= 3; i++) {
var ii = 5-i;
for (var j = 2; j <= 3; j++) {
- if (node[i][0] == 'num' && node[ii][0] == 'binary' && node[ii][1] == '+' && node[ii][j][0] == 'num') {
+ if (node[i][0] === 'num' && node[ii][0] === 'binary' && node[ii][1] === '+' && node[ii][j][0] === 'num') {
rerun = true;
node[ii][j][1] += node[i][1];
return node[ii];
@@ -720,15 +726,15 @@ function simplifyExpressionsPre(ast) {
}
}
- // if (x == 0) can be if (!x), etc.
+ // if (x === 0) can be if (!x), etc.
function simplifyZeroComp(ast) {
traverse(ast, function(node, type) {
var binary;
- if (type == 'if' && (binary = node[1])[0] == 'binary') {
- if ((binary[1] == '!=' || binary[1] == '!==') && binary[3][0] == 'num' && binary[3][1] == 0) {
+ if (type === 'if' && (binary = node[1])[0] === 'binary') {
+ if ((binary[1] === '!=' || binary[1] === '!==') && binary[3][0] === 'num' && binary[3][1] === 0) {
node[1] = binary[2];
return node;
- } else if ((binary[1] == '==' || binary[1] == '===') && binary[3][0] == 'num' && binary[3][1] == 0) {
+ } else if ((binary[1] === '==' || binary[1] === '===') && binary[3][0] === 'num' && binary[3][1] === 0) {
node[1] = ['unary-prefix', '!', binary[2]];
return node;
}
@@ -740,7 +746,7 @@ function simplifyExpressionsPre(ast) {
// Add final returns when necessary
var returnType = null;
traverse(fun, function(node, type) {
- if (type == 'return' && node[1]) {
+ if (type === 'return' && node[1]) {
returnType = detectAsmCoercion(node[1]);
}
});
@@ -750,7 +756,7 @@ function simplifyExpressionsPre(ast) {
var last = stats[stats.length-1];
if (last[0] != 'return') {
var returnValue = ['num', 0];
- if (returnType == ASM_DOUBLE) returnValue = ['unary-prefix', '+', returnValue];
+ if (returnType === ASM_DOUBLE) returnValue = ['unary-prefix', '+', returnValue];
stats.push(['return', returnValue]);
}
}
@@ -800,11 +806,11 @@ function optimizeShiftsInternal(ast, conservative) {
// vars
// XXX if var has >>=, ignore it here? That means a previous pass already optimized it
var hasSwitch = traverse(fun, function(node, type) {
- if (type == 'var') {
+ if (type === 'var') {
node[1].forEach(function(arg) {
newVar(arg[0], false, arg[1]);
});
- } else if (type == 'switch') {
+ } else if (type === 'switch') {
// The relooper can't always optimize functions, and we currently don't work with
// switch statements when optimizing shifts. Bail.
return true;
@@ -817,25 +823,25 @@ function optimizeShiftsInternal(ast, conservative) {
// optimize for code size, not speed.
traverse(fun, function(node, type, stack) {
stack.push(node);
- if (type == 'name' && vars[node[1]] && stack[stack.length-2][0] != 'assign') {
+ if (type === 'name' && vars[node[1]] && stack[stack.length-2][0] != 'assign') {
vars[node[1]].uses++;
- } else if (type == 'assign' && node[2][0] == 'name' && vars[node[2][1]]) {
+ } else if (type === 'assign' && node[2][0] === 'name' && vars[node[2][1]]) {
vars[node[2][1]].defs++;
}
}, null, []);
// First, break up elements inside a shift. This lets us see clearly what to do next.
traverse(fun, function(node, type) {
- if (type == 'binary' && node[1] == '>>' && node[3][0] == 'num') {
+ if (type === 'binary' && node[1] === '>>' && node[3][0] === 'num') {
var shifts = node[3][1];
if (shifts <= MAX_SHIFTS) {
// Push the >> inside the value elements
function addShift(subNode) {
- if (subNode[0] == 'binary' && subNode[1] == '+') {
+ if (subNode[0] === 'binary' && subNode[1] === '+') {
subNode[2] = addShift(subNode[2]);
subNode[3] = addShift(subNode[3]);
return subNode;
}
- if (subNode[0] == 'name' && !subNode[2]) { // names are returned with a shift, but we also note their being shifted
+ if (subNode[0] === 'name' && !subNode[2]) { // names are returned with a shift, but we also note their being shifted
var name = subNode[1];
if (vars[name]) {
vars[name].timesShifted[shifts]++;
@@ -849,7 +855,7 @@ function optimizeShiftsInternal(ast, conservative) {
}
});
traverse(fun, function(node, type) {
- if (node[0] == 'name' && node[2]) {
+ if (node[0] === 'name' && node[2]) {
return node.slice(0, 2); // clean up our notes
}
});
@@ -858,7 +864,7 @@ function optimizeShiftsInternal(ast, conservative) {
for (var name in vars) {
var data = vars[name];
var totalTimesShifted = sum(data.timesShifted);
- if (totalTimesShifted == 0) {
+ if (totalTimesShifted === 0) {
continue;
}
if (totalTimesShifted != Math.max.apply(null, data.timesShifted)) {
@@ -868,7 +874,7 @@ function optimizeShiftsInternal(ast, conservative) {
if (funFinished[name]) continue;
// We have one shift size (and possible unshifted uses). Consider replacing this variable with a shifted clone. If
// the estimated benefit is >0, we will do it
- if (data.defs == 1) {
+ if (data.defs === 1) {
data.benefit = totalTimesShifted - 2*(data.defs + (data.param ? 1 : 0));
}
if (conservative) data.benefit = 0;
@@ -884,7 +890,7 @@ function optimizeShiftsInternal(ast, conservative) {
//printErr(JSON.stringify(vars));
function cleanNotes() { // We need to mark 'name' nodes as 'processed' in some passes here; this cleans the notes up
traverse(fun, function(node, type) {
- if (node[0] == 'name' && node[2]) {
+ if (node[0] === 'name' && node[2]) {
return node.slice(0, 2);
}
});
@@ -906,7 +912,7 @@ function optimizeShiftsInternal(ast, conservative) {
}
traverse(fun, function(node, type, stack) { // add shift to assignments
stack.push(node);
- if (node[0] == 'assign' && node[1] === true && node[2][0] == 'name' && needsShift(node[2][1]) && !node[2][2]) {
+ if (node[0] === 'assign' && node[1] === true && node[2][0] === 'name' && needsShift(node[2][1]) && !node[2][2]) {
var name = node[2][1];
var data = vars[name];
var parent = stack[stack.length-3];
@@ -914,7 +920,7 @@ function optimizeShiftsInternal(ast, conservative) {
assert(statements, 'Invalid parent for assign-shift: ' + dump(parent));
var i = statements.indexOf(stack[stack.length-2]);
statements.splice(i+1, 0, ['stat', ['assign', true, ['name', name + '$s' + data.primaryShift], ['binary', '>>', ['name', name, true], ['num', data.primaryShift]]]]);
- } else if (node[0] == 'var') {
+ } else if (node[0] === 'var') {
var args = node[1];
for (var i = 0; i < args.length; i++) {
var arg = args[i];
@@ -930,14 +936,14 @@ function optimizeShiftsInternal(ast, conservative) {
cleanNotes();
traverse(fun, function(node, type, stack) { // replace shifted name with new variable
stack.push(node);
- if (node[0] == 'binary' && node[1] == '>>' && node[2][0] == 'name' && needsShift(node[2][1]) && node[3][0] == 'num') {
+ if (node[0] === 'binary' && node[1] === '>>' && node[2][0] === 'name' && needsShift(node[2][1]) && node[3][0] === 'num') {
var name = node[2][1];
var data = vars[name];
var parent = stack[stack.length-2];
// Don't modify in |x$sN = x >> 2|, in normal assigns and in var assigns
- if (parent[0] == 'assign' && parent[2][0] == 'name' && parent[2][1] == name + '$s' + data.primaryShift) return;
- if (parent[0] == name + '$s' + data.primaryShift) return;
- if (node[3][1] == data.primaryShift) {
+ if (parent[0] === 'assign' && parent[2][0] === 'name' && parent[2][1] === name + '$s' + data.primaryShift) return;
+ if (parent[0] === name + '$s' + data.primaryShift) return;
+ if (node[3][1] === data.primaryShift) {
return ['name', name + '$s' + data.primaryShift];
}
}
@@ -948,8 +954,8 @@ function optimizeShiftsInternal(ast, conservative) {
while (more) { // combine shifts in the same direction as an optimization
more = false;
traverse(fun, function(node, type) {
- if (node[0] == 'binary' && node[1] in SIMPLE_SHIFTS && node[2][0] == 'binary' && node[2][1] == node[1] &&
- node[3][0] == 'num' && node[2][3][0] == 'num') { // do not turn a << b << c into a << b + c; while logically identical, it is slower
+ if (node[0] === 'binary' && node[1] in SIMPLE_SHIFTS && node[2][0] === 'binary' && node[2][1] === node[1] &&
+ node[3][0] === 'num' && node[2][3][0] === 'num') { // do not turn a << b << c into a << b + c; while logically identical, it is slower
more = true;
return ['binary', node[1], node[2][2], ['num', node[3][1] + node[2][3][1]]];
}
@@ -958,32 +964,32 @@ function optimizeShiftsInternal(ast, conservative) {
// Before recombining, do some additional optimizations
traverse(fun, function(node, type) {
// Apply constant shifts onto constants
- if (type == 'binary' && node[1] == '>>' && node[2][0] == 'num' && node[3][0] == 'num' && node[3][1] <= MAX_SHIFTS) {
+ if (type === 'binary' && node[1] === '>>' && node[2][0] === 'num' && node[3][0] === 'num' && node[3][1] <= MAX_SHIFTS) {
var subNode = node[2];
var shifts = node[3][1];
var result = subNode[1] / Math.pow(2, shifts);
- if (result % 1 == 0) {
+ if (result % 1 === 0) {
subNode[1] = result;
return subNode;
}
}
// Optimize the case of ($a*80)>>2 into ($a*20)|0
- if (type == 'binary' && node[1] in SIMPLE_SHIFTS &&
- node[2][0] == 'binary' && node[2][1] == '*') {
+ if (type === 'binary' && node[1] in SIMPLE_SHIFTS &&
+ node[2][0] === 'binary' && node[2][1] === '*') {
var mulNode = node[2];
- if (mulNode[2][0] == 'num') {
+ if (mulNode[2][0] === 'num') {
var temp = mulNode[2];
mulNode[2] = mulNode[3];
mulNode[3] = temp;
}
- if (mulNode[3][0] == 'num') {
- if (node[1] == '<<') {
+ if (mulNode[3][0] === 'num') {
+ if (node[1] === '<<') {
mulNode[3][1] *= Math.pow(2, node[3][1]);
node[1] = '|';
node[3][1] = 0;
return node;
} else {
- if (mulNode[3][1] % Math.pow(2, node[3][1]) == 0) {
+ if (mulNode[3][1] % Math.pow(2, node[3][1]) === 0) {
mulNode[3][1] /= Math.pow(2, node[3][1]);
node[1] = '|';
node[3][1] = 0;
@@ -994,8 +1000,8 @@ function optimizeShiftsInternal(ast, conservative) {
}
/* XXX - theoretically useful optimization(s), but commented out as not helpful in practice
// Transform (x << 2) >> 2 into x & mask or something even simpler
- if (type == 'binary' && node[1] == '>>' && node[3][0] == 'num' &&
- node[2][0] == 'binary' && node[2][1] == '<<' && node[2][3][0] == 'num' && node[3][1] == node[2][3][1]) {
+ if (type === 'binary' && node[1] === '>>' && node[3][0] === 'num' &&
+ node[2][0] === 'binary' && node[2][1] === '<<' && node[2][3][0] === 'num' && node[3][1] === node[2][3][1]) {
var subNode = node[2];
var shifts = node[3][1];
var mask = ((0xffffffff << shifts) >>> shifts) | 0;
@@ -1008,11 +1014,11 @@ function optimizeShiftsInternal(ast, conservative) {
// Re-combine remaining shifts, to undo the breaking up we did before. may require reordering inside +'s
traverse(fun, function(node, type, stack) {
stack.push(node);
- if (type == 'binary' && node[1] == '+' && (stack[stack.length-2][0] != 'binary' || stack[stack.length-2][1] != '+')) {
+ if (type === 'binary' && node[1] === '+' && (stack[stack.length-2][0] != 'binary' || stack[stack.length-2][1] !== '+')) {
// 'Flatten' added items
var addedItems = [];
function flatten(node) {
- if (node[0] == 'binary' && node[1] == '+') {
+ if (node[0] === 'binary' && node[1] === '+') {
flatten(node[2]);
flatten(node[3]);
} else {
@@ -1025,15 +1031,15 @@ function optimizeShiftsInternal(ast, conservative) {
function originalOrderKey(item) {
return -originalOrder.indexOf(item);
}
- if (node[0] == 'binary' && node[1] in SIMPLE_SHIFTS) {
- if (node[3][0] == 'num' && node[3][1] <= MAX_SHIFTS) return 2*node[3][1] + (node[1] == '>>' ? 100 : 0); // 0-106
- return (node[1] == '>>' ? 20000 : 10000) + originalOrderKey(node);
+ if (node[0] === 'binary' && node[1] in SIMPLE_SHIFTS) {
+ if (node[3][0] === 'num' && node[3][1] <= MAX_SHIFTS) return 2*node[3][1] + (node[1] === '>>' ? 100 : 0); // 0-106
+ return (node[1] === '>>' ? 20000 : 10000) + originalOrderKey(node);
}
- if (node[0] == 'num') return -20000 + node[1];
+ if (node[0] === 'num') return -20000 + node[1];
return -10000 + originalOrderKey(node); // Don't modify the original order if we don't modify anything
}
for (var i = 0; i < addedItems.length; i++) {
- if (addedItems[i][0] == 'string') return; // this node is not relevant for us
+ if (addedItems[i][0] === 'string') return; // this node is not relevant for us
}
addedItems.sort(function(node1, node2) {
return key(node1) - key(node2);
@@ -1042,7 +1048,7 @@ function optimizeShiftsInternal(ast, conservative) {
var i = 0;
while (i < addedItems.length-1) { // re-combine inside addedItems
var k = key(addedItems[i]), k1 = key(addedItems[i+1]);
- if (k == k1 && k >= 0 && k1 <= 106) {
+ if (k === k1 && k >= 0 && k1 <= 106) {
addedItems[i] = ['binary', addedItems[i][1], ['binary', '+', addedItems[i][2], addedItems[i+1][2]], addedItems[i][3]];
addedItems.splice(i+1, 1);
} else {
@@ -1051,7 +1057,7 @@ function optimizeShiftsInternal(ast, conservative) {
}
var num = 0;
for (i = 0; i < addedItems.length; i++) { // combine all numbers into one
- if (addedItems[i][0] == 'num') {
+ if (addedItems[i][0] === 'num') {
num += addedItems[i][1];
addedItems.splice(i, 1);
i--;
@@ -1063,7 +1069,7 @@ function optimizeShiftsInternal(ast, conservative) {
// so it might take more space, but normally at most one more digit).
var added = false;
for (i = 0; i < addedItems.length; i++) {
- if (addedItems[i][0] == 'binary' && addedItems[i][1] == '>>' && addedItems[i][3][0] == 'num' && addedItems[i][3][1] <= MAX_SHIFTS) {
+ if (addedItems[i][0] === 'binary' && addedItems[i][1] === '>>' && addedItems[i][3][0] === 'num' && addedItems[i][3][1] <= MAX_SHIFTS) {
addedItems[i] = ['binary', '>>', ['binary', '+', addedItems[i][2], ['num', num << addedItems[i][3][1]]], addedItems[i][3]];
added = true;
}
@@ -1100,8 +1106,8 @@ function optimizeShiftsAggressive(ast) {
// if (!(x < 5))
// or such. Simplifying these saves space and time.
function simplifyNotCompsDirect(node) {
- if (node[0] == 'unary-prefix' && node[1] == '!') {
- if (node[2][0] == 'binary') {
+ if (node[0] === 'unary-prefix' && node[1] === '!') {
+ if (node[2][0] === 'binary') {
switch(node[2][1]) {
case '<': return ['binary', '>=', node[2][2], node[2][3]];
case '>': return ['binary', '<=', node[2][2], node[2][3]];
@@ -1112,7 +1118,7 @@ function simplifyNotCompsDirect(node) {
case '===': return ['binary', '!==', node[2][2], node[2][3]];
case '!==': return ['binary', '===', node[2][2], node[2][3]];
}
- } else if (node[2][0] == 'unary-prefix' && node[2][1] == '!') {
+ } else if (node[2][0] === 'unary-prefix' && node[2][1] === '!') {
return node[2][2];
}
}
@@ -1135,8 +1141,8 @@ var NO_SIDE_EFFECTS = set('num', 'name');
function hasSideEffects(node) { // this is 99% incomplete!
if (node[0] in NO_SIDE_EFFECTS) return false;
- if (node[0] == 'unary-prefix') return hasSideEffects(node[2]);
- if (node[0] == 'binary') return hasSideEffects(node[2]) || hasSideEffects(node[3]);
+ if (node[0] === 'unary-prefix') return hasSideEffects(node[2]);
+ if (node[0] === 'binary') return hasSideEffects(node[2]) || hasSideEffects(node[3]);
return true;
}
@@ -1145,8 +1151,8 @@ function hasSideEffects(node) { // this is 99% incomplete!
function vacuum(ast) {
function isEmpty(node) {
if (!node) return true;
- if (node[0] == 'toplevel' && (!node[1] || node[1].length == 0)) return true;
- if (node[0] == 'block' && (!node[1] || (typeof node[1] != 'object') || node[1].length == 0 || (node[1].length == 1 && isEmpty(node[1])))) return true;
+ if (node[0] === 'toplevel' && (!node[1] || node[1].length === 0)) return true;
+ if (node[0] === 'block' && (!node[1] || (typeof node[1] != 'object') || node[1].length === 0 || (node[1].length === 1 && isEmpty(node[1])))) return true;
return false;
}
function simplifyList(node, si) {
@@ -1156,7 +1162,7 @@ function vacuum(ast) {
var i = 0;
while (i < statements.length) {
var subNode = statements[i];
- if (subNode[0] == 'block') {
+ if (subNode[0] === 'block') {
statements.splice.apply(statements, [i, 1].concat(subNode[1] || []));
changed = true;
} else {
@@ -1176,20 +1182,20 @@ function vacuum(ast) {
var ret;
switch(node[0]) {
case 'block': {
- if (node[1] && node[1].length == 1 && node[1][0][0] == 'block') {
+ if (node[1] && node[1].length === 1 && node[1][0][0] === 'block') {
return node[1][0];
- } else if (typeof node[1] == 'object') {
+ } else if (typeof node[1] === 'object') {
ret = simplifyList(node, 1);
if (ret) return ret;
}
} break;
case 'stat': {
- if (node[1][0] == 'block') {
+ if (node[1][0] === 'block') {
return node[1];
}
} break;
case 'defun': {
- if (node[3].length == 1 && node[3][0][0] == 'block') {
+ if (node[3].length === 1 && node[3][0][0] === 'block') {
node[3] = node[3][0][1];
return node;
} else {
@@ -1198,19 +1204,19 @@ function vacuum(ast) {
}
} break;
case 'do': {
- if (node[1][0] == 'num' && node[2][0] == 'toplevel' && (!node[2][1] || node[2][1].length == 0)) {
+ if (node[1][0] === 'num' && node[2][0] === 'toplevel' && (!node[2][1] || node[2][1].length === 0)) {
return emptyNode();
} else if (isEmpty(node[2]) && !hasSideEffects(node[1])) {
return emptyNode();
}
} break;
case 'label': {
- if (node[2][0] == 'toplevel' && (!node[2][1] || node[2][1].length == 0)) {
+ if (node[2][0] === 'toplevel' && (!node[2][1] || node[2][1].length === 0)) {
return emptyNode();
}
} break;
case 'if': {
- var empty2 = isEmpty(node[2]), empty3 = isEmpty(node[3]), has3 = node.length == 4;
+ var empty2 = isEmpty(node[2]), empty3 = isEmpty(node[3]), has3 = node.length === 4;
if (!empty2 && empty3 && has3) { // empty else clauses
return node.slice(0, 3);
} else if (empty2 && !empty3) { // empty if blocks
@@ -1232,9 +1238,9 @@ function vacuum(ast) {
}
function getStatements(node) {
- if (node[0] == 'defun') {
+ if (node[0] === 'defun') {
return node[3];
- } else if (node[0] == 'block') {
+ } else if (node[0] === 'block') {
return node[1];
} else {
return null;
@@ -1242,9 +1248,9 @@ function getStatements(node) {
}
// Multiple blocks from the relooper are, in general, implemented by
-// if (label == x) { } else if ..
+// if (label === x) { } else if ..
// and branching into them by
-// if (condition) { label == x } else ..
+// if (condition) { label === x } else ..
// We can hoist the multiple block into the condition, thus removing code and one 'if' check
function hoistMultiples(ast) {
traverseGeneratedFunctions(ast, function(node) {
@@ -1261,10 +1267,10 @@ function hoistMultiples(ast) {
var postInner = post;
var shellLabel = false, shellDo = false;
while (true) {
- if (postInner[0] == 'label') {
+ if (postInner[0] === 'label') {
shellLabel = postInner[1];
postInner = postInner[2];
- } else if (postInner[0] == 'do') {
+ } else if (postInner[0] === 'do') {
shellDo = postInner[1];
postInner = postInner[2][1][0];
} else {
@@ -1273,19 +1279,19 @@ function hoistMultiples(ast) {
}
if (postInner[0] != 'if') continue;
// Look into this if, and its elseifs
- while (postInner && postInner[0] == 'if') {
+ while (postInner && postInner[0] === 'if') {
var cond = postInner[1];
- if (cond[0] == 'binary' && cond[1] == '==' && cond[2][0] == 'name' && cond[2][1] == 'label') {
- assert(cond[3][0] == 'num');
+ if (cond[0] === 'binary' && cond[1] === '==' && cond[2][0] === 'name' && cond[2][1] === 'label') {
+ assert(cond[3][0] === 'num');
// We have a valid Multiple check here. Try to hoist it, look for the source in |pre| and its else's
var labelNum = cond[3][1];
var labelBlock = postInner[2];
- assert(labelBlock[0] == 'block');
+ assert(labelBlock[0] === 'block');
var found = false;
traverse(pre, function(preNode, preType) {
- if (!found && preType == 'assign' && preNode[2][0] == 'name' && preNode[2][1] == 'label') {
- assert(preNode[3][0] == 'num');
- if (preNode[3][1] == labelNum) {
+ if (!found && preType === 'assign' && preNode[2][0] === 'name' && preNode[2][1] === 'label') {
+ assert(preNode[3][0] === 'num');
+ if (preNode[3][1] === labelNum) {
// That's it! Hoist away. We can also throw away the label setting as its goal has already been achieved
found = true;
modifiedI = true;
@@ -1315,16 +1321,16 @@ function hoistMultiples(ast) {
// situation is if the code after us is a multiple, in which case we might be checking for
// this label inside it (or in a later multiple, even)
function tryEliminate(node) {
- if (node[0] == 'if') {
+ if (node[0] === 'if') {
var replaced;
if (replaced = tryEliminate(node[2])) node[2] = replaced;
if (node[3] && (replaced = tryEliminate(node[3]))) node[3] = replaced;
} else {
- if (node[0] == 'block' && node[1] && node[1].length > 0) {
+ if (node[0] === 'block' && node[1] && node[1].length > 0) {
var subNode = node[1][node[1].length-1];
- if (subNode[0] == 'stat' && subNode[1][0] == 'assign' && subNode[1][2][0] == 'name' &&
- subNode[1][2][1] == 'label' && subNode[1][3][0] == 'num') {
- if (node[1].length == 1) {
+ if (subNode[0] === 'stat' && subNode[1][0] === 'assign' && subNode[1][2][0] === 'name' &&
+ subNode[1][2][1] === 'label' && subNode[1][3][0] === 'num') {
+ if (node[1].length === 1) {
return emptyNode();
} else {
node[1].splice(node[1].length-1, 1);
@@ -1336,9 +1342,9 @@ function hoistMultiples(ast) {
return false;
}
function getActualStatement(node) { // find the actual active statement, ignoring a label and one-time do loop
- if (node[0] == 'label') node = node[2];
- if (node[0] == 'do') node = node[2];
- if (node[0] == 'block' && node[1].length == 1) node = node[1][0];
+ if (node[0] === 'label') node = node[2];
+ if (node[0] === 'do') node = node[2];
+ if (node[0] === 'block' && node[1].length === 1) node = node[1][0];
return node;
}
vacuum(node);
@@ -1348,7 +1354,7 @@ function hoistMultiples(ast) {
for (var i = 0; i < statements.length-1; i++) {
var curr = getActualStatement(statements[i]);
var next = statements[i+1];
- if (curr[0] == 'if' && next[0] != 'if' && next[0] != 'label' && next[0] != 'do' && next[0] != 'while') {
+ if (curr[0] === 'if' && next[0] != 'if' && next[0] != 'label' && next[0] != 'do' && next[0] != 'while') {
tryEliminate(curr);
}
}
@@ -1366,7 +1372,7 @@ function hoistMultiples(ast) {
if (!statements) return;
for (var i = 0; i < statements.length; i++) {
var node = statements[i];
- if (node[0] == 'if' && node[2][0] == 'block' && node[3] && node[3][0] == 'block') {
+ if (node[0] === 'if' && node[2][0] === 'block' && node[3] && node[3][0] === 'block') {
var stat1 = node[2][1], stat2 = node[3][1];
// If break|continue in the latter and not the former, reverse them
if (!(stat1[stat1.length-1][0] in LOOP_FLOW) && (stat2[stat2.length-1][0] in LOOP_FLOW)) {
@@ -1394,7 +1400,7 @@ function loopOptimizer(ast) {
var neededDos = [];
// Find unneeded labels
traverseGenerated(ast, function(node, type, stack) {
- if (type == 'label' && node[2][0] in LOOP) {
+ if (type === 'label' && node[2][0] in LOOP) {
// this is a labelled loop. we don't know if it's needed yet. Mark its label for removal for now now.
stack.push(node);
node[1] = '+' + node[1];
@@ -1410,12 +1416,12 @@ function loopOptimizer(ast) {
assert(lastLoop, 'Cannot break/continue without a Label');
while (i >= 0 && !lastLabel) {
if (stack[i][0] in LOOP) break; // another loop in the middle - no label for lastLoop
- if (stack[i][0] == 'label') lastLabel = stack[i];
+ if (stack[i][0] === 'label') lastLabel = stack[i];
i--;
}
var ident = node[1]; // there may not be a label ident if this is a simple break; or continue;
var plus = '+' + ident;
- if (lastLabel && ident && (ident == lastLabel[1] || plus == lastLabel[1])) {
+ if (lastLabel && ident && (ident === lastLabel[1] || plus === lastLabel[1])) {
// If this is a 'do' loop, this break means we actually need it.
neededDos.push(lastLoop);
// We don't need the control flow command to have a label - it's referring to the current loop
@@ -1428,7 +1434,7 @@ function loopOptimizer(ast) {
// Find the label node that needs to stay alive
stack.forEach(function(label) {
if (!label) return;
- if (label[1] == plus) label[1] = label[1].substr(1); // Remove '+', marking it as needed
+ if (label[1] === plus) label[1] = label[1].substr(1); // Remove '+', marking it as needed
});
}
}
@@ -1438,12 +1444,12 @@ function loopOptimizer(ast) {
var more = false;
// Remove unneeded labels
traverseGenerated(ast, function(node, type) {
- if (type == 'label' && node[1][0] == '+') {
+ if (type === 'label' && node[1][0] === '+') {
more = true;
var ident = node[1].substr(1);
// Remove label from loop flow commands
traverse(node[2], function(node2, type) {
- if (type in LOOP_FLOW && node2[1] == ident) {
+ if (type in LOOP_FLOW && node2[1] === ident) {
return [node2[0]];
}
});
@@ -1453,13 +1459,13 @@ function loopOptimizer(ast) {
// Remove unneeded one-time loops. We need such loops if (1) they have a label, or (2) they have a direct break so they are in neededDos.
// First, add all labeled loops of this nature to neededDos
traverseGenerated(ast, function(node, type) {
- if (type == 'label' && node[2][0] == 'do') {
+ if (type === 'label' && node[2][0] === 'do') {
neededDos.push(node[2]);
}
});
// Remove unneeded dos, we know who they are now
traverseGenerated(ast, function(node, type) {
- if (type == 'do' && neededDos.indexOf(node) < 0) {
+ if (type === 'do' && neededDos.indexOf(node) < 0) {
assert(jsonCompare(node[1], ['num', 0]), 'Trying to remove a one-time do loop that is not one of our generated ones.;');
more = true;
return node[2];
@@ -1485,7 +1491,7 @@ function loopOptimizer(ast) {
function unVarify(vars, ret) { // transform var x=1, y=2 etc. into (x=1, y=2), i.e., the same assigns, but without a var definition
ret = ret || [];
ret[0] = 'stat';
- if (vars.length == 1) {
+ if (vars.length === 1) {
ret[1] = ['assign', true, ['name', vars[0][0]], vars[0][1]];
} else {
ret[1] = [];
@@ -1507,16 +1513,16 @@ var ASM_DOUBLE = 1;
function detectAsmCoercion(node) {
// for params, +x vs x|0, for vars, 0.0 vs 0
- if (node[0] == 'num' && node[1].toString().indexOf('.') >= 0) return ASM_DOUBLE;
- return node[0] == 'unary-prefix' ? ASM_DOUBLE : ASM_INT;
+ if (node[0] === 'num' && node[1].toString().indexOf('.') >= 0) return ASM_DOUBLE;
+ return node[0] === 'unary-prefix' ? ASM_DOUBLE : ASM_INT;
}
function makeAsmParamCoercion(param, type) {
- return type == ASM_INT ? ['binary', '|', ['name', param], ['num', 0]] : ['unary-prefix', '+', ['name', param]];
+ return type === ASM_INT ? ['binary', '|', ['name', param], ['num', 0]] : ['unary-prefix', '+', ['name', param]];
}
function makeAsmVarDef(v, type) {
- return [v, type == ASM_INT ? ['num', 0] : ['unary-prefix', '+', ['num', 0]]];
+ return [v, type === ASM_INT ? ['num', 0] : ['unary-prefix', '+', ['num', 0]]];
}
function normalizeAsm(func) {
@@ -1548,7 +1554,7 @@ function normalizeAsm(func) {
var name = v[0];
var value = v[1];
if (!(name in data.vars)) {
- assert(value[0] == 'num' || (value[0] == 'unary-prefix' && value[2][0] == 'num')); // must be valid coercion no-op
+ assert(value[0] === 'num' || (value[0] === 'unary-prefix' && value[2][0] === 'num')); // must be valid coercion no-op
data.vars[name] = detectAsmCoercion(value);
v.length = 1; // make an un-assigning var
} else {
@@ -1560,7 +1566,7 @@ function normalizeAsm(func) {
// finally, look for other var definitions and collect them
while (i < stats.length) {
traverse(stats[i], function(node, type) {
- if (type == 'var') {
+ if (type === 'var') {
for (var j = 0; j < node[1].length; j++) {
var v = node[1][j];
var name = v[0];
@@ -1575,8 +1581,8 @@ function normalizeAsm(func) {
}
}
unVarify(node[1], node);
- } else if (type == 'dot') {
- if (node[1][0] == 'name' && node[1][1] == 'Math') {
+ } else if (type === 'dot') {
+ if (node[1][0] === 'name' && node[1][1] === 'Math') {
// transform Math.max to Math_max; we forward in the latter version
node[0] = 'name';
node[1] = 'Math_' + node[2];
@@ -1594,7 +1600,7 @@ function denormalizeAsm(func, data) {
var stats = func[3];
// Remove var definitions, if any
for (var i = 0; i < stats.length; i++) {
- if (stats[i][0] == 'var') {
+ if (stats[i][0] === 'var') {
stats[i] = emptyNode();
} else {
if (!isEmptyNode(stats[i])) break;
@@ -1663,7 +1669,7 @@ function registerize(ast) {
var localVars = {};
var hasSwitch = false; // we cannot optimize variables if there is a switch, unless in asm mode
traverse(fun, function(node, type) {
- if (type == 'var') {
+ if (type === 'var') {
node[1].forEach(function(defined) { localVars[defined[0]] = 1 });
var vars = node[1].filter(function(varr) { return varr[1] });
if (vars.length >= 1) {
@@ -1671,7 +1677,7 @@ function registerize(ast) {
} else {
return emptyNode();
}
- } else if (type == 'switch') {
+ } else if (type === 'switch') {
hasSwitch = true;
}
});
@@ -1682,7 +1688,7 @@ function registerize(ast) {
var nextLocal = 0;
// Minify globals using the mapping we were given
traverse(fun, function(node, type) {
- if (type == 'name') {
+ if (type === 'name') {
var name = node[1];
var minified = extraInfo.globals[name];
if (minified) {
@@ -1702,13 +1708,13 @@ function registerize(ast) {
asmData.params[newName] = asmData.params[minified];
delete asmData.params[minified];
traverse(fun, function(node, type) {
- if (type == 'name' && node[1] == minified) {
+ if (type === 'name' && node[1] === minified) {
node[1] = newName;
}
});
if (fun[2]) {
for (var i = 0; i < fun[2].length; i++) {
- if (fun[2][i] == minified) fun[2][i] = newName;
+ if (fun[2][i] === minified) fun[2][i] = newName;
}
}
}
@@ -1761,15 +1767,15 @@ function registerize(ast) {
level--;
}
traverse(fun, function possibilifier(node, type) {
- if (type == 'name') {
+ if (type === 'name') {
var name = node[1];
if (localVars[name]) {
if (!varUses[name]) varUses[name] = 0;
varUses[name]++;
if (possibles[name] && !varLevels[name]) unoptimizables[name] = 1; // used outside of simple domination
}
- } else if (type == 'assign' && typeof node[1] != 'string') {
- if (node[2] && node[2][0] == 'name') {
+ } else if (type === 'assign' && typeof node[1] != 'string') {
+ if (node[2] && node[2][0] === 'name') {
var name = node[2][1];
// if local and not yet used, this might be optimizable if we dominate
// all other uses
@@ -1877,11 +1883,11 @@ function registerize(ast) {
}
varUses[name]--;
assert(varUses[name] >= 0);
- if (varUses[name] == 0) {
+ if (varUses[name] === 0) {
if (optimizables[name]) delete activeOptimizables[name];
// If we are not in a loop, or we are optimizable and not bound to a loop
// (we might have been in one but left it), we can free the register now.
- if (loops == 0 || (optimizables[name] && !optimizableLoops[name])) {
+ if (loops === 0 || (optimizables[name] && !optimizableLoops[name])) {
// free register
freeRegs.push(reg);
} else {
@@ -1894,7 +1900,7 @@ function registerize(ast) {
return true;
}
traverse(fun, function(node, type) { // XXX we rely on traversal order being the same as execution order here
- if (type == 'name') {
+ if (type === 'name') {
var name = node[1];
if (decUse(name)) {
node[1] = fullNames[varRegs[name]];
@@ -2001,10 +2007,10 @@ var IGNORABLE_ELIMINATOR_SCAN_NODES = set('num', 'toplevel', 'string', 'break',
var ABORTING_ELIMINATOR_SCAN_NODES = set('new', 'object', 'function', 'defun', 'for', 'while', 'array', 'throw'); // we could handle some of these, TODO, but nontrivial (e.g. for while, the condition is hit multiple times after the body)
function isTempDoublePtrAccess(node) { // these are used in bitcasts; they are not really affecting memory, and should cause no invalidation
- assert(node[0] == 'sub');
- return (node[2][0] == 'name' && node[2][1] == 'tempDoublePtr') ||
- (node[2][0] == 'binary' && ((node[2][2][0] == 'name' && node[2][2][1] == 'tempDoublePtr') ||
- (node[2][3][0] == 'name' && node[2][3][1] == 'tempDoublePtr')));
+ assert(node[0] === 'sub');
+ return (node[2][0] === 'name' && node[2][1] === 'tempDoublePtr') ||
+ (node[2][0] === 'binary' && ((node[2][2][0] === 'name' && node[2][2][1] === 'tempDoublePtr') ||
+ (node[2][3][0] === 'name' && node[2][3][1] === 'tempDoublePtr')));
}
function eliminate(ast, memSafe) {
@@ -2049,9 +2055,9 @@ function eliminate(ast, memSafe) {
var name = node[1];
if (!uses[name]) uses[name] = 0;
uses[name]++;
- } else if (type == 'assign') {
+ } else if (type === 'assign') {
var target = node[2];
- if (target[0] == 'name') {
+ if (target[0] === 'name') {
var name = target[1];
if (!(name in definitions)) definitions[name] = 0;
definitions[name]++;
@@ -2063,7 +2069,7 @@ function eliminate(ast, memSafe) {
namings[name]++; // offset it here, this tracks the total times we are named
}
}
- } else if (type == 'switch') {
+ } else if (type === 'switch') {
hasSwitch = true;
}
});
@@ -2085,9 +2091,9 @@ function eliminate(ast, memSafe) {
if (name in varsToTryToRemove) delete varsToTryToRemove[name];
}
function processVariable(name) {
- if (definitions[name] == 1 && uses[name] == 1) {
+ if (definitions[name] === 1 && uses[name] === 1) {
potentials[name] = 1;
- } else if (uses[name] == 0 && (!definitions[name] || definitions[name] <= 1)) { // no uses, no def or 1 def (cannot operate on phis, and the llvm optimizer will remove unneeded phis anyhow)
+ } else if (uses[name] === 0 && (!definitions[name] || definitions[name] <= 1)) { // no uses, no def or 1 def (cannot operate on phis, and the llvm optimizer will remove unneeded phis anyhow)
var hasSideEffects = false;
var value = values[name];
if (value) {
@@ -2095,8 +2101,8 @@ function eliminate(ast, memSafe) {
// First, pattern-match
// (HEAP32[((tempDoublePtr)>>2)]=((HEAP32[(($_sroa_0_0__idx1)>>2)])|0),HEAP32[(((tempDoublePtr)+(4))>>2)]=((HEAP32[((($_sroa_0_0__idx1)+(4))>>2)])|0),(+(HEAPF64[(tempDoublePtr)>>3])))
// which has no side effects and is the special form of converting double to i64.
- if (!(value[0] == 'seq' && value[1][0] == 'assign' && value[1][2][0] == 'sub' && value[1][2][2][0] == 'binary' && value[1][2][2][1] == '>>' &&
- value[1][2][2][2][0] == 'name' && value[1][2][2][2][1] == 'tempDoublePtr')) {
+ if (!(value[0] === 'seq' && value[1][0] === 'assign' && value[1][2][0] === 'sub' && value[1][2][2][0] === 'binary' && value[1][2][2][1] === '>>' &&
+ value[1][2][2][2][0] === 'name' && value[1][2][2][2][1] === 'tempDoublePtr')) {
// If not that, then traverse and scan normally.
traverse(value, function(node, type) {
if (!(type in NODES_WITHOUT_ELIMINATION_SIDE_EFFECTS)) {
@@ -2114,7 +2120,7 @@ function eliminate(ast, memSafe) {
// appearing in it, and possibly eliminate again
if (value) {
traverse(value, function(node, type) {
- if (type == 'name') {
+ if (type === 'name') {
var name = node[1];
node[1] = ''; // we can remove this - it will never be shown, and should not be left to confuse us as we traverse
if (name in locals) {
@@ -2152,7 +2158,7 @@ function eliminate(ast, memSafe) {
var usesGlobals = false, usesMemory = false, deps = {}, doesCall = false;
var ignoreName = false; // one-time ignorings of names, as first op in sub and call
traverse(value, function(node, type) {
- if (type == 'name') {
+ if (type === 'name') {
if (!ignoreName) {
var name = node[1];
if (!(name in locals)) {
@@ -2164,10 +2170,10 @@ function eliminate(ast, memSafe) {
} else {
ignoreName = false;
}
- } else if (type == 'sub') {
+ } else if (type === 'sub') {
usesMemory = true;
ignoreName = true;
- } else if (type == 'call') {
+ } else if (type === 'call') {
usesGlobals = true;
usesMemory = true;
doesCall = true;
@@ -2255,10 +2261,10 @@ function eliminate(ast, memSafe) {
//nesting++; // printErr-related
//printErr(spaces(2*(nesting+1)) + 'trav: ' + JSON.stringify(node).substr(0, 50) + ' : ' + keys(tracked) + ' : ' + [allowTracking, ignoreSub, ignoreName]);
var type = node[0];
- if (type == 'assign') {
+ if (type === 'assign') {
var target = node[2];
var value = node[3];
- var nameTarget = target[0] == 'name';
+ var nameTarget = target[0] === 'name';
traverseInOrder(target, true, nameTarget); // evaluate left
traverseInOrder(value); // evaluate right
// do the actual assignment
@@ -2275,7 +2281,7 @@ function eliminate(ast, memSafe) {
}
// if we can track this name (that we assign into), and it has 0 uses and we want to remove its 'var'
// definition - then remove it right now, there is no later chance
- if (allowTracking && (name in varsToRemove) && uses[name] == 0) {
+ if (allowTracking && (name in varsToRemove) && uses[name] === 0) {
track(name, node[3], node);
doEliminate(name, node);
}
@@ -2290,13 +2296,13 @@ function eliminate(ast, memSafe) {
} else {
if (allowTracking) track(name, node[3], node);
}
- } else if (target[0] == 'sub') {
+ } else if (target[0] === 'sub') {
if (!isTempDoublePtrAccess(target) && !memoryInvalidated) {
invalidateMemory();
memoryInvalidated = true;
}
}
- } else if (type == 'sub') {
+ } else if (type === 'sub') {
traverseInOrder(node[1], false, !memSafe); // evaluate inner
traverseInOrder(node[2]); // evaluate outer
// ignoreSub means we are a write (happening later), not a read
@@ -2307,7 +2313,7 @@ function eliminate(ast, memSafe) {
callsInvalidated = true;
}
}
- } else if (type == 'var') {
+ } else if (type === 'var') {
var vars = node[1];
for (var i = 0; i < vars.length; i++) {
var name = vars[i][0];
@@ -2319,7 +2325,7 @@ function eliminate(ast, memSafe) {
} else {
invalidateByDep(name);
}
- if (vars.length == 1 && name in varsToTryToRemove && value) {
+ if (vars.length === 1 && name in varsToTryToRemove && value) {
// replace it in-place
value = ['stat', value];
node.length = value.length;
@@ -2330,7 +2336,7 @@ function eliminate(ast, memSafe) {
}
}
}
- } else if (type == 'binary') {
+ } else if (type === 'binary') {
var flipped = false;
if (node[1] in ASSOCIATIVE_BINARIES && !(node[2][0] in NAME_OR_NUM) && node[3][0] in NAME_OR_NUM) { // TODO recurse here?
// associatives like + and * can be reordered in the simple case of one of the sides being a name, since we assume they are all just numbers
@@ -2346,7 +2352,7 @@ function eliminate(ast, memSafe) {
node[2] = node[3];
node[3] = temp;
}
- } else if (type == 'name') {
+ } else if (type === 'name') {
if (!ignoreName) { // ignoreName means we are the name of something like a call or a sub - irrelevant for us
var name = node[1];
if (name in tracked) {
@@ -2356,10 +2362,10 @@ function eliminate(ast, memSafe) {
callsInvalidated = true;
}
}
- } else if (type == 'unary-prefix' || type == 'unary-postfix') {
+ } else if (type === 'unary-prefix' || type === 'unary-postfix') {
traverseInOrder(node[2]);
} else if (type in IGNORABLE_ELIMINATOR_SCAN_NODES) {
- } else if (type == 'call') {
+ } else if (type === 'call') {
traverseInOrder(node[1], false, true);
var args = node[2];
for (var i = 0; i < args.length; i++) {
@@ -2374,7 +2380,7 @@ function eliminate(ast, memSafe) {
invalidateMemory();
memoryInvalidated = true;
}
- } else if (type == 'if') {
+ } else if (type === 'if') {
if (allowTracking) {
traverseInOrder(node[1]); // can eliminate into condition, but nowhere else
if (!callsInvalidated) { // invalidate calls, since we cannot eliminate them into an if that may not execute!
@@ -2390,38 +2396,38 @@ function eliminate(ast, memSafe) {
} else {
tracked = {};
}
- } else if (type == 'block') {
+ } else if (type === 'block') {
var stats = node[1];
if (stats) {
for (var i = 0; i < stats.length; i++) {
traverseInOrder(stats[i]);
}
}
- } else if (type == 'stat') {
+ } else if (type === 'stat') {
traverseInOrder(node[1]);
- } else if (type == 'label') {
+ } else if (type === 'label') {
traverseInOrder(node[2]);
- } else if (type == 'seq') {
+ } else if (type === 'seq') {
traverseInOrder(node[1]);
traverseInOrder(node[2]);
- } else if (type == 'do') {
- if (node[1][0] == 'num' && node[1][1] == 0) { // one-time loop
+ } else if (type === 'do') {
+ if (node[1][0] === 'num' && node[1][1] === 0) { // one-time loop
traverseInOrder(node[2]);
} else {
tracked = {};
}
- } else if (type == 'return') {
+ } else if (type === 'return') {
if (node[1]) traverseInOrder(node[1]);
- } else if (type == 'conditional') {
+ } else if (type === 'conditional') {
traverseInOrder(node[1]);
traverseInOrder(node[2]);
traverseInOrder(node[3]);
- } else if (type == 'switch') {
+ } else if (type === 'switch') {
traverseInOrder(node[1]);
var cases = node[2];
for (var i = 0; i < cases.length; i++) {
var c = cases[i];
- assert(c[0] === null || c[0][0] == 'num' || (c[0][0] == 'unary-prefix' && c[0][2][0] == 'num'));
+ assert(c[0] === null || c[0][0] === 'num' || (c[0][0] === 'unary-prefix' && c[0][2][0] === 'num'));
var stats = c[1];
for (var j = 0; j < stats.length; j++) {
traverseInOrder(stats[j]);
@@ -2440,7 +2446,7 @@ function eliminate(ast, memSafe) {
}
//var eliminationLimit = 0; // used to debugging purposes
function doEliminate(name, node) {
- //if (eliminationLimit == 0) return;
+ //if (eliminationLimit === 0) return;
//eliminationLimit--;
//printErr('elim!!!!! ' + name);
// yes, eliminate!
@@ -2449,9 +2455,9 @@ function eliminate(ast, memSafe) {
delete tracked[name];
var defNode = info.defNode;
if (!sideEffectFree[name]) {
- if (defNode[0] == 'var') {
+ if (defNode[0] === 'var') {
defNode[1].forEach(function(pair) {
- if (pair[0] == name) {
+ if (pair[0] === name) {
value = pair[1];
}
});
@@ -2477,7 +2483,7 @@ function eliminate(ast, memSafe) {
}
traverse(func, function(block) {
// Look for statements, including while-switch pattern
- var stats = getStatements(block) || (block[0] == 'while' && block[2][0] == 'switch' ? [block[2]] : stats);
+ var stats = getStatements(block) || (block[0] === 'while' && block[2][0] === 'switch' ? [block[2]] : stats);
if (!stats) return;
//printErr('Stats: ' + JSON.stringify(stats).substr(0,100));
tracked = {};
@@ -2486,7 +2492,7 @@ function eliminate(ast, memSafe) {
var node = stats[i];
//printErr('StatBlock[' + i + '] => ' + JSON.stringify(node).substr(0,100));
var type = node[0];
- if (type == 'stat') {
+ if (type === 'stat') {
node = node[1];
type = node[0];
}
@@ -2507,7 +2513,7 @@ function eliminate(ast, memSafe) {
// pre
if (type === 'var') {
node[1] = node[1].filter(function(pair) { return !varsToRemove[pair[0]] });
- if (node[1].length == 0) {
+ if (node[1].length === 0) {
// wipe out an empty |var;|
node[0] = 'toplevel';
node[1] = [];
@@ -2515,7 +2521,7 @@ function eliminate(ast, memSafe) {
}
}, function(node, type) {
// post
- if (type == 'name') {
+ if (type === 'name') {
var name = node[1];
if (name in helperReplacements) {
node[1] = helperReplacements[name];
@@ -2527,30 +2533,30 @@ function eliminate(ast, memSafe) {
} else {
seenUses[name]++;
}
- } else if (type == 'while') {
+ } else if (type === 'while') {
// try to remove loop helper variables specifically
var stats = node[2][1];
var last = stats[stats.length-1];
- if (last && last[0] == 'if' && last[2][0] == 'block' && last[3] && last[3][0] == 'block') {
+ if (last && last[0] === 'if' && last[2][0] === 'block' && last[3] && last[3][0] === 'block') {
var ifTrue = last[2];
var ifFalse = last[3];
var flip = false;
- if (ifFalse[1][0] && ifFalse[1][0][0] == 'break') { // canonicalize break in the if
+ if (ifFalse[1][0] && ifFalse[1][0][0] === 'break') { // canonicalize break in the if
var temp = ifFalse;
ifFalse = ifTrue;
ifTrue = temp;
flip = true;
}
- if (ifTrue[1][0] && ifTrue[1][0][0] == 'break') {
+ if (ifTrue[1][0] && ifTrue[1][0][0] === 'break') {
var assigns = ifFalse[1];
var loopers = [], helpers = [];
for (var i = 0; i < assigns.length; i++) {
- if (assigns[i][0] == 'stat' && assigns[i][1][0] == 'assign') {
+ if (assigns[i][0] === 'stat' && assigns[i][1][0] === 'assign') {
var assign = assigns[i][1];
- if (assign[1] === true && assign[2][0] == 'name' && assign[3][0] == 'name') {
+ if (assign[1] === true && assign[2][0] === 'name' && assign[3][0] === 'name') {
var looper = assign[2][1];
var helper = assign[3][1];
- if (definitions[helper] == 1 && seenUses[looper] == namings[looper] &&
+ if (definitions[helper] === 1 && seenUses[looper] === namings[looper] &&
!helperReplacements[helper] && !helperReplacements[looper]) {
loopers.push(looper);
helpers.push(helper);
@@ -2566,11 +2572,11 @@ function eliminate(ast, memSafe) {
var found = -1;
for (var i = stats.length-2; i >= 0; i--) {
var curr = stats[i];
- if (curr[0] == 'stat' && curr[1][0] == 'assign') {
+ if (curr[0] === 'stat' && curr[1][0] === 'assign') {
var currAssign = curr[1];
- if (currAssign[1] === true && currAssign[2][0] == 'name') {
+ if (currAssign[1] === true && currAssign[2][0] === 'name') {
var to = currAssign[2][1];
- if (to == helper) {
+ if (to === helper) {
found = i;
break;
}
@@ -2582,7 +2588,7 @@ function eliminate(ast, memSafe) {
for (var i = found+1; i < stats.length && !looperUsed; i++) {
var curr = i < stats.length-1 ? stats[i] : last[1]; // on the last line, just look in the condition
traverse(curr, function(node, type) {
- if (type == 'name' && node[1] == looper) {
+ if (type === 'name' && node[1] === looper) {
looperUsed = true;
return true;
}
@@ -2592,7 +2598,7 @@ function eliminate(ast, memSafe) {
}
for (var l = 0; l < helpers.length; l++) {
for (var k = 0; k < helpers.length; k++) {
- if (l != k && helpers[l] == helpers[k]) return; // it is complicated to handle a shared helper, abort
+ if (l != k && helpers[l] === helpers[k]) return; // it is complicated to handle a shared helper, abort
}
}
// hurrah! this is safe to do
@@ -2602,7 +2608,7 @@ function eliminate(ast, memSafe) {
var helper = helpers[l];
varsToRemove[helper] = 2;
traverse(node, function(node, type) { // replace all appearances of helper with looper
- if (type == 'name' && node[1] == helper) node[1] = looper;
+ if (type === 'name' && node[1] === helper) node[1] = looper;
});
helperReplacements[helper] = looper; // replace all future appearances of helper with looper
helperReplacements[looper] = looper; // avoid any further attempts to optimize looper in this manner (seenUses is wrong anyhow, too)
@@ -2620,7 +2626,7 @@ function eliminate(ast, memSafe) {
if (asm) {
for (var v in varsToRemove) {
- if (varsToRemove[v] == 2) delete asmData.vars[v];
+ if (varsToRemove[v] === 2) delete asmData.vars[v];
}
denormalizeAsm(func, asmData);
}
@@ -2634,14 +2640,14 @@ function eliminate(ast, memSafe) {
this.run = function() {
traverse(this.node, function(node, type) {
- if (type === 'binary' && node[1] == '+') {
+ if (type === 'binary' && node[1] === '+') {
var names = [];
var num = 0;
var has_num = false;
var fail = false;
traverse(node, function(subNode, subType) {
if (subType === 'binary') {
- if (subNode[1] != '+') {
+ if (subNode[1] !== '+') {
fail = true;
return false;
}
@@ -2682,7 +2688,7 @@ function minifyGlobals(ast) {
var first = true; // do not minify initial 'var asm ='
// find the globals
traverse(ast, function(node, type) {
- if (type == 'var') {
+ if (type === 'var') {
if (first) {
first = false;
return;
@@ -2703,7 +2709,7 @@ function minifyGlobals(ast) {
}
// apply minification
traverse(ast, function(node, type) {
- if (type == 'name') {
+ if (type === 'name') {
var name = node[1];
if (name in minified) {
node[1] = minified[name];
@@ -2771,9 +2777,9 @@ function relocate(ast) {
// Change +5 to DOT$ZERO(5). We then textually change 5 to 5.0 (uglify's ast cannot differentiate between 5 and 5.0 directly)
function prepDotZero(ast) {
traverse(ast, function(node, type) {
- if (type == 'unary-prefix' && node[1] == '+') {
- if (node[2][0] == 'num' ||
- (node[2][0] == 'unary-prefix' && node[2][1] == '-' && node[2][2][0] == 'num')) {
+ if (type === 'unary-prefix' && node[1] === '+') {
+ if (node[2][0] === 'num' ||
+ (node[2][0] === 'unary-prefix' && node[2][1] === '-' && node[2][2][0] === 'num')) {
return ['call', ['name', 'DOT$ZERO'], [node[2]]];
}
}
@@ -2781,7 +2787,7 @@ function prepDotZero(ast) {
}
function fixDotZero(js) {
return js.replace(/DOT\$ZERO\(([-+]?(0x)?[0-9a-f]*\.?[0-9]+([eE][-+]?[0-9]+)?)\)/g, function(m, num) {
- if (num.substr(0, 2) == '0x' || num.substr(0, 3) == '-0x') {
+ if (num.substr(0, 2) === '0x' || num.substr(0, 3) === '-0x') {
return eval(num) + '.0';
}
if (num.indexOf('.') >= 0) return num;
@@ -2796,11 +2802,11 @@ function asmLoopOptimizer(ast) {
// This is at the end of the pipeline, we can assume all other optimizations are done, and we modify loops
// into shapes that might confuse other passes
traverse(fun, function(node, type) {
- if (type == 'while' && node[1][0] == 'num' && node[1][1] == 1 && node[2][0] == 'block') {
+ if (type === 'while' && node[1][0] === 'num' && node[1][1] === 1 && node[2][0] === 'block') {
// while (1) { .. if (..) { break } } ==> do { .. } while(..)
var stats = node[2][1];
var last = stats[stats.length-1];
- if (last && last[0] == 'if' && !last[3] && last[2][0] == 'block' && last[2][1][0][0] == 'break' && !last[2][1][0][1]) {
+ if (last && last[0] === 'if' && !last[3] && last[2][0] === 'block' && last[2][1][0] && last[2][1][0][0] === 'break' && !last[2][1][0][1]) {
var conditionToBreak = last[1];
stats.pop();
node[0] = 'do';
@@ -2843,6 +2849,14 @@ var passes = {
var suffix = '';
+arguments_ = arguments_.filter(function (arg) {
+ if (!/^--/.test(arg)) return true;
+
+ if (arg === '--debug') debug = true;
+ else throw new Error('Unrecognized flag: ' + arg);
+});
+
+
var src = read(arguments_[0]);
var ast = srcToAst(src);
//printErr(JSON.stringify(ast)); throw 1;
@@ -2851,6 +2865,7 @@ var extraInfoStart = src.indexOf('// EXTRA_INFO:')
if (extraInfoStart > 0) extraInfo = JSON.parse(src.substr(extraInfoStart + 14));
//printErr(JSON.stringify(extraInfo));
+
arguments_.slice(1).forEach(function(arg) {
passes[arg](ast);
});
diff --git a/tools/js_optimizer.py b/tools/js_optimizer.py
index 2ecf3cfb..905ae835 100644
--- a/tools/js_optimizer.py
+++ b/tools/js_optimizer.py
@@ -57,7 +57,7 @@ class Minifier:
if curr not in INVALID_3: self.names.append(curr)
#print >> sys.stderr, self.names
- def minify_shell(self, shell, minify_whitespace):
+ def minify_shell(self, shell, minify_whitespace, source_map=False):
#print >> sys.stderr, "MINIFY SHELL 1111111111", shell, "\n222222222222222"
# Run through js-optimizer.js to find and minify the global symbols
# We send it the globals, which it parses at the proper time. JS decides how
@@ -76,7 +76,12 @@ class Minifier:
f.write('// EXTRA_INFO:' + self.serialize())
f.close()
- output = subprocess.Popen(self.js_engine + [JS_OPTIMIZER, temp_file, 'minifyGlobals', 'noPrintMetadata'] + (['minifyWhitespace'] if minify_whitespace else []), stdout=subprocess.PIPE).communicate()[0]
+ output = subprocess.Popen(self.js_engine +
+ [JS_OPTIMIZER, temp_file, 'minifyGlobals', 'noPrintMetadata'] +
+ (['minifyWhitespace'] if minify_whitespace else []) +
+ (['--debug'] if source_map else []),
+ stdout=subprocess.PIPE).communicate()[0]
+
assert len(output) > 0 and not output.startswith('Assertion failed'), 'Error in js optimizer: ' + output
#print >> sys.stderr, "minified SHELL 3333333333333333", output, "\n44444444444444444444"
code, metadata = output.split('// EXTRA_INFO:')
@@ -102,7 +107,7 @@ def run_on_chunk(command):
if DEBUG and not shared.WINDOWS: print >> sys.stderr, '.' # Skip debug progress indicator on Windows, since it doesn't buffer well with multiple threads printing to console.
return filename
-def run_on_js(filename, passes, js_engine, jcache):
+def run_on_js(filename, passes, js_engine, jcache, source_map=False):
if isinstance(jcache, bool) and jcache: jcache = shared.JCache
if jcache: shared.JCache.ensure()
@@ -176,7 +181,7 @@ EMSCRIPTEN_FUNCS();
js = js[start_funcs + len(start_funcs_marker):end_funcs]
minifier = Minifier(js, js_engine)
- asm_shell_pre, asm_shell_post = minifier.minify_shell(asm_shell, 'minifyWhitespace' in passes).split('EMSCRIPTEN_FUNCS();');
+ asm_shell_pre, asm_shell_post = minifier.minify_shell(asm_shell, 'minifyWhitespace' in passes, source_map).split('EMSCRIPTEN_FUNCS();');
asm_shell_post = asm_shell_post.replace('});', '})');
pre += asm_shell_pre + '\n' + start_funcs_marker
post = end_funcs_marker + asm_shell_post + post
@@ -212,7 +217,9 @@ EMSCRIPTEN_FUNCS();
total_size = len(js)
js = None
- cores = int(os.environ.get('EMCC_CORES') or multiprocessing.cpu_count())
+ # if we are making source maps, we want our debug numbering to start from the
+ # top of the file, so avoid breaking the JS into chunks
+ cores = 1 if source_map else int(os.environ.get('EMCC_CORES') or multiprocessing.cpu_count())
intended_num_chunks = int(round(cores * NUM_CHUNKS_PER_CORE))
chunk_size = min(MAX_CHUNK_SIZE, max(MIN_CHUNK_SIZE, total_size / intended_num_chunks))
@@ -252,7 +259,9 @@ EMSCRIPTEN_FUNCS();
if len(filenames) > 0:
# XXX Use '--nocrankshaft' to disable crankshaft to work around v8 bug 1895, needed for older v8/node (node 0.6.8+ should be ok)
- commands = map(lambda filename: js_engine + [JS_OPTIMIZER, filename, 'noPrintMetadata'] + passes, filenames)
+ commands = map(lambda filename: js_engine +
+ [JS_OPTIMIZER, filename, 'noPrintMetadata'] +
+ (['--debug'] if source_map else []) + passes, filenames)
#print [' '.join(command) for command in commands]
cores = min(cores, filenames)
@@ -320,6 +329,6 @@ EMSCRIPTEN_FUNCS();
return filename
-def run(filename, passes, js_engine, jcache):
- return temp_files.run_and_clean(lambda: run_on_js(filename, passes, js_engine, jcache))
+def run(filename, passes, js_engine, jcache, source_map=False):
+ return temp_files.run_and_clean(lambda: run_on_js(filename, passes, js_engine, jcache, source_map))
diff --git a/tools/node_modules/source-map/.npmignore b/tools/node_modules/source-map/.npmignore
new file mode 100644
index 00000000..3dddf3f6
--- /dev/null
+++ b/tools/node_modules/source-map/.npmignore
@@ -0,0 +1,2 @@
+dist/*
+node_modules/*
diff --git a/tools/node_modules/source-map/.travis.yml b/tools/node_modules/source-map/.travis.yml
new file mode 100644
index 00000000..ddc9c4f9
--- /dev/null
+++ b/tools/node_modules/source-map/.travis.yml
@@ -0,0 +1,4 @@
+language: node_js
+node_js:
+ - 0.8
+ - "0.10" \ No newline at end of file
diff --git a/tools/node_modules/source-map/CHANGELOG.md b/tools/node_modules/source-map/CHANGELOG.md
new file mode 100644
index 00000000..140fe0cf
--- /dev/null
+++ b/tools/node_modules/source-map/CHANGELOG.md
@@ -0,0 +1,58 @@
+# Change Log
+
+## 0.1.22
+
+* Ignore duplicate mappings in SourceMapGenerator. Fixes github issue 21.
+
+## 0.1.21
+
+* Fixed handling of sources that start with a slash so that they are relative to
+ the source root's host.
+
+## 0.1.20
+
+* Fixed github issue #43: absolute URLs aren't joined with the source root
+ anymore.
+
+## 0.1.19
+
+* Using Travis CI to run tests.
+
+## 0.1.18
+
+* Fixed a bug in the handling of sourceRoot.
+
+## 0.1.17
+
+* Added SourceNode.fromStringWithSourceMap.
+
+## 0.1.16
+
+* Added missing documentation.
+
+* Fixed the generating of empty mappings in SourceNode.
+
+## 0.1.15
+
+* Added SourceMapGenerator.applySourceMap.
+
+## 0.1.14
+
+* The sourceRoot is now handled consistently.
+
+## 0.1.13
+
+* Added SourceMapGenerator.fromSourceMap.
+
+## 0.1.12
+
+* SourceNode now generates empty mappings too.
+
+## 0.1.11
+
+* Added name support to SourceNode.
+
+## 0.1.10
+
+* Added sourcesContent support to the customer and generator.
+
diff --git a/tools/node_modules/source-map/LICENSE b/tools/node_modules/source-map/LICENSE
new file mode 100644
index 00000000..ed1b7cf2
--- /dev/null
+++ b/tools/node_modules/source-map/LICENSE
@@ -0,0 +1,28 @@
+
+Copyright (c) 2009-2011, Mozilla Foundation and contributors
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+* Neither the names of the Mozilla Foundation nor the names of project
+ contributors may be used to endorse or promote products derived from this
+ software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/tools/node_modules/source-map/Makefile.dryice.js b/tools/node_modules/source-map/Makefile.dryice.js
new file mode 100644
index 00000000..8973ac2c
--- /dev/null
+++ b/tools/node_modules/source-map/Makefile.dryice.js
@@ -0,0 +1,166 @@
+/* -*- Mode: js; js-indent-level: 2; -*- */
+/*
+ * Copyright 2011 Mozilla Foundation and contributors
+ * Licensed under the New BSD license. See LICENSE or:
+ * http://opensource.org/licenses/BSD-3-Clause
+ */
+var path = require('path');
+var fs = require('fs');
+var copy = require('dryice').copy;
+
+function removeAmdefine(src) {
+ src = String(src).replace(
+ /if\s*\(typeof\s*define\s*!==\s*'function'\)\s*{\s*var\s*define\s*=\s*require\('amdefine'\)\(module\);\s*}\s*/g,
+ '');
+ src = src.replace(
+ /\b(define\(.*)('amdefine',?)/gm,
+ '$1');
+ return src;
+}
+removeAmdefine.onRead = true;
+
+function makeNonRelative(src) {
+ return src
+ .replace(/require\('.\//g, 'require(\'source-map/')
+ .replace(/\.\.\/\.\.\/lib\//g, '');
+}
+makeNonRelative.onRead = true;
+
+function buildBrowser() {
+ console.log('\nCreating dist/source-map.js');
+
+ var project = copy.createCommonJsProject({
+ roots: [ path.join(__dirname, 'lib') ]
+ });
+
+ copy({
+ source: [
+ 'build/mini-require.js',
+ {
+ project: project,
+ require: [ 'source-map/source-map-generator',
+ 'source-map/source-map-consumer',
+ 'source-map/source-node']
+ },
+ 'build/suffix-browser.js'
+ ],
+ filter: [
+ copy.filter.moduleDefines,
+ removeAmdefine
+ ],
+ dest: 'dist/source-map.js'
+ });
+}
+
+function buildBrowserMin() {
+ console.log('\nCreating dist/source-map.min.js');
+
+ copy({
+ source: 'dist/source-map.js',
+ filter: copy.filter.uglifyjs,
+ dest: 'dist/source-map.min.js'
+ });
+}
+
+function buildFirefox() {
+ console.log('\nCreating dist/SourceMap.jsm');
+
+ var project = copy.createCommonJsProject({
+ roots: [ path.join(__dirname, 'lib') ]
+ });
+
+ copy({
+ source: [
+ 'build/prefix-source-map.jsm',
+ {
+ project: project,
+ require: [ 'source-map/source-map-consumer',
+ 'source-map/source-map-generator',
+ 'source-map/source-node' ]
+ },
+ 'build/suffix-source-map.jsm'
+ ],
+ filter: [
+ copy.filter.moduleDefines,
+ removeAmdefine,
+ makeNonRelative
+ ],
+ dest: 'dist/SourceMap.jsm'
+ });
+
+ // Create dist/test/Utils.jsm
+ console.log('\nCreating dist/test/Utils.jsm');
+
+ project = copy.createCommonJsProject({
+ roots: [ __dirname, path.join(__dirname, 'lib') ]
+ });
+
+ copy({
+ source: [
+ 'build/prefix-utils.jsm',
+ 'build/assert-shim.js',
+ {
+ project: project,
+ require: [ 'test/source-map/util' ]
+ },
+ 'build/suffix-utils.jsm'
+ ],
+ filter: [
+ copy.filter.moduleDefines,
+ removeAmdefine,
+ makeNonRelative
+ ],
+ dest: 'dist/test/Utils.jsm'
+ });
+
+ function isTestFile(f) {
+ return /^test\-.*?\.js/.test(f);
+ }
+
+ var testFiles = fs.readdirSync(path.join(__dirname, 'test', 'source-map')).filter(isTestFile);
+
+ testFiles.forEach(function (testFile) {
+ console.log('\nCreating', path.join('dist', 'test', testFile.replace(/\-/g, '_')));
+
+ copy({
+ source: [
+ 'build/test-prefix.js',
+ path.join('test', 'source-map', testFile),
+ 'build/test-suffix.js'
+ ],
+ filter: [
+ removeAmdefine,
+ makeNonRelative,
+ function (input, source) {
+ return input.replace('define(',
+ 'define("'
+ + path.join('test', 'source-map', testFile.replace(/\.js$/, ''))
+ + '", ["require", "exports", "module"], ');
+ },
+ function (input, source) {
+ return input.replace('{THIS_MODULE}', function () {
+ return "test/source-map/" + testFile.replace(/\.js$/, '');
+ });
+ }
+ ],
+ dest: path.join('dist', 'test', testFile.replace(/\-/g, '_'))
+ });
+ });
+}
+
+function ensureDir(name) {
+ var dirExists = false;
+ try {
+ dirExists = fs.statSync(name).isDirectory();
+ } catch (err) {}
+
+ if (!dirExists) {
+ fs.mkdirSync(name, 0777);
+ }
+}
+
+ensureDir("dist");
+ensureDir("dist/test");
+buildFirefox();
+buildBrowser();
+buildBrowserMin();
diff --git a/tools/node_modules/source-map/README.md b/tools/node_modules/source-map/README.md
new file mode 100644
index 00000000..95e6787a
--- /dev/null
+++ b/tools/node_modules/source-map/README.md
@@ -0,0 +1,347 @@
+# Source Map
+
+This is a library to generate and consume the source map format
+[described here][format].
+
+[Learn more here][feature].
+
+This library was written in the Asynchronous Module Definition
+format. It should work in the following environments:
+
+* Modern Browsers (either after the build, or with an AMD loader such as
+ RequireJS)
+
+* Inside Firefox (as a JSM file, after the build)
+
+* With NodeJS versions 0.8.X and higher
+
+## Installing with NPM (for use with NodeJS)
+
+Simply
+
+ $ npm install source-map
+
+Or, if you'd like to hack on this library and have it installed via npm so you
+can try out your changes:
+
+ $ git clone https://fitzgen@github.com/mozilla/source-map.git
+ $ cd source-map
+ $ npm link .
+
+## Building from Source (for everywhere else)
+
+Install Node and then run
+
+ $ git clone https://fitzgen@github.com/mozilla/source-map.git
+ $ cd source-map
+ $ npm link .
+
+Next, run
+
+ $ node Makefile.dryice.js`
+
+This should create the following files:
+
+* `dist/source-map.js` - The unminified browser version.
+
+* `dist/source-map.min.js` - The minified browser version.
+
+* `dist/SourceMap.jsm` - The JavaScript Module for inclusion in Firefox
+ source.
+
+## API
+
+Get a reference to the module:
+
+ // NodeJS
+ var sourceMap = require('source-map');
+
+ // Browser builds
+ var sourceMap = window.sourceMap;
+
+ // Inside Firefox
+ let sourceMap = {};
+ Components.utils.import('resource:///modules/devtools/SourceMap.jsm', sourceMap);
+
+### SourceMapConsumer
+
+A SourceMapConsumer instance represents a parsed source map which we can query
+for information about the original file positions by giving it a file position
+in the generated source.
+
+#### new SourceMapConsumer(rawSourceMap)
+
+The only parameter is the raw source map (either as a string which can be
+`JSON.parse`'d, or an object). According to the spec, source maps have the
+following attributes:
+
+* `version`: Which version of the source map spec this map is following.
+
+* `sources`: An array of URLs to the original source files.
+
+* `names`: An array of identifiers which can be referrenced by individual
+ mappings.
+
+* `sourceRoot`: Optional. The URL root from which all sources are relative.
+
+* `sourcesContent`: Optional. An array of contents of the original source files.
+
+* `mappings`: A string of base64 VLQs which contain the actual mappings.
+
+* `file`: The generated filename this source map is associated with.
+
+#### SourceMapConsumer.prototype.originalPositionFor(generatedPosition)
+
+Returns the original source, line, and column information for the generated
+source's line and column positions provided. The only argument is an object with
+the following properties:
+
+* `line`: The line number in the generated source.
+
+* `column`: The column number in the generated source.
+
+and an object is returned with the following properties:
+
+* `source`: The original source file, or null if this information is not
+ available.
+
+* `line`: The line number in the original source, or null if this information is
+ not available.
+
+* `column`: The column number in the original source, or null or null if this
+ information is not available.
+
+* `name`: The original identifier, or null if this information is not available.
+
+#### SourceMapConsumer.prototype.generatedPositionFor(originalPosition)
+
+Returns the generated line and column information for the original source,
+line, and column positions provided. The only argument is an object with
+the following properties:
+
+* `source`: The filename of the original source.
+
+* `line`: The line number in the original source.
+
+* `column`: The column number in the original source.
+
+and an object is returned with the following properties:
+
+* `line`: The line number in the generated source, or null.
+
+* `column`: The column number in the generated source, or null.
+
+#### SourceMapConsumer.prototype.sourceContentFor(source)
+
+Returns the original source content for the source provided. The only
+argument is the URL of the original source file.
+
+#### SourceMapConsumer.prototype.eachMapping(callback, context, order)
+
+Iterate over each mapping between an original source/line/column and a
+generated line/column in this source map.
+
+* `callback`: The function that is called with each mapping.
+
+* `context`: Optional. If specified, this object will be the value of `this`
+ every time that `callback` is called.
+
+* `order`: Either `SourceMapConsumer.GENERATED_ORDER` or
+ `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to iterate over
+ the mappings sorted by the generated file's line/column order or the
+ original's source/line/column order, respectively. Defaults to
+ `SourceMapConsumer.GENERATED_ORDER`.
+
+### SourceMapGenerator
+
+An instance of the SourceMapGenerator represents a source map which is being
+built incrementally.
+
+#### new SourceMapGenerator(startOfSourceMap)
+
+To create a new one, you must pass an object with the following properties:
+
+* `file`: The filename of the generated source that this source map is
+ associated with.
+
+* `sourceRoot`: An optional root for all relative URLs in this source map.
+
+#### SourceMapGenerator.fromSourceMap(sourceMapConsumer)
+
+Creates a new SourceMapGenerator based on a SourceMapConsumer
+
+* `sourceMapConsumer` The SourceMap.
+
+#### SourceMapGenerator.prototype.addMapping(mapping)
+
+Add a single mapping from original source line and column to the generated
+source's line and column for this source map being created. The mapping object
+should have the following properties:
+
+* `generated`: An object with the generated line and column positions.
+
+* `original`: An object with the original line and column positions.
+
+* `source`: The original source file (relative to the sourceRoot).
+
+* `name`: An optional original token name for this mapping.
+
+#### SourceMapGenerator.prototype.setSourceContent(sourceFile, sourceContent)
+
+Set the source content for an original source file.
+
+* `sourceFile` the URL of the original source file.
+
+* `sourceContent` the content of the source file.
+
+#### SourceMapGenerator.prototype.applySourceMap(sourceMapConsumer[, sourceFile])
+
+Applies a SourceMap for a source file to the SourceMap.
+Each mapping to the supplied source file is rewritten using the
+supplied SourceMap. Note: The resolution for the resulting mappings
+is the minimium of this map and the supplied map.
+
+* `sourceMapConsumer`: The SourceMap to be applied.
+
+* `sourceFile`: Optional. The filename of the source file.
+ If omitted, sourceMapConsumer.file will be used.
+
+#### SourceMapGenerator.prototype.toString()
+
+Renders the source map being generated to a string.
+
+### SourceNode
+
+SourceNodes provide a way to abstract over interpolating and/or concatenating
+snippets of generated JavaScript source code, while maintaining the line and
+column information associated between those snippets and the original source
+code. This is useful as the final intermediate representation a compiler might
+use before outputting the generated JS and source map.
+
+#### new SourceNode(line, column, source[, chunk[, name]])
+
+* `line`: The original line number associated with this source node, or null if
+ it isn't associated with an original line.
+
+* `column`: The original column number associated with this source node, or null
+ if it isn't associated with an original column.
+
+* `source`: The original source's filename.
+
+* `chunk`: Optional. Is immediately passed to `SourceNode.prototype.add`, see
+ below.
+
+* `name`: Optional. The original identifier.
+
+#### SourceNode.fromStringWithSourceMap(code, sourceMapConsumer)
+
+Creates a SourceNode from generated code and a SourceMapConsumer.
+
+* `code`: The generated code
+
+* `sourceMapConsumer` The SourceMap for the generated code
+
+#### SourceNode.prototype.add(chunk)
+
+Add a chunk of generated JS to this source node.
+
+* `chunk`: A string snippet of generated JS code, another instance of
+ `SourceNode`, or an array where each member is one of those things.
+
+#### SourceNode.prototype.prepend(chunk)
+
+Prepend a chunk of generated JS to this source node.
+
+* `chunk`: A string snippet of generated JS code, another instance of
+ `SourceNode`, or an array where each member is one of those things.
+
+#### SourceNode.prototype.setSourceContent(sourceFile, sourceContent)
+
+Set the source content for a source file. This will be added to the
+`SourceMap` in the `sourcesContent` field.
+
+* `sourceFile`: The filename of the source file
+
+* `sourceContent`: The content of the source file
+
+#### SourceNode.prototype.walk(fn)
+
+Walk over the tree of JS snippets in this node and its children. The walking
+function is called once for each snippet of JS and is passed that snippet and
+the its original associated source's line/column location.
+
+* `fn`: The traversal function.
+
+#### SourceNode.prototype.walkSourceContents(fn)
+
+Walk over the tree of SourceNodes. The walking function is called for each
+source file content and is passed the filename and source content.
+
+* `fn`: The traversal function.
+
+#### SourceNode.prototype.join(sep)
+
+Like `Array.prototype.join` except for SourceNodes. Inserts the separator
+between each of this source node's children.
+
+* `sep`: The separator.
+
+#### SourceNode.prototype.replaceRight(pattern, replacement)
+
+Call `String.prototype.replace` on the very right-most source snippet. Useful
+for trimming whitespace from the end of a source node, etc.
+
+* `pattern`: The pattern to replace.
+
+* `replacement`: The thing to replace the pattern with.
+
+#### SourceNode.prototype.toString()
+
+Return the string representation of this source node. Walks over the tree and
+concatenates all the various snippets together to one string.
+
+### SourceNode.prototype.toStringWithSourceMap(startOfSourceMap)
+
+Returns the string representation of this tree of source nodes, plus a
+SourceMapGenerator which contains all the mappings between the generated and
+original sources.
+
+The arguments are the same as those to `new SourceMapGenerator`.
+
+## Tests
+
+[![Build Status](https://travis-ci.org/mozilla/source-map.png?branch=master)](https://travis-ci.org/mozilla/source-map)
+
+Install NodeJS version 0.8.0 or greater, then run `node test/run-tests.js`.
+
+To add new tests, create a new file named `test/test-<your new test name>.js`
+and export your test functions with names that start with "test", for example
+
+ exports["test doing the foo bar"] = function (assert, util) {
+ ...
+ };
+
+The new test will be located automatically when you run the suite.
+
+The `util` argument is the test utility module located at `test/source-map/util`.
+
+The `assert` argument is a cut down version of node's assert module. You have
+access to the following assertion functions:
+
+* `doesNotThrow`
+
+* `equal`
+
+* `ok`
+
+* `strictEqual`
+
+* `throws`
+
+(The reason for the restricted set of test functions is because we need the
+tests to run inside Firefox's test suite as well and so the assert module is
+shimmed in that environment. See `build/assert-shim.js`.)
+
+[format]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit
+[feature]: https://wiki.mozilla.org/DevTools/Features/SourceMap
+[Dryice]: https://github.com/mozilla/dryice
diff --git a/tools/node_modules/source-map/build/assert-shim.js b/tools/node_modules/source-map/build/assert-shim.js
new file mode 100644
index 00000000..daa1a623
--- /dev/null
+++ b/tools/node_modules/source-map/build/assert-shim.js
@@ -0,0 +1,56 @@
+/* -*- Mode: js; js-indent-level: 2; -*- */
+/*
+ * Copyright 2011 Mozilla Foundation and contributors
+ * Licensed under the New BSD license. See LICENSE or:
+ * http://opensource.org/licenses/BSD-3-Clause
+ */
+define('test/source-map/assert', ['exports'], function (exports) {
+
+ let do_throw = function (msg) {
+ throw new Error(msg);
+ };
+
+ exports.init = function (throw_fn) {
+ do_throw = throw_fn;
+ };
+
+ exports.doesNotThrow = function (fn) {
+ try {
+ fn();
+ }
+ catch (e) {
+ do_throw(e.message);
+ }
+ };
+
+ exports.equal = function (actual, expected, msg) {
+ msg = msg || String(actual) + ' != ' + String(expected);
+ if (actual != expected) {
+ do_throw(msg);
+ }
+ };
+
+ exports.ok = function (val, msg) {
+ msg = msg || String(val) + ' is falsey';
+ if (!Boolean(val)) {
+ do_throw(msg);
+ }
+ };
+
+ exports.strictEqual = function (actual, expected, msg) {
+ msg = msg || String(actual) + ' !== ' + String(expected);
+ if (actual !== expected) {
+ do_throw(msg);
+ }
+ };
+
+ exports.throws = function (fn) {
+ try {
+ fn();
+ do_throw('Expected an error to be thrown, but it wasn\'t.');
+ }
+ catch (e) {
+ }
+ };
+
+});
diff --git a/tools/node_modules/source-map/build/mini-require.js b/tools/node_modules/source-map/build/mini-require.js
new file mode 100644
index 00000000..0daf4537
--- /dev/null
+++ b/tools/node_modules/source-map/build/mini-require.js
@@ -0,0 +1,152 @@
+/* -*- Mode: js; js-indent-level: 2; -*- */
+/*
+ * Copyright 2011 Mozilla Foundation and contributors
+ * Licensed under the New BSD license. See LICENSE or:
+ * http://opensource.org/licenses/BSD-3-Clause
+ */
+
+/**
+ * Define a module along with a payload.
+ * @param {string} moduleName Name for the payload
+ * @param {ignored} deps Ignored. For compatibility with CommonJS AMD Spec
+ * @param {function} payload Function with (require, exports, module) params
+ */
+function define(moduleName, deps, payload) {
+ if (typeof moduleName != "string") {
+ throw new TypeError('Expected string, got: ' + moduleName);
+ }
+
+ if (arguments.length == 2) {
+ payload = deps;
+ }
+
+ if (moduleName in define.modules) {
+ throw new Error("Module already defined: " + moduleName);
+ }
+ define.modules[moduleName] = payload;
+};
+
+/**
+ * The global store of un-instantiated modules
+ */
+define.modules = {};
+
+
+/**
+ * We invoke require() in the context of a Domain so we can have multiple
+ * sets of modules running separate from each other.
+ * This contrasts with JSMs which are singletons, Domains allows us to
+ * optionally load a CommonJS module twice with separate data each time.
+ * Perhaps you want 2 command lines with a different set of commands in each,
+ * for example.
+ */
+function Domain() {
+ this.modules = {};
+ this._currentModule = null;
+}
+
+(function () {
+
+ /**
+ * Lookup module names and resolve them by calling the definition function if
+ * needed.
+ * There are 2 ways to call this, either with an array of dependencies and a
+ * callback to call when the dependencies are found (which can happen
+ * asynchronously in an in-page context) or with a single string an no callback
+ * where the dependency is resolved synchronously and returned.
+ * The API is designed to be compatible with the CommonJS AMD spec and
+ * RequireJS.
+ * @param {string[]|string} deps A name, or names for the payload
+ * @param {function|undefined} callback Function to call when the dependencies
+ * are resolved
+ * @return {undefined|object} The module required or undefined for
+ * array/callback method
+ */
+ Domain.prototype.require = function(deps, callback) {
+ if (Array.isArray(deps)) {
+ var params = deps.map(function(dep) {
+ return this.lookup(dep);
+ }, this);
+ if (callback) {
+ callback.apply(null, params);
+ }
+ return undefined;
+ }
+ else {
+ return this.lookup(deps);
+ }
+ };
+
+ function normalize(path) {
+ var bits = path.split('/');
+ var i = 1;
+ while (i < bits.length) {
+ if (bits[i] === '..') {
+ bits.splice(i-1, 1);
+ } else if (bits[i] === '.') {
+ bits.splice(i, 1);
+ } else {
+ i++;
+ }
+ }
+ return bits.join('/');
+ }
+
+ function join(a, b) {
+ a = a.trim();
+ b = b.trim();
+ if (/^\//.test(b)) {
+ return b;
+ } else {
+ return a.replace(/\/*$/, '/') + b;
+ }
+ }
+
+ function dirname(path) {
+ var bits = path.split('/');
+ bits.pop();
+ return bits.join('/');
+ }
+
+ /**
+ * Lookup module names and resolve them by calling the definition function if
+ * needed.
+ * @param {string} moduleName A name for the payload to lookup
+ * @return {object} The module specified by aModuleName or null if not found.
+ */
+ Domain.prototype.lookup = function(moduleName) {
+ if (/^\./.test(moduleName)) {
+ moduleName = normalize(join(dirname(this._currentModule), moduleName));
+ }
+
+ if (moduleName in this.modules) {
+ var module = this.modules[moduleName];
+ return module;
+ }
+
+ if (!(moduleName in define.modules)) {
+ throw new Error("Module not defined: " + moduleName);
+ }
+
+ var module = define.modules[moduleName];
+
+ if (typeof module == "function") {
+ var exports = {};
+ var previousModule = this._currentModule;
+ this._currentModule = moduleName;
+ module(this.require.bind(this), exports, { id: moduleName, uri: "" });
+ this._currentModule = previousModule;
+ module = exports;
+ }
+
+ // cache the resulting module object for next time
+ this.modules[moduleName] = module;
+
+ return module;
+ };
+
+}());
+
+define.Domain = Domain;
+define.globalDomain = new Domain();
+var require = define.globalDomain.require.bind(define.globalDomain);
diff --git a/tools/node_modules/source-map/build/prefix-source-map.jsm b/tools/node_modules/source-map/build/prefix-source-map.jsm
new file mode 100644
index 00000000..ee2539d8
--- /dev/null
+++ b/tools/node_modules/source-map/build/prefix-source-map.jsm
@@ -0,0 +1,20 @@
+/* -*- Mode: js; js-indent-level: 2; -*- */
+/*
+ * Copyright 2011 Mozilla Foundation and contributors
+ * Licensed under the New BSD license. See LICENSE or:
+ * http://opensource.org/licenses/BSD-3-Clause
+ */
+
+/*
+ * WARNING!
+ *
+ * Do not edit this file directly, it is built from the sources at
+ * https://github.com/mozilla/source-map/
+ */
+
+///////////////////////////////////////////////////////////////////////////////
+
+
+this.EXPORTED_SYMBOLS = [ "SourceMapConsumer", "SourceMapGenerator", "SourceNode" ];
+
+Components.utils.import('resource://gre/modules/devtools/Require.jsm');
diff --git a/tools/node_modules/source-map/build/prefix-utils.jsm b/tools/node_modules/source-map/build/prefix-utils.jsm
new file mode 100644
index 00000000..80341d45
--- /dev/null
+++ b/tools/node_modules/source-map/build/prefix-utils.jsm
@@ -0,0 +1,18 @@
+/* -*- Mode: js; js-indent-level: 2; -*- */
+/*
+ * Copyright 2011 Mozilla Foundation and contributors
+ * Licensed under the New BSD license. See LICENSE or:
+ * http://opensource.org/licenses/BSD-3-Clause
+ */
+
+/*
+ * WARNING!
+ *
+ * Do not edit this file directly, it is built from the sources at
+ * https://github.com/mozilla/source-map/
+ */
+
+Components.utils.import('resource://gre/modules/devtools/Require.jsm');
+Components.utils.import('resource://gre/modules/devtools/SourceMap.jsm');
+
+this.EXPORTED_SYMBOLS = [ "define", "runSourceMapTests" ];
diff --git a/tools/node_modules/source-map/build/suffix-browser.js b/tools/node_modules/source-map/build/suffix-browser.js
new file mode 100644
index 00000000..cf6cde7c
--- /dev/null
+++ b/tools/node_modules/source-map/build/suffix-browser.js
@@ -0,0 +1,8 @@
+/* -*- Mode: js; js-indent-level: 2; -*- */
+///////////////////////////////////////////////////////////////////////////////
+
+window.sourceMap = {
+ SourceMapConsumer: require('source-map/source-map-consumer').SourceMapConsumer,
+ SourceMapGenerator: require('source-map/source-map-generator').SourceMapGenerator,
+ SourceNode: require('source-map/source-node').SourceNode
+};
diff --git a/tools/node_modules/source-map/build/suffix-source-map.jsm b/tools/node_modules/source-map/build/suffix-source-map.jsm
new file mode 100644
index 00000000..cf3c2d8d
--- /dev/null
+++ b/tools/node_modules/source-map/build/suffix-source-map.jsm
@@ -0,0 +1,6 @@
+/* -*- Mode: js; js-indent-level: 2; -*- */
+///////////////////////////////////////////////////////////////////////////////
+
+this.SourceMapConsumer = require('source-map/source-map-consumer').SourceMapConsumer;
+this.SourceMapGenerator = require('source-map/source-map-generator').SourceMapGenerator;
+this.SourceNode = require('source-map/source-node').SourceNode;
diff --git a/tools/node_modules/source-map/build/suffix-utils.jsm b/tools/node_modules/source-map/build/suffix-utils.jsm
new file mode 100644
index 00000000..b31b84cb
--- /dev/null
+++ b/tools/node_modules/source-map/build/suffix-utils.jsm
@@ -0,0 +1,21 @@
+/* -*- Mode: js; js-indent-level: 2; -*- */
+/*
+ * Copyright 2011 Mozilla Foundation and contributors
+ * Licensed under the New BSD license. See LICENSE or:
+ * http://opensource.org/licenses/BSD-3-Clause
+ */
+function runSourceMapTests(modName, do_throw) {
+ let mod = require(modName);
+ let assert = require('test/source-map/assert');
+ let util = require('test/source-map/util');
+
+ assert.init(do_throw);
+
+ for (let k in mod) {
+ if (/^test/.test(k)) {
+ mod[k](assert, util);
+ }
+ }
+
+}
+this.runSourceMapTests = runSourceMapTests;
diff --git a/tools/node_modules/source-map/build/test-prefix.js b/tools/node_modules/source-map/build/test-prefix.js
new file mode 100644
index 00000000..1b13f300
--- /dev/null
+++ b/tools/node_modules/source-map/build/test-prefix.js
@@ -0,0 +1,8 @@
+/*
+ * WARNING!
+ *
+ * Do not edit this file directly, it is built from the sources at
+ * https://github.com/mozilla/source-map/
+ */
+
+Components.utils.import('resource://test/Utils.jsm');
diff --git a/tools/node_modules/source-map/build/test-suffix.js b/tools/node_modules/source-map/build/test-suffix.js
new file mode 100644
index 00000000..bec2de3f
--- /dev/null
+++ b/tools/node_modules/source-map/build/test-suffix.js
@@ -0,0 +1,3 @@
+function run_test() {
+ runSourceMapTests('{THIS_MODULE}', do_throw);
+}
diff --git a/tools/node_modules/source-map/lib/source-map.js b/tools/node_modules/source-map/lib/source-map.js
new file mode 100644
index 00000000..121ad241
--- /dev/null
+++ b/tools/node_modules/source-map/lib/source-map.js
@@ -0,0 +1,8 @@
+/*
+ * Copyright 2009-2011 Mozilla Foundation and contributors
+ * Licensed under the New BSD license. See LICENSE.txt or:
+ * http://opensource.org/licenses/BSD-3-Clause
+ */
+exports.SourceMapGenerator = require('./source-map/source-map-generator').SourceMapGenerator;
+exports.SourceMapConsumer = require('./source-map/source-map-consumer').SourceMapConsumer;
+exports.SourceNode = require('./source-map/source-node').SourceNode;
diff --git a/tools/node_modules/source-map/lib/source-map/array-set.js b/tools/node_modules/source-map/lib/source-map/array-set.js
new file mode 100644
index 00000000..f138ae7c
--- /dev/null
+++ b/tools/node_modules/source-map/lib/source-map/array-set.js
@@ -0,0 +1,96 @@
+/* -*- Mode: js; js-indent-level: 2; -*- */
+/*
+ * Copyright 2011 Mozilla Foundation and contributors
+ * Licensed under the New BSD license. See LICENSE or:
+ * http://opensource.org/licenses/BSD-3-Clause
+ */
+if (typeof define !== 'function') {
+ var define = require('amdefine')(module);
+}
+define(function (require, exports, module) {
+
+ var util = require('./util');
+
+ /**
+ * A data structure which is a combination of an array and a set. Adding a new
+ * member is O(1), testing for membership is O(1), and finding the index of an
+ * element is O(1). Removing elements from the set is not supported. Only
+ * strings are supported for membership.
+ */
+ function ArraySet() {
+ this._array = [];
+ this._set = {};
+ }
+
+ /**
+ * Static method for creating ArraySet instances from an existing array.
+ */
+ ArraySet.fromArray = function ArraySet_fromArray(aArray) {
+ var set = new ArraySet();
+ for (var i = 0, len = aArray.length; i < len; i++) {
+ set.add(aArray[i]);
+ }
+ return set;
+ };
+
+ /**
+ * Add the given string to this set.
+ *
+ * @param String aStr
+ */
+ ArraySet.prototype.add = function ArraySet_add(aStr) {
+ if (this.has(aStr)) {
+ // Already a member; nothing to do.
+ return;
+ }
+ var idx = this._array.length;
+ this._array.push(aStr);
+ this._set[util.toSetString(aStr)] = idx;
+ };
+
+ /**
+ * Is the given string a member of this set?
+ *
+ * @param String aStr
+ */
+ ArraySet.prototype.has = function ArraySet_has(aStr) {
+ return Object.prototype.hasOwnProperty.call(this._set,
+ util.toSetString(aStr));
+ };
+
+ /**
+ * What is the index of the given string in the array?
+ *
+ * @param String aStr
+ */
+ ArraySet.prototype.indexOf = function ArraySet_indexOf(aStr) {
+ if (this.has(aStr)) {
+ return this._set[util.toSetString(aStr)];
+ }
+ throw new Error('"' + aStr + '" is not in the set.');
+ };
+
+ /**
+ * What is the element at the given index?
+ *
+ * @param Number aIdx
+ */
+ ArraySet.prototype.at = function ArraySet_at(aIdx) {
+ if (aIdx >= 0 && aIdx < this._array.length) {
+ return this._array[aIdx];
+ }
+ throw new Error('No element indexed by ' + aIdx);
+ };
+
+ /**
+ * Returns the array representation of this set (which has the proper indices
+ * indicated by indexOf). Note that this is a copy of the internal array used
+ * for storing the members so that no one can mess with internal state.
+ */
+ ArraySet.prototype.toArray = function ArraySet_toArray() {
+ return this._array.slice();
+ };
+
+ exports.ArraySet = ArraySet;
+
+});
diff --git a/tools/node_modules/source-map/lib/source-map/base64-vlq.js b/tools/node_modules/source-map/lib/source-map/base64-vlq.js
new file mode 100644
index 00000000..a9dd758e
--- /dev/null
+++ b/tools/node_modules/source-map/lib/source-map/base64-vlq.js
@@ -0,0 +1,144 @@
+/* -*- Mode: js; js-indent-level: 2; -*- */
+/*
+ * Copyright 2011 Mozilla Foundation and contributors
+ * Licensed under the New BSD license. See LICENSE or:
+ * http://opensource.org/licenses/BSD-3-Clause
+ *
+ * Based on the Base 64 VLQ implementation in Closure Compiler:
+ * https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java
+ *
+ * Copyright 2011 The Closure Compiler Authors. All rights reserved.
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of Google Inc. nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+if (typeof define !== 'function') {
+ var define = require('amdefine')(module);
+}
+define(function (require, exports, module) {
+
+ var base64 = require('./base64');
+
+ // A single base 64 digit can contain 6 bits of data. For the base 64 variable
+ // length quantities we use in the source map spec, the first bit is the sign,
+ // the next four bits are the actual value, and the 6th bit is the
+ // continuation bit. The continuation bit tells us whether there are more
+ // digits in this value following this digit.
+ //
+ // Continuation
+ // | Sign
+ // | |
+ // V V
+ // 101011
+
+ var VLQ_BASE_SHIFT = 5;
+
+ // binary: 100000
+ var VLQ_BASE = 1 << VLQ_BASE_SHIFT;
+
+ // binary: 011111
+ var VLQ_BASE_MASK = VLQ_BASE - 1;
+
+ // binary: 100000
+ var VLQ_CONTINUATION_BIT = VLQ_BASE;
+
+ /**
+ * Converts from a two-complement value to a value where the sign bit is
+ * is placed in the least significant bit. For example, as decimals:
+ * 1 becomes 2 (10 binary), -1 becomes 3 (11 binary)
+ * 2 becomes 4 (100 binary), -2 becomes 5 (101 binary)
+ */
+ function toVLQSigned(aValue) {
+ return aValue < 0
+ ? ((-aValue) << 1) + 1
+ : (aValue << 1) + 0;
+ }
+
+ /**
+ * Converts to a two-complement value from a value where the sign bit is
+ * is placed in the least significant bit. For example, as decimals:
+ * 2 (10 binary) becomes 1, 3 (11 binary) becomes -1
+ * 4 (100 binary) becomes 2, 5 (101 binary) becomes -2
+ */
+ function fromVLQSigned(aValue) {
+ var isNegative = (aValue & 1) === 1;
+ var shifted = aValue >> 1;
+ return isNegative
+ ? -shifted
+ : shifted;
+ }
+
+ /**
+ * Returns the base 64 VLQ encoded value.
+ */
+ exports.encode = function base64VLQ_encode(aValue) {
+ var encoded = "";
+ var digit;
+
+ var vlq = toVLQSigned(aValue);
+
+ do {
+ digit = vlq & VLQ_BASE_MASK;
+ vlq >>>= VLQ_BASE_SHIFT;
+ if (vlq > 0) {
+ // There are still more digits in this value, so we must make sure the
+ // continuation bit is marked.
+ digit |= VLQ_CONTINUATION_BIT;
+ }
+ encoded += base64.encode(digit);
+ } while (vlq > 0);
+
+ return encoded;
+ };
+
+ /**
+ * Decodes the next base 64 VLQ value from the given string and returns the
+ * value and the rest of the string.
+ */
+ exports.decode = function base64VLQ_decode(aStr) {
+ var i = 0;
+ var strLen = aStr.length;
+ var result = 0;
+ var shift = 0;
+ var continuation, digit;
+
+ do {
+ if (i >= strLen) {
+ throw new Error("Expected more digits in base 64 VLQ value.");
+ }
+ digit = base64.decode(aStr.charAt(i++));
+ continuation = !!(digit & VLQ_CONTINUATION_BIT);
+ digit &= VLQ_BASE_MASK;
+ result = result + (digit << shift);
+ shift += VLQ_BASE_SHIFT;
+ } while (continuation);
+
+ return {
+ value: fromVLQSigned(result),
+ rest: aStr.slice(i)
+ };
+ };
+
+});
diff --git a/tools/node_modules/source-map/lib/source-map/base64.js b/tools/node_modules/source-map/lib/source-map/base64.js
new file mode 100644
index 00000000..397a754b
--- /dev/null
+++ b/tools/node_modules/source-map/lib/source-map/base64.js
@@ -0,0 +1,42 @@
+/* -*- Mode: js; js-indent-level: 2; -*- */
+/*
+ * Copyright 2011 Mozilla Foundation and contributors
+ * Licensed under the New BSD license. See LICENSE or:
+ * http://opensource.org/licenses/BSD-3-Clause
+ */
+if (typeof define !== 'function') {
+ var define = require('amdefine')(module);
+}
+define(function (require, exports, module) {
+
+ var charToIntMap = {};
+ var intToCharMap = {};
+
+ 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
+ .split('')
+ .forEach(function (ch, index) {
+ charToIntMap[ch] = index;
+ intToCharMap[index] = ch;
+ });
+
+ /**
+ * Encode an integer in the range of 0 to 63 to a single base 64 digit.
+ */
+ exports.encode = function base64_encode(aNumber) {
+ if (aNumber in intToCharMap) {
+ return intToCharMap[aNumber];
+ }
+ throw new TypeError("Must be between 0 and 63: " + aNumber);
+ };
+
+ /**
+ * Decode a single base 64 digit to an integer.
+ */
+ exports.decode = function base64_decode(aChar) {
+ if (aChar in charToIntMap) {
+ return charToIntMap[aChar];
+ }
+ throw new TypeError("Not a valid base 64 digit: " + aChar);
+ };
+
+});
diff --git a/tools/node_modules/source-map/lib/source-map/binary-search.js b/tools/node_modules/source-map/lib/source-map/binary-search.js
new file mode 100644
index 00000000..c6cb517c
--- /dev/null
+++ b/tools/node_modules/source-map/lib/source-map/binary-search.js
@@ -0,0 +1,81 @@
+/* -*- Mode: js; js-indent-level: 2; -*- */
+/*
+ * Copyright 2011 Mozilla Foundation and contributors
+ * Licensed under the New BSD license. See LICENSE or:
+ * http://opensource.org/licenses/BSD-3-Clause
+ */
+if (typeof define !== 'function') {
+ var define = require('amdefine')(module);
+}
+define(function (require, exports, module) {
+
+ /**
+ * Recursive implementation of binary search.
+ *
+ * @param aLow Indices here and lower do not contain the needle.
+ * @param aHigh Indices here and higher do not contain the needle.
+ * @param aNeedle The element being searched for.
+ * @param aHaystack The non-empty array being searched.
+ * @param aCompare Function which takes two elements and returns -1, 0, or 1.
+ */
+ function recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare) {
+ // This function terminates when one of the following is true:
+ //
+ // 1. We find the exact element we are looking for.
+ //
+ // 2. We did not find the exact element, but we can return the next
+ // closest element that is less than that element.
+ //
+ // 3. We did not find the exact element, and there is no next-closest
+ // element which is less than the one we are searching for, so we
+ // return null.
+ var mid = Math.floor((aHigh - aLow) / 2) + aLow;
+ var cmp = aCompare(aNeedle, aHaystack[mid]);
+ if (cmp === 0) {
+ // Found the element we are looking for.
+ return aHaystack[mid];
+ }
+ else if (cmp > 0) {
+ // aHaystack[mid] is greater than our needle.
+ if (aHigh - mid > 1) {
+ // The element is in the upper half.
+ return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare);
+ }
+ // We did not find an exact match, return the next closest one
+ // (termination case 2).
+ return aHaystack[mid];
+ }
+ else {
+ // aHaystack[mid] is less than our needle.
+ if (mid - aLow > 1) {
+ // The element is in the lower half.
+ return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare);
+ }
+ // The exact needle element was not found in this haystack. Determine if
+ // we are in termination case (2) or (3) and return the appropriate thing.
+ return aLow < 0
+ ? null
+ : aHaystack[aLow];
+ }
+ }
+
+ /**
+ * This is an implementation of binary search which will always try and return
+ * the next lowest value checked if there is no exact hit. This is because
+ * mappings between original and generated line/col pairs are single points,
+ * and there is an implicit region between each of them, so a miss just means
+ * that you aren't on the very start of a region.
+ *
+ * @param aNeedle The element you are looking for.
+ * @param aHaystack The array that is being searched.
+ * @param aCompare A function which takes the needle and an element in the
+ * array and returns -1, 0, or 1 depending on whether the needle is less
+ * than, equal to, or greater than the element, respectively.
+ */
+ exports.search = function search(aNeedle, aHaystack, aCompare) {
+ return aHaystack.length > 0
+ ? recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack, aCompare)
+ : null;
+ };
+
+});
diff --git a/tools/node_modules/source-map/lib/source-map/source-map-consumer.js b/tools/node_modules/source-map/lib/source-map/source-map-consumer.js
new file mode 100644
index 00000000..e7b3538e
--- /dev/null
+++ b/tools/node_modules/source-map/lib/source-map/source-map-consumer.js
@@ -0,0 +1,430 @@
+/* -*- Mode: js; js-indent-level: 2; -*- */
+/*
+ * Copyright 2011 Mozilla Foundation and contributors
+ * Licensed under the New BSD license. See LICENSE or:
+ * http://opensource.org/licenses/BSD-3-Clause
+ */
+if (typeof define !== 'function') {
+ var define = require('amdefine')(module);
+}
+define(function (require, exports, module) {
+
+ var util = require('./util');
+ var binarySearch = require('./binary-search');
+ var ArraySet = require('./array-set').ArraySet;
+ var base64VLQ = require('./base64-vlq');
+
+ /**
+ * A SourceMapConsumer instance represents a parsed source map which we can
+ * query for information about the original file positions by giving it a file
+ * position in the generated source.
+ *
+ * The only parameter is the raw source map (either as a JSON string, or
+ * already parsed to an object). According to the spec, source maps have the
+ * following attributes:
+ *
+ * - version: Which version of the source map spec this map is following.
+ * - sources: An array of URLs to the original source files.
+ * - names: An array of identifiers which can be referrenced by individual mappings.
+ * - sourceRoot: Optional. The URL root from which all sources are relative.
+ * - sourcesContent: Optional. An array of contents of the original source files.
+ * - mappings: A string of base64 VLQs which contain the actual mappings.
+ * - file: The generated file this source map is associated with.
+ *
+ * Here is an example source map, taken from the source map spec[0]:
+ *
+ * {
+ * version : 3,
+ * file: "out.js",
+ * sourceRoot : "",
+ * sources: ["foo.js", "bar.js"],
+ * names: ["src", "maps", "are", "fun"],
+ * mappings: "AA,AB;;ABCDE;"
+ * }
+ *
+ * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit?pli=1#
+ */
+ function SourceMapConsumer(aSourceMap) {
+ var sourceMap = aSourceMap;
+ if (typeof aSourceMap === 'string') {
+ sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, ''));
+ }
+
+ var version = util.getArg(sourceMap, 'version');
+ var sources = util.getArg(sourceMap, 'sources');
+ var names = util.getArg(sourceMap, 'names');
+ var sourceRoot = util.getArg(sourceMap, 'sourceRoot', null);
+ var sourcesContent = util.getArg(sourceMap, 'sourcesContent', null);
+ var mappings = util.getArg(sourceMap, 'mappings');
+ var file = util.getArg(sourceMap, 'file');
+
+ if (version !== this._version) {
+ throw new Error('Unsupported version: ' + version);
+ }
+
+ this._names = ArraySet.fromArray(names);
+ this._sources = ArraySet.fromArray(sources);
+ this.sourceRoot = sourceRoot;
+ this.sourcesContent = sourcesContent;
+ this.file = file;
+
+ // `this._generatedMappings` and `this._originalMappings` hold the parsed
+ // mapping coordinates from the source map's "mappings" attribute. Each
+ // object in the array is of the form
+ //
+ // {
+ // generatedLine: The line number in the generated code,
+ // generatedColumn: The column number in the generated code,
+ // source: The path to the original source file that generated this
+ // chunk of code,
+ // originalLine: The line number in the original source that
+ // corresponds to this chunk of generated code,
+ // originalColumn: The column number in the original source that
+ // corresponds to this chunk of generated code,
+ // name: The name of the original symbol which generated this chunk of
+ // code.
+ // }
+ //
+ // All properties except for `generatedLine` and `generatedColumn` can be
+ // `null`.
+ //
+ // `this._generatedMappings` is ordered by the generated positions.
+ //
+ // `this._originalMappings` is ordered by the original positions.
+ this._generatedMappings = [];
+ this._originalMappings = [];
+ this._parseMappings(mappings, sourceRoot);
+ }
+
+ /**
+ * The version of the source mapping spec that we are consuming.
+ */
+ SourceMapConsumer.prototype._version = 3;
+
+ /**
+ * The list of original sources.
+ */
+ Object.defineProperty(SourceMapConsumer.prototype, 'sources', {
+ get: function () {
+ return this._sources.toArray().map(function (s) {
+ return this.sourceRoot ? util.join(this.sourceRoot, s) : s;
+ }, this);
+ }
+ });
+
+ /**
+ * Parse the mappings in a string in to a data structure which we can easily
+ * query (an ordered list in this._generatedMappings).
+ */
+ SourceMapConsumer.prototype._parseMappings =
+ function SourceMapConsumer_parseMappings(aStr, aSourceRoot) {
+ var generatedLine = 1;
+ var previousGeneratedColumn = 0;
+ var previousOriginalLine = 0;
+ var previousOriginalColumn = 0;
+ var previousSource = 0;
+ var previousName = 0;
+ var mappingSeparator = /^[,;]/;
+ var str = aStr;
+ var mapping;
+ var temp;
+
+ while (str.length > 0) {
+ if (str.charAt(0) === ';') {
+ generatedLine++;
+ str = str.slice(1);
+ previousGeneratedColumn = 0;
+ }
+ else if (str.charAt(0) === ',') {
+ str = str.slice(1);
+ }
+ else {
+ mapping = {};
+ mapping.generatedLine = generatedLine;
+
+ // Generated column.
+ temp = base64VLQ.decode(str);
+ mapping.generatedColumn = previousGeneratedColumn + temp.value;
+ previousGeneratedColumn = mapping.generatedColumn;
+ str = temp.rest;
+
+ if (str.length > 0 && !mappingSeparator.test(str.charAt(0))) {
+ // Original source.
+ temp = base64VLQ.decode(str);
+ mapping.source = this._sources.at(previousSource + temp.value);
+ previousSource += temp.value;
+ str = temp.rest;
+ if (str.length === 0 || mappingSeparator.test(str.charAt(0))) {
+ throw new Error('Found a source, but no line and column');
+ }
+
+ // Original line.
+ temp = base64VLQ.decode(str);
+ mapping.originalLine = previousOriginalLine + temp.value;
+ previousOriginalLine = mapping.originalLine;
+ // Lines are stored 0-based
+ mapping.originalLine += 1;
+ str = temp.rest;
+ if (str.length === 0 || mappingSeparator.test(str.charAt(0))) {
+ throw new Error('Found a source and line, but no column');
+ }
+
+ // Original column.
+ temp = base64VLQ.decode(str);
+ mapping.originalColumn = previousOriginalColumn + temp.value;
+ previousOriginalColumn = mapping.originalColumn;
+ str = temp.rest;
+
+ if (str.length > 0 && !mappingSeparator.test(str.charAt(0))) {
+ // Original name.
+ temp = base64VLQ.decode(str);
+ mapping.name = this._names.at(previousName + temp.value);
+ previousName += temp.value;
+ str = temp.rest;
+ }
+ }
+
+ this._generatedMappings.push(mapping);
+ if (typeof mapping.originalLine === 'number') {
+ this._originalMappings.push(mapping);
+ }
+ }
+ }
+
+ this._originalMappings.sort(this._compareOriginalPositions);
+ };
+
+ /**
+ * Comparator between two mappings where the original positions are compared.
+ */
+ SourceMapConsumer.prototype._compareOriginalPositions =
+ function SourceMapConsumer_compareOriginalPositions(mappingA, mappingB) {
+ if (mappingA.source > mappingB.source) {
+ return 1;
+ }
+ else if (mappingA.source < mappingB.source) {
+ return -1;
+ }
+ else {
+ var cmp = mappingA.originalLine - mappingB.originalLine;
+ return cmp === 0
+ ? mappingA.originalColumn - mappingB.originalColumn
+ : cmp;
+ }
+ };
+
+ /**
+ * Comparator between two mappings where the generated positions are compared.
+ */
+ SourceMapConsumer.prototype._compareGeneratedPositions =
+ function SourceMapConsumer_compareGeneratedPositions(mappingA, mappingB) {
+ var cmp = mappingA.generatedLine - mappingB.generatedLine;
+ return cmp === 0
+ ? mappingA.generatedColumn - mappingB.generatedColumn
+ : cmp;
+ };
+
+ /**
+ * Find the mapping that best matches the hypothetical "needle" mapping that
+ * we are searching for in the given "haystack" of mappings.
+ */
+ SourceMapConsumer.prototype._findMapping =
+ function SourceMapConsumer_findMapping(aNeedle, aMappings, aLineName,
+ aColumnName, aComparator) {
+ // To return the position we are searching for, we must first find the
+ // mapping for the given position and then return the opposite position it
+ // points to. Because the mappings are sorted, we can use binary search to
+ // find the best mapping.
+
+ if (aNeedle[aLineName] <= 0) {
+ throw new TypeError('Line must be greater than or equal to 1, got '
+ + aNeedle[aLineName]);
+ }
+ if (aNeedle[aColumnName] < 0) {
+ throw new TypeError('Column must be greater than or equal to 0, got '
+ + aNeedle[aColumnName]);
+ }
+
+ return binarySearch.search(aNeedle, aMappings, aComparator);
+ };
+
+ /**
+ * Returns the original source, line, and column information for the generated
+ * source's line and column positions provided. The only argument is an object
+ * with the following properties:
+ *
+ * - line: The line number in the generated source.
+ * - column: The column number in the generated source.
+ *
+ * and an object is returned with the following properties:
+ *
+ * - source: The original source file, or null.
+ * - line: The line number in the original source, or null.
+ * - column: The column number in the original source, or null.
+ * - name: The original identifier, or null.
+ */
+ SourceMapConsumer.prototype.originalPositionFor =
+ function SourceMapConsumer_originalPositionFor(aArgs) {
+ var needle = {
+ generatedLine: util.getArg(aArgs, 'line'),
+ generatedColumn: util.getArg(aArgs, 'column')
+ };
+
+ var mapping = this._findMapping(needle,
+ this._generatedMappings,
+ "generatedLine",
+ "generatedColumn",
+ this._compareGeneratedPositions);
+
+ if (mapping) {
+ var source = util.getArg(mapping, 'source', null);
+ if (source && this.sourceRoot) {
+ source = util.join(this.sourceRoot, source);
+ }
+ return {
+ source: source,
+ line: util.getArg(mapping, 'originalLine', null),
+ column: util.getArg(mapping, 'originalColumn', null),
+ name: util.getArg(mapping, 'name', null)
+ };
+ }
+
+ return {
+ source: null,
+ line: null,
+ column: null,
+ name: null
+ };
+ };
+
+ /**
+ * Returns the original source content. The only argument is
+ * the url of the original source file. Returns null if no
+ * original source content is availible.
+ */
+ SourceMapConsumer.prototype.sourceContentFor =
+ function SourceMapConsumer_sourceContentFor(aSource) {
+ if (!this.sourcesContent) {
+ return null;
+ }
+
+ if (this.sourceRoot) {
+ aSource = util.relative(this.sourceRoot, aSource);
+ }
+
+ if (this._sources.has(aSource)) {
+ return this.sourcesContent[this._sources.indexOf(aSource)];
+ }
+
+ var url;
+ if (this.sourceRoot
+ && (url = util.urlParse(this.sourceRoot))
+ && (!url.path || url.path == "/")
+ && this._sources.has("/" + aSource)) {
+ return this.sourcesContent[this._sources.indexOf("/" + aSource)];
+ }
+
+ throw new Error('"' + aSource + '" is not in the SourceMap.');
+ };
+
+ /**
+ * Returns the generated line and column information for the original source,
+ * line, and column positions provided. The only argument is an object with
+ * the following properties:
+ *
+ * - source: The filename of the original source.
+ * - line: The line number in the original source.
+ * - column: The column number in the original source.
+ *
+ * and an object is returned with the following properties:
+ *
+ * - line: The line number in the generated source, or null.
+ * - column: The column number in the generated source, or null.
+ */
+ SourceMapConsumer.prototype.generatedPositionFor =
+ function SourceMapConsumer_generatedPositionFor(aArgs) {
+ var needle = {
+ source: util.getArg(aArgs, 'source'),
+ originalLine: util.getArg(aArgs, 'line'),
+ originalColumn: util.getArg(aArgs, 'column')
+ };
+
+ if (this.sourceRoot) {
+ needle.source = util.relative(this.sourceRoot, needle.source);
+ }
+
+ var mapping = this._findMapping(needle,
+ this._originalMappings,
+ "originalLine",
+ "originalColumn",
+ this._compareOriginalPositions);
+
+ if (mapping) {
+ return {
+ line: util.getArg(mapping, 'generatedLine', null),
+ column: util.getArg(mapping, 'generatedColumn', null)
+ };
+ }
+
+ return {
+ line: null,
+ column: null
+ };
+ };
+
+ SourceMapConsumer.GENERATED_ORDER = 1;
+ SourceMapConsumer.ORIGINAL_ORDER = 2;
+
+ /**
+ * Iterate over each mapping between an original source/line/column and a
+ * generated line/column in this source map.
+ *
+ * @param Function aCallback
+ * The function that is called with each mapping.
+ * @param Object aContext
+ * Optional. If specified, this object will be the value of `this` every
+ * time that `aCallback` is called.
+ * @param aOrder
+ * Either `SourceMapConsumer.GENERATED_ORDER` or
+ * `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to
+ * iterate over the mappings sorted by the generated file's line/column
+ * order or the original's source/line/column order, respectively. Defaults to
+ * `SourceMapConsumer.GENERATED_ORDER`.
+ */
+ SourceMapConsumer.prototype.eachMapping =
+ function SourceMapConsumer_eachMapping(aCallback, aContext, aOrder) {
+ var context = aContext || null;
+ var order = aOrder || SourceMapConsumer.GENERATED_ORDER;
+
+ var mappings;
+ switch (order) {
+ case SourceMapConsumer.GENERATED_ORDER:
+ mappings = this._generatedMappings;
+ break;
+ case SourceMapConsumer.ORIGINAL_ORDER:
+ mappings = this._originalMappings;
+ break;
+ default:
+ throw new Error("Unknown order of iteration.");
+ }
+
+ var sourceRoot = this.sourceRoot;
+ mappings.map(function (mapping) {
+ var source = mapping.source;
+ if (source && sourceRoot) {
+ source = util.join(sourceRoot, source);
+ }
+ return {
+ source: source,
+ generatedLine: mapping.generatedLine,
+ generatedColumn: mapping.generatedColumn,
+ originalLine: mapping.originalLine,
+ originalColumn: mapping.originalColumn,
+ name: mapping.name
+ };
+ }).forEach(aCallback, context);
+ };
+
+ exports.SourceMapConsumer = SourceMapConsumer;
+
+});
diff --git a/tools/node_modules/source-map/lib/source-map/source-map-generator.js b/tools/node_modules/source-map/lib/source-map/source-map-generator.js
new file mode 100644
index 00000000..b3f62f51
--- /dev/null
+++ b/tools/node_modules/source-map/lib/source-map/source-map-generator.js
@@ -0,0 +1,381 @@
+/* -*- Mode: js; js-indent-level: 2; -*- */
+/*
+ * Copyright 2011 Mozilla Foundation and contributors
+ * Licensed under the New BSD license. See LICENSE or:
+ * http://opensource.org/licenses/BSD-3-Clause
+ */
+if (typeof define !== 'function') {
+ var define = require('amdefine')(module);
+}
+define(function (require, exports, module) {
+
+ var base64VLQ = require('./base64-vlq');
+ var util = require('./util');
+ var ArraySet = require('./array-set').ArraySet;
+
+ /**
+ * An instance of the SourceMapGenerator represents a source map which is
+ * being built incrementally. To create a new one, you must pass an object
+ * with the following properties:
+ *
+ * - file: The filename of the generated source.
+ * - sourceRoot: An optional root for all URLs in this source map.
+ */
+ function SourceMapGenerator(aArgs) {
+ this._file = util.getArg(aArgs, 'file');
+ this._sourceRoot = util.getArg(aArgs, 'sourceRoot', null);
+ this._sources = new ArraySet();
+ this._names = new ArraySet();
+ this._mappings = [];
+ this._sourcesContents = null;
+ }
+
+ SourceMapGenerator.prototype._version = 3;
+
+ /**
+ * Creates a new SourceMapGenerator based on a SourceMapConsumer
+ *
+ * @param aSourceMapConsumer The SourceMap.
+ */
+ SourceMapGenerator.fromSourceMap =
+ function SourceMapGenerator_fromSourceMap(aSourceMapConsumer) {
+ var sourceRoot = aSourceMapConsumer.sourceRoot;
+ var generator = new SourceMapGenerator({
+ file: aSourceMapConsumer.file,
+ sourceRoot: sourceRoot
+ });
+ aSourceMapConsumer.eachMapping(function (mapping) {
+ var newMapping = {
+ generated: {
+ line: mapping.generatedLine,
+ column: mapping.generatedColumn
+ }
+ };
+
+ if (mapping.source) {
+ newMapping.source = mapping.source;
+ if (sourceRoot) {
+ newMapping.source = util.relative(sourceRoot, newMapping.source);
+ }
+
+ newMapping.original = {
+ line: mapping.originalLine,
+ column: mapping.originalColumn
+ };
+
+ if (mapping.name) {
+ newMapping.name = mapping.name;
+ }
+ }
+
+ generator.addMapping(newMapping);
+ });
+ aSourceMapConsumer.sources.forEach(function (sourceFile) {
+ var content = aSourceMapConsumer.sourceContentFor(sourceFile);
+ if (content) {
+ generator.setSourceContent(sourceFile, content);
+ }
+ });
+ return generator;
+ };
+
+ /**
+ * Add a single mapping from original source line and column to the generated
+ * source's line and column for this source map being created. The mapping
+ * object should have the following properties:
+ *
+ * - generated: An object with the generated line and column positions.
+ * - original: An object with the original line and column positions.
+ * - source: The original source file (relative to the sourceRoot).
+ * - name: An optional original token name for this mapping.
+ */
+ SourceMapGenerator.prototype.addMapping =
+ function SourceMapGenerator_addMapping(aArgs) {
+ var generated = util.getArg(aArgs, 'generated');
+ var original = util.getArg(aArgs, 'original', null);
+ var source = util.getArg(aArgs, 'source', null);
+ var name = util.getArg(aArgs, 'name', null);
+
+ this._validateMapping(generated, original, source, name);
+
+ if (source && !this._sources.has(source)) {
+ this._sources.add(source);
+ }
+
+ if (name && !this._names.has(name)) {
+ this._names.add(name);
+ }
+
+ this._mappings.push({
+ generated: generated,
+ original: original,
+ source: source,
+ name: name
+ });
+ };
+
+ /**
+ * Set the source content for a source file.
+ */
+ SourceMapGenerator.prototype.setSourceContent =
+ function SourceMapGenerator_setSourceContent(aSourceFile, aSourceContent) {
+ var source = aSourceFile;
+ if (this._sourceRoot) {
+ source = util.relative(this._sourceRoot, source);
+ }
+
+ if (aSourceContent !== null) {
+ // Add the source content to the _sourcesContents map.
+ // Create a new _sourcesContents map if the property is null.
+ if (!this._sourcesContents) {
+ this._sourcesContents = {};
+ }
+ this._sourcesContents[util.toSetString(source)] = aSourceContent;
+ } else {
+ // Remove the source file from the _sourcesContents map.
+ // If the _sourcesContents map is empty, set the property to null.
+ delete this._sourcesContents[util.toSetString(source)];
+ if (Object.keys(this._sourcesContents).length === 0) {
+ this._sourcesContents = null;
+ }
+ }
+ };
+
+ /**
+ * Applies the mappings of a sub-source-map for a specific source file to the
+ * source map being generated. Each mapping to the supplied source file is
+ * rewritten using the supplied source map. Note: The resolution for the
+ * resulting mappings is the minimium of this map and the supplied map.
+ *
+ * @param aSourceMapConsumer The source map to be applied.
+ * @param aSourceFile Optional. The filename of the source file.
+ * If omitted, SourceMapConsumer's file property will be used.
+ */
+ SourceMapGenerator.prototype.applySourceMap =
+ function SourceMapGenerator_applySourceMap(aSourceMapConsumer, aSourceFile) {
+ // If aSourceFile is omitted, we will use the file property of the SourceMap
+ if (!aSourceFile) {
+ aSourceFile = aSourceMapConsumer.file;
+ }
+ var sourceRoot = this._sourceRoot;
+ // Make "aSourceFile" relative if an absolute Url is passed.
+ if (sourceRoot) {
+ aSourceFile = util.relative(sourceRoot, aSourceFile);
+ }
+ // Applying the SourceMap can add and remove items from the sources and
+ // the names array.
+ var newSources = new ArraySet();
+ var newNames = new ArraySet();
+
+ // Find mappings for the "aSourceFile"
+ this._mappings.forEach(function (mapping) {
+ if (mapping.source === aSourceFile && mapping.original) {
+ // Check if it can be mapped by the source map, then update the mapping.
+ var original = aSourceMapConsumer.originalPositionFor({
+ line: mapping.original.line,
+ column: mapping.original.column
+ });
+ if (original.source !== null) {
+ // Copy mapping
+ if (sourceRoot) {
+ mapping.source = util.relative(sourceRoot, original.source);
+ } else {
+ mapping.source = original.source;
+ }
+ mapping.original.line = original.line;
+ mapping.original.column = original.column;
+ if (original.name !== null && mapping.name !== null) {
+ // Only use the identifier name if it's an identifier
+ // in both SourceMaps
+ mapping.name = original.name;
+ }
+ }
+ }
+
+ var source = mapping.source;
+ if (source && !newSources.has(source)) {
+ newSources.add(source);
+ }
+
+ var name = mapping.name;
+ if (name && !newNames.has(name)) {
+ newNames.add(name);
+ }
+
+ }, this);
+ this._sources = newSources;
+ this._names = newNames;
+
+ // Copy sourcesContents of applied map.
+ aSourceMapConsumer.sources.forEach(function (sourceFile) {
+ var content = aSourceMapConsumer.sourceContentFor(sourceFile);
+ if (content) {
+ if (sourceRoot) {
+ sourceFile = util.relative(sourceRoot, sourceFile);
+ }
+ this.setSourceContent(sourceFile, content);
+ }
+ }, this);
+ };
+
+ /**
+ * A mapping can have one of the three levels of data:
+ *
+ * 1. Just the generated position.
+ * 2. The Generated position, original position, and original source.
+ * 3. Generated and original position, original source, as well as a name
+ * token.
+ *
+ * To maintain consistency, we validate that any new mapping being added falls
+ * in to one of these categories.
+ */
+ SourceMapGenerator.prototype._validateMapping =
+ function SourceMapGenerator_validateMapping(aGenerated, aOriginal, aSource,
+ aName) {
+ if (aGenerated && 'line' in aGenerated && 'column' in aGenerated
+ && aGenerated.line > 0 && aGenerated.column >= 0
+ && !aOriginal && !aSource && !aName) {
+ // Case 1.
+ return;
+ }
+ else if (aGenerated && 'line' in aGenerated && 'column' in aGenerated
+ && aOriginal && 'line' in aOriginal && 'column' in aOriginal
+ && aGenerated.line > 0 && aGenerated.column >= 0
+ && aOriginal.line > 0 && aOriginal.column >= 0
+ && aSource) {
+ // Cases 2 and 3.
+ return;
+ }
+ else {
+ throw new Error('Invalid mapping.');
+ }
+ };
+
+ function cmpLocation(loc1, loc2) {
+ var cmp = (loc1 && loc1.line) - (loc2 && loc2.line);
+ return cmp ? cmp : (loc1 && loc1.column) - (loc2 && loc2.column);
+ }
+
+ function strcmp(str1, str2) {
+ str1 = str1 || '';
+ str2 = str2 || '';
+ return (str1 > str2) - (str1 < str2);
+ }
+
+ function cmpMapping(mappingA, mappingB) {
+ return cmpLocation(mappingA.generated, mappingB.generated) ||
+ cmpLocation(mappingA.original, mappingB.original) ||
+ strcmp(mappingA.source, mappingB.source) ||
+ strcmp(mappingA.name, mappingB.name);
+ }
+
+ /**
+ * Serialize the accumulated mappings in to the stream of base 64 VLQs
+ * specified by the source map format.
+ */
+ SourceMapGenerator.prototype._serializeMappings =
+ function SourceMapGenerator_serializeMappings() {
+ var previousGeneratedColumn = 0;
+ var previousGeneratedLine = 1;
+ var previousOriginalColumn = 0;
+ var previousOriginalLine = 0;
+ var previousName = 0;
+ var previousSource = 0;
+ var result = '';
+ var mapping;
+
+ // The mappings must be guarenteed to be in sorted order before we start
+ // serializing them or else the generated line numbers (which are defined
+ // via the ';' separators) will be all messed up. Note: it might be more
+ // performant to maintain the sorting as we insert them, rather than as we
+ // serialize them, but the big O is the same either way.
+ this._mappings.sort(cmpMapping);
+
+ for (var i = 0, len = this._mappings.length; i < len; i++) {
+ mapping = this._mappings[i];
+
+ if (mapping.generated.line !== previousGeneratedLine) {
+ previousGeneratedColumn = 0;
+ while (mapping.generated.line !== previousGeneratedLine) {
+ result += ';';
+ previousGeneratedLine++;
+ }
+ }
+ else {
+ if (i > 0) {
+ if (!cmpMapping(mapping, this._mappings[i - 1])) {
+ continue;
+ }
+ result += ',';
+ }
+ }
+
+ result += base64VLQ.encode(mapping.generated.column
+ - previousGeneratedColumn);
+ previousGeneratedColumn = mapping.generated.column;
+
+ if (mapping.source && mapping.original) {
+ result += base64VLQ.encode(this._sources.indexOf(mapping.source)
+ - previousSource);
+ previousSource = this._sources.indexOf(mapping.source);
+
+ // lines are stored 0-based in SourceMap spec version 3
+ result += base64VLQ.encode(mapping.original.line - 1
+ - previousOriginalLine);
+ previousOriginalLine = mapping.original.line - 1;
+
+ result += base64VLQ.encode(mapping.original.column
+ - previousOriginalColumn);
+ previousOriginalColumn = mapping.original.column;
+
+ if (mapping.name) {
+ result += base64VLQ.encode(this._names.indexOf(mapping.name)
+ - previousName);
+ previousName = this._names.indexOf(mapping.name);
+ }
+ }
+ }
+
+ return result;
+ };
+
+ /**
+ * Externalize the source map.
+ */
+ SourceMapGenerator.prototype.toJSON =
+ function SourceMapGenerator_toJSON() {
+ var map = {
+ version: this._version,
+ file: this._file,
+ sources: this._sources.toArray(),
+ names: this._names.toArray(),
+ mappings: this._serializeMappings()
+ };
+ if (this._sourceRoot) {
+ map.sourceRoot = this._sourceRoot;
+ }
+ if (this._sourcesContents) {
+ map.sourcesContent = map.sources.map(function (source) {
+ if (map.sourceRoot) {
+ source = util.relative(map.sourceRoot, source);
+ }
+ return Object.prototype.hasOwnProperty.call(
+ this._sourcesContents, util.toSetString(source))
+ ? this._sourcesContents[util.toSetString(source)]
+ : null;
+ }, this);
+ }
+ return map;
+ };
+
+ /**
+ * Render the source map being generated to a string.
+ */
+ SourceMapGenerator.prototype.toString =
+ function SourceMapGenerator_toString() {
+ return JSON.stringify(this);
+ };
+
+ exports.SourceMapGenerator = SourceMapGenerator;
+
+});
diff --git a/tools/node_modules/source-map/lib/source-map/source-node.js b/tools/node_modules/source-map/lib/source-map/source-node.js
new file mode 100644
index 00000000..4c3314a8
--- /dev/null
+++ b/tools/node_modules/source-map/lib/source-map/source-node.js
@@ -0,0 +1,353 @@
+/* -*- Mode: js; js-indent-level: 2; -*- */
+/*
+ * Copyright 2011 Mozilla Foundation and contributors
+ * Licensed under the New BSD license. See LICENSE or:
+ * http://opensource.org/licenses/BSD-3-Clause
+ */
+if (typeof define !== 'function') {
+ var define = require('amdefine')(module);
+}
+define(function (require, exports, module) {
+
+ var SourceMapGenerator = require('./source-map-generator').SourceMapGenerator;
+ var util = require('./util');
+
+ /**
+ * SourceNodes provide a way to abstract over interpolating/concatenating
+ * snippets of generated JavaScript source code while maintaining the line and
+ * column information associated with the original source code.
+ *
+ * @param aLine The original line number.
+ * @param aColumn The original column number.
+ * @param aSource The original source's filename.
+ * @param aChunks Optional. An array of strings which are snippets of
+ * generated JS, or other SourceNodes.
+ * @param aName The original identifier.
+ */
+ function SourceNode(aLine, aColumn, aSource, aChunks, aName) {
+ this.children = [];
+ this.sourceContents = {};
+ this.line = aLine === undefined ? null : aLine;
+ this.column = aColumn === undefined ? null : aColumn;
+ this.source = aSource === undefined ? null : aSource;
+ this.name = aName === undefined ? null : aName;
+ if (aChunks != null) this.add(aChunks);
+ }
+
+ /**
+ * Creates a SourceNode from generated code and a SourceMapConsumer.
+ *
+ * @param aGeneratedCode The generated code
+ * @param aSourceMapConsumer The SourceMap for the generated code
+ */
+ SourceNode.fromStringWithSourceMap =
+ function SourceNode_fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer) {
+ // The SourceNode we want to fill with the generated code
+ // and the SourceMap
+ var node = new SourceNode();
+
+ // The generated code
+ // Processed fragments are removed from this array.
+ var remainingLines = aGeneratedCode.split('\n');
+
+ // We need to remember the position of "remainingLines"
+ var lastGeneratedLine = 1, lastGeneratedColumn = 0;
+
+ // The generate SourceNodes we need a code range.
+ // To extract it current and last mapping is used.
+ // Here we store the last mapping.
+ var lastMapping = null;
+
+ aSourceMapConsumer.eachMapping(function (mapping) {
+ if (lastMapping === null) {
+ // We add the generated code until the first mapping
+ // to the SourceNode without any mapping.
+ // Each line is added as separate string.
+ while (lastGeneratedLine < mapping.generatedLine) {
+ node.add(remainingLines.shift() + "\n");
+ lastGeneratedLine++;
+ }
+ if (lastGeneratedColumn < mapping.generatedColumn) {
+ var nextLine = remainingLines[0];
+ node.add(nextLine.substr(0, mapping.generatedColumn));
+ remainingLines[0] = nextLine.substr(mapping.generatedColumn);
+ lastGeneratedColumn = mapping.generatedColumn;
+ }
+ } else {
+ // We add the code from "lastMapping" to "mapping":
+ // First check if there is a new line in between.
+ if (lastGeneratedLine < mapping.generatedLine) {
+ var code = "";
+ // Associate full lines with "lastMapping"
+ do {
+ code += remainingLines.shift() + "\n";
+ lastGeneratedLine++;
+ lastGeneratedColumn = 0;
+ } while (lastGeneratedLine < mapping.generatedLine);
+ // When we reached the correct line, we add code until we
+ // reach the correct column too.
+ if (lastGeneratedColumn < mapping.generatedColumn) {
+ var nextLine = remainingLines[0];
+ code += nextLine.substr(0, mapping.generatedColumn);
+ remainingLines[0] = nextLine.substr(mapping.generatedColumn);
+ lastGeneratedColumn = mapping.generatedColumn;
+ }
+ // Create the SourceNode.
+ addMappingWithCode(lastMapping, code);
+ } else {
+ // There is no new line in between.
+ // Associate the code between "lastGeneratedColumn" and
+ // "mapping.generatedColumn" with "lastMapping"
+ var nextLine = remainingLines[0];
+ var code = nextLine.substr(0, mapping.generatedColumn -
+ lastGeneratedColumn);
+ remainingLines[0] = nextLine.substr(mapping.generatedColumn -
+ lastGeneratedColumn);
+ lastGeneratedColumn = mapping.generatedColumn;
+ addMappingWithCode(lastMapping, code);
+ }
+ }
+ lastMapping = mapping;
+ }, this);
+ // We have processed all mappings.
+ // Associate the remaining code in the current line with "lastMapping"
+ // and add the remaining lines without any mapping
+ addMappingWithCode(lastMapping, remainingLines.join("\n"));
+
+ // Copy sourcesContent into SourceNode
+ aSourceMapConsumer.sources.forEach(function (sourceFile) {
+ var content = aSourceMapConsumer.sourceContentFor(sourceFile);
+ if (content) {
+ node.setSourceContent(sourceFile, content);
+ }
+ });
+
+ return node;
+
+ function addMappingWithCode(mapping, code) {
+ if (mapping.source === undefined) {
+ node.add(code);
+ } else {
+ node.add(new SourceNode(mapping.originalLine,
+ mapping.originalColumn,
+ mapping.source,
+ code,
+ mapping.name));
+ }
+ }
+ };
+
+ /**
+ * Add a chunk of generated JS to this source node.
+ *
+ * @param aChunk A string snippet of generated JS code, another instance of
+ * SourceNode, or an array where each member is one of those things.
+ */
+ SourceNode.prototype.add = function SourceNode_add(aChunk) {
+ if (Array.isArray(aChunk)) {
+ aChunk.forEach(function (chunk) {
+ this.add(chunk);
+ }, this);
+ }
+ else if (aChunk instanceof SourceNode || typeof aChunk === "string") {
+ if (aChunk) {
+ this.children.push(aChunk);
+ }
+ }
+ else {
+ throw new TypeError(
+ "Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk
+ );
+ }
+ return this;
+ };
+
+ /**
+ * Add a chunk of generated JS to the beginning of this source node.
+ *
+ * @param aChunk A string snippet of generated JS code, another instance of
+ * SourceNode, or an array where each member is one of those things.
+ */
+ SourceNode.prototype.prepend = function SourceNode_prepend(aChunk) {
+ if (Array.isArray(aChunk)) {
+ for (var i = aChunk.length-1; i >= 0; i--) {
+ this.prepend(aChunk[i]);
+ }
+ }
+ else if (aChunk instanceof SourceNode || typeof aChunk === "string") {
+ this.children.unshift(aChunk);
+ }
+ else {
+ throw new TypeError(
+ "Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk
+ );
+ }
+ return this;
+ };
+
+ /**
+ * Walk over the tree of JS snippets in this node and its children. The
+ * walking function is called once for each snippet of JS and is passed that
+ * snippet and the its original associated source's line/column location.
+ *
+ * @param aFn The traversal function.
+ */
+ SourceNode.prototype.walk = function SourceNode_walk(aFn) {
+ this.children.forEach(function (chunk) {
+ if (chunk instanceof SourceNode) {
+ chunk.walk(aFn);
+ }
+ else {
+ if (chunk !== '') {
+ aFn(chunk, { source: this.source,
+ line: this.line,
+ column: this.column,
+ name: this.name });
+ }
+ }
+ }, this);
+ };
+
+ /**
+ * Like `String.prototype.join` except for SourceNodes. Inserts `aStr` between
+ * each of `this.children`.
+ *
+ * @param aSep The separator.
+ */
+ SourceNode.prototype.join = function SourceNode_join(aSep) {
+ var newChildren;
+ var i;
+ var len = this.children.length;
+ if (len > 0) {
+ newChildren = [];
+ for (i = 0; i < len-1; i++) {
+ newChildren.push(this.children[i]);
+ newChildren.push(aSep);
+ }
+ newChildren.push(this.children[i]);
+ this.children = newChildren;
+ }
+ return this;
+ };
+
+ /**
+ * Call String.prototype.replace on the very right-most source snippet. Useful
+ * for trimming whitespace from the end of a source node, etc.
+ *
+ * @param aPattern The pattern to replace.
+ * @param aReplacement The thing to replace the pattern with.
+ */
+ SourceNode.prototype.replaceRight = function SourceNode_replaceRight(aPattern, aReplacement) {
+ var lastChild = this.children[this.children.length - 1];
+ if (lastChild instanceof SourceNode) {
+ lastChild.replaceRight(aPattern, aReplacement);
+ }
+ else if (typeof lastChild === 'string') {
+ this.children[this.children.length - 1] = lastChild.replace(aPattern, aReplacement);
+ }
+ else {
+ this.children.push(''.replace(aPattern, aReplacement));
+ }
+ return this;
+ };
+
+ /**
+ * Set the source content for a source file. This will be added to the SourceMapGenerator
+ * in the sourcesContent field.
+ *
+ * @param aSourceFile The filename of the source file
+ * @param aSourceContent The content of the source file
+ */
+ SourceNode.prototype.setSourceContent =
+ function SourceNode_setSourceContent(aSourceFile, aSourceContent) {
+ this.sourceContents[util.toSetString(aSourceFile)] = aSourceContent;
+ };
+
+ /**
+ * Walk over the tree of SourceNodes. The walking function is called for each
+ * source file content and is passed the filename and source content.
+ *
+ * @param aFn The traversal function.
+ */
+ SourceNode.prototype.walkSourceContents =
+ function SourceNode_walkSourceContents(aFn) {
+ this.children.forEach(function (chunk) {
+ if (chunk instanceof SourceNode) {
+ chunk.walkSourceContents(aFn);
+ }
+ }, this);
+ Object.keys(this.sourceContents).forEach(function (sourceFileKey) {
+ aFn(util.fromSetString(sourceFileKey), this.sourceContents[sourceFileKey]);
+ }, this);
+ };
+
+ /**
+ * Return the string representation of this source node. Walks over the tree
+ * and concatenates all the various snippets together to one string.
+ */
+ SourceNode.prototype.toString = function SourceNode_toString() {
+ var str = "";
+ this.walk(function (chunk) {
+ str += chunk;
+ });
+ return str;
+ };
+
+ /**
+ * Returns the string representation of this source node along with a source
+ * map.
+ */
+ SourceNode.prototype.toStringWithSourceMap = function SourceNode_toStringWithSourceMap(aArgs) {
+ var generated = {
+ code: "",
+ line: 1,
+ column: 0
+ };
+ var map = new SourceMapGenerator(aArgs);
+ var sourceMappingActive = false;
+ this.walk(function (chunk, original) {
+ generated.code += chunk;
+ if (original.source !== null
+ && original.line !== null
+ && original.column !== null) {
+ map.addMapping({
+ source: original.source,
+ original: {
+ line: original.line,
+ column: original.column
+ },
+ generated: {
+ line: generated.line,
+ column: generated.column
+ },
+ name: original.name
+ });
+ sourceMappingActive = true;
+ } else if (sourceMappingActive) {
+ map.addMapping({
+ generated: {
+ line: generated.line,
+ column: generated.column
+ }
+ });
+ sourceMappingActive = false;
+ }
+ chunk.split('').forEach(function (ch) {
+ if (ch === '\n') {
+ generated.line++;
+ generated.column = 0;
+ } else {
+ generated.column++;
+ }
+ });
+ });
+ this.walkSourceContents(function (sourceFile, sourceContent) {
+ map.setSourceContent(sourceFile, sourceContent);
+ });
+
+ return { code: generated.code, map: map };
+ };
+
+ exports.SourceNode = SourceNode;
+
+});
diff --git a/tools/node_modules/source-map/lib/source-map/util.js b/tools/node_modules/source-map/lib/source-map/util.js
new file mode 100644
index 00000000..af5c58b4
--- /dev/null
+++ b/tools/node_modules/source-map/lib/source-map/util.js
@@ -0,0 +1,117 @@
+/* -*- Mode: js; js-indent-level: 2; -*- */
+/*
+ * Copyright 2011 Mozilla Foundation and contributors
+ * Licensed under the New BSD license. See LICENSE or:
+ * http://opensource.org/licenses/BSD-3-Clause
+ */
+if (typeof define !== 'function') {
+ var define = require('amdefine')(module);
+}
+define(function (require, exports, module) {
+
+ /**
+ * This is a helper function for getting values from parameter/options
+ * objects.
+ *
+ * @param args The object we are extracting values from
+ * @param name The name of the property we are getting.
+ * @param defaultValue An optional value to return if the property is missing
+ * from the object. If this is not specified and the property is missing, an
+ * error will be thrown.
+ */
+ function getArg(aArgs, aName, aDefaultValue) {
+ if (aName in aArgs) {
+ return aArgs[aName];
+ } else if (arguments.length === 3) {
+ return aDefaultValue;
+ } else {
+ throw new Error('"' + aName + '" is a required argument.');
+ }
+ }
+ exports.getArg = getArg;
+
+ var urlRegexp = /([\w+\-.]+):\/\/((\w+:\w+)@)?([\w.]+)?(:(\d+))?(\S+)?/;
+
+ function urlParse(aUrl) {
+ var match = aUrl.match(urlRegexp);
+ if (!match) {
+ return null;
+ }
+ return {
+ scheme: match[1],
+ auth: match[3],
+ host: match[4],
+ port: match[6],
+ path: match[7]
+ };
+ }
+ exports.urlParse = urlParse;
+
+ function urlGenerate(aParsedUrl) {
+ var url = aParsedUrl.scheme + "://";
+ if (aParsedUrl.auth) {
+ url += aParsedUrl.auth + "@"
+ }
+ if (aParsedUrl.host) {
+ url += aParsedUrl.host;
+ }
+ if (aParsedUrl.port) {
+ url += ":" + aParsedUrl.port
+ }
+ if (aParsedUrl.path) {
+ url += aParsedUrl.path;
+ }
+ return url;
+ }
+ exports.urlGenerate = urlGenerate;
+
+ function join(aRoot, aPath) {
+ var url;
+
+ if (aPath.match(urlRegexp)) {
+ return aPath;
+ }
+
+ if (aPath.charAt(0) === '/' && (url = urlParse(aRoot))) {
+ url.path = aPath;
+ return urlGenerate(url);
+ }
+
+ return aRoot.replace(/\/$/, '') + '/' + aPath;
+ }
+ exports.join = join;
+
+ /**
+ * Because behavior goes wacky when you set `__proto__` on objects, we
+ * have to prefix all the strings in our set with an arbitrary character.
+ *
+ * See https://github.com/mozilla/source-map/pull/31 and
+ * https://github.com/mozilla/source-map/issues/30
+ *
+ * @param String aStr
+ */
+ function toSetString(aStr) {
+ return '$' + aStr;
+ }
+ exports.toSetString = toSetString;
+
+ function fromSetString(aStr) {
+ return aStr.substr(1);
+ }
+ exports.fromSetString = fromSetString;
+
+ function relative(aRoot, aPath) {
+ aRoot = aRoot.replace(/\/$/, '');
+
+ var url = urlParse(aRoot);
+ if (aPath.charAt(0) == "/" && url && url.path == "/") {
+ return aPath.slice(1);
+ }
+
+ return aPath.indexOf(aRoot + '/') === 0
+ ? aPath.substr(aRoot.length + 1)
+ : aPath;
+ }
+ exports.relative = relative;
+
+});
diff --git a/tools/node_modules/source-map/node_modules/amdefine/LICENSE b/tools/node_modules/source-map/node_modules/amdefine/LICENSE
new file mode 100644
index 00000000..f33d665d
--- /dev/null
+++ b/tools/node_modules/source-map/node_modules/amdefine/LICENSE
@@ -0,0 +1,58 @@
+amdefine is released under two licenses: new BSD, and MIT. You may pick the
+license that best suits your development needs. The text of both licenses are
+provided below.
+
+
+The "New" BSD License:
+----------------------
+
+Copyright (c) 2011, The Dojo Foundation
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+ * Neither the name of the Dojo Foundation nor the names of its contributors
+ may be used to endorse or promote products derived from this software
+ without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+MIT License
+-----------
+
+Copyright (c) 2011, The Dojo Foundation
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/tools/node_modules/source-map/node_modules/amdefine/README.md b/tools/node_modules/source-map/node_modules/amdefine/README.md
new file mode 100644
index 00000000..8ebbc739
--- /dev/null
+++ b/tools/node_modules/source-map/node_modules/amdefine/README.md
@@ -0,0 +1,119 @@
+# amdefine
+
+A module that can be used to implement AMD's define() in Node. This allows you
+to code to the AMD API and have the module work in node programs without
+requiring those other programs to use AMD.
+
+## Usage
+
+**1)** Update your package.json to indicate amdefine as a dependency:
+
+```javascript
+ "dependencies": {
+ "amdefine": ">=0.0.5"
+ }
+```
+
+Then run `npm install` to get amdefine into your project.
+
+**2)** At the top of each module that uses define(), place this code:
+
+```javascript
+if (typeof define !== 'function') { var define = require('amdefine')(module) }
+```
+
+**Only use these snippets** when loading amdefine. If you preserve the basic structure,
+with the braces, it will be stripped out when using the [RequireJS optimizer](#optimizer).
+
+You can add spaces, line breaks and even require amdefine with a local path, but
+keep the rest of the structure to get the stripping behavior.
+
+As you may know, because `if` statements in JavaScript don't have their own scope, the var
+declaration in the above snippet is made whether the `if` expression is truthy or not. If
+RequireJS is loaded then the declaration is superfluous because `define` is already already
+declared in the same scope in RequireJS. Fortunately JavaScript handles multiple `var`
+declarations of the same variable in the same scope gracefully.
+
+If you want to deliver amdefine.js with your code rather than specifying it as a dependency
+with npm, then just download the latest release and refer to it using a relative path:
+
+[Latest Version](https://github.com/jrburke/amdefine/raw/latest/amdefine.js)
+
+## define() usage
+
+It is best if you use the anonymous forms of define() in your module:
+
+```javascript
+define(function (require) {
+ var dependency = require('dependency');
+});
+```
+
+or
+
+```javascript
+define(['dependency'], function (dependency) {
+
+});
+```
+
+## RequireJS optimizer integration. <a name="optimizer"></name>
+
+Version 1.0.3 of the [RequireJS optimizer](http://requirejs.org/docs/optimization.html)
+will have support for stripping the `if (typeof define !== 'function')` check
+mentioned above, so you can include this snippet for code that runs in the
+browser, but avoid taking the cost of the if() statement once the code is
+optimized for deployment.
+
+## Node 0.4 Support
+
+If you want to support Node 0.4, then add `require` as the second parameter to amdefine:
+
+```javascript
+//Only if you want Node 0.4. If using 0.5 or later, use the above snippet.
+if (typeof define !== 'function') { var define = require('amdefine')(module, require) }
+```
+
+## Limitations
+
+### Synchronous vs Asynchronous
+
+amdefine creates a define() function that is callable by your code. It will
+execute and trace dependencies and call the factory function *synchronously*,
+to keep the behavior in line with Node's synchronous dependency tracing.
+
+The exception: calling AMD's callback-style require() from inside a factory
+function. The require callback is called on process.nextTick():
+
+```javascript
+define(function (require) {
+ require(['a'], function(a) {
+ //'a' is loaded synchronously, but
+ //this callback is called on process.nextTick().
+ });
+});
+```
+
+### Loader Plugins
+
+Loader plugins are supported as long as they call their load() callbacks
+synchronously. So ones that do network requests will not work. However plugins
+like [text](http://requirejs.org/docs/api.html#text) can load text files locally.
+
+The plugin API's `load.fromText()` is **not supported** in amdefine, so this means
+transpiler plugins like the [CoffeeScript loader plugin](https://github.com/jrburke/require-cs)
+will not work. This may be fixable, but it is a bit complex, and I do not have
+enough node-fu to figure it out yet. See the source for amdefine.js if you want
+to get an idea of the issues involved.
+
+## Tests
+
+To run the tests, cd to **tests** and run:
+
+```
+node all.js
+```
+
+## License
+
+New BSD and MIT. Check the LICENSE file for all the details.
diff --git a/tools/node_modules/source-map/node_modules/amdefine/amdefine.js b/tools/node_modules/source-map/node_modules/amdefine/amdefine.js
new file mode 100644
index 00000000..d074c94c
--- /dev/null
+++ b/tools/node_modules/source-map/node_modules/amdefine/amdefine.js
@@ -0,0 +1,299 @@
+/** vim: et:ts=4:sw=4:sts=4
+ * @license amdefine 0.0.5 Copyright (c) 2011, The Dojo Foundation All Rights Reserved.
+ * Available via the MIT or new BSD license.
+ * see: http://github.com/jrburke/amdefine for details
+ */
+
+/*jslint node: true */
+/*global module, process */
+'use strict';
+
+var path = require('path');
+
+/**
+ * Creates a define for node.
+ * @param {Object} module the "module" object that is defined by Node for the
+ * current module.
+ * @param {Function} [require]. Node's require function for the current module.
+ * It only needs to be passed in Node versions before 0.5, when module.require
+ * did not exist.
+ * @returns {Function} a define function that is usable for the current node
+ * module.
+ */
+function amdefine(module, require) {
+ var defineCache = {},
+ loaderCache = {},
+ alreadyCalled = false,
+ makeRequire, stringRequire;
+
+ /**
+ * Trims the . and .. from an array of path segments.
+ * It will keep a leading path segment if a .. will become
+ * the first path segment, to help with module name lookups,
+ * which act like paths, but can be remapped. But the end result,
+ * all paths that use this function should look normalized.
+ * NOTE: this method MODIFIES the input array.
+ * @param {Array} ary the array of path segments.
+ */
+ function trimDots(ary) {
+ var i, part;
+ for (i = 0; ary[i]; i+= 1) {
+ part = ary[i];
+ if (part === '.') {
+ ary.splice(i, 1);
+ i -= 1;
+ } else if (part === '..') {
+ if (i === 1 && (ary[2] === '..' || ary[0] === '..')) {
+ //End of the line. Keep at least one non-dot
+ //path segment at the front so it can be mapped
+ //correctly to disk. Otherwise, there is likely
+ //no path mapping for a path starting with '..'.
+ //This can still fail, but catches the most reasonable
+ //uses of ..
+ break;
+ } else if (i > 0) {
+ ary.splice(i - 1, 2);
+ i -= 2;
+ }
+ }
+ }
+ }
+
+ function normalize(name, baseName) {
+ var baseParts;
+
+ //Adjust any relative paths.
+ if (name && name.charAt(0) === '.') {
+ //If have a base name, try to normalize against it,
+ //otherwise, assume it is a top-level require that will
+ //be relative to baseUrl in the end.
+ if (baseName) {
+ baseParts = baseName.split('/');
+ baseParts = baseParts.slice(0, baseParts.length - 1);
+ baseParts = baseParts.concat(name.split('/'));
+ trimDots(baseParts);
+ name = baseParts.join('/');
+ }
+ }
+
+ return name;
+ }
+
+ /**
+ * Create the normalize() function passed to a loader plugin's
+ * normalize method.
+ */
+ function makeNormalize(relName) {
+ return function (name) {
+ return normalize(name, relName);
+ };
+ }
+
+ function makeLoad(id) {
+ function load(value) {
+ loaderCache[id] = value;
+ }
+
+ load.fromText = function (id, text) {
+ //This one is difficult because the text can/probably uses
+ //define, and any relative paths and requires should be relative
+ //to that id was it would be found on disk. But this would require
+ //bootstrapping a module/require fairly deeply from node core.
+ //Not sure how best to go about that yet.
+ throw new Error('amdefine does not implement load.fromText');
+ };
+
+ return load;
+ }
+
+ makeRequire = function (systemRequire, exports, module, relId) {
+ function amdRequire(deps, callback) {
+ if (typeof deps === 'string') {
+ //Synchronous, single module require('')
+ return stringRequire(systemRequire, exports, module, deps, relId);
+ } else {
+ //Array of dependencies with a callback.
+
+ //Convert the dependencies to modules.
+ deps = deps.map(function (depName) {
+ return stringRequire(systemRequire, exports, module, depName, relId);
+ });
+
+ //Wait for next tick to call back the require call.
+ process.nextTick(function () {
+ callback.apply(null, deps);
+ });
+ }
+ }
+
+ amdRequire.toUrl = function (filePath) {
+ if (filePath.indexOf('.') === 0) {
+ return normalize(filePath, path.dirname(module.filename));
+ } else {
+ return filePath;
+ }
+ };
+
+ return amdRequire;
+ };
+
+ //Favor explicit value, passed in if the module wants to support Node 0.4.
+ require = require || function req() {
+ return module.require.apply(module, arguments);
+ };
+
+ function runFactory(id, deps, factory) {
+ var r, e, m, result;
+
+ if (id) {
+ e = loaderCache[id] = {};
+ m = {
+ id: id,
+ uri: __filename,
+ exports: e
+ };
+ r = makeRequire(require, e, m, id);
+ } else {
+ //Only support one define call per file
+ if (alreadyCalled) {
+ throw new Error('amdefine with no module ID cannot be called more than once per file.');
+ }
+ alreadyCalled = true;
+
+ //Use the real variables from node
+ //Use module.exports for exports, since
+ //the exports in here is amdefine exports.
+ e = module.exports;
+ m = module;
+ r = makeRequire(require, e, m, module.id);
+ }
+
+ //If there are dependencies, they are strings, so need
+ //to convert them to dependency values.
+ if (deps) {
+ deps = deps.map(function (depName) {
+ return r(depName);
+ });
+ }
+
+ //Call the factory with the right dependencies.
+ if (typeof factory === 'function') {
+ result = factory.apply(module.exports, deps);
+ } else {
+ result = factory;
+ }
+
+ if (result !== undefined) {
+ m.exports = result;
+ if (id) {
+ loaderCache[id] = m.exports;
+ }
+ }
+ }
+
+ stringRequire = function (systemRequire, exports, module, id, relId) {
+ //Split the ID by a ! so that
+ var index = id.indexOf('!'),
+ originalId = id,
+ prefix, plugin;
+
+ if (index === -1) {
+ id = normalize(id, relId);
+
+ //Straight module lookup. If it is one of the special dependencies,
+ //deal with it, otherwise, delegate to node.
+ if (id === 'require') {
+ return makeRequire(systemRequire, exports, module, relId);
+ } else if (id === 'exports') {
+ return exports;
+ } else if (id === 'module') {
+ return module;
+ } else if (loaderCache.hasOwnProperty(id)) {
+ return loaderCache[id];
+ } else if (defineCache[id]) {
+ runFactory.apply(null, defineCache[id]);
+ return loaderCache[id];
+ } else {
+ if(systemRequire) {
+ return systemRequire(originalId);
+ } else {
+ throw new Error('No module with ID: ' + id);
+ }
+ }
+ } else {
+ //There is a plugin in play.
+ prefix = id.substring(0, index);
+ id = id.substring(index + 1, id.length);
+
+ plugin = stringRequire(systemRequire, exports, module, prefix, relId);
+
+ if (plugin.normalize) {
+ id = plugin.normalize(id, makeNormalize(relId));
+ } else {
+ //Normalize the ID normally.
+ id = normalize(id, relId);
+ }
+
+ if (loaderCache[id]) {
+ return loaderCache[id];
+ } else {
+ plugin.load(id, makeRequire(systemRequire, exports, module, relId), makeLoad(id), {});
+
+ return loaderCache[id];
+ }
+ }
+ };
+
+ //Create a define function specific to the module asking for amdefine.
+ function define(id, deps, factory) {
+ if (Array.isArray(id)) {
+ factory = deps;
+ deps = id;
+ id = undefined;
+ } else if (typeof id !== 'string') {
+ factory = id;
+ id = deps = undefined;
+ }
+
+ if (deps && !Array.isArray(deps)) {
+ factory = deps;
+ deps = undefined;
+ }
+
+ if (!deps) {
+ deps = ['require', 'exports', 'module'];
+ }
+
+ //Set up properties for this module. If an ID, then use
+ //internal cache. If no ID, then use the external variables
+ //for this node module.
+ if (id) {
+ //Put the module in deep freeze until there is a
+ //require call for it.
+ defineCache[id] = [id, deps, factory];
+ } else {
+ runFactory(id, deps, factory);
+ }
+ }
+
+ //define.require, which has access to all the values in the
+ //cache. Useful for AMD modules that all have IDs in the file,
+ //but need to finally export a value to node based on one of those
+ //IDs.
+ define.require = function (id) {
+ if (loaderCache[id]) {
+ return loaderCache[id];
+ }
+
+ if (defineCache[id]) {
+ runFactory.apply(null, defineCache[id]);
+ return loaderCache[id];
+ }
+ };
+
+ define.amd = {};
+
+ return define;
+}
+
+module.exports = amdefine;
diff --git a/tools/node_modules/source-map/node_modules/amdefine/package.json b/tools/node_modules/source-map/node_modules/amdefine/package.json
new file mode 100644
index 00000000..87ac4163
--- /dev/null
+++ b/tools/node_modules/source-map/node_modules/amdefine/package.json
@@ -0,0 +1,33 @@
+{
+ "name": "amdefine",
+ "description": "Provide AMD's define() API for declaring modules in the AMD format",
+ "version": "0.0.5",
+ "homepage": "http://github.com/jrburke/amdefine",
+ "author": {
+ "name": "James Burke",
+ "email": "jrburke@gmail.com",
+ "url": "http://github.com/jrburke"
+ },
+ "licenses": [
+ {
+ "type": "BSD",
+ "url": "https://github.com/jrburke/amdefine/blob/master/LICENSE"
+ },
+ {
+ "type": "MIT",
+ "url": "https://github.com/jrburke/amdefine/blob/master/LICENSE"
+ }
+ ],
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/jrburke/amdefine.git"
+ },
+ "main": "./amdefine.js",
+ "engines": {
+ "node": ">=0.4.2"
+ },
+ "readme": "# amdefine\n\nA module that can be used to implement AMD's define() in Node. This allows you\nto code to the AMD API and have the module work in node programs without\nrequiring those other programs to use AMD.\n\n## Usage\n\n**1)** Update your package.json to indicate amdefine as a dependency:\n\n```javascript\n \"dependencies\": {\n \"amdefine\": \">=0.0.5\"\n }\n```\n\nThen run `npm install` to get amdefine into your project.\n\n**2)** At the top of each module that uses define(), place this code:\n\n```javascript\nif (typeof define !== 'function') { var define = require('amdefine')(module) }\n```\n\n**Only use these snippets** when loading amdefine. If you preserve the basic structure,\nwith the braces, it will be stripped out when using the [RequireJS optimizer](#optimizer).\n\nYou can add spaces, line breaks and even require amdefine with a local path, but\nkeep the rest of the structure to get the stripping behavior.\n\nAs you may know, because `if` statements in JavaScript don't have their own scope, the var\ndeclaration in the above snippet is made whether the `if` expression is truthy or not. If\nRequireJS is loaded then the declaration is superfluous because `define` is already already\ndeclared in the same scope in RequireJS. Fortunately JavaScript handles multiple `var`\ndeclarations of the same variable in the same scope gracefully.\n\nIf you want to deliver amdefine.js with your code rather than specifying it as a dependency\nwith npm, then just download the latest release and refer to it using a relative path:\n\n[Latest Version](https://github.com/jrburke/amdefine/raw/latest/amdefine.js)\n\n## define() usage\n\nIt is best if you use the anonymous forms of define() in your module:\n\n```javascript\ndefine(function (require) {\n var dependency = require('dependency');\n});\n```\n\nor\n\n```javascript\ndefine(['dependency'], function (dependency) {\n\n});\n```\n\n## RequireJS optimizer integration. <a name=\"optimizer\"></name>\n\nVersion 1.0.3 of the [RequireJS optimizer](http://requirejs.org/docs/optimization.html)\nwill have support for stripping the `if (typeof define !== 'function')` check\nmentioned above, so you can include this snippet for code that runs in the\nbrowser, but avoid taking the cost of the if() statement once the code is\noptimized for deployment.\n\n## Node 0.4 Support\n\nIf you want to support Node 0.4, then add `require` as the second parameter to amdefine:\n\n```javascript\n//Only if you want Node 0.4. If using 0.5 or later, use the above snippet.\nif (typeof define !== 'function') { var define = require('amdefine')(module, require) }\n```\n\n## Limitations\n\n### Synchronous vs Asynchronous\n\namdefine creates a define() function that is callable by your code. It will\nexecute and trace dependencies and call the factory function *synchronously*,\nto keep the behavior in line with Node's synchronous dependency tracing.\n\nThe exception: calling AMD's callback-style require() from inside a factory\nfunction. The require callback is called on process.nextTick():\n\n```javascript\ndefine(function (require) {\n require(['a'], function(a) {\n //'a' is loaded synchronously, but\n //this callback is called on process.nextTick().\n });\n});\n```\n\n### Loader Plugins\n\nLoader plugins are supported as long as they call their load() callbacks\nsynchronously. So ones that do network requests will not work. However plugins\nlike [text](http://requirejs.org/docs/api.html#text) can load text files locally.\n\nThe plugin API's `load.fromText()` is **not supported** in amdefine, so this means\ntranspiler plugins like the [CoffeeScript loader plugin](https://github.com/jrburke/require-cs)\nwill not work. This may be fixable, but it is a bit complex, and I do not have\nenough node-fu to figure it out yet. See the source for amdefine.js if you want\nto get an idea of the issues involved.\n\n## Tests\n\nTo run the tests, cd to **tests** and run:\n\n```\nnode all.js\n```\n\n## License\n\nNew BSD and MIT. Check the LICENSE file for all the details.\n",
+ "readmeFilename": "README.md",
+ "_id": "amdefine@0.0.5",
+ "_from": "amdefine@>=0.0.4"
+}
diff --git a/tools/node_modules/source-map/package.json b/tools/node_modules/source-map/package.json
new file mode 100644
index 00000000..730e042e
--- /dev/null
+++ b/tools/node_modules/source-map/package.json
@@ -0,0 +1,74 @@
+{
+ "name": "source-map",
+ "description": "Generates and consumes source maps",
+ "version": "0.1.23",
+ "homepage": "https://github.com/mozilla/source-map",
+ "author": {
+ "name": "Nick Fitzgerald",
+ "email": "nfitzgerald@mozilla.com"
+ },
+ "contributors": [
+ {
+ "name": "Stephen Crane"
+ },
+ {
+ "name": "Ryan Seddon"
+ },
+ {
+ "name": "Mihai Bazon",
+ "email": "mihai.bazon@gmail.com"
+ },
+ {
+ "name": "Michael Ficarra",
+ "email": "github.public.email@michael.ficarra.me"
+ },
+ {
+ "name": "Todd Wolfson",
+ "email": "todd@twolfson.com"
+ },
+ {
+ "name": "Alexander Solovyov",
+ "email": "alexander@solovyov.net"
+ },
+ {
+ "name": "Felix Gnass"
+ },
+ {
+ "name": "Conrad Irwin",
+ "email": "conrad.irwin@gmail.com"
+ },
+ {
+ "name": "github.com/usrbincc"
+ }
+ ],
+ "repository": {
+ "type": "git",
+ "url": "http://github.com/mozilla/source-map.git"
+ },
+ "directories": {
+ "lib": "./lib"
+ },
+ "main": "./lib/source-map.js",
+ "engines": {
+ "node": ">=0.8.0"
+ },
+ "licenses": [
+ {
+ "type": "BSD",
+ "url": "http://opensource.org/licenses/BSD-3-Clause"
+ }
+ ],
+ "dependencies": {
+ "amdefine": ">=0.0.4"
+ },
+ "devDependencies": {
+ "dryice": ">=0.4.8"
+ },
+ "scripts": {
+ "test": "node test/run-tests.js"
+ },
+ "readme": "# Source Map\n\nThis is a library to generate and consume the source map format\n[described here][format].\n\n[Learn more here][feature].\n\nThis library was written in the Asynchronous Module Definition\nformat. It should work in the following environments:\n\n* Modern Browsers (either after the build, or with an AMD loader such as\n RequireJS)\n\n* Inside Firefox (as a JSM file, after the build)\n\n* With NodeJS versions 0.8.X and higher\n\n## Installing with NPM (for use with NodeJS)\n\nSimply\n\n $ npm install source-map\n\nOr, if you'd like to hack on this library and have it installed via npm so you\ncan try out your changes:\n\n $ git clone https://fitzgen@github.com/mozilla/source-map.git\n $ cd source-map\n $ npm link .\n\n## Building from Source (for everywhere else)\n\nInstall Node and then run\n\n $ git clone https://fitzgen@github.com/mozilla/source-map.git\n $ cd source-map\n $ npm link .\n\nNext, run\n\n $ node Makefile.dryice.js`\n\nThis should create the following files:\n\n* `dist/source-map.js` - The unminified browser version.\n\n* `dist/source-map.min.js` - The minified browser version.\n\n* `dist/SourceMap.jsm` - The JavaScript Module for inclusion in Firefox\n source.\n\n## API\n\nGet a reference to the module:\n\n // NodeJS\n var sourceMap = require('source-map');\n\n // Browser builds\n var sourceMap = window.sourceMap;\n\n // Inside Firefox\n let sourceMap = {};\n Components.utils.import('resource:///modules/devtools/SourceMap.jsm', sourceMap);\n\n### SourceMapConsumer\n\nA SourceMapConsumer instance represents a parsed source map which we can query\nfor information about the original file positions by giving it a file position\nin the generated source.\n\n#### new SourceMapConsumer(rawSourceMap)\n\nThe only parameter is the raw source map (either as a string which can be\n`JSON.parse`'d, or an object). According to the spec, source maps have the\nfollowing attributes:\n\n* `version`: Which version of the source map spec this map is following.\n\n* `sources`: An array of URLs to the original source files.\n\n* `names`: An array of identifiers which can be referrenced by individual\n mappings.\n\n* `sourceRoot`: Optional. The URL root from which all sources are relative.\n\n* `sourcesContent`: Optional. An array of contents of the original source files.\n\n* `mappings`: A string of base64 VLQs which contain the actual mappings.\n\n* `file`: The generated filename this source map is associated with.\n\n#### SourceMapConsumer.prototype.originalPositionFor(generatedPosition)\n\nReturns the original source, line, and column information for the generated\nsource's line and column positions provided. The only argument is an object with\nthe following properties:\n\n* `line`: The line number in the generated source.\n\n* `column`: The column number in the generated source.\n\nand an object is returned with the following properties:\n\n* `source`: The original source file, or null if this information is not\n available.\n\n* `line`: The line number in the original source, or null if this information is\n not available.\n\n* `column`: The column number in the original source, or null or null if this\n information is not available.\n\n* `name`: The original identifier, or null if this information is not available.\n\n#### SourceMapConsumer.prototype.generatedPositionFor(originalPosition)\n\nReturns the generated line and column information for the original source,\nline, and column positions provided. The only argument is an object with\nthe following properties:\n\n* `source`: The filename of the original source.\n\n* `line`: The line number in the original source.\n\n* `column`: The column number in the original source.\n\nand an object is returned with the following properties:\n\n* `line`: The line number in the generated source, or null.\n\n* `column`: The column number in the generated source, or null.\n\n#### SourceMapConsumer.prototype.sourceContentFor(source)\n\nReturns the original source content for the source provided. The only\nargument is the URL of the original source file.\n\n#### SourceMapConsumer.prototype.eachMapping(callback, context, order)\n\nIterate over each mapping between an original source/line/column and a\ngenerated line/column in this source map.\n\n* `callback`: The function that is called with each mapping.\n\n* `context`: Optional. If specified, this object will be the value of `this`\n every time that `callback` is called.\n\n* `order`: Either `SourceMapConsumer.GENERATED_ORDER` or\n `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to iterate over\n the mappings sorted by the generated file's line/column order or the\n original's source/line/column order, respectively. Defaults to\n `SourceMapConsumer.GENERATED_ORDER`.\n\n### SourceMapGenerator\n\nAn instance of the SourceMapGenerator represents a source map which is being\nbuilt incrementally.\n\n#### new SourceMapGenerator(startOfSourceMap)\n\nTo create a new one, you must pass an object with the following properties:\n\n* `file`: The filename of the generated source that this source map is\n associated with.\n\n* `sourceRoot`: An optional root for all relative URLs in this source map.\n\n#### SourceMapGenerator.fromSourceMap(sourceMapConsumer)\n\nCreates a new SourceMapGenerator based on a SourceMapConsumer\n\n* `sourceMapConsumer` The SourceMap.\n\n#### SourceMapGenerator.prototype.addMapping(mapping)\n\nAdd a single mapping from original source line and column to the generated\nsource's line and column for this source map being created. The mapping object\nshould have the following properties:\n\n* `generated`: An object with the generated line and column positions.\n\n* `original`: An object with the original line and column positions.\n\n* `source`: The original source file (relative to the sourceRoot).\n\n* `name`: An optional original token name for this mapping.\n\n#### SourceMapGenerator.prototype.setSourceContent(sourceFile, sourceContent)\n\nSet the source content for an original source file.\n\n* `sourceFile` the URL of the original source file.\n\n* `sourceContent` the content of the source file.\n\n#### SourceMapGenerator.prototype.applySourceMap(sourceMapConsumer[, sourceFile])\n\nApplies a SourceMap for a source file to the SourceMap.\nEach mapping to the supplied source file is rewritten using the\nsupplied SourceMap. Note: The resolution for the resulting mappings\nis the minimium of this map and the supplied map.\n\n* `sourceMapConsumer`: The SourceMap to be applied.\n\n* `sourceFile`: Optional. The filename of the source file.\n If omitted, sourceMapConsumer.file will be used.\n\n#### SourceMapGenerator.prototype.toString()\n\nRenders the source map being generated to a string.\n\n### SourceNode\n\nSourceNodes provide a way to abstract over interpolating and/or concatenating\nsnippets of generated JavaScript source code, while maintaining the line and\ncolumn information associated between those snippets and the original source\ncode. This is useful as the final intermediate representation a compiler might\nuse before outputting the generated JS and source map.\n\n#### new SourceNode(line, column, source[, chunk[, name]])\n\n* `line`: The original line number associated with this source node, or null if\n it isn't associated with an original line.\n\n* `column`: The original column number associated with this source node, or null\n if it isn't associated with an original column.\n\n* `source`: The original source's filename.\n\n* `chunk`: Optional. Is immediately passed to `SourceNode.prototype.add`, see\n below.\n\n* `name`: Optional. The original identifier.\n\n#### SourceNode.fromStringWithSourceMap(code, sourceMapConsumer)\n\nCreates a SourceNode from generated code and a SourceMapConsumer.\n\n* `code`: The generated code\n\n* `sourceMapConsumer` The SourceMap for the generated code\n\n#### SourceNode.prototype.add(chunk)\n\nAdd a chunk of generated JS to this source node.\n\n* `chunk`: A string snippet of generated JS code, another instance of\n `SourceNode`, or an array where each member is one of those things.\n\n#### SourceNode.prototype.prepend(chunk)\n\nPrepend a chunk of generated JS to this source node.\n\n* `chunk`: A string snippet of generated JS code, another instance of\n `SourceNode`, or an array where each member is one of those things.\n\n#### SourceNode.prototype.setSourceContent(sourceFile, sourceContent)\n\nSet the source content for a source file. This will be added to the\n`SourceMap` in the `sourcesContent` field.\n\n* `sourceFile`: The filename of the source file\n\n* `sourceContent`: The content of the source file\n\n#### SourceNode.prototype.walk(fn)\n\nWalk over the tree of JS snippets in this node and its children. The walking\nfunction is called once for each snippet of JS and is passed that snippet and\nthe its original associated source's line/column location.\n\n* `fn`: The traversal function.\n\n#### SourceNode.prototype.walkSourceContents(fn)\n\nWalk over the tree of SourceNodes. The walking function is called for each\nsource file content and is passed the filename and source content.\n\n* `fn`: The traversal function.\n\n#### SourceNode.prototype.join(sep)\n\nLike `Array.prototype.join` except for SourceNodes. Inserts the separator\nbetween each of this source node's children.\n\n* `sep`: The separator.\n\n#### SourceNode.prototype.replaceRight(pattern, replacement)\n\nCall `String.prototype.replace` on the very right-most source snippet. Useful\nfor trimming whitespace from the end of a source node, etc.\n\n* `pattern`: The pattern to replace.\n\n* `replacement`: The thing to replace the pattern with.\n\n#### SourceNode.prototype.toString()\n\nReturn the string representation of this source node. Walks over the tree and\nconcatenates all the various snippets together to one string.\n\n### SourceNode.prototype.toStringWithSourceMap(startOfSourceMap)\n\nReturns the string representation of this tree of source nodes, plus a\nSourceMapGenerator which contains all the mappings between the generated and\noriginal sources.\n\nThe arguments are the same as those to `new SourceMapGenerator`.\n\n## Tests\n\n[![Build Status](https://travis-ci.org/mozilla/source-map.png?branch=master)](https://travis-ci.org/mozilla/source-map)\n\nInstall NodeJS version 0.8.0 or greater, then run `node test/run-tests.js`.\n\nTo add new tests, create a new file named `test/test-<your new test name>.js`\nand export your test functions with names that start with \"test\", for example\n\n exports[\"test doing the foo bar\"] = function (assert, util) {\n ...\n };\n\nThe new test will be located automatically when you run the suite.\n\nThe `util` argument is the test utility module located at `test/source-map/util`.\n\nThe `assert` argument is a cut down version of node's assert module. You have\naccess to the following assertion functions:\n\n* `doesNotThrow`\n\n* `equal`\n\n* `ok`\n\n* `strictEqual`\n\n* `throws`\n\n(The reason for the restricted set of test functions is because we need the\ntests to run inside Firefox's test suite as well and so the assert module is\nshimmed in that environment. See `build/assert-shim.js`.)\n\n[format]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit\n[feature]: https://wiki.mozilla.org/DevTools/Features/SourceMap\n[Dryice]: https://github.com/mozilla/dryice\n",
+ "readmeFilename": "README.md",
+ "_id": "source-map@0.1.23",
+ "_from": "source-map@"
+}
diff --git a/tools/node_modules/source-map/test/run-tests.js b/tools/node_modules/source-map/test/run-tests.js
new file mode 100755
index 00000000..8bf2256f
--- /dev/null
+++ b/tools/node_modules/source-map/test/run-tests.js
@@ -0,0 +1,73 @@
+#!/usr/bin/env node
+/* -*- Mode: js; js-indent-level: 2; -*- */
+/*
+ * Copyright 2011 Mozilla Foundation and contributors
+ * Licensed under the New BSD license. See LICENSE or:
+ * http://opensource.org/licenses/BSD-3-Clause
+ */
+var assert = require('assert');
+var fs = require('fs');
+var path = require('path');
+var util = require('./source-map/util');
+
+function run(tests) {
+ var failures = [];
+ var total = 0;
+ var passed = 0;
+
+ for (var i = 0; i < tests.length; i++) {
+ for (var k in tests[i].testCase) {
+ if (/^test/.test(k)) {
+ total++;
+ try {
+ tests[i].testCase[k](assert, util);
+ passed++;
+ process.stdout.write('.');
+ }
+ catch (e) {
+ failures.push({
+ name: tests[i].name + ': ' + k,
+ error: e
+ });
+ process.stdout.write('E');
+ }
+ }
+ }
+ }
+
+ process.stdout.write('\n');
+ console.log(passed + ' / ' + total + ' tests passed.');
+
+ failures.forEach(function (f) {
+ console.log('================================================================================');
+ console.log(f.name);
+ console.log('--------------------------------------------------------------------------------');
+ console.log(f.error.stack);
+ });
+
+ return failures.length;
+}
+
+var code;
+
+process.stdout.on('close', function () {
+ process.exit(code);
+});
+
+function isTestFile(f) {
+ return /^test\-.*?\.js/.test(f);
+}
+
+function toModule(f) {
+ return './source-map/' + f.replace(/\.js$/, '');
+}
+
+var requires = fs.readdirSync(path.join(__dirname, 'source-map')).filter(isTestFile).map(toModule);
+
+code = run(requires.map(require).map(function (mod, i) {
+ return {
+ name: requires[i],
+ testCase: mod
+ };
+}));
+process.exit(code);
diff --git a/tools/node_modules/source-map/test/source-map/test-api.js b/tools/node_modules/source-map/test/source-map/test-api.js
new file mode 100644
index 00000000..63eb45a0
--- /dev/null
+++ b/tools/node_modules/source-map/test/source-map/test-api.js
@@ -0,0 +1,26 @@
+/* -*- Mode: js; js-indent-level: 2; -*- */
+/*
+ * Copyright 2012 Mozilla Foundation and contributors
+ * Licensed under the New BSD license. See LICENSE or:
+ * http://opensource.org/licenses/BSD-3-Clause
+ */
+if (typeof define !== 'function') {
+ var define = require('amdefine')(module);
+}
+define(function (require, exports, module) {
+
+ var sourceMap;
+ try {
+ sourceMap = require('../../lib/source-map');
+ } catch (e) {
+ sourceMap = {};
+ Components.utils.import('resource:///modules/devtools/SourceMap.jsm', sourceMap);
+ }
+
+ exports['test that the api is properly exposed in the top level'] = function (assert, util) {
+ assert.equal(typeof sourceMap.SourceMapGenerator, "function");
+ assert.equal(typeof sourceMap.SourceMapConsumer, "function");
+ assert.equal(typeof sourceMap.SourceNode, "function");
+ };
+
+});
diff --git a/tools/node_modules/source-map/test/source-map/test-array-set.js b/tools/node_modules/source-map/test/source-map/test-array-set.js
new file mode 100644
index 00000000..c4e88d36
--- /dev/null
+++ b/tools/node_modules/source-map/test/source-map/test-array-set.js
@@ -0,0 +1,71 @@
+/* -*- Mode: js; js-indent-level: 2; -*- */
+/*
+ * Copyright 2011 Mozilla Foundation and contributors
+ * Licensed under the New BSD license. See LICENSE or:
+ * http://opensource.org/licenses/BSD-3-Clause
+ */
+if (typeof define !== 'function') {
+ var define = require('amdefine')(module);
+}
+define(function (require, exports, module) {
+
+ var ArraySet = require('../../lib/source-map/array-set').ArraySet;
+
+ function makeTestSet() {
+ var set = new ArraySet();
+ for (var i = 0; i < 100; i++) {
+ set.add(String(i));
+ }
+ return set;
+ }
+
+ exports['test .has() membership'] = function (assert, util) {
+ var set = makeTestSet();
+ for (var i = 0; i < 100; i++) {
+ assert.ok(set.has(String(i)));
+ }
+ };
+
+ exports['test .indexOf() elements'] = function (assert, util) {
+ var set = makeTestSet();
+ for (var i = 0; i < 100; i++) {
+ assert.strictEqual(set.indexOf(String(i)), i);
+ }
+ };
+
+ exports['test .at() indexing'] = function (assert, util) {
+ var set = makeTestSet();
+ for (var i = 0; i < 100; i++) {
+ assert.strictEqual(set.at(i), String(i));
+ }
+ };
+
+ exports['test creating from an array'] = function (assert, util) {
+ var set = ArraySet.fromArray(['foo', 'bar', 'baz', 'quux', 'hasOwnProperty']);
+
+ assert.ok(set.has('foo'));
+ assert.ok(set.has('bar'));
+ assert.ok(set.has('baz'));
+ assert.ok(set.has('quux'));
+ assert.ok(set.has('hasOwnProperty'));
+
+ assert.strictEqual(set.indexOf('foo'), 0);
+ assert.strictEqual(set.indexOf('bar'), 1);
+ assert.strictEqual(set.indexOf('baz'), 2);
+ assert.strictEqual(set.indexOf('quux'), 3);
+
+ assert.strictEqual(set.at(0), 'foo');
+ assert.strictEqual(set.at(1), 'bar');
+ assert.strictEqual(set.at(2), 'baz');
+ assert.strictEqual(set.at(3), 'quux');
+ };
+
+ exports['test that you can add __proto__; see github issue #30'] = function (assert, util) {
+ var set = new ArraySet();
+ set.add('__proto__');
+ assert.ok(set.has('__proto__'));
+ assert.strictEqual(set.at(0), '__proto__');
+ assert.strictEqual(set.indexOf('__proto__'), 0);
+ };
+
+});
diff --git a/tools/node_modules/source-map/test/source-map/test-base64-vlq.js b/tools/node_modules/source-map/test/source-map/test-base64-vlq.js
new file mode 100644
index 00000000..7100da35
--- /dev/null
+++ b/tools/node_modules/source-map/test/source-map/test-base64-vlq.js
@@ -0,0 +1,24 @@
+/* -*- Mode: js; js-indent-level: 2; -*- */
+/*
+ * Copyright 2011 Mozilla Foundation and contributors
+ * Licensed under the New BSD license. See LICENSE or:
+ * http://opensource.org/licenses/BSD-3-Clause
+ */
+if (typeof define !== 'function') {
+ var define = require('amdefine')(module);
+}
+define(function (require, exports, module) {
+
+ var base64VLQ = require('../../lib/source-map/base64-vlq');
+
+ exports['test normal encoding and decoding'] = function (assert, util) {
+ var result;
+ for (var i = -255; i < 256; i++) {
+ result = base64VLQ.decode(base64VLQ.encode(i));
+ assert.ok(result);
+ assert.equal(result.value, i);
+ assert.equal(result.rest, "");
+ }
+ };
+
+});
diff --git a/tools/node_modules/source-map/test/source-map/test-base64.js b/tools/node_modules/source-map/test/source-map/test-base64.js
new file mode 100644
index 00000000..23e0f886
--- /dev/null
+++ b/tools/node_modules/source-map/test/source-map/test-base64.js
@@ -0,0 +1,35 @@
+/* -*- Mode: js; js-indent-level: 2; -*- */
+/*
+ * Copyright 2011 Mozilla Foundation and contributors
+ * Licensed under the New BSD license. See LICENSE or:
+ * http://opensource.org/licenses/BSD-3-Clause
+ */
+if (typeof define !== 'function') {
+ var define = require('amdefine')(module);
+}
+define(function (require, exports, module) {
+
+ var base64 = require('../../lib/source-map/base64');
+
+ exports['test out of range encoding'] = function (assert, util) {
+ assert.throws(function () {
+ base64.encode(-1);
+ });
+ assert.throws(function () {
+ base64.encode(64);
+ });
+ };
+
+ exports['test out of range decoding'] = function (assert, util) {
+ assert.throws(function () {
+ base64.decode('=');
+ });
+ };
+
+ exports['test normal encoding and decoding'] = function (assert, util) {
+ for (var i = 0; i < 64; i++) {
+ assert.equal(base64.decode(base64.encode(i)), i);
+ }
+ };
+
+});
diff --git a/tools/node_modules/source-map/test/source-map/test-binary-search.js b/tools/node_modules/source-map/test/source-map/test-binary-search.js
new file mode 100644
index 00000000..41d0bf85
--- /dev/null
+++ b/tools/node_modules/source-map/test/source-map/test-binary-search.js
@@ -0,0 +1,54 @@
+/* -*- Mode: js; js-indent-level: 2; -*- */
+/*
+ * Copyright 2011 Mozilla Foundation and contributors
+ * Licensed under the New BSD license. See LICENSE or:
+ * http://opensource.org/licenses/BSD-3-Clause
+ */
+if (typeof define !== 'function') {
+ var define = require('amdefine')(module);
+}
+define(function (require, exports, module) {
+
+ var binarySearch = require('../../lib/source-map/binary-search');
+
+ function numberCompare(a, b) {
+ return a - b;
+ }
+
+ exports['test too high'] = function (assert, util) {
+ var needle = 30;
+ var haystack = [2,4,6,8,10,12,14,16,18,20];
+
+ assert.doesNotThrow(function () {
+ binarySearch.search(needle, haystack, numberCompare);
+ });
+
+ assert.equal(binarySearch.search(needle, haystack, numberCompare), 20);
+ };
+
+ exports['test too low'] = function (assert, util) {
+ var needle = 1;
+ var haystack = [2,4,6,8,10,12,14,16,18,20];
+
+ assert.doesNotThrow(function () {
+ binarySearch.search(needle, haystack, numberCompare);
+ });
+
+ assert.equal(binarySearch.search(needle, haystack, numberCompare), null);
+ };
+
+ exports['test exact search'] = function (assert, util) {
+ var needle = 4;
+ var haystack = [2,4,6,8,10,12,14,16,18,20];
+
+ assert.equal(binarySearch.search(needle, haystack, numberCompare), 4);
+ };
+
+ exports['test fuzzy search'] = function (assert, util) {
+ var needle = 19;
+ var haystack = [2,4,6,8,10,12,14,16,18,20];
+
+ assert.equal(binarySearch.search(needle, haystack, numberCompare), 18);
+ };
+
+});
diff --git a/tools/node_modules/source-map/test/source-map/test-dog-fooding.js b/tools/node_modules/source-map/test/source-map/test-dog-fooding.js
new file mode 100644
index 00000000..d433bf6b
--- /dev/null
+++ b/tools/node_modules/source-map/test/source-map/test-dog-fooding.js
@@ -0,0 +1,72 @@
+/* -*- Mode: js; js-indent-level: 2; -*- */
+/*
+ * Copyright 2011 Mozilla Foundation and contributors
+ * Licensed under the New BSD license. See LICENSE or:
+ * http://opensource.org/licenses/BSD-3-Clause
+ */
+if (typeof define !== 'function') {
+ var define = require('amdefine')(module);
+}
+define(function (require, exports, module) {
+
+ var SourceMapConsumer = require('../../lib/source-map/source-map-consumer').SourceMapConsumer;
+ var SourceMapGenerator = require('../../lib/source-map/source-map-generator').SourceMapGenerator;
+
+ exports['test eating our own dog food'] = function (assert, util) {
+ var smg = new SourceMapGenerator({
+ file: 'testing.js',
+ sourceRoot: '/wu/tang'
+ });
+
+ smg.addMapping({
+ source: 'gza.coffee',
+ original: { line: 1, column: 0 },
+ generated: { line: 2, column: 2 }
+ });
+
+ smg.addMapping({
+ source: 'gza.coffee',
+ original: { line: 2, column: 0 },
+ generated: { line: 3, column: 2 }
+ });
+
+ smg.addMapping({
+ source: 'gza.coffee',
+ original: { line: 3, column: 0 },
+ generated: { line: 4, column: 2 }
+ });
+
+ smg.addMapping({
+ source: 'gza.coffee',
+ original: { line: 4, column: 0 },
+ generated: { line: 5, column: 2 }
+ });
+
+ var smc = new SourceMapConsumer(smg.toString());
+
+ // Exact
+ util.assertMapping(2, 2, '/wu/tang/gza.coffee', 1, 0, null, smc, assert);
+ util.assertMapping(3, 2, '/wu/tang/gza.coffee', 2, 0, null, smc, assert);
+ util.assertMapping(4, 2, '/wu/tang/gza.coffee', 3, 0, null, smc, assert);
+ util.assertMapping(5, 2, '/wu/tang/gza.coffee', 4, 0, null, smc, assert);
+
+ // Fuzzy
+
+ // Original to generated
+ util.assertMapping(2, 0, null, null, null, null, smc, assert, true);
+ util.assertMapping(2, 9, '/wu/tang/gza.coffee', 1, 0, null, smc, assert, true);
+ util.assertMapping(3, 0, '/wu/tang/gza.coffee', 1, 0, null, smc, assert, true);
+ util.assertMapping(3, 9, '/wu/tang/gza.coffee', 2, 0, null, smc, assert, true);
+ util.assertMapping(4, 0, '/wu/tang/gza.coffee', 2, 0, null, smc, assert, true);
+ util.assertMapping(4, 9, '/wu/tang/gza.coffee', 3, 0, null, smc, assert, true);
+ util.assertMapping(5, 0, '/wu/tang/gza.coffee', 3, 0, null, smc, assert, true);
+ util.assertMapping(5, 9, '/wu/tang/gza.coffee', 4, 0, null, smc, assert, true);
+
+ // Generated to original
+ util.assertMapping(2, 2, '/wu/tang/gza.coffee', 1, 1, null, smc, assert, null, true);
+ util.assertMapping(3, 2, '/wu/tang/gza.coffee', 2, 3, null, smc, assert, null, true);
+ util.assertMapping(4, 2, '/wu/tang/gza.coffee', 3, 6, null, smc, assert, null, true);
+ util.assertMapping(5, 2, '/wu/tang/gza.coffee', 4, 9, null, smc, assert, null, true);
+ };
+
+});
diff --git a/tools/node_modules/source-map/test/source-map/test-source-map-consumer.js b/tools/node_modules/source-map/test/source-map/test-source-map-consumer.js
new file mode 100644
index 00000000..2f9a23d0
--- /dev/null
+++ b/tools/node_modules/source-map/test/source-map/test-source-map-consumer.js
@@ -0,0 +1,306 @@
+/* -*- Mode: js; js-indent-level: 2; -*- */
+/*
+ * Copyright 2011 Mozilla Foundation and contributors
+ * Licensed under the New BSD license. See LICENSE or:
+ * http://opensource.org/licenses/BSD-3-Clause
+ */
+if (typeof define !== 'function') {
+ var define = require('amdefine')(module);
+}
+define(function (require, exports, module) {
+
+ var SourceMapConsumer = require('../../lib/source-map/source-map-consumer').SourceMapConsumer;
+ var SourceMapGenerator = require('../../lib/source-map/source-map-generator').SourceMapGenerator;
+
+ exports['test that we can instantiate with a string or an objects'] = function (assert, util) {
+ assert.doesNotThrow(function () {
+ var map = new SourceMapConsumer(util.testMap);
+ });
+ assert.doesNotThrow(function () {
+ var map = new SourceMapConsumer(JSON.stringify(util.testMap));
+ });
+ };
+
+ exports['test that the `sources` field has the original sources'] = function (assert, util) {
+ var map = new SourceMapConsumer(util.testMap);
+ var sources = map.sources;
+
+ assert.equal(sources[0], '/the/root/one.js');
+ assert.equal(sources[1], '/the/root/two.js');
+ assert.equal(sources.length, 2);
+ };
+
+ exports['test that the source root is reflected in a mapping\'s source field'] = function (assert, util) {
+ var map = new SourceMapConsumer(util.testMap);
+ var mapping;
+
+ mapping = map.originalPositionFor({
+ line: 2,
+ column: 1
+ });
+ assert.equal(mapping.source, '/the/root/two.js');
+
+ mapping = map.originalPositionFor({
+ line: 1,
+ column: 1
+ });
+ assert.equal(mapping.source, '/the/root/one.js');
+ };
+
+ exports['test mapping tokens back exactly'] = function (assert, util) {
+ var map = new SourceMapConsumer(util.testMap);
+
+ util.assertMapping(1, 1, '/the/root/one.js', 1, 1, null, map, assert);
+ util.assertMapping(1, 5, '/the/root/one.js', 1, 5, null, map, assert);
+ util.assertMapping(1, 9, '/the/root/one.js', 1, 11, null, map, assert);
+ util.assertMapping(1, 18, '/the/root/one.js', 1, 21, 'bar', map, assert);
+ util.assertMapping(1, 21, '/the/root/one.js', 2, 3, null, map, assert);
+ util.assertMapping(1, 28, '/the/root/one.js', 2, 10, 'baz', map, assert);
+ util.assertMapping(1, 32, '/the/root/one.js', 2, 14, 'bar', map, assert);
+
+ util.assertMapping(2, 1, '/the/root/two.js', 1, 1, null, map, assert);
+ util.assertMapping(2, 5, '/the/root/two.js', 1, 5, null, map, assert);
+ util.assertMapping(2, 9, '/the/root/two.js', 1, 11, null, map, assert);
+ util.assertMapping(2, 18, '/the/root/two.js', 1, 21, 'n', map, assert);
+ util.assertMapping(2, 21, '/the/root/two.js', 2, 3, null, map, assert);
+ util.assertMapping(2, 28, '/the/root/two.js', 2, 10, 'n', map, assert);
+ };
+
+ exports['test mapping tokens fuzzy'] = function (assert, util) {
+ var map = new SourceMapConsumer(util.testMap);
+
+ // Finding original positions
+ util.assertMapping(1, 20, '/the/root/one.js', 1, 21, 'bar', map, assert, true);
+ util.assertMapping(1, 30, '/the/root/one.js', 2, 10, 'baz', map, assert, true);
+ util.assertMapping(2, 12, '/the/root/two.js', 1, 11, null, map, assert, true);
+
+ // Finding generated positions
+ util.assertMapping(1, 18, '/the/root/one.js', 1, 22, 'bar', map, assert, null, true);
+ util.assertMapping(1, 28, '/the/root/one.js', 2, 13, 'baz', map, assert, null, true);
+ util.assertMapping(2, 9, '/the/root/two.js', 1, 16, null, map, assert, null, true);
+ };
+
+ exports['test creating source map consumers with )]}\' prefix'] = function (assert, util) {
+ assert.doesNotThrow(function () {
+ var map = new SourceMapConsumer(")]}'" + JSON.stringify(util.testMap));
+ });
+ };
+
+ exports['test eachMapping'] = function (assert, util) {
+ var map = new SourceMapConsumer(util.testMap);
+ var previousLine = -Infinity;
+ var previousColumn = -Infinity;
+ map.eachMapping(function (mapping) {
+ assert.ok(mapping.generatedLine >= previousLine);
+
+ if (mapping.source) {
+ assert.equal(mapping.source.indexOf(util.testMap.sourceRoot), 0);
+ }
+
+ if (mapping.generatedLine === previousLine) {
+ assert.ok(mapping.generatedColumn >= previousColumn);
+ previousColumn = mapping.generatedColumn;
+ }
+ else {
+ previousLine = mapping.generatedLine;
+ previousColumn = -Infinity;
+ }
+ });
+ };
+
+ exports['test iterating over mappings in a different order'] = function (assert, util) {
+ var map = new SourceMapConsumer(util.testMap);
+ var previousLine = -Infinity;
+ var previousColumn = -Infinity;
+ var previousSource = "";
+ map.eachMapping(function (mapping) {
+ assert.ok(mapping.source >= previousSource);
+
+ if (mapping.source === previousSource) {
+ assert.ok(mapping.originalLine >= previousLine);
+
+ if (mapping.originalLine === previousLine) {
+ assert.ok(mapping.originalColumn >= previousColumn);
+ previousColumn = mapping.originalColumn;
+ }
+ else {
+ previousLine = mapping.originalLine;
+ previousColumn = -Infinity;
+ }
+ }
+ else {
+ previousSource = mapping.source;
+ previousLine = -Infinity;
+ previousColumn = -Infinity;
+ }
+ }, null, SourceMapConsumer.ORIGINAL_ORDER);
+ };
+
+ exports['test that we can set the context for `this` in eachMapping'] = function (assert, util) {
+ var map = new SourceMapConsumer(util.testMap);
+ var context = {};
+ map.eachMapping(function () {
+ assert.equal(this, context);
+ }, context);
+ };
+
+ exports['test that the `sourcesContent` field has the original sources'] = function (assert, util) {
+ var map = new SourceMapConsumer(util.testMapWithSourcesContent);
+ var sourcesContent = map.sourcesContent;
+
+ assert.equal(sourcesContent[0], ' ONE.foo = function (bar) {\n return baz(bar);\n };');
+ assert.equal(sourcesContent[1], ' TWO.inc = function (n) {\n return n + 1;\n };');
+ assert.equal(sourcesContent.length, 2);
+ };
+
+ exports['test that we can get the original sources for the sources'] = function (assert, util) {
+ var map = new SourceMapConsumer(util.testMapWithSourcesContent);
+ var sources = map.sources;
+
+ assert.equal(map.sourceContentFor(sources[0]), ' ONE.foo = function (bar) {\n return baz(bar);\n };');
+ assert.equal(map.sourceContentFor(sources[1]), ' TWO.inc = function (n) {\n return n + 1;\n };');
+ assert.equal(map.sourceContentFor("one.js"), ' ONE.foo = function (bar) {\n return baz(bar);\n };');
+ assert.equal(map.sourceContentFor("two.js"), ' TWO.inc = function (n) {\n return n + 1;\n };');
+ assert.throws(function () {
+ map.sourceContentFor("");
+ }, Error);
+ assert.throws(function () {
+ map.sourceContentFor("/the/root/three.js");
+ }, Error);
+ assert.throws(function () {
+ map.sourceContentFor("three.js");
+ }, Error);
+ };
+
+ exports['test sourceRoot + generatedPositionFor'] = function (assert, util) {
+ var map = new SourceMapGenerator({
+ sourceRoot: 'foo/bar',
+ file: 'baz.js'
+ });
+ map.addMapping({
+ original: { line: 1, column: 1 },
+ generated: { line: 2, column: 2 },
+ source: 'bang.coffee'
+ });
+ map.addMapping({
+ original: { line: 5, column: 5 },
+ generated: { line: 6, column: 6 },
+ source: 'bang.coffee'
+ });
+ map = new SourceMapConsumer(map.toString());
+
+ // Should handle without sourceRoot.
+ var pos = map.generatedPositionFor({
+ line: 1,
+ column: 1,
+ source: 'bang.coffee'
+ });
+
+ assert.equal(pos.line, 2);
+ assert.equal(pos.column, 2);
+
+ // Should handle with sourceRoot.
+ var pos = map.generatedPositionFor({
+ line: 1,
+ column: 1,
+ source: 'foo/bar/bang.coffee'
+ });
+
+ assert.equal(pos.line, 2);
+ assert.equal(pos.column, 2);
+ };
+
+ exports['test sourceRoot + originalPositionFor'] = function (assert, util) {
+ var map = new SourceMapGenerator({
+ sourceRoot: 'foo/bar',
+ file: 'baz.js'
+ });
+ map.addMapping({
+ original: { line: 1, column: 1 },
+ generated: { line: 2, column: 2 },
+ source: 'bang.coffee'
+ });
+ map = new SourceMapConsumer(map.toString());
+
+ var pos = map.originalPositionFor({
+ line: 2,
+ column: 2,
+ });
+
+ // Should always have the prepended source root
+ assert.equal(pos.source, 'foo/bar/bang.coffee');
+ assert.equal(pos.line, 1);
+ assert.equal(pos.column, 1);
+ };
+
+ exports['test github issue #56'] = function (assert, util) {
+ var map = new SourceMapGenerator({
+ sourceRoot: 'http://',
+ file: 'www.example.com/foo.js'
+ });
+ map.addMapping({
+ original: { line: 1, column: 1 },
+ generated: { line: 2, column: 2 },
+ source: 'www.example.com/original.js'
+ });
+ map = new SourceMapConsumer(map.toString());
+
+ var sources = map.sources;
+ assert.equal(sources.length, 1);
+ assert.equal(sources[0], 'http://www.example.com/original.js');
+ };
+
+ exports['test github issue #43'] = function (assert, util) {
+ var map = new SourceMapGenerator({
+ sourceRoot: 'http://example.com',
+ file: 'foo.js'
+ });
+ map.addMapping({
+ original: { line: 1, column: 1 },
+ generated: { line: 2, column: 2 },
+ source: 'http://cdn.example.com/original.js'
+ });
+ map = new SourceMapConsumer(map.toString());
+
+ var sources = map.sources;
+ assert.equal(sources.length, 1,
+ 'Should only be one source.');
+ assert.equal(sources[0], 'http://cdn.example.com/original.js',
+ 'Should not be joined with the sourceRoot.');
+ };
+
+ exports['test absolute path, but same host sources'] = function (assert, util) {
+ var map = new SourceMapGenerator({
+ sourceRoot: 'http://example.com/foo/bar',
+ file: 'foo.js'
+ });
+ map.addMapping({
+ original: { line: 1, column: 1 },
+ generated: { line: 2, column: 2 },
+ source: '/original.js'
+ });
+ map = new SourceMapConsumer(map.toString());
+
+ var sources = map.sources;
+ assert.equal(sources.length, 1,
+ 'Should only be one source.');
+ assert.equal(sources[0], 'http://example.com/original.js',
+ 'Source should be relative the host of the source root.');
+ };
+
+ exports['test github issue #64'] = function (assert, util) {
+ var map = new SourceMapConsumer({
+ "version": 3,
+ "file": "foo.js",
+ "sourceRoot": "http://example.com/",
+ "sources": ["/a"],
+ "names": [],
+ "mappings": "AACA",
+ "sourcesContent": ["foo"]
+ });
+
+ assert.equal(map.sourceContentFor("a"), "foo");
+ assert.equal(map.sourceContentFor("/a"), "foo");
+ };
+
+});
diff --git a/tools/node_modules/source-map/test/source-map/test-source-map-generator.js b/tools/node_modules/source-map/test/source-map/test-source-map-generator.js
new file mode 100644
index 00000000..2c42c8ca
--- /dev/null
+++ b/tools/node_modules/source-map/test/source-map/test-source-map-generator.js
@@ -0,0 +1,391 @@
+/* -*- Mode: js; js-indent-level: 2; -*- */
+/*
+ * Copyright 2011 Mozilla Foundation and contributors
+ * Licensed under the New BSD license. See LICENSE or:
+ * http://opensource.org/licenses/BSD-3-Clause
+ */
+if (typeof define !== 'function') {
+ var define = require('amdefine')(module);
+}
+define(function (require, exports, module) {
+
+ var SourceMapGenerator = require('../../lib/source-map/source-map-generator').SourceMapGenerator;
+ var SourceMapConsumer = require('../../lib/source-map/source-map-consumer').SourceMapConsumer;
+ var SourceNode = require('../../lib/source-map/source-node').SourceNode;
+ var util = require('./util');
+
+ exports['test some simple stuff'] = function (assert, util) {
+ var map = new SourceMapGenerator({
+ file: 'foo.js',
+ sourceRoot: '.'
+ });
+ assert.ok(true);
+ };
+
+ exports['test JSON serialization'] = function (assert, util) {
+ var map = new SourceMapGenerator({
+ file: 'foo.js',
+ sourceRoot: '.'
+ });
+ assert.equal(map.toString(), JSON.stringify(map));
+ };
+
+ exports['test adding mappings (case 1)'] = function (assert, util) {
+ var map = new SourceMapGenerator({
+ file: 'generated-foo.js',
+ sourceRoot: '.'
+ });
+
+ assert.doesNotThrow(function () {
+ map.addMapping({
+ generated: { line: 1, column: 1 }
+ });
+ });
+ };
+
+ exports['test adding mappings (case 2)'] = function (assert, util) {
+ var map = new SourceMapGenerator({
+ file: 'generated-foo.js',
+ sourceRoot: '.'
+ });
+
+ assert.doesNotThrow(function () {
+ map.addMapping({
+ generated: { line: 1, column: 1 },
+ source: 'bar.js',
+ original: { line: 1, column: 1 }
+ });
+ });
+ };
+
+ exports['test adding mappings (case 3)'] = function (assert, util) {
+ var map = new SourceMapGenerator({
+ file: 'generated-foo.js',
+ sourceRoot: '.'
+ });
+
+ assert.doesNotThrow(function () {
+ map.addMapping({
+ generated: { line: 1, column: 1 },
+ source: 'bar.js',
+ original: { line: 1, column: 1 },
+ name: 'someToken'
+ });
+ });
+ };
+
+ exports['test adding mappings (invalid)'] = function (assert, util) {
+ var map = new SourceMapGenerator({
+ file: 'generated-foo.js',
+ sourceRoot: '.'
+ });
+
+ // Not enough info.
+ assert.throws(function () {
+ map.addMapping({});
+ });
+
+ // Original file position, but no source.
+ assert.throws(function () {
+ map.addMapping({
+ generated: { line: 1, column: 1 },
+ original: { line: 1, column: 1 }
+ });
+ });
+ };
+
+ exports['test that the correct mappings are being generated'] = function (assert, util) {
+ var map = new SourceMapGenerator({
+ file: 'min.js',
+ sourceRoot: '/the/root'
+ });
+
+ map.addMapping({
+ generated: { line: 1, column: 1 },
+ original: { line: 1, column: 1 },
+ source: 'one.js'
+ });
+ map.addMapping({
+ generated: { line: 1, column: 5 },
+ original: { line: 1, column: 5 },
+ source: 'one.js'
+ });
+ map.addMapping({
+ generated: { line: 1, column: 9 },
+ original: { line: 1, column: 11 },
+ source: 'one.js'
+ });
+ map.addMapping({
+ generated: { line: 1, column: 18 },
+ original: { line: 1, column: 21 },
+ source: 'one.js',
+ name: 'bar'
+ });
+ map.addMapping({
+ generated: { line: 1, column: 21 },
+ original: { line: 2, column: 3 },
+ source: 'one.js'
+ });
+ map.addMapping({
+ generated: { line: 1, column: 28 },
+ original: { line: 2, column: 10 },
+ source: 'one.js',
+ name: 'baz'
+ });
+ map.addMapping({
+ generated: { line: 1, column: 32 },
+ original: { line: 2, column: 14 },
+ source: 'one.js',
+ name: 'bar'
+ });
+
+ map.addMapping({
+ generated: { line: 2, column: 1 },
+ original: { line: 1, column: 1 },
+ source: 'two.js'
+ });
+ map.addMapping({
+ generated: { line: 2, column: 5 },
+ original: { line: 1, column: 5 },
+ source: 'two.js'
+ });
+ map.addMapping({
+ generated: { line: 2, column: 9 },
+ original: { line: 1, column: 11 },
+ source: 'two.js'
+ });
+ map.addMapping({
+ generated: { line: 2, column: 18 },
+ original: { line: 1, column: 21 },
+ source: 'two.js',
+ name: 'n'
+ });
+ map.addMapping({
+ generated: { line: 2, column: 21 },
+ original: { line: 2, column: 3 },
+ source: 'two.js'
+ });
+ map.addMapping({
+ generated: { line: 2, column: 28 },
+ original: { line: 2, column: 10 },
+ source: 'two.js',
+ name: 'n'
+ });
+
+ map = JSON.parse(map.toString());
+
+ util.assertEqualMaps(assert, map, util.testMap);
+ };
+
+ exports['test that source content can be set'] = function (assert, util) {
+ var map = new SourceMapGenerator({
+ file: 'min.js',
+ sourceRoot: '/the/root'
+ });
+ map.addMapping({
+ generated: { line: 1, column: 1 },
+ original: { line: 1, column: 1 },
+ source: 'one.js'
+ });
+ map.addMapping({
+ generated: { line: 2, column: 1 },
+ original: { line: 1, column: 1 },
+ source: 'two.js'
+ });
+ map.setSourceContent('one.js', 'one file content');
+
+ map = JSON.parse(map.toString());
+ assert.equal(map.sources[0], 'one.js');
+ assert.equal(map.sources[1], 'two.js');
+ assert.equal(map.sourcesContent[0], 'one file content');
+ assert.equal(map.sourcesContent[1], null);
+ };
+
+ exports['test .fromSourceMap'] = function (assert, util) {
+ var map = SourceMapGenerator.fromSourceMap(new SourceMapConsumer(util.testMap));
+ util.assertEqualMaps(assert, map.toJSON(), util.testMap);
+ };
+
+ exports['test .fromSourceMap with sourcesContent'] = function (assert, util) {
+ var map = SourceMapGenerator.fromSourceMap(
+ new SourceMapConsumer(util.testMapWithSourcesContent));
+ util.assertEqualMaps(assert, map.toJSON(), util.testMapWithSourcesContent);
+ };
+
+ exports['test applySourceMap'] = function (assert, util) {
+ var node = new SourceNode(null, null, null, [
+ new SourceNode(2, 0, 'fileX', 'lineX2\n'),
+ 'genA1\n',
+ new SourceNode(2, 0, 'fileY', 'lineY2\n'),
+ 'genA2\n',
+ new SourceNode(1, 0, 'fileX', 'lineX1\n'),
+ 'genA3\n',
+ new SourceNode(1, 0, 'fileY', 'lineY1\n')
+ ]);
+ var mapStep1 = node.toStringWithSourceMap({
+ file: 'fileA'
+ }).map;
+ mapStep1.setSourceContent('fileX', 'lineX1\nlineX2\n');
+ mapStep1 = mapStep1.toJSON();
+
+ node = new SourceNode(null, null, null, [
+ 'gen1\n',
+ new SourceNode(1, 0, 'fileA', 'lineA1\n'),
+ new SourceNode(2, 0, 'fileA', 'lineA2\n'),
+ new SourceNode(3, 0, 'fileA', 'lineA3\n'),
+ new SourceNode(4, 0, 'fileA', 'lineA4\n'),
+ new SourceNode(1, 0, 'fileB', 'lineB1\n'),
+ new SourceNode(2, 0, 'fileB', 'lineB2\n'),
+ 'gen2\n'
+ ]);
+ var mapStep2 = node.toStringWithSourceMap({
+ file: 'fileGen'
+ }).map;
+ mapStep2.setSourceContent('fileB', 'lineB1\nlineB2\n');
+ mapStep2 = mapStep2.toJSON();
+
+ node = new SourceNode(null, null, null, [
+ 'gen1\n',
+ new SourceNode(2, 0, 'fileX', 'lineA1\n'),
+ new SourceNode(2, 0, 'fileA', 'lineA2\n'),
+ new SourceNode(2, 0, 'fileY', 'lineA3\n'),
+ new SourceNode(4, 0, 'fileA', 'lineA4\n'),
+ new SourceNode(1, 0, 'fileB', 'lineB1\n'),
+ new SourceNode(2, 0, 'fileB', 'lineB2\n'),
+ 'gen2\n'
+ ]);
+ var expectedMap = node.toStringWithSourceMap({
+ file: 'fileGen'
+ }).map;
+ expectedMap.setSourceContent('fileX', 'lineX1\nlineX2\n');
+ expectedMap.setSourceContent('fileB', 'lineB1\nlineB2\n');
+ expectedMap = expectedMap.toJSON();
+
+ // apply source map "mapStep1" to "mapStep2"
+ var generator = SourceMapGenerator.fromSourceMap(new SourceMapConsumer(mapStep2));
+ generator.applySourceMap(new SourceMapConsumer(mapStep1));
+ var actualMap = generator.toJSON();
+
+ util.assertEqualMaps(assert, actualMap, expectedMap);
+ };
+
+ exports['test sorting with duplicate generated mappings'] = function (assert, util) {
+ var map = new SourceMapGenerator({
+ file: 'test.js'
+ });
+ map.addMapping({
+ generated: { line: 3, column: 0 },
+ original: { line: 2, column: 0 },
+ source: 'a.js'
+ });
+ map.addMapping({
+ generated: { line: 2, column: 0 }
+ });
+ map.addMapping({
+ generated: { line: 2, column: 0 }
+ });
+ map.addMapping({
+ generated: { line: 1, column: 0 },
+ original: { line: 1, column: 0 },
+ source: 'a.js'
+ });
+
+ util.assertEqualMaps(assert, map.toJSON(), {
+ version: 3,
+ file: 'test.js',
+ sources: ['a.js'],
+ names: [],
+ mappings: 'AAAA;A;AACA'
+ });
+ };
+
+ exports['test ignore duplicate mappings.'] = function (assert, util) {
+ var init = { file: 'min.js', sourceRoot: '/the/root' };
+ var map1, map2;
+
+ // null original source location
+ var nullMapping1 = {
+ generated: { line: 1, column: 0 }
+ };
+ var nullMapping2 = {
+ generated: { line: 2, column: 2 }
+ };
+
+ map1 = new SourceMapGenerator(init);
+ map2 = new SourceMapGenerator(init);
+
+ map1.addMapping(nullMapping1);
+ map1.addMapping(nullMapping1);
+
+ map2.addMapping(nullMapping1);
+
+ util.assertEqualMaps(assert, map1.toJSON(), map2.toJSON());
+
+ map1.addMapping(nullMapping2);
+ map1.addMapping(nullMapping1);
+
+ map2.addMapping(nullMapping2);
+
+ util.assertEqualMaps(assert, map1.toJSON(), map2.toJSON());
+
+ // original source location
+ var srcMapping1 = {
+ generated: { line: 1, column: 0 },
+ original: { line: 11, column: 0 },
+ source: 'srcMapping1.js'
+ };
+ var srcMapping2 = {
+ generated: { line: 2, column: 2 },
+ original: { line: 11, column: 0 },
+ source: 'srcMapping2.js'
+ };
+
+ map1 = new SourceMapGenerator(init);
+ map2 = new SourceMapGenerator(init);
+
+ map1.addMapping(srcMapping1);
+ map1.addMapping(srcMapping1);
+
+ map2.addMapping(srcMapping1);
+
+ util.assertEqualMaps(assert, map1.toJSON(), map2.toJSON());
+
+ map1.addMapping(srcMapping2);
+ map1.addMapping(srcMapping1);
+
+ map2.addMapping(srcMapping2);
+
+ util.assertEqualMaps(assert, map1.toJSON(), map2.toJSON());
+
+ // full original source and name information
+ var fullMapping1 = {
+ generated: { line: 1, column: 0 },
+ original: { line: 11, column: 0 },
+ source: 'fullMapping1.js',
+ name: 'fullMapping1'
+ };
+ var fullMapping2 = {
+ generated: { line: 2, column: 2 },
+ original: { line: 11, column: 0 },
+ source: 'fullMapping2.js',
+ name: 'fullMapping2'
+ };
+
+ map1 = new SourceMapGenerator(init);
+ map2 = new SourceMapGenerator(init);
+
+ map1.addMapping(fullMapping1);
+ map1.addMapping(fullMapping1);
+
+ map2.addMapping(fullMapping1);
+
+ util.assertEqualMaps(assert, map1.toJSON(), map2.toJSON());
+
+ map1.addMapping(fullMapping2);
+ map1.addMapping(fullMapping1);
+
+ map2.addMapping(fullMapping2);
+
+ util.assertEqualMaps(assert, map1.toJSON(), map2.toJSON());
+ };
+});
diff --git a/tools/node_modules/source-map/test/source-map/test-source-node.js b/tools/node_modules/source-map/test/source-map/test-source-node.js
new file mode 100644
index 00000000..231dd7bd
--- /dev/null
+++ b/tools/node_modules/source-map/test/source-map/test-source-node.js
@@ -0,0 +1,282 @@
+/* -*- Mode: js; js-indent-level: 2; -*- */
+/*
+ * Copyright 2011 Mozilla Foundation and contributors
+ * Licensed under the New BSD license. See LICENSE or:
+ * http://opensource.org/licenses/BSD-3-Clause
+ */
+if (typeof define !== 'function') {
+ var define = require('amdefine')(module);
+}
+define(function (require, exports, module) {
+
+ var SourceMapGenerator = require('../../lib/source-map/source-map-generator').SourceMapGenerator;
+ var SourceMapConsumer = require('../../lib/source-map/source-map-consumer').SourceMapConsumer;
+ var SourceNode = require('../../lib/source-map/source-node').SourceNode;
+
+ exports['test .add()'] = function (assert, util) {
+ var node = new SourceNode(null, null, null);
+
+ // Adding a string works.
+ node.add('function noop() {}');
+
+ // Adding another source node works.
+ node.add(new SourceNode(null, null, null));
+
+ // Adding an array works.
+ node.add(['function foo() {',
+ new SourceNode(null, null, null,
+ 'return 10;'),
+ '}']);
+
+ // Adding other stuff doesn't.
+ assert.throws(function () {
+ node.add({});
+ });
+ assert.throws(function () {
+ node.add(function () {});
+ });
+ };
+
+ exports['test .prepend()'] = function (assert, util) {
+ var node = new SourceNode(null, null, null);
+
+ // Prepending a string works.
+ node.prepend('function noop() {}');
+ assert.equal(node.children[0], 'function noop() {}');
+ assert.equal(node.children.length, 1);
+
+ // Prepending another source node works.
+ node.prepend(new SourceNode(null, null, null));
+ assert.equal(node.children[0], '');
+ assert.equal(node.children[1], 'function noop() {}');
+ assert.equal(node.children.length, 2);
+
+ // Prepending an array works.
+ node.prepend(['function foo() {',
+ new SourceNode(null, null, null,
+ 'return 10;'),
+ '}']);
+ assert.equal(node.children[0], 'function foo() {');
+ assert.equal(node.children[1], 'return 10;');
+ assert.equal(node.children[2], '}');
+ assert.equal(node.children[3], '');
+ assert.equal(node.children[4], 'function noop() {}');
+ assert.equal(node.children.length, 5);
+
+ // Prepending other stuff doesn't.
+ assert.throws(function () {
+ node.prepend({});
+ });
+ assert.throws(function () {
+ node.prepend(function () {});
+ });
+ };
+
+ exports['test .toString()'] = function (assert, util) {
+ assert.equal((new SourceNode(null, null, null,
+ ['function foo() {',
+ new SourceNode(null, null, null, 'return 10;'),
+ '}'])).toString(),
+ 'function foo() {return 10;}');
+ };
+
+ exports['test .join()'] = function (assert, util) {
+ assert.equal((new SourceNode(null, null, null,
+ ['a', 'b', 'c', 'd'])).join(', ').toString(),
+ 'a, b, c, d');
+ };
+
+ exports['test .walk()'] = function (assert, util) {
+ var node = new SourceNode(null, null, null,
+ ['(function () {\n',
+ ' ', new SourceNode(1, 0, 'a.js', ['someCall()']), ';\n',
+ ' ', new SourceNode(2, 0, 'b.js', ['if (foo) bar()']), ';\n',
+ '}());']);
+ var expected = [
+ { str: '(function () {\n', source: null, line: null, column: null },
+ { str: ' ', source: null, line: null, column: null },
+ { str: 'someCall()', source: 'a.js', line: 1, column: 0 },
+ { str: ';\n', source: null, line: null, column: null },
+ { str: ' ', source: null, line: null, column: null },
+ { str: 'if (foo) bar()', source: 'b.js', line: 2, column: 0 },
+ { str: ';\n', source: null, line: null, column: null },
+ { str: '}());', source: null, line: null, column: null },
+ ];
+ var i = 0;
+ node.walk(function (chunk, loc) {
+ assert.equal(expected[i].str, chunk);
+ assert.equal(expected[i].source, loc.source);
+ assert.equal(expected[i].line, loc.line);
+ assert.equal(expected[i].column, loc.column);
+ i++;
+ });
+ };
+
+ exports['test .replaceRight'] = function (assert, util) {
+ var node;
+
+ // Not nested
+ node = new SourceNode(null, null, null, 'hello world');
+ node.replaceRight(/world/, 'universe');
+ assert.equal(node.toString(), 'hello universe');
+
+ // Nested
+ node = new SourceNode(null, null, null,
+ [new SourceNode(null, null, null, 'hey sexy mama, '),
+ new SourceNode(null, null, null, 'want to kill all humans?')]);
+ node.replaceRight(/kill all humans/, 'watch Futurama');
+ assert.equal(node.toString(), 'hey sexy mama, want to watch Futurama?');
+ };
+
+ exports['test .toStringWithSourceMap()'] = function (assert, util) {
+ var node = new SourceNode(null, null, null,
+ ['(function () {\n',
+ ' ',
+ new SourceNode(1, 0, 'a.js', 'someCall', 'originalCall'),
+ new SourceNode(1, 8, 'a.js', '()'),
+ ';\n',
+ ' ', new SourceNode(2, 0, 'b.js', ['if (foo) bar()']), ';\n',
+ '}());']);
+ var map = node.toStringWithSourceMap({
+ file: 'foo.js'
+ }).map;
+
+ assert.ok(map instanceof SourceMapGenerator, 'map instanceof SourceMapGenerator');
+ map = new SourceMapConsumer(map.toString());
+
+ var actual;
+
+ actual = map.originalPositionFor({
+ line: 1,
+ column: 4
+ });
+ assert.equal(actual.source, null);
+ assert.equal(actual.line, null);
+ assert.equal(actual.column, null);
+
+ actual = map.originalPositionFor({
+ line: 2,
+ column: 2
+ });
+ assert.equal(actual.source, 'a.js');
+ assert.equal(actual.line, 1);
+ assert.equal(actual.column, 0);
+ assert.equal(actual.name, 'originalCall');
+
+ actual = map.originalPositionFor({
+ line: 3,
+ column: 2
+ });
+ assert.equal(actual.source, 'b.js');
+ assert.equal(actual.line, 2);
+ assert.equal(actual.column, 0);
+
+ actual = map.originalPositionFor({
+ line: 3,
+ column: 16
+ });
+ assert.equal(actual.source, null);
+ assert.equal(actual.line, null);
+ assert.equal(actual.column, null);
+
+ actual = map.originalPositionFor({
+ line: 4,
+ column: 2
+ });
+ assert.equal(actual.source, null);
+ assert.equal(actual.line, null);
+ assert.equal(actual.column, null);
+ };
+
+ exports['test .fromStringWithSourceMap()'] = function (assert, util) {
+ var node = SourceNode.fromStringWithSourceMap(
+ util.testGeneratedCode,
+ new SourceMapConsumer(util.testMap));
+
+ var result = node.toStringWithSourceMap({
+ file: 'min.js'
+ });
+ var map = result.map;
+ var code = result.code;
+
+ assert.equal(code, util.testGeneratedCode);
+ assert.ok(map instanceof SourceMapGenerator, 'map instanceof SourceMapGenerator');
+ map = map.toJSON();
+ assert.equal(map.version, util.testMap.version);
+ assert.equal(map.file, util.testMap.file);
+ assert.equal(map.mappings, util.testMap.mappings);
+ };
+
+ exports['test .fromStringWithSourceMap() complex version'] = function (assert, util) {
+ var input = new SourceNode(null, null, null, [
+ "(function() {\n",
+ " var Test = {};\n",
+ " ", new SourceNode(1, 0, "a.js", "Test.A = { value: 1234 };\n"),
+ " ", new SourceNode(2, 0, "a.js", "Test.A.x = 'xyz';"), "\n",
+ "}());\n",
+ "/* Generated Source */"]);
+ input = input.toStringWithSourceMap({
+ file: 'foo.js'
+ });
+
+ var node = SourceNode.fromStringWithSourceMap(
+ input.code,
+ new SourceMapConsumer(input.map.toString()));
+
+ var result = node.toStringWithSourceMap({
+ file: 'foo.js'
+ });
+ var map = result.map;
+ var code = result.code;
+
+ assert.equal(code, input.code);
+ assert.ok(map instanceof SourceMapGenerator, 'map instanceof SourceMapGenerator');
+ map = map.toJSON();
+ var inputMap = input.map.toJSON();
+ util.assertEqualMaps(assert, map, inputMap);
+ };
+
+ exports['test setSourceContent with toStringWithSourceMap'] = function (assert, util) {
+ var aNode = new SourceNode(1, 1, 'a.js', 'a');
+ aNode.setSourceContent('a.js', 'someContent');
+ var node = new SourceNode(null, null, null,
+ ['(function () {\n',
+ ' ', aNode,
+ ' ', new SourceNode(1, 1, 'b.js', 'b'),
+ '}());']);
+ node.setSourceContent('b.js', 'otherContent');
+ var map = node.toStringWithSourceMap({
+ file: 'foo.js'
+ }).map;
+
+ assert.ok(map instanceof SourceMapGenerator, 'map instanceof SourceMapGenerator');
+ map = new SourceMapConsumer(map.toString());
+
+ assert.equal(map.sources.length, 2);
+ assert.equal(map.sources[0], 'a.js');
+ assert.equal(map.sources[1], 'b.js');
+ assert.equal(map.sourcesContent.length, 2);
+ assert.equal(map.sourcesContent[0], 'someContent');
+ assert.equal(map.sourcesContent[1], 'otherContent');
+ };
+
+ exports['test walkSourceContents'] = function (assert, util) {
+ var aNode = new SourceNode(1, 1, 'a.js', 'a');
+ aNode.setSourceContent('a.js', 'someContent');
+ var node = new SourceNode(null, null, null,
+ ['(function () {\n',
+ ' ', aNode,
+ ' ', new SourceNode(1, 1, 'b.js', 'b'),
+ '}());']);
+ node.setSourceContent('b.js', 'otherContent');
+ var results = [];
+ node.walkSourceContents(function (sourceFile, sourceContent) {
+ results.push([sourceFile, sourceContent]);
+ });
+ assert.equal(results.length, 2);
+ assert.equal(results[0][0], 'a.js');
+ assert.equal(results[0][1], 'someContent');
+ assert.equal(results[1][0], 'b.js');
+ assert.equal(results[1][1], 'otherContent');
+ };
+});
diff --git a/tools/node_modules/source-map/test/source-map/util.js b/tools/node_modules/source-map/test/source-map/util.js
new file mode 100644
index 00000000..3e19fc90
--- /dev/null
+++ b/tools/node_modules/source-map/test/source-map/util.js
@@ -0,0 +1,152 @@
+/* -*- Mode: js; js-indent-level: 2; -*- */
+/*
+ * Copyright 2011 Mozilla Foundation and contributors
+ * Licensed under the New BSD license. See LICENSE or:
+ * http://opensource.org/licenses/BSD-3-Clause
+ */
+if (typeof define !== 'function') {
+ var define = require('amdefine')(module);
+}
+define(function (require, exports, module) {
+
+ var util = require('../../lib/source-map/util');
+
+ // This is a test mapping which maps functions from two different files
+ // (one.js and two.js) to a minified generated source.
+ //
+ // Here is one.js:
+ //
+ // ONE.foo = function (bar) {
+ // return baz(bar);
+ // };
+ //
+ // Here is two.js:
+ //
+ // TWO.inc = function (n) {
+ // return n + 1;
+ // };
+ //
+ // And here is the generated code (min.js):
+ //
+ // ONE.foo=function(a){return baz(a);};
+ // TWO.inc=function(a){return a+1;};
+ exports.testGeneratedCode = " ONE.foo=function(a){return baz(a);};\n"+
+ " TWO.inc=function(a){return a+1;};";
+ exports.testMap = {
+ version: 3,
+ file: 'min.js',
+ names: ['bar', 'baz', 'n'],
+ sources: ['one.js', 'two.js'],
+ sourceRoot: '/the/root',
+ mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA'
+ };
+ exports.testMapWithSourcesContent = {
+ version: 3,
+ file: 'min.js',
+ names: ['bar', 'baz', 'n'],
+ sources: ['one.js', 'two.js'],
+ sourcesContent: [
+ ' ONE.foo = function (bar) {\n' +
+ ' return baz(bar);\n' +
+ ' };',
+ ' TWO.inc = function (n) {\n' +
+ ' return n + 1;\n' +
+ ' };'
+ ],
+ sourceRoot: '/the/root',
+ mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA'
+ };
+
+ function assertMapping(generatedLine, generatedColumn, originalSource,
+ originalLine, originalColumn, name, map, assert,
+ dontTestGenerated, dontTestOriginal) {
+ if (!dontTestOriginal) {
+ var origMapping = map.originalPositionFor({
+ line: generatedLine,
+ column: generatedColumn
+ });
+ assert.equal(origMapping.name, name,
+ 'Incorrect name, expected ' + JSON.stringify(name)
+ + ', got ' + JSON.stringify(origMapping.name));
+ assert.equal(origMapping.line, originalLine,
+ 'Incorrect line, expected ' + JSON.stringify(originalLine)
+ + ', got ' + JSON.stringify(origMapping.line));
+ assert.equal(origMapping.column, originalColumn,
+ 'Incorrect column, expected ' + JSON.stringify(originalColumn)
+ + ', got ' + JSON.stringify(origMapping.column));
+
+ var expectedSource;
+
+ if (originalSource && map.sourceRoot && originalSource.indexOf(map.sourceRoot) === 0) {
+ expectedSource = originalSource;
+ } else if (originalSource) {
+ expectedSource = map.sourceRoot
+ ? util.join(map.sourceRoot, originalSource)
+ : originalSource;
+ } else {
+ expectedSource = null;
+ }
+
+ assert.equal(origMapping.source, expectedSource,
+ 'Incorrect source, expected ' + JSON.stringify(expectedSource)
+ + ', got ' + JSON.stringify(origMapping.source));
+ }
+
+ if (!dontTestGenerated) {
+ var genMapping = map.generatedPositionFor({
+ source: originalSource,
+ line: originalLine,
+ column: originalColumn
+ });
+ assert.equal(genMapping.line, generatedLine,
+ 'Incorrect line, expected ' + JSON.stringify(generatedLine)
+ + ', got ' + JSON.stringify(genMapping.line));
+ assert.equal(genMapping.column, generatedColumn,
+ 'Incorrect column, expected ' + JSON.stringify(generatedColumn)
+ + ', got ' + JSON.stringify(genMapping.column));
+ }
+ }
+ exports.assertMapping = assertMapping;
+
+ function assertEqualMaps(assert, actualMap, expectedMap) {
+ assert.equal(actualMap.version, expectedMap.version, "version mismatch");
+ assert.equal(actualMap.file, expectedMap.file, "file mismatch");
+ assert.equal(actualMap.names.length,
+ expectedMap.names.length,
+ "names length mismatch: " +
+ actualMap.names.join(", ") + " != " + expectedMap.names.join(", "));
+ for (var i = 0; i < actualMap.names.length; i++) {
+ assert.equal(actualMap.names[i],
+ expectedMap.names[i],
+ "names[" + i + "] mismatch: " +
+ actualMap.names.join(", ") + " != " + expectedMap.names.join(", "));
+ }
+ assert.equal(actualMap.sources.length,
+ expectedMap.sources.length,
+ "sources length mismatch: " +
+ actualMap.sources.join(", ") + " != " + expectedMap.sources.join(", "));
+ for (var i = 0; i < actualMap.sources.length; i++) {
+ assert.equal(actualMap.sources[i],
+ expectedMap.sources[i],
+ "sources[" + i + "] length mismatch: " +
+ actualMap.sources.join(", ") + " != " + expectedMap.sources.join(", "));
+ }
+ assert.equal(actualMap.sourceRoot,
+ expectedMap.sourceRoot,
+ "sourceRoot mismatch: " +
+ actualMap.sourceRoot + " != " + expectedMap.sourceRoot);
+ assert.equal(actualMap.mappings, expectedMap.mappings, "mappings mismatch");
+ if (actualMap.sourcesContent) {
+ assert.equal(actualMap.sourcesContent.length,
+ expectedMap.sourcesContent.length,
+ "sourcesContent length mismatch");
+ for (var i = 0; i < actualMap.sourcesContent.length; i++) {
+ assert.equal(actualMap.sourcesContent[i],
+ expectedMap.sourcesContent[i],
+ "sourcesContent[" + i + "] mismatch");
+ }
+ }
+ }
+ exports.assertEqualMaps = assertEqualMaps;
+
+});
diff --git a/tools/shared.py b/tools/shared.py
index a11122a3..c9efe6ef 100644
--- a/tools/shared.py
+++ b/tools/shared.py
@@ -1,6 +1,7 @@
import shutil, time, os, sys, json, tempfile, copy, shlex, atexit, subprocess, hashlib, cPickle, re
from subprocess import Popen, PIPE, STDOUT
from tempfile import mkstemp
+from distutils.spawn import find_executable
import jsrun, cache, tempfiles
from response_file import create_response_file
import logging, platform
@@ -204,25 +205,12 @@ else:
config_file = '\n'.join(config_file)
# autodetect some default paths
config_file = config_file.replace('{{{ EMSCRIPTEN_ROOT }}}', __rootpath__)
- llvm_root = '/usr/bin'
- try:
- llvm_root = os.path.dirname(Popen(['which', 'llvm-dis'], stdout=PIPE).communicate()[0].replace('\n', ''))
- except:
- pass
+ llvm_root = find_executable('llvm-dis') or '/usr/bin'
config_file = config_file.replace('{{{ LLVM_ROOT }}}', llvm_root)
- node = 'node'
- try:
- node = Popen(['which', 'node'], stdout=PIPE).communicate()[0].replace('\n', '') or \
- Popen(['which', 'nodejs'], stdout=PIPE).communicate()[0].replace('\n', '') or node
- except:
- pass
+ node = find_executable('node') or find_executable('nodejs') or 'node'
config_file = config_file.replace('{{{ NODE }}}', node)
- python = sys.executable or 'python'
- try:
- python = Popen(['which', 'python2'], stdout=PIPE).communicate()[0].replace('\n', '') or \
- Popen(['which', 'python'], stdout=PIPE).communicate()[0].replace('\n', '') or python
- except:
- pass
+ python = find_executable('python2') or find_executable('python') or \
+ sys.executable or 'python'
config_file = config_file.replace('{{{ PYTHON }}}', python)
# write
@@ -324,6 +312,7 @@ def check_sanity(force=False):
if reason:
logging.warning('(Emscripten: %s, clearing cache)' % reason)
Cache.erase()
+ force = False # the check actually failed, so definitely write out the sanity file, to avoid others later seeing failures too
# some warning, not fatal checks - do them even if EM_IGNORE_SANITY is on
check_llvm_version()
@@ -944,7 +933,7 @@ set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)''' % { 'winfix': '' if not WINDOWS e
# Finish link
actual_files = unique_ordered(actual_files) # tolerate people trying to link a.so a.so etc.
- logging.debug('emcc: llvm-linking: %s', actual_files)
+ logging.debug('emcc: llvm-linking: %s to %s', actual_files, target)
# check for too-long command line
link_cmd = [LLVM_LINK] + actual_files + ['-o', target]
@@ -1218,8 +1207,8 @@ set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)''' % { 'winfix': '' if not WINDOWS e
return opts
@staticmethod
- def js_optimizer(filename, passes, jcache):
- return js_optimizer.run(filename, passes, listify(NODE_JS), jcache)
+ def js_optimizer(filename, passes, jcache, debug):
+ return js_optimizer.run(filename, passes, listify(NODE_JS), jcache, debug)
@staticmethod
def closure_compiler(filename, pretty=True):
@@ -1293,9 +1282,6 @@ set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)''' % { 'winfix': '' if not WINDOWS e
emcc_debug = os.environ.get('EMCC_DEBUG')
if emcc_debug: del os.environ['EMCC_DEBUG']
- emcc_optimize_normally = os.environ.get('EMCC_OPTIMIZE_NORMALLY')
- if emcc_optimize_normally: del os.environ['EMCC_OPTIMIZE_NORMALLY']
-
def make(opt_level):
raw = relooper + '.raw.js'
Building.emcc(os.path.join('relooper', 'Relooper.cpp'), ['-I' + os.path.join('relooper'), '--post-js',
@@ -1326,7 +1312,6 @@ set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)''' % { 'winfix': '' if not WINDOWS e
finally:
os.chdir(curr)
if emcc_debug: os.environ['EMCC_DEBUG'] = emcc_debug
- if emcc_optimize_normally: os.environ['EMCC_OPTIMIZE_NORMALLY'] = emcc_optimize_normally
if not ok:
logging.error('bootstrapping relooper failed. You may need to manually create relooper.js by compiling it, see src/relooper/emscripten')
1/0
diff --git a/tools/source-maps/sourcemap2json.js b/tools/source-maps/sourcemap2json.js
new file mode 100644
index 00000000..5dd162b2
--- /dev/null
+++ b/tools/source-maps/sourcemap2json.js
@@ -0,0 +1,15 @@
+/*
+ * Quick utility script for the Python test script to call. Could be replaced if
+ * a good Python source map library is found.
+ */
+var SourceMapConsumer = require('source-map').SourceMapConsumer;
+var fs = require('fs');
+
+var consumer = new SourceMapConsumer(fs.readFileSync(process.argv[2], 'utf-8'));
+var mappings = [];
+
+consumer.eachMapping(function(mapping) {
+ mappings.push(mapping);
+});
+
+console.log(JSON.stringify(mappings));
diff --git a/tools/source-maps/sourcemapper.js b/tools/source-maps/sourcemapper.js
new file mode 100755
index 00000000..fa908900
--- /dev/null
+++ b/tools/source-maps/sourcemapper.js
@@ -0,0 +1,177 @@
+#! /usr/bin/env node
+
+"use strict";
+
+var fs = require('fs');
+var path = require('path');
+
+var START_MARKER = '// EMSCRIPTEN_START_FUNCS\n';
+var END_MARKER = '// EMSCRIPTEN_END_FUNCS\n';
+
+function countLines(s) {
+ var count = 0;
+ for (var i = 0, l = s.length; i < l; i ++) {
+ if (s[i] === '\n') count++;
+ }
+ return count;
+}
+
+/*
+ * Extracts the line (not block) comments from the generated function code and
+ * invokes commentHandler with (comment content, line number of comment). This
+ * function implements a simplistic lexer with the following assumptions:
+ * 1. "// EMSCRIPTEN_START_FUNCS" and "// EMSCRIPTEN_END_FUNCS" are unique
+ * markers for separating library code from generated function code. Things
+ * will break if they appear as part of a string in library code (but OK if
+ * they occur in generated code).
+ * 2. Between these two markers, no regexes are used.
+ */
+function extractComments(source, commentHandler) {
+ var state = 'code';
+ var commentContent = '';
+ var functionStartIdx = source.indexOf(START_MARKER);
+ var functionEndIdx = source.lastIndexOf(END_MARKER);
+ var lineCount = countLines(source.slice(0, functionStartIdx)) + 2;
+
+ for (var i = functionStartIdx + START_MARKER.length; i < functionEndIdx; i++) {
+ var c = source[i];
+ var nextC = source[i+1];
+ switch (state) {
+ case 'code':
+ if (c === '/') {
+ if (nextC === '/') { state = 'lineComment'; i++; }
+ else if (nextC === '*') { state = 'blockComment'; i++; }
+ }
+ else if (c === '"') state = 'doubleQuotedString';
+ else if (c === '\'') state = 'singleQuotedString';
+ break;
+ case 'lineComment':
+ if (c === '\n') {
+ state = 'code';
+ commentHandler(commentContent, lineCount);
+ commentContent = "";
+ } else {
+ commentContent += c;
+ }
+ break;
+ case 'blockComment':
+ if (c === '*' && nextC === '/') state = 'code';
+ case 'singleQuotedString':
+ if (c === '\\') i++;
+ else if (c === '\'') state = 'code';
+ break;
+ case 'doubleQuotedString':
+ if (c === '\\') i++;
+ else if (c === '"') state = 'code';
+ break;
+ }
+
+ if (c === '\n') lineCount++;
+ }
+}
+
+function getMappings(source) {
+ // generatedLineNumber -> { originalLineNumber, originalFileName }
+ var mappings = {};
+ extractComments(source, function(content, generatedLineNumber) {
+ var matches = /@line (\d+)(?: "([^"]*)")?/.exec(content);
+ if (matches === null) return;
+ var originalFileName = matches[2];
+ mappings[generatedLineNumber] = {
+ originalLineNumber: parseInt(matches[1], 10),
+ originalFileName: originalFileName
+ }
+ });
+ return mappings;
+}
+
+function generateMap(mappings, sourceRoot, mapFileBaseName, generatedLineOffset) {
+ var SourceMapGenerator = require('source-map').SourceMapGenerator;
+
+ var generator = new SourceMapGenerator({ file: mapFileBaseName });
+ var seenFiles = Object.create(null);
+
+ for (var generatedLineNumber in mappings) {
+ var generatedLineNumber = parseInt(generatedLineNumber, 10);
+ var mapping = mappings[generatedLineNumber];
+ var originalFileName = mapping.originalFileName;
+ generator.addMapping({
+ generated: { line: generatedLineNumber + generatedLineOffset, column: 0 },
+ original: { line: mapping.originalLineNumber, column: 0 },
+ source: originalFileName
+ });
+
+ // we could call setSourceContent repeatedly, but readFileSync is slow, so
+ // avoid doing it unnecessarily
+ if (!(originalFileName in seenFiles)) {
+ seenFiles[originalFileName] = true;
+ var rootedPath = originalFileName[0] === path.sep ?
+ originalFileName : path.join(sourceRoot, originalFileName);
+ try {
+ generator.setSourceContent(originalFileName, fs.readFileSync(rootedPath, 'utf-8'));
+ } catch (e) {
+ console.warn("sourcemapper: Unable to find original file for " + originalFileName +
+ " at " + rootedPath);
+ }
+ }
+ }
+
+ fs.writeFileSync(mapFileBaseName + '.map', generator.toString());
+}
+
+function appendMappingURL(fileName, source, mapFileName) {
+ var lastLine = source.slice(source.lastIndexOf('\n'));
+ if (!/sourceMappingURL/.test(lastLine))
+ fs.appendFileSync(fileName, '//@ sourceMappingURL=' + path.basename(mapFileName));
+}
+
+function parseArgs(args) {
+ var rv = { _: [] }; // unflagged args go into `_`; similar to the optimist library
+ for (var i = 0; i < args.length; i++) {
+ if (/^--/.test(args[i])) rv[args[i].slice(2)] = args[++i];
+ else rv._.push(args[i]);
+ }
+ return rv;
+}
+
+if (require.main === module) {
+ if (process.argv.length < 3) {
+ console.log('Usage: ./sourcemapper.js <original js> <optimized js file ...> \\\n' +
+ '\t--sourceRoot <default "."> \\\n' +
+ '\t--mapFileBaseName <default `filename`> \\\n' +
+ '\t--offset <default 0>');
+ process.exit(1);
+ } else {
+ var opts = parseArgs(process.argv.slice(2));
+ var fileName = opts._[0];
+ var sourceRoot = opts.sourceRoot ? opts.sourceRoot : ".";
+ var mapFileBaseName = opts.mapFileBaseName ? opts.mapFileBaseName : fileName;
+ var generatedLineOffset = opts.offset ? parseInt(opts.offset, 10) : 0;
+
+ var generatedSource = fs.readFileSync(fileName, 'utf-8');
+ var source = generatedSource;
+ var mappings = getMappings(generatedSource);
+ for (var i = 1, l = opts._.length; i < l; i ++) {
+ var optimizedSource = fs.readFileSync(opts._[i], 'utf-8')
+ var optimizedMappings = getMappings(optimizedSource);
+ var newMappings = {};
+ // uglify processes the code between EMSCRIPTEN_START_FUNCS and
+ // EMSCRIPTEN_END_FUNCS, so its line number maps are relative to those
+ // markers. we correct for that here. +2 = 1 for the newline in the marker
+ // and 1 to make it a 1-based index.
+ var startFuncsLineNumber = countLines(source.slice(0, source.indexOf(START_MARKER))) + 2;
+ for (var line in optimizedMappings) {
+ var originalLineNumber = optimizedMappings[line].originalLineNumber + startFuncsLineNumber;
+ if (originalLineNumber in mappings) {
+ newMappings[line] = mappings[originalLineNumber];
+ }
+ }
+ mappings = newMappings;
+ source = optimizedSource;
+ }
+
+ generateMap(mappings, sourceRoot, mapFileBaseName, generatedLineOffset);
+ appendMappingURL(opts._[opts._.length - 1], generatedSource,
+ opts.mapFileBaseName + '.map');
+ }
+}
diff --git a/tools/test-js-optimizer-asm-pre-output.js b/tools/test-js-optimizer-asm-pre-output.js
index 2cd8d407..301a2ec8 100644
--- a/tools/test-js-optimizer-asm-pre-output.js
+++ b/tools/test-js-optimizer-asm-pre-output.js
@@ -19,6 +19,7 @@ function a() {
f(g() | 0 & -1);
f((g() | 0) >> 2);
$56 = _fcntl() | 0 | 1;
+ FUNCTION_TABLE_ii[55 & 127]() | 0;
}
function b($this, $__n) {
$this = $this | 0;
diff --git a/tools/test-js-optimizer-asm-pre.js b/tools/test-js-optimizer-asm-pre.js
index ca7d2894..c7c92124 100644
--- a/tools/test-js-optimizer-asm-pre.js
+++ b/tools/test-js-optimizer-asm-pre.js
@@ -20,6 +20,7 @@ function a() {
f(g() | 0 & -1);
f((g() | 0) >> 2);
$56 = (_fcntl() | 0) | 1;
+ FUNCTION_TABLE_ii[55 & 127]() | 0;
}
function b($this, $__n) {
$this = $this | 0;