aboutsummaryrefslogtreecommitdiff
path: root/tools/file_packager.py
diff options
context:
space:
mode:
authorJukka Jylänki <jujjyl@gmail.com>2013-05-12 15:25:27 +0300
committerJukka Jylänki <jujjyl@gmail.com>2013-05-25 18:26:05 +0300
commit540ad09dab13f9b5a2025e0994984b6a3402d559 (patch)
tree0505865eb8b3be50e9cd32d5ca5caa36c51b616c /tools/file_packager.py
parentbf8c659ebc868649ceb570d780a8b3de9b6d18b9 (diff)
Add support for specifying the target location of preloaded or embedded files on the virtual filesystem via --preload-file 'src@dst' notation.
Diffstat (limited to 'tools/file_packager.py')
-rw-r--r--tools/file_packager.py69
1 files changed, 38 insertions, 31 deletions
diff --git a/tools/file_packager.py b/tools/file_packager.py
index f4cdafc8..c801fe81 100644
--- a/tools/file_packager.py
+++ b/tools/file_packager.py
@@ -112,16 +112,18 @@ for arg in sys.argv[1:]:
in_preload = False
in_embed = False
in_compress = 0
- elif in_preload:
- if os.path.isfile(arg) or os.path.isdir(arg):
- data_files.append({ 'name': arg, 'mode': 'preload' })
+ elif in_preload or in_embed:
+ mode = 'preload'
+ if in_embed:
+ mode = 'embed'
+ if '@' in arg:
+ srcpath, dstpath = arg.split('@') # User is specifying destination filename explicitly.
else:
- print >> sys.stderr, 'Warning: ' + arg + ' does not exist, ignoring.'
- elif in_embed:
- if os.path.isfile(arg) or os.path.isdir(arg):
- data_files.append({ 'name': arg, 'mode': 'embed' })
+ srcpath = dstpath = arg # Use source path as destination path.
+ if os.path.isfile(srcpath) or os.path.isdir(srcpath):
+ data_files.append({ 'srcpath': srcpath, 'dstpath': dstpath, 'mode': mode })
else:
- print >> sys.stderr, 'Warning:' + arg + ' does not exist, ignoring.'
+ print >> sys.stderr, 'Warning: ' + arg + ' does not exist, ignoring.'
elif in_compress:
if in_compress == 1:
Compression.encoder = arg
@@ -147,21 +149,26 @@ function assert(check, msg) {
'''
# Expand directories into individual files
-def add(mode, dirname, names):
+def add(arg, dirname, names):
+ # rootpathsrc: The path name of the root directory on the local FS we are adding to emscripten virtual FS.
+ # rootpathdst: The name we want to make the source path available on the emscripten virtual FS.
+ mode, rootpathsrc, rootpathdst = arg
for name in names:
fullname = os.path.join(dirname, name)
if not os.path.isdir(fullname):
- data_files.append({ 'name': fullname, 'mode': mode })
+ dstpath = os.path.join(rootpathdst, os.path.relpath(fullname, rootpathsrc)) # Convert source filename relative to root directory of target FS.
+ data_files.append({ 'srcpath': fullname, 'dstpath': dstpath, 'mode': mode })
for file_ in data_files:
- if os.path.isdir(file_['name']):
- os.path.walk(file_['name'], add, file_['mode'])
-data_files = filter(lambda file_: not os.path.isdir(file_['name']), data_files)
+ if os.path.isdir(file_['srcpath']):
+ os.path.walk(file_['srcpath'], add, [file_['mode'], file_['srcpath'], file_['dstpath']])
+data_files = filter(lambda file_: not os.path.isdir(file_['srcpath']), data_files)
for file_ in data_files:
- if file_['name'].startswith('./'): file_['name'] = file_['name'][2:] # remove redundant ./ prefix
- file_['name'] = file_['name'].replace(os.path.sep, '/') # name in the filesystem, native and emulated
- file_['localname'] = file_['name'] # name to actually load from local filesystem, after transformations
+ file_['dstpath'] = file_['dstpath'].replace(os.path.sep, '/') # name in the filesystem, native and emulated
+ if file_['dstpath'].endswith('/'): # If user has submitted a directory name as the destination but omitted the destination filename, use the filename from source file
+ file_['dstpath'] = file_['dstpath'] + os.path.basename(file_['srcpath'])
+ if file_['dstpath'].startswith('./'): file_['dstpath'] = file_['dstpath'][2:] # remove redundant ./ prefix
# Remove duplicates (can occur naively, for example preload dir/, preload dir/subdir/)
seen = {}
@@ -169,7 +176,7 @@ def was_seen(name):
if seen.get(name): return True
seen[name] = 1
return False
-data_files = filter(lambda file_: not was_seen(file_['name']), data_files)
+data_files = filter(lambda file_: not was_seen(file_['dstpath']), data_files)
if AV_WORKAROUND:
random.shuffle(data_files)
@@ -201,20 +208,20 @@ if crunch:
'''
for file_ in data_files:
- if file_['name'].endswith(CRUNCH_INPUT_SUFFIX):
+ if file_['dstpath'].endswith(CRUNCH_INPUT_SUFFIX):
# Do not crunch if crunched version exists and is more recent than dds source
- crunch_name = unsuffixed(file_['name']) + CRUNCH_OUTPUT_SUFFIX
- file_['localname'] = crunch_name
+ crunch_name = unsuffixed(file_['dstpath']) + CRUNCH_OUTPUT_SUFFIX
+ file_['dstpath'] = crunch_name
try:
crunch_time = os.stat(crunch_name).st_mtime
- dds_time = os.stat(file_['name']).st_mtime
+ dds_time = os.stat(file_['srcpath']).st_mtime
if dds_time < crunch_time: continue
except:
pass # if one of them does not exist, continue on
# guess at format. this lets us tell crunch to not try to be clever and use odd formats like DXT5_AGBR
try:
- format = Popen(['file', file_['name']], stdout=PIPE).communicate()[0]
+ format = Popen(['file', file_['srcpath']], stdout=PIPE).communicate()[0]
if 'DXT5' in format:
format = ['-dxt5']
elif 'DXT1' in format:
@@ -223,23 +230,23 @@ if crunch:
raise Exception('unknown format')
except:
format = []
- Popen([CRUNCH, '-file', file_['name'], '-quality', crunch] + format, stdout=sys.stderr).communicate()
+ Popen([CRUNCH, '-file', file_['srcpath'], '-quality', crunch] + format, stdout=sys.stderr).communicate()
#if not os.path.exists(os.path.basename(crunch_name)):
# print >> sys.stderr, 'Failed to crunch, perhaps a weird dxt format? Looking for a source PNG for the DDS'
- # Popen([CRUNCH, '-file', unsuffixed(file_['name']) + '.png', '-quality', crunch] + format, stdout=sys.stderr).communicate()
+ # Popen([CRUNCH, '-file', unsuffixed(file_['srcpath']) + '.png', '-quality', crunch] + format, stdout=sys.stderr).communicate()
assert os.path.exists(os.path.basename(crunch_name)), 'crunch failed to generate output'
shutil.move(os.path.basename(crunch_name), crunch_name) # crunch places files in the current dir
# prepend the dds header
crunched = open(crunch_name, 'rb').read()
c = open(crunch_name, 'wb')
- c.write(open(file_['name'], 'rb').read()[:DDS_HEADER_SIZE])
+ c.write(open(file_['srcpath'], 'rb').read()[:DDS_HEADER_SIZE])
c.write(crunched)
c.close()
# Set up folders
partial_dirs = []
for file_ in data_files:
- dirname = os.path.dirname(file_['name'])
+ dirname = os.path.dirname(file_['dstpath'])
dirname = dirname.lstrip('/') # absolute paths start with '/', remove that
if dirname != '':
parts = dirname.split('/')
@@ -255,10 +262,10 @@ if has_preloaded:
start = 0
for file_ in data_files:
file_['data_start'] = start
- curr = open(file_['localname'], 'rb').read()
+ curr = open(file_['srcpath'], 'rb').read()
file_['data_end'] = start + len(curr)
if AV_WORKAROUND: curr += '\x00'
- #print >> sys.stderr, 'bundling', file_['name'], file_['localname'], file_['data_start'], file_['data_end']
+ #print >> sys.stderr, 'bundling', file_['srcpath'], file_['dstpath'], file_['data_start'], file_['data_end']
start += len(curr)
data.write(curr)
data.close()
@@ -280,10 +287,10 @@ if has_preloaded:
counter = 0
for file_ in data_files:
- filename = file_['name']
+ filename = file_['dstpath']
if file_['mode'] == 'embed':
# Embed
- data = map(ord, open(file_['localname'], 'rb').read())
+ data = map(ord, open(file_['srcpath'], 'rb').read())
str_data = ''
chunk_size = 10240
while len(data) > 0:
@@ -357,7 +364,7 @@ if has_preloaded:
Module['HEAPU8'].set(data, ptr);
curr.response = Module['HEAPU8'].subarray(ptr, ptr + %d);
curr.onload();
- ''' % (file_['name'], file_['data_start'], file_['data_end'], file_['data_end'] - file_['data_start'], file_['data_end'] - file_['data_start'])
+ ''' % (file_['dstpath'], file_['data_start'], file_['data_end'], file_['data_end'] - file_['data_start'], file_['data_end'] - file_['data_start'])
use_data += " Module['removeRunDependency']('datafile_%s');\n" % data_target
if Compression.on: