//"use strict";
// LLVM assembly => internal intermediate representation, which is ready
// to be processed by the later stages.
var fastPaths = 0, slowPaths = 0;
var tokenCache = {};
[',', '{', '}', 'i32', 'label', ';', '4', '0', '1', '2', '255', 'align', 'i8*', 'i8', 'i16', 'getelementptr', 'inbounds', 'unnamed_addr', 'x', 'load', 'preds', 'br', 'i32*', 'i1', 'store', '<label>', 'constant', 'c', 'private', 'null', 'internal', 'to', 'bitcast', 'define', 'nounwind', 'nocapture', '%this', 'call', '...'].forEach(function(text) { tokenCache[text] = { text: text } });
//var tokenCacheMisses = {};
// Line tokenizer
function tokenize(text) {
var tokens = [];
var quotes = 0;
var lastToken = null;
var enclosers = {
'[': 0,
']': '[',
'(': 0,
')': '(',
'<': 0,
'>': '<',
'{': 0,
'}': '{',
};
var totalEnclosing = 0;
function makeToken(text) {
if (text.length == 0) return;
// merge *..* into last token
if (lastToken && text[0] === '*') {
//assert(/^\**$/.test(text));
//assert(!(lastToken.text in tokenCache));
lastToken.text += text;
return;
}
var cached = tokenCache[text];
if (cached) {
//assert(cached.text === text);
tokens.push(cached);
lastToken = cached;
return;
}
//tokenCacheMisses[text] = (misses[text] || 0) + 1;
var token = {
text: text
};
if (text[0] in enclosers) {
token.tokens = tokenize(text.substr(1, text.length-2));
token.type = text[0];
}
// merge function definitions together
if (lastToken && isType(lastToken.text) && isFunctionDef(token)) {
if (lastToken.text in tokenCache) {
// create a copy of the cached value
lastToken = tokens[tokens.length-1] = { text: lastToken.text };
}
lastToken.text += ' ' + text;
} else {
tokens.push(token);
lastToken = token;
}