30431 lines
1.1 MiB
30431 lines
1.1 MiB
/**
|
|
* @license r.js 2.3.6 Copyright jQuery Foundation and other contributors.
|
|
* Released under MIT license, http://github.com/requirejs/r.js/LICENSE
|
|
*/
|
|
|
|
/*
|
|
* This is a bootstrap script to allow running RequireJS in the command line
|
|
* in either a Java/Rhino or Node environment. It is modified by the top-level
|
|
* dist.js file to inject other files to completely enable this file. It is
|
|
* the shell of the r.js file.
|
|
*/
|
|
|
|
/*jslint evil: true, nomen: true, sloppy: true */
|
|
/*global readFile: true, process: false, Packages: false, print: false,
|
|
console: false, java: false, module: false, requirejsVars, navigator,
|
|
document, importScripts, self, location, Components, FileUtils */
|
|
|
|
var requirejs, require, define, xpcUtil;
|
|
(function (console, args, readFileFunc) {
|
|
var fileName, env, fs, vm, path, exec, rhinoContext, dir, nodeRequire,
|
|
nodeDefine, exists, reqMain, loadedOptimizedLib, existsForNode, Cc, Ci,
|
|
version = '2.3.6',
|
|
jsSuffixRegExp = /\.js$/,
|
|
commandOption = '',
|
|
useLibLoaded = {},
|
|
//Used by jslib/rhino/args.js
|
|
rhinoArgs = args,
|
|
//Used by jslib/xpconnect/args.js
|
|
xpconnectArgs = args,
|
|
readFile = typeof readFileFunc !== 'undefined' ? readFileFunc : null;
|
|
|
|
function showHelp() {
|
|
console.log('See https://github.com/requirejs/r.js for usage.');
|
|
}
|
|
|
|
if (typeof process !== 'undefined' && process.versions && !!process.versions.node) {
|
|
env = 'node';
|
|
|
|
//Get the fs module via Node's require before it
|
|
//gets replaced. Used in require/node.js
|
|
fs = require('fs');
|
|
vm = require('vm');
|
|
path = require('path');
|
|
//In Node 0.7+ existsSync is on fs.
|
|
existsForNode = fs.existsSync || path.existsSync;
|
|
|
|
nodeRequire = require;
|
|
nodeDefine = define;
|
|
reqMain = require.main;
|
|
|
|
//Temporarily hide require and define to allow require.js to define
|
|
//them.
|
|
require = undefined;
|
|
define = undefined;
|
|
|
|
readFile = function (path) {
|
|
return fs.readFileSync(path, 'utf8');
|
|
};
|
|
|
|
exec = function (string, name) {
|
|
return vm.runInThisContext(this.requirejsVars.require.makeNodeWrapper(string),
|
|
name ? fs.realpathSync(name) : '');
|
|
};
|
|
|
|
exists = function (fileName) {
|
|
return existsForNode(fileName);
|
|
};
|
|
|
|
|
|
fileName = process.argv[2];
|
|
|
|
if (fileName && fileName.indexOf('-') === 0) {
|
|
commandOption = fileName.substring(1);
|
|
fileName = process.argv[3];
|
|
}
|
|
} else if (typeof Packages !== 'undefined') {
|
|
env = 'rhino';
|
|
|
|
fileName = args[0];
|
|
|
|
if (fileName && fileName.indexOf('-') === 0) {
|
|
commandOption = fileName.substring(1);
|
|
fileName = args[1];
|
|
}
|
|
|
|
//Exec/readFile differs between Rhino and Nashorn. Rhino has an
|
|
//importPackage where Nashorn does not, so branch on that. This is a
|
|
//coarser check -- detecting readFile existence might also be enough for
|
|
//this spot. However, sticking with importPackage to keep it the same
|
|
//as other Rhino/Nashorn detection branches.
|
|
if (typeof importPackage !== 'undefined') {
|
|
rhinoContext = Packages.org.mozilla.javascript.ContextFactory.getGlobal().enterContext();
|
|
|
|
exec = function (string, name) {
|
|
return rhinoContext.evaluateString(this, string, name, 0, null);
|
|
};
|
|
} else {
|
|
exec = function (string, name) {
|
|
load({ script: string, name: name});
|
|
};
|
|
readFile = readFully;
|
|
}
|
|
|
|
exists = function (fileName) {
|
|
return (new java.io.File(fileName)).exists();
|
|
};
|
|
|
|
//Define a console.log for easier logging. Don't
|
|
//get fancy though.
|
|
if (typeof console === 'undefined') {
|
|
console = {
|
|
log: function () {
|
|
print.apply(undefined, arguments);
|
|
}
|
|
};
|
|
}
|
|
} else if ((typeof navigator !== 'undefined' && typeof document !== 'undefined') ||
|
|
(typeof importScripts !== 'undefined' && typeof self !== 'undefined')) {
|
|
env = 'browser';
|
|
|
|
readFile = function (path) {
|
|
return fs.readFileSync(path, 'utf8');
|
|
};
|
|
|
|
exec = function (string) {
|
|
return eval(string);
|
|
};
|
|
|
|
exists = function () {
|
|
console.log('x.js exists not applicable in browser env');
|
|
return false;
|
|
};
|
|
|
|
} else if (typeof Components !== 'undefined' && Components.classes && Components.interfaces) {
|
|
env = 'xpconnect';
|
|
|
|
Components.utils['import']('resource://gre/modules/FileUtils.jsm');
|
|
Cc = Components.classes;
|
|
Ci = Components.interfaces;
|
|
|
|
fileName = args[0];
|
|
|
|
if (fileName && fileName.indexOf('-') === 0) {
|
|
commandOption = fileName.substring(1);
|
|
fileName = args[1];
|
|
}
|
|
|
|
xpcUtil = {
|
|
isWindows: ('@mozilla.org/windows-registry-key;1' in Cc),
|
|
cwd: function () {
|
|
return FileUtils.getFile("CurWorkD", []).path;
|
|
},
|
|
|
|
//Remove . and .. from paths, normalize on front slashes
|
|
normalize: function (path) {
|
|
//There has to be an easier way to do this.
|
|
var i, part, ary,
|
|
firstChar = path.charAt(0);
|
|
|
|
if (firstChar !== '/' &&
|
|
firstChar !== '\\' &&
|
|
path.indexOf(':') === -1) {
|
|
//A relative path. Use the current working directory.
|
|
path = xpcUtil.cwd() + '/' + path;
|
|
}
|
|
|
|
ary = path.replace(/\\/g, '/').split('/');
|
|
|
|
for (i = 0; i < ary.length; i += 1) {
|
|
part = ary[i];
|
|
if (part === '.') {
|
|
ary.splice(i, 1);
|
|
i -= 1;
|
|
} else if (part === '..') {
|
|
ary.splice(i - 1, 2);
|
|
i -= 2;
|
|
}
|
|
}
|
|
return ary.join('/');
|
|
},
|
|
|
|
xpfile: function (path) {
|
|
var fullPath;
|
|
try {
|
|
fullPath = xpcUtil.normalize(path);
|
|
if (xpcUtil.isWindows) {
|
|
fullPath = fullPath.replace(/\//g, '\\');
|
|
}
|
|
return new FileUtils.File(fullPath);
|
|
} catch (e) {
|
|
throw new Error((fullPath || path) + ' failed: ' + e);
|
|
}
|
|
},
|
|
|
|
readFile: function (/*String*/path, /*String?*/encoding) {
|
|
//A file read function that can deal with BOMs
|
|
encoding = encoding || "utf-8";
|
|
|
|
var inStream, convertStream,
|
|
readData = {},
|
|
fileObj = xpcUtil.xpfile(path);
|
|
|
|
//XPCOM, you so crazy
|
|
try {
|
|
inStream = Cc['@mozilla.org/network/file-input-stream;1']
|
|
.createInstance(Ci.nsIFileInputStream);
|
|
inStream.init(fileObj, 1, 0, false);
|
|
|
|
convertStream = Cc['@mozilla.org/intl/converter-input-stream;1']
|
|
.createInstance(Ci.nsIConverterInputStream);
|
|
convertStream.init(inStream, encoding, inStream.available(),
|
|
Ci.nsIConverterInputStream.DEFAULT_REPLACEMENT_CHARACTER);
|
|
|
|
convertStream.readString(inStream.available(), readData);
|
|
return readData.value;
|
|
} catch (e) {
|
|
throw new Error((fileObj && fileObj.path || '') + ': ' + e);
|
|
} finally {
|
|
if (convertStream) {
|
|
convertStream.close();
|
|
}
|
|
if (inStream) {
|
|
inStream.close();
|
|
}
|
|
}
|
|
}
|
|
};
|
|
|
|
readFile = xpcUtil.readFile;
|
|
|
|
exec = function (string) {
|
|
return eval(string);
|
|
};
|
|
|
|
exists = function (fileName) {
|
|
return xpcUtil.xpfile(fileName).exists();
|
|
};
|
|
|
|
//Define a console.log for easier logging. Don't
|
|
//get fancy though.
|
|
if (typeof console === 'undefined') {
|
|
console = {
|
|
log: function () {
|
|
print.apply(undefined, arguments);
|
|
}
|
|
};
|
|
}
|
|
}
|
|
|
|
/** vim: et:ts=4:sw=4:sts=4
|
|
* @license RequireJS 2.3.6 Copyright jQuery Foundation and other contributors.
|
|
* Released under MIT license, https://github.com/requirejs/requirejs/blob/master/LICENSE
|
|
*/
|
|
//Not using strict: uneven strict support in browsers, #392, and causes
|
|
//problems with requirejs.exec()/transpiler plugins that may not be strict.
|
|
/*jslint regexp: true, nomen: true, sloppy: true */
|
|
/*global window, navigator, document, importScripts, setTimeout, opera */
|
|
|
|
|
|
(function (global, setTimeout) {
|
|
var req, s, head, baseElement, dataMain, src,
|
|
interactiveScript, currentlyAddingScript, mainScript, subPath,
|
|
version = '2.3.6',
|
|
commentRegExp = /\/\*[\s\S]*?\*\/|([^:"'=]|^)\/\/.*$/mg,
|
|
cjsRequireRegExp = /[^.]\s*require\s*\(\s*["']([^'"\s]+)["']\s*\)/g,
|
|
jsSuffixRegExp = /\.js$/,
|
|
currDirRegExp = /^\.\//,
|
|
op = Object.prototype,
|
|
ostring = op.toString,
|
|
hasOwn = op.hasOwnProperty,
|
|
isBrowser = !!(typeof window !== 'undefined' && typeof navigator !== 'undefined' && window.document),
|
|
isWebWorker = !isBrowser && typeof importScripts !== 'undefined',
|
|
//PS3 indicates loaded and complete, but need to wait for complete
|
|
//specifically. Sequence is 'loading', 'loaded', execution,
|
|
// then 'complete'. The UA check is unfortunate, but not sure how
|
|
//to feature test w/o causing perf issues.
|
|
readyRegExp = isBrowser && navigator.platform === 'PLAYSTATION 3' ?
|
|
/^complete$/ : /^(complete|loaded)$/,
|
|
defContextName = '_',
|
|
//Oh the tragedy, detecting opera. See the usage of isOpera for reason.
|
|
isOpera = typeof opera !== 'undefined' && opera.toString() === '[object Opera]',
|
|
contexts = {},
|
|
cfg = {},
|
|
globalDefQueue = [],
|
|
useInteractive = false;
|
|
|
|
//Could match something like ')//comment', do not lose the prefix to comment.
|
|
function commentReplace(match, singlePrefix) {
|
|
return singlePrefix || '';
|
|
}
|
|
|
|
function isFunction(it) {
|
|
return ostring.call(it) === '[object Function]';
|
|
}
|
|
|
|
function isArray(it) {
|
|
return ostring.call(it) === '[object Array]';
|
|
}
|
|
|
|
/**
|
|
* Helper function for iterating over an array. If the func returns
|
|
* a true value, it will break out of the loop.
|
|
*/
|
|
function each(ary, func) {
|
|
if (ary) {
|
|
var i;
|
|
for (i = 0; i < ary.length; i += 1) {
|
|
if (ary[i] && func(ary[i], i, ary)) {
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Helper function for iterating over an array backwards. If the func
|
|
* returns a true value, it will break out of the loop.
|
|
*/
|
|
function eachReverse(ary, func) {
|
|
if (ary) {
|
|
var i;
|
|
for (i = ary.length - 1; i > -1; i -= 1) {
|
|
if (ary[i] && func(ary[i], i, ary)) {
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
function hasProp(obj, prop) {
|
|
return hasOwn.call(obj, prop);
|
|
}
|
|
|
|
function getOwn(obj, prop) {
|
|
return hasProp(obj, prop) && obj[prop];
|
|
}
|
|
|
|
/**
|
|
* Cycles over properties in an object and calls a function for each
|
|
* property value. If the function returns a truthy value, then the
|
|
* iteration is stopped.
|
|
*/
|
|
function eachProp(obj, func) {
|
|
var prop;
|
|
for (prop in obj) {
|
|
if (hasProp(obj, prop)) {
|
|
if (func(obj[prop], prop)) {
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Simple function to mix in properties from source into target,
|
|
* but only if target does not already have a property of the same name.
|
|
*/
|
|
function mixin(target, source, force, deepStringMixin) {
|
|
if (source) {
|
|
eachProp(source, function (value, prop) {
|
|
if (force || !hasProp(target, prop)) {
|
|
if (deepStringMixin && typeof value === 'object' && value &&
|
|
!isArray(value) && !isFunction(value) &&
|
|
!(value instanceof RegExp)) {
|
|
|
|
if (!target[prop]) {
|
|
target[prop] = {};
|
|
}
|
|
mixin(target[prop], value, force, deepStringMixin);
|
|
} else {
|
|
target[prop] = value;
|
|
}
|
|
}
|
|
});
|
|
}
|
|
return target;
|
|
}
|
|
|
|
//Similar to Function.prototype.bind, but the 'this' object is specified
|
|
//first, since it is easier to read/figure out what 'this' will be.
|
|
function bind(obj, fn) {
|
|
return function () {
|
|
return fn.apply(obj, arguments);
|
|
};
|
|
}
|
|
|
|
function scripts() {
|
|
return document.getElementsByTagName('script');
|
|
}
|
|
|
|
function defaultOnError(err) {
|
|
throw err;
|
|
}
|
|
|
|
//Allow getting a global that is expressed in
|
|
//dot notation, like 'a.b.c'.
|
|
function getGlobal(value) {
|
|
if (!value) {
|
|
return value;
|
|
}
|
|
var g = global;
|
|
each(value.split('.'), function (part) {
|
|
g = g[part];
|
|
});
|
|
return g;
|
|
}
|
|
|
|
/**
|
|
* Constructs an error with a pointer to an URL with more information.
|
|
* @param {String} id the error ID that maps to an ID on a web page.
|
|
* @param {String} message human readable error.
|
|
* @param {Error} [err] the original error, if there is one.
|
|
*
|
|
* @returns {Error}
|
|
*/
|
|
function makeError(id, msg, err, requireModules) {
|
|
var e = new Error(msg + '\nhttps://requirejs.org/docs/errors.html#' + id);
|
|
e.requireType = id;
|
|
e.requireModules = requireModules;
|
|
if (err) {
|
|
e.originalError = err;
|
|
}
|
|
return e;
|
|
}
|
|
|
|
if (typeof define !== 'undefined') {
|
|
//If a define is already in play via another AMD loader,
|
|
//do not overwrite.
|
|
return;
|
|
}
|
|
|
|
if (typeof requirejs !== 'undefined') {
|
|
if (isFunction(requirejs)) {
|
|
//Do not overwrite an existing requirejs instance.
|
|
return;
|
|
}
|
|
cfg = requirejs;
|
|
requirejs = undefined;
|
|
}
|
|
|
|
//Allow for a require config object
|
|
if (typeof require !== 'undefined' && !isFunction(require)) {
|
|
//assume it is a config object.
|
|
cfg = require;
|
|
require = undefined;
|
|
}
|
|
|
|
function newContext(contextName) {
|
|
var inCheckLoaded, Module, context, handlers,
|
|
checkLoadedTimeoutId,
|
|
config = {
|
|
//Defaults. Do not set a default for map
|
|
//config to speed up normalize(), which
|
|
//will run faster if there is no default.
|
|
waitSeconds: 7,
|
|
baseUrl: './',
|
|
paths: {},
|
|
bundles: {},
|
|
pkgs: {},
|
|
shim: {},
|
|
config: {}
|
|
},
|
|
registry = {},
|
|
//registry of just enabled modules, to speed
|
|
//cycle breaking code when lots of modules
|
|
//are registered, but not activated.
|
|
enabledRegistry = {},
|
|
undefEvents = {},
|
|
defQueue = [],
|
|
defined = {},
|
|
urlFetched = {},
|
|
bundlesMap = {},
|
|
requireCounter = 1,
|
|
unnormalizedCounter = 1;
|
|
|
|
/**
|
|
* Trims the . and .. from an array of path segments.
|
|
* It will keep a leading path segment if a .. will become
|
|
* the first path segment, to help with module name lookups,
|
|
* which act like paths, but can be remapped. But the end result,
|
|
* all paths that use this function should look normalized.
|
|
* NOTE: this method MODIFIES the input array.
|
|
* @param {Array} ary the array of path segments.
|
|
*/
|
|
function trimDots(ary) {
|
|
var i, part;
|
|
for (i = 0; i < ary.length; i++) {
|
|
part = ary[i];
|
|
if (part === '.') {
|
|
ary.splice(i, 1);
|
|
i -= 1;
|
|
} else if (part === '..') {
|
|
// If at the start, or previous value is still ..,
|
|
// keep them so that when converted to a path it may
|
|
// still work when converted to a path, even though
|
|
// as an ID it is less than ideal. In larger point
|
|
// releases, may be better to just kick out an error.
|
|
if (i === 0 || (i === 1 && ary[2] === '..') || ary[i - 1] === '..') {
|
|
continue;
|
|
} else if (i > 0) {
|
|
ary.splice(i - 1, 2);
|
|
i -= 2;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Given a relative module name, like ./something, normalize it to
|
|
* a real name that can be mapped to a path.
|
|
* @param {String} name the relative name
|
|
* @param {String} baseName a real name that the name arg is relative
|
|
* to.
|
|
* @param {Boolean} applyMap apply the map config to the value. Should
|
|
* only be done if this normalization is for a dependency ID.
|
|
* @returns {String} normalized name
|
|
*/
|
|
function normalize(name, baseName, applyMap) {
|
|
var pkgMain, mapValue, nameParts, i, j, nameSegment, lastIndex,
|
|
foundMap, foundI, foundStarMap, starI, normalizedBaseParts,
|
|
baseParts = (baseName && baseName.split('/')),
|
|
map = config.map,
|
|
starMap = map && map['*'];
|
|
|
|
//Adjust any relative paths.
|
|
if (name) {
|
|
name = name.split('/');
|
|
lastIndex = name.length - 1;
|
|
|
|
// If wanting node ID compatibility, strip .js from end
|
|
// of IDs. Have to do this here, and not in nameToUrl
|
|
// because node allows either .js or non .js to map
|
|
// to same file.
|
|
if (config.nodeIdCompat && jsSuffixRegExp.test(name[lastIndex])) {
|
|
name[lastIndex] = name[lastIndex].replace(jsSuffixRegExp, '');
|
|
}
|
|
|
|
// Starts with a '.' so need the baseName
|
|
if (name[0].charAt(0) === '.' && baseParts) {
|
|
//Convert baseName to array, and lop off the last part,
|
|
//so that . matches that 'directory' and not name of the baseName's
|
|
//module. For instance, baseName of 'one/two/three', maps to
|
|
//'one/two/three.js', but we want the directory, 'one/two' for
|
|
//this normalization.
|
|
normalizedBaseParts = baseParts.slice(0, baseParts.length - 1);
|
|
name = normalizedBaseParts.concat(name);
|
|
}
|
|
|
|
trimDots(name);
|
|
name = name.join('/');
|
|
}
|
|
|
|
//Apply map config if available.
|
|
if (applyMap && map && (baseParts || starMap)) {
|
|
nameParts = name.split('/');
|
|
|
|
outerLoop: for (i = nameParts.length; i > 0; i -= 1) {
|
|
nameSegment = nameParts.slice(0, i).join('/');
|
|
|
|
if (baseParts) {
|
|
//Find the longest baseName segment match in the config.
|
|
//So, do joins on the biggest to smallest lengths of baseParts.
|
|
for (j = baseParts.length; j > 0; j -= 1) {
|
|
mapValue = getOwn(map, baseParts.slice(0, j).join('/'));
|
|
|
|
//baseName segment has config, find if it has one for
|
|
//this name.
|
|
if (mapValue) {
|
|
mapValue = getOwn(mapValue, nameSegment);
|
|
if (mapValue) {
|
|
//Match, update name to the new value.
|
|
foundMap = mapValue;
|
|
foundI = i;
|
|
break outerLoop;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
//Check for a star map match, but just hold on to it,
|
|
//if there is a shorter segment match later in a matching
|
|
//config, then favor over this star map.
|
|
if (!foundStarMap && starMap && getOwn(starMap, nameSegment)) {
|
|
foundStarMap = getOwn(starMap, nameSegment);
|
|
starI = i;
|
|
}
|
|
}
|
|
|
|
if (!foundMap && foundStarMap) {
|
|
foundMap = foundStarMap;
|
|
foundI = starI;
|
|
}
|
|
|
|
if (foundMap) {
|
|
nameParts.splice(0, foundI, foundMap);
|
|
name = nameParts.join('/');
|
|
}
|
|
}
|
|
|
|
// If the name points to a package's name, use
|
|
// the package main instead.
|
|
pkgMain = getOwn(config.pkgs, name);
|
|
|
|
return pkgMain ? pkgMain : name;
|
|
}
|
|
|
|
function removeScript(name) {
|
|
if (isBrowser) {
|
|
each(scripts(), function (scriptNode) {
|
|
if (scriptNode.getAttribute('data-requiremodule') === name &&
|
|
scriptNode.getAttribute('data-requirecontext') === context.contextName) {
|
|
scriptNode.parentNode.removeChild(scriptNode);
|
|
return true;
|
|
}
|
|
});
|
|
}
|
|
}
|
|
|
|
function hasPathFallback(id) {
|
|
var pathConfig = getOwn(config.paths, id);
|
|
if (pathConfig && isArray(pathConfig) && pathConfig.length > 1) {
|
|
//Pop off the first array value, since it failed, and
|
|
//retry
|
|
pathConfig.shift();
|
|
context.require.undef(id);
|
|
|
|
//Custom require that does not do map translation, since
|
|
//ID is "absolute", already mapped/resolved.
|
|
context.makeRequire(null, {
|
|
skipMap: true
|
|
})([id]);
|
|
|
|
return true;
|
|
}
|
|
}
|
|
|
|
//Turns a plugin!resource to [plugin, resource]
|
|
//with the plugin being undefined if the name
|
|
//did not have a plugin prefix.
|
|
function splitPrefix(name) {
|
|
var prefix,
|
|
index = name ? name.indexOf('!') : -1;
|
|
if (index > -1) {
|
|
prefix = name.substring(0, index);
|
|
name = name.substring(index + 1, name.length);
|
|
}
|
|
return [prefix, name];
|
|
}
|
|
|
|
/**
|
|
* Creates a module mapping that includes plugin prefix, module
|
|
* name, and path. If parentModuleMap is provided it will
|
|
* also normalize the name via require.normalize()
|
|
*
|
|
* @param {String} name the module name
|
|
* @param {String} [parentModuleMap] parent module map
|
|
* for the module name, used to resolve relative names.
|
|
* @param {Boolean} isNormalized: is the ID already normalized.
|
|
* This is true if this call is done for a define() module ID.
|
|
* @param {Boolean} applyMap: apply the map config to the ID.
|
|
* Should only be true if this map is for a dependency.
|
|
*
|
|
* @returns {Object}
|
|
*/
|
|
function makeModuleMap(name, parentModuleMap, isNormalized, applyMap) {
|
|
var url, pluginModule, suffix, nameParts,
|
|
prefix = null,
|
|
parentName = parentModuleMap ? parentModuleMap.name : null,
|
|
originalName = name,
|
|
isDefine = true,
|
|
normalizedName = '';
|
|
|
|
//If no name, then it means it is a require call, generate an
|
|
//internal name.
|
|
if (!name) {
|
|
isDefine = false;
|
|
name = '_@r' + (requireCounter += 1);
|
|
}
|
|
|
|
nameParts = splitPrefix(name);
|
|
prefix = nameParts[0];
|
|
name = nameParts[1];
|
|
|
|
if (prefix) {
|
|
prefix = normalize(prefix, parentName, applyMap);
|
|
pluginModule = getOwn(defined, prefix);
|
|
}
|
|
|
|
//Account for relative paths if there is a base name.
|
|
if (name) {
|
|
if (prefix) {
|
|
if (isNormalized) {
|
|
normalizedName = name;
|
|
} else if (pluginModule && pluginModule.normalize) {
|
|
//Plugin is loaded, use its normalize method.
|
|
normalizedName = pluginModule.normalize(name, function (name) {
|
|
return normalize(name, parentName, applyMap);
|
|
});
|
|
} else {
|
|
// If nested plugin references, then do not try to
|
|
// normalize, as it will not normalize correctly. This
|
|
// places a restriction on resourceIds, and the longer
|
|
// term solution is not to normalize until plugins are
|
|
// loaded and all normalizations to allow for async
|
|
// loading of a loader plugin. But for now, fixes the
|
|
// common uses. Details in #1131
|
|
normalizedName = name.indexOf('!') === -1 ?
|
|
normalize(name, parentName, applyMap) :
|
|
name;
|
|
}
|
|
} else {
|
|
//A regular module.
|
|
normalizedName = normalize(name, parentName, applyMap);
|
|
|
|
//Normalized name may be a plugin ID due to map config
|
|
//application in normalize. The map config values must
|
|
//already be normalized, so do not need to redo that part.
|
|
nameParts = splitPrefix(normalizedName);
|
|
prefix = nameParts[0];
|
|
normalizedName = nameParts[1];
|
|
isNormalized = true;
|
|
|
|
url = context.nameToUrl(normalizedName);
|
|
}
|
|
}
|
|
|
|
//If the id is a plugin id that cannot be determined if it needs
|
|
//normalization, stamp it with a unique ID so two matching relative
|
|
//ids that may conflict can be separate.
|
|
suffix = prefix && !pluginModule && !isNormalized ?
|
|
'_unnormalized' + (unnormalizedCounter += 1) :
|
|
'';
|
|
|
|
return {
|
|
prefix: prefix,
|
|
name: normalizedName,
|
|
parentMap: parentModuleMap,
|
|
unnormalized: !!suffix,
|
|
url: url,
|
|
originalName: originalName,
|
|
isDefine: isDefine,
|
|
id: (prefix ?
|
|
prefix + '!' + normalizedName :
|
|
normalizedName) + suffix
|
|
};
|
|
}
|
|
|
|
function getModule(depMap) {
|
|
var id = depMap.id,
|
|
mod = getOwn(registry, id);
|
|
|
|
if (!mod) {
|
|
mod = registry[id] = new context.Module(depMap);
|
|
}
|
|
|
|
return mod;
|
|
}
|
|
|
|
function on(depMap, name, fn) {
|
|
var id = depMap.id,
|
|
mod = getOwn(registry, id);
|
|
|
|
if (hasProp(defined, id) &&
|
|
(!mod || mod.defineEmitComplete)) {
|
|
if (name === 'defined') {
|
|
fn(defined[id]);
|
|
}
|
|
} else {
|
|
mod = getModule(depMap);
|
|
if (mod.error && name === 'error') {
|
|
fn(mod.error);
|
|
} else {
|
|
mod.on(name, fn);
|
|
}
|
|
}
|
|
}
|
|
|
|
function onError(err, errback) {
|
|
var ids = err.requireModules,
|
|
notified = false;
|
|
|
|
if (errback) {
|
|
errback(err);
|
|
} else {
|
|
each(ids, function (id) {
|
|
var mod = getOwn(registry, id);
|
|
if (mod) {
|
|
//Set error on module, so it skips timeout checks.
|
|
mod.error = err;
|
|
if (mod.events.error) {
|
|
notified = true;
|
|
mod.emit('error', err);
|
|
}
|
|
}
|
|
});
|
|
|
|
if (!notified) {
|
|
req.onError(err);
|
|
}
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Internal method to transfer globalQueue items to this context's
|
|
* defQueue.
|
|
*/
|
|
function takeGlobalQueue() {
|
|
//Push all the globalDefQueue items into the context's defQueue
|
|
if (globalDefQueue.length) {
|
|
each(globalDefQueue, function(queueItem) {
|
|
var id = queueItem[0];
|
|
if (typeof id === 'string') {
|
|
context.defQueueMap[id] = true;
|
|
}
|
|
defQueue.push(queueItem);
|
|
});
|
|
globalDefQueue = [];
|
|
}
|
|
}
|
|
|
|
handlers = {
|
|
'require': function (mod) {
|
|
if (mod.require) {
|
|
return mod.require;
|
|
} else {
|
|
return (mod.require = context.makeRequire(mod.map));
|
|
}
|
|
},
|
|
'exports': function (mod) {
|
|
mod.usingExports = true;
|
|
if (mod.map.isDefine) {
|
|
if (mod.exports) {
|
|
return (defined[mod.map.id] = mod.exports);
|
|
} else {
|
|
return (mod.exports = defined[mod.map.id] = {});
|
|
}
|
|
}
|
|
},
|
|
'module': function (mod) {
|
|
if (mod.module) {
|
|
return mod.module;
|
|
} else {
|
|
return (mod.module = {
|
|
id: mod.map.id,
|
|
uri: mod.map.url,
|
|
config: function () {
|
|
return getOwn(config.config, mod.map.id) || {};
|
|
},
|
|
exports: mod.exports || (mod.exports = {})
|
|
});
|
|
}
|
|
}
|
|
};
|
|
|
|
function cleanRegistry(id) {
|
|
//Clean up machinery used for waiting modules.
|
|
delete registry[id];
|
|
delete enabledRegistry[id];
|
|
}
|
|
|
|
function breakCycle(mod, traced, processed) {
|
|
var id = mod.map.id;
|
|
|
|
if (mod.error) {
|
|
mod.emit('error', mod.error);
|
|
} else {
|
|
traced[id] = true;
|
|
each(mod.depMaps, function (depMap, i) {
|
|
var depId = depMap.id,
|
|
dep = getOwn(registry, depId);
|
|
|
|
//Only force things that have not completed
|
|
//being defined, so still in the registry,
|
|
//and only if it has not been matched up
|
|
//in the module already.
|
|
if (dep && !mod.depMatched[i] && !processed[depId]) {
|
|
if (getOwn(traced, depId)) {
|
|
mod.defineDep(i, defined[depId]);
|
|
mod.check(); //pass false?
|
|
} else {
|
|
breakCycle(dep, traced, processed);
|
|
}
|
|
}
|
|
});
|
|
processed[id] = true;
|
|
}
|
|
}
|
|
|
|
function checkLoaded() {
|
|
var err, usingPathFallback,
|
|
waitInterval = config.waitSeconds * 1000,
|
|
//It is possible to disable the wait interval by using waitSeconds of 0.
|
|
expired = waitInterval && (context.startTime + waitInterval) < new Date().getTime(),
|
|
noLoads = [],
|
|
reqCalls = [],
|
|
stillLoading = false,
|
|
needCycleCheck = true;
|
|
|
|
//Do not bother if this call was a result of a cycle break.
|
|
if (inCheckLoaded) {
|
|
return;
|
|
}
|
|
|
|
inCheckLoaded = true;
|
|
|
|
//Figure out the state of all the modules.
|
|
eachProp(enabledRegistry, function (mod) {
|
|
var map = mod.map,
|
|
modId = map.id;
|
|
|
|
//Skip things that are not enabled or in error state.
|
|
if (!mod.enabled) {
|
|
return;
|
|
}
|
|
|
|
if (!map.isDefine) {
|
|
reqCalls.push(mod);
|
|
}
|
|
|
|
if (!mod.error) {
|
|
//If the module should be executed, and it has not
|
|
//been inited and time is up, remember it.
|
|
if (!mod.inited && expired) {
|
|
if (hasPathFallback(modId)) {
|
|
usingPathFallback = true;
|
|
stillLoading = true;
|
|
} else {
|
|
noLoads.push(modId);
|
|
removeScript(modId);
|
|
}
|
|
} else if (!mod.inited && mod.fetched && map.isDefine) {
|
|
stillLoading = true;
|
|
if (!map.prefix) {
|
|
//No reason to keep looking for unfinished
|
|
//loading. If the only stillLoading is a
|
|
//plugin resource though, keep going,
|
|
//because it may be that a plugin resource
|
|
//is waiting on a non-plugin cycle.
|
|
return (needCycleCheck = false);
|
|
}
|
|
}
|
|
}
|
|
});
|
|
|
|
if (expired && noLoads.length) {
|
|
//If wait time expired, throw error of unloaded modules.
|
|
err = makeError('timeout', 'Load timeout for modules: ' + noLoads, null, noLoads);
|
|
err.contextName = context.contextName;
|
|
return onError(err);
|
|
}
|
|
|
|
//Not expired, check for a cycle.
|
|
if (needCycleCheck) {
|
|
each(reqCalls, function (mod) {
|
|
breakCycle(mod, {}, {});
|
|
});
|
|
}
|
|
|
|
//If still waiting on loads, and the waiting load is something
|
|
//other than a plugin resource, or there are still outstanding
|
|
//scripts, then just try back later.
|
|
if ((!expired || usingPathFallback) && stillLoading) {
|
|
//Something is still waiting to load. Wait for it, but only
|
|
//if a timeout is not already in effect.
|
|
if ((isBrowser || isWebWorker) && !checkLoadedTimeoutId) {
|
|
checkLoadedTimeoutId = setTimeout(function () {
|
|
checkLoadedTimeoutId = 0;
|
|
checkLoaded();
|
|
}, 50);
|
|
}
|
|
}
|
|
|
|
inCheckLoaded = false;
|
|
}
|
|
|
|
Module = function (map) {
|
|
this.events = getOwn(undefEvents, map.id) || {};
|
|
this.map = map;
|
|
this.shim = getOwn(config.shim, map.id);
|
|
this.depExports = [];
|
|
this.depMaps = [];
|
|
this.depMatched = [];
|
|
this.pluginMaps = {};
|
|
this.depCount = 0;
|
|
|
|
/* this.exports this.factory
|
|
this.depMaps = [],
|
|
this.enabled, this.fetched
|
|
*/
|
|
};
|
|
|
|
Module.prototype = {
|
|
init: function (depMaps, factory, errback, options) {
|
|
options = options || {};
|
|
|
|
//Do not do more inits if already done. Can happen if there
|
|
//are multiple define calls for the same module. That is not
|
|
//a normal, common case, but it is also not unexpected.
|
|
if (this.inited) {
|
|
return;
|
|
}
|
|
|
|
this.factory = factory;
|
|
|
|
if (errback) {
|
|
//Register for errors on this module.
|
|
this.on('error', errback);
|
|
} else if (this.events.error) {
|
|
//If no errback already, but there are error listeners
|
|
//on this module, set up an errback to pass to the deps.
|
|
errback = bind(this, function (err) {
|
|
this.emit('error', err);
|
|
});
|
|
}
|
|
|
|
//Do a copy of the dependency array, so that
|
|
//source inputs are not modified. For example
|
|
//"shim" deps are passed in here directly, and
|
|
//doing a direct modification of the depMaps array
|
|
//would affect that config.
|
|
this.depMaps = depMaps && depMaps.slice(0);
|
|
|
|
this.errback = errback;
|
|
|
|
//Indicate this module has be initialized
|
|
this.inited = true;
|
|
|
|
this.ignore = options.ignore;
|
|
|
|
//Could have option to init this module in enabled mode,
|
|
//or could have been previously marked as enabled. However,
|
|
//the dependencies are not known until init is called. So
|
|
//if enabled previously, now trigger dependencies as enabled.
|
|
if (options.enabled || this.enabled) {
|
|
//Enable this module and dependencies.
|
|
//Will call this.check()
|
|
this.enable();
|
|
} else {
|
|
this.check();
|
|
}
|
|
},
|
|
|
|
defineDep: function (i, depExports) {
|
|
//Because of cycles, defined callback for a given
|
|
//export can be called more than once.
|
|
if (!this.depMatched[i]) {
|
|
this.depMatched[i] = true;
|
|
this.depCount -= 1;
|
|
this.depExports[i] = depExports;
|
|
}
|
|
},
|
|
|
|
fetch: function () {
|
|
if (this.fetched) {
|
|
return;
|
|
}
|
|
this.fetched = true;
|
|
|
|
context.startTime = (new Date()).getTime();
|
|
|
|
var map = this.map;
|
|
|
|
//If the manager is for a plugin managed resource,
|
|
//ask the plugin to load it now.
|
|
if (this.shim) {
|
|
context.makeRequire(this.map, {
|
|
enableBuildCallback: true
|
|
})(this.shim.deps || [], bind(this, function () {
|
|
return map.prefix ? this.callPlugin() : this.load();
|
|
}));
|
|
} else {
|
|
//Regular dependency.
|
|
return map.prefix ? this.callPlugin() : this.load();
|
|
}
|
|
},
|
|
|
|
load: function () {
|
|
var url = this.map.url;
|
|
|
|
//Regular dependency.
|
|
if (!urlFetched[url]) {
|
|
urlFetched[url] = true;
|
|
context.load(this.map.id, url);
|
|
}
|
|
},
|
|
|
|
/**
|
|
* Checks if the module is ready to define itself, and if so,
|
|
* define it.
|
|
*/
|
|
check: function () {
|
|
if (!this.enabled || this.enabling) {
|
|
return;
|
|
}
|
|
|
|
var err, cjsModule,
|
|
id = this.map.id,
|
|
depExports = this.depExports,
|
|
exports = this.exports,
|
|
factory = this.factory;
|
|
|
|
if (!this.inited) {
|
|
// Only fetch if not already in the defQueue.
|
|
if (!hasProp(context.defQueueMap, id)) {
|
|
this.fetch();
|
|
}
|
|
} else if (this.error) {
|
|
this.emit('error', this.error);
|
|
} else if (!this.defining) {
|
|
//The factory could trigger another require call
|
|
//that would result in checking this module to
|
|
//define itself again. If already in the process
|
|
//of doing that, skip this work.
|
|
this.defining = true;
|
|
|
|
if (this.depCount < 1 && !this.defined) {
|
|
if (isFunction(factory)) {
|
|
//If there is an error listener, favor passing
|
|
//to that instead of throwing an error. However,
|
|
//only do it for define()'d modules. require
|
|
//errbacks should not be called for failures in
|
|
//their callbacks (#699). However if a global
|
|
//onError is set, use that.
|
|
if ((this.events.error && this.map.isDefine) ||
|
|
req.onError !== defaultOnError) {
|
|
try {
|
|
exports = context.execCb(id, factory, depExports, exports);
|
|
} catch (e) {
|
|
err = e;
|
|
}
|
|
} else {
|
|
exports = context.execCb(id, factory, depExports, exports);
|
|
}
|
|
|
|
// Favor return value over exports. If node/cjs in play,
|
|
// then will not have a return value anyway. Favor
|
|
// module.exports assignment over exports object.
|
|
if (this.map.isDefine && exports === undefined) {
|
|
cjsModule = this.module;
|
|
if (cjsModule) {
|
|
exports = cjsModule.exports;
|
|
} else if (this.usingExports) {
|
|
//exports already set the defined value.
|
|
exports = this.exports;
|
|
}
|
|
}
|
|
|
|
if (err) {
|
|
err.requireMap = this.map;
|
|
err.requireModules = this.map.isDefine ? [this.map.id] : null;
|
|
err.requireType = this.map.isDefine ? 'define' : 'require';
|
|
return onError((this.error = err));
|
|
}
|
|
|
|
} else {
|
|
//Just a literal value
|
|
exports = factory;
|
|
}
|
|
|
|
this.exports = exports;
|
|
|
|
if (this.map.isDefine && !this.ignore) {
|
|
defined[id] = exports;
|
|
|
|
if (req.onResourceLoad) {
|
|
var resLoadMaps = [];
|
|
each(this.depMaps, function (depMap) {
|
|
resLoadMaps.push(depMap.normalizedMap || depMap);
|
|
});
|
|
req.onResourceLoad(context, this.map, resLoadMaps);
|
|
}
|
|
}
|
|
|
|
//Clean up
|
|
cleanRegistry(id);
|
|
|
|
this.defined = true;
|
|
}
|
|
|
|
//Finished the define stage. Allow calling check again
|
|
//to allow define notifications below in the case of a
|
|
//cycle.
|
|
this.defining = false;
|
|
|
|
if (this.defined && !this.defineEmitted) {
|
|
this.defineEmitted = true;
|
|
this.emit('defined', this.exports);
|
|
this.defineEmitComplete = true;
|
|
}
|
|
|
|
}
|
|
},
|
|
|
|
callPlugin: function () {
|
|
var map = this.map,
|
|
id = map.id,
|
|
//Map already normalized the prefix.
|
|
pluginMap = makeModuleMap(map.prefix);
|
|
|
|
//Mark this as a dependency for this plugin, so it
|
|
//can be traced for cycles.
|
|
this.depMaps.push(pluginMap);
|
|
|
|
on(pluginMap, 'defined', bind(this, function (plugin) {
|
|
var load, normalizedMap, normalizedMod,
|
|
bundleId = getOwn(bundlesMap, this.map.id),
|
|
name = this.map.name,
|
|
parentName = this.map.parentMap ? this.map.parentMap.name : null,
|
|
localRequire = context.makeRequire(map.parentMap, {
|
|
enableBuildCallback: true
|
|
});
|
|
|
|
//If current map is not normalized, wait for that
|
|
//normalized name to load instead of continuing.
|
|
if (this.map.unnormalized) {
|
|
//Normalize the ID if the plugin allows it.
|
|
if (plugin.normalize) {
|
|
name = plugin.normalize(name, function (name) {
|
|
return normalize(name, parentName, true);
|
|
}) || '';
|
|
}
|
|
|
|
//prefix and name should already be normalized, no need
|
|
//for applying map config again either.
|
|
normalizedMap = makeModuleMap(map.prefix + '!' + name,
|
|
this.map.parentMap,
|
|
true);
|
|
on(normalizedMap,
|
|
'defined', bind(this, function (value) {
|
|
this.map.normalizedMap = normalizedMap;
|
|
this.init([], function () { return value; }, null, {
|
|
enabled: true,
|
|
ignore: true
|
|
});
|
|
}));
|
|
|
|
normalizedMod = getOwn(registry, normalizedMap.id);
|
|
if (normalizedMod) {
|
|
//Mark this as a dependency for this plugin, so it
|
|
//can be traced for cycles.
|
|
this.depMaps.push(normalizedMap);
|
|
|
|
if (this.events.error) {
|
|
normalizedMod.on('error', bind(this, function (err) {
|
|
this.emit('error', err);
|
|
}));
|
|
}
|
|
normalizedMod.enable();
|
|
}
|
|
|
|
return;
|
|
}
|
|
|
|
//If a paths config, then just load that file instead to
|
|
//resolve the plugin, as it is built into that paths layer.
|
|
if (bundleId) {
|
|
this.map.url = context.nameToUrl(bundleId);
|
|
this.load();
|
|
return;
|
|
}
|
|
|
|
load = bind(this, function (value) {
|
|
this.init([], function () { return value; }, null, {
|
|
enabled: true
|
|
});
|
|
});
|
|
|
|
load.error = bind(this, function (err) {
|
|
this.inited = true;
|
|
this.error = err;
|
|
err.requireModules = [id];
|
|
|
|
//Remove temp unnormalized modules for this module,
|
|
//since they will never be resolved otherwise now.
|
|
eachProp(registry, function (mod) {
|
|
if (mod.map.id.indexOf(id + '_unnormalized') === 0) {
|
|
cleanRegistry(mod.map.id);
|
|
}
|
|
});
|
|
|
|
onError(err);
|
|
});
|
|
|
|
//Allow plugins to load other code without having to know the
|
|
//context or how to 'complete' the load.
|
|
load.fromText = bind(this, function (text, textAlt) {
|
|
/*jslint evil: true */
|
|
var moduleName = map.name,
|
|
moduleMap = makeModuleMap(moduleName),
|
|
hasInteractive = useInteractive;
|
|
|
|
//As of 2.1.0, support just passing the text, to reinforce
|
|
//fromText only being called once per resource. Still
|
|
//support old style of passing moduleName but discard
|
|
//that moduleName in favor of the internal ref.
|
|
if (textAlt) {
|
|
text = textAlt;
|
|
}
|
|
|
|
//Turn off interactive script matching for IE for any define
|
|
//calls in the text, then turn it back on at the end.
|
|
if (hasInteractive) {
|
|
useInteractive = false;
|
|
}
|
|
|
|
//Prime the system by creating a module instance for
|
|
//it.
|
|
getModule(moduleMap);
|
|
|
|
//Transfer any config to this other module.
|
|
if (hasProp(config.config, id)) {
|
|
config.config[moduleName] = config.config[id];
|
|
}
|
|
|
|
try {
|
|
req.exec(text);
|
|
} catch (e) {
|
|
return onError(makeError('fromtexteval',
|
|
'fromText eval for ' + id +
|
|
' failed: ' + e,
|
|
e,
|
|
[id]));
|
|
}
|
|
|
|
if (hasInteractive) {
|
|
useInteractive = true;
|
|
}
|
|
|
|
//Mark this as a dependency for the plugin
|
|
//resource
|
|
this.depMaps.push(moduleMap);
|
|
|
|
//Support anonymous modules.
|
|
context.completeLoad(moduleName);
|
|
|
|
//Bind the value of that module to the value for this
|
|
//resource ID.
|
|
localRequire([moduleName], load);
|
|
});
|
|
|
|
//Use parentName here since the plugin's name is not reliable,
|
|
//could be some weird string with no path that actually wants to
|
|
//reference the parentName's path.
|
|
plugin.load(map.name, localRequire, load, config);
|
|
}));
|
|
|
|
context.enable(pluginMap, this);
|
|
this.pluginMaps[pluginMap.id] = pluginMap;
|
|
},
|
|
|
|
enable: function () {
|
|
enabledRegistry[this.map.id] = this;
|
|
this.enabled = true;
|
|
|
|
//Set flag mentioning that the module is enabling,
|
|
//so that immediate calls to the defined callbacks
|
|
//for dependencies do not trigger inadvertent load
|
|
//with the depCount still being zero.
|
|
this.enabling = true;
|
|
|
|
//Enable each dependency
|
|
each(this.depMaps, bind(this, function (depMap, i) {
|
|
var id, mod, handler;
|
|
|
|
if (typeof depMap === 'string') {
|
|
//Dependency needs to be converted to a depMap
|
|
//and wired up to this module.
|
|
depMap = makeModuleMap(depMap,
|
|
(this.map.isDefine ? this.map : this.map.parentMap),
|
|
false,
|
|
!this.skipMap);
|
|
this.depMaps[i] = depMap;
|
|
|
|
handler = getOwn(handlers, depMap.id);
|
|
|
|
if (handler) {
|
|
this.depExports[i] = handler(this);
|
|
return;
|
|
}
|
|
|
|
this.depCount += 1;
|
|
|
|
on(depMap, 'defined', bind(this, function (depExports) {
|
|
if (this.undefed) {
|
|
return;
|
|
}
|
|
this.defineDep(i, depExports);
|
|
this.check();
|
|
}));
|
|
|
|
if (this.errback) {
|
|
on(depMap, 'error', bind(this, this.errback));
|
|
} else if (this.events.error) {
|
|
// No direct errback on this module, but something
|
|
// else is listening for errors, so be sure to
|
|
// propagate the error correctly.
|
|
on(depMap, 'error', bind(this, function(err) {
|
|
this.emit('error', err);
|
|
}));
|
|
}
|
|
}
|
|
|
|
id = depMap.id;
|
|
mod = registry[id];
|
|
|
|
//Skip special modules like 'require', 'exports', 'module'
|
|
//Also, don't call enable if it is already enabled,
|
|
//important in circular dependency cases.
|
|
if (!hasProp(handlers, id) && mod && !mod.enabled) {
|
|
context.enable(depMap, this);
|
|
}
|
|
}));
|
|
|
|
//Enable each plugin that is used in
|
|
//a dependency
|
|
eachProp(this.pluginMaps, bind(this, function (pluginMap) {
|
|
var mod = getOwn(registry, pluginMap.id);
|
|
if (mod && !mod.enabled) {
|
|
context.enable(pluginMap, this);
|
|
}
|
|
}));
|
|
|
|
this.enabling = false;
|
|
|
|
this.check();
|
|
},
|
|
|
|
on: function (name, cb) {
|
|
var cbs = this.events[name];
|
|
if (!cbs) {
|
|
cbs = this.events[name] = [];
|
|
}
|
|
cbs.push(cb);
|
|
},
|
|
|
|
emit: function (name, evt) {
|
|
each(this.events[name], function (cb) {
|
|
cb(evt);
|
|
});
|
|
if (name === 'error') {
|
|
//Now that the error handler was triggered, remove
|
|
//the listeners, since this broken Module instance
|
|
//can stay around for a while in the registry.
|
|
delete this.events[name];
|
|
}
|
|
}
|
|
};
|
|
|
|
function callGetModule(args) {
|
|
//Skip modules already defined.
|
|
if (!hasProp(defined, args[0])) {
|
|
getModule(makeModuleMap(args[0], null, true)).init(args[1], args[2]);
|
|
}
|
|
}
|
|
|
|
function removeListener(node, func, name, ieName) {
|
|
//Favor detachEvent because of IE9
|
|
//issue, see attachEvent/addEventListener comment elsewhere
|
|
//in this file.
|
|
if (node.detachEvent && !isOpera) {
|
|
//Probably IE. If not it will throw an error, which will be
|
|
//useful to know.
|
|
if (ieName) {
|
|
node.detachEvent(ieName, func);
|
|
}
|
|
} else {
|
|
node.removeEventListener(name, func, false);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Given an event from a script node, get the requirejs info from it,
|
|
* and then removes the event listeners on the node.
|
|
* @param {Event} evt
|
|
* @returns {Object}
|
|
*/
|
|
function getScriptData(evt) {
|
|
//Using currentTarget instead of target for Firefox 2.0's sake. Not
|
|
//all old browsers will be supported, but this one was easy enough
|
|
//to support and still makes sense.
|
|
var node = evt.currentTarget || evt.srcElement;
|
|
|
|
//Remove the listeners once here.
|
|
removeListener(node, context.onScriptLoad, 'load', 'onreadystatechange');
|
|
removeListener(node, context.onScriptError, 'error');
|
|
|
|
return {
|
|
node: node,
|
|
id: node && node.getAttribute('data-requiremodule')
|
|
};
|
|
}
|
|
|
|
function intakeDefines() {
|
|
var args;
|
|
|
|
//Any defined modules in the global queue, intake them now.
|
|
takeGlobalQueue();
|
|
|
|
//Make sure any remaining defQueue items get properly processed.
|
|
while (defQueue.length) {
|
|
args = defQueue.shift();
|
|
if (args[0] === null) {
|
|
return onError(makeError('mismatch', 'Mismatched anonymous define() module: ' +
|
|
args[args.length - 1]));
|
|
} else {
|
|
//args are id, deps, factory. Should be normalized by the
|
|
//define() function.
|
|
callGetModule(args);
|
|
}
|
|
}
|
|
context.defQueueMap = {};
|
|
}
|
|
|
|
context = {
|
|
config: config,
|
|
contextName: contextName,
|
|
registry: registry,
|
|
defined: defined,
|
|
urlFetched: urlFetched,
|
|
defQueue: defQueue,
|
|
defQueueMap: {},
|
|
Module: Module,
|
|
makeModuleMap: makeModuleMap,
|
|
nextTick: req.nextTick,
|
|
onError: onError,
|
|
|
|
/**
|
|
* Set a configuration for the context.
|
|
* @param {Object} cfg config object to integrate.
|
|
*/
|
|
configure: function (cfg) {
|
|
//Make sure the baseUrl ends in a slash.
|
|
if (cfg.baseUrl) {
|
|
if (cfg.baseUrl.charAt(cfg.baseUrl.length - 1) !== '/') {
|
|
cfg.baseUrl += '/';
|
|
}
|
|
}
|
|
|
|
// Convert old style urlArgs string to a function.
|
|
if (typeof cfg.urlArgs === 'string') {
|
|
var urlArgs = cfg.urlArgs;
|
|
cfg.urlArgs = function(id, url) {
|
|
return (url.indexOf('?') === -1 ? '?' : '&') + urlArgs;
|
|
};
|
|
}
|
|
|
|
//Save off the paths since they require special processing,
|
|
//they are additive.
|
|
var shim = config.shim,
|
|
objs = {
|
|
paths: true,
|
|
bundles: true,
|
|
config: true,
|
|
map: true
|
|
};
|
|
|
|
eachProp(cfg, function (value, prop) {
|
|
if (objs[prop]) {
|
|
if (!config[prop]) {
|
|
config[prop] = {};
|
|
}
|
|
mixin(config[prop], value, true, true);
|
|
} else {
|
|
config[prop] = value;
|
|
}
|
|
});
|
|
|
|
//Reverse map the bundles
|
|
if (cfg.bundles) {
|
|
eachProp(cfg.bundles, function (value, prop) {
|
|
each(value, function (v) {
|
|
if (v !== prop) {
|
|
bundlesMap[v] = prop;
|
|
}
|
|
});
|
|
});
|
|
}
|
|
|
|
//Merge shim
|
|
if (cfg.shim) {
|
|
eachProp(cfg.shim, function (value, id) {
|
|
//Normalize the structure
|
|
if (isArray(value)) {
|
|
value = {
|
|
deps: value
|
|
};
|
|
}
|
|
if ((value.exports || value.init) && !value.exportsFn) {
|
|
value.exportsFn = context.makeShimExports(value);
|
|
}
|
|
shim[id] = value;
|
|
});
|
|
config.shim = shim;
|
|
}
|
|
|
|
//Adjust packages if necessary.
|
|
if (cfg.packages) {
|
|
each(cfg.packages, function (pkgObj) {
|
|
var location, name;
|
|
|
|
pkgObj = typeof pkgObj === 'string' ? {name: pkgObj} : pkgObj;
|
|
|
|
name = pkgObj.name;
|
|
location = pkgObj.location;
|
|
if (location) {
|
|
config.paths[name] = pkgObj.location;
|
|
}
|
|
|
|
//Save pointer to main module ID for pkg name.
|
|
//Remove leading dot in main, so main paths are normalized,
|
|
//and remove any trailing .js, since different package
|
|
//envs have different conventions: some use a module name,
|
|
//some use a file name.
|
|
config.pkgs[name] = pkgObj.name + '/' + (pkgObj.main || 'main')
|
|
.replace(currDirRegExp, '')
|
|
.replace(jsSuffixRegExp, '');
|
|
});
|
|
}
|
|
|
|
//If there are any "waiting to execute" modules in the registry,
|
|
//update the maps for them, since their info, like URLs to load,
|
|
//may have changed.
|
|
eachProp(registry, function (mod, id) {
|
|
//If module already has init called, since it is too
|
|
//late to modify them, and ignore unnormalized ones
|
|
//since they are transient.
|
|
if (!mod.inited && !mod.map.unnormalized) {
|
|
mod.map = makeModuleMap(id, null, true);
|
|
}
|
|
});
|
|
|
|
//If a deps array or a config callback is specified, then call
|
|
//require with those args. This is useful when require is defined as a
|
|
//config object before require.js is loaded.
|
|
if (cfg.deps || cfg.callback) {
|
|
context.require(cfg.deps || [], cfg.callback);
|
|
}
|
|
},
|
|
|
|
makeShimExports: function (value) {
|
|
function fn() {
|
|
var ret;
|
|
if (value.init) {
|
|
ret = value.init.apply(global, arguments);
|
|
}
|
|
return ret || (value.exports && getGlobal(value.exports));
|
|
}
|
|
return fn;
|
|
},
|
|
|
|
makeRequire: function (relMap, options) {
|
|
options = options || {};
|
|
|
|
function localRequire(deps, callback, errback) {
|
|
var id, map, requireMod;
|
|
|
|
if (options.enableBuildCallback && callback && isFunction(callback)) {
|
|
callback.__requireJsBuild = true;
|
|
}
|
|
|
|
if (typeof deps === 'string') {
|
|
if (isFunction(callback)) {
|
|
//Invalid call
|
|
return onError(makeError('requireargs', 'Invalid require call'), errback);
|
|
}
|
|
|
|
//If require|exports|module are requested, get the
|
|
//value for them from the special handlers. Caveat:
|
|
//this only works while module is being defined.
|
|
if (relMap && hasProp(handlers, deps)) {
|
|
return handlers[deps](registry[relMap.id]);
|
|
}
|
|
|
|
//Synchronous access to one module. If require.get is
|
|
//available (as in the Node adapter), prefer that.
|
|
if (req.get) {
|
|
return req.get(context, deps, relMap, localRequire);
|
|
}
|
|
|
|
//Normalize module name, if it contains . or ..
|
|
map = makeModuleMap(deps, relMap, false, true);
|
|
id = map.id;
|
|
|
|
if (!hasProp(defined, id)) {
|
|
return onError(makeError('notloaded', 'Module name "' +
|
|
id +
|
|
'" has not been loaded yet for context: ' +
|
|
contextName +
|
|
(relMap ? '' : '. Use require([])')));
|
|
}
|
|
return defined[id];
|
|
}
|
|
|
|
//Grab defines waiting in the global queue.
|
|
intakeDefines();
|
|
|
|
//Mark all the dependencies as needing to be loaded.
|
|
context.nextTick(function () {
|
|
//Some defines could have been added since the
|
|
//require call, collect them.
|
|
intakeDefines();
|
|
|
|
requireMod = getModule(makeModuleMap(null, relMap));
|
|
|
|
//Store if map config should be applied to this require
|
|
//call for dependencies.
|
|
requireMod.skipMap = options.skipMap;
|
|
|
|
requireMod.init(deps, callback, errback, {
|
|
enabled: true
|
|
});
|
|
|
|
checkLoaded();
|
|
});
|
|
|
|
return localRequire;
|
|
}
|
|
|
|
mixin(localRequire, {
|
|
isBrowser: isBrowser,
|
|
|
|
/**
|
|
* Converts a module name + .extension into an URL path.
|
|
* *Requires* the use of a module name. It does not support using
|
|
* plain URLs like nameToUrl.
|
|
*/
|
|
toUrl: function (moduleNamePlusExt) {
|
|
var ext,
|
|
index = moduleNamePlusExt.lastIndexOf('.'),
|
|
segment = moduleNamePlusExt.split('/')[0],
|
|
isRelative = segment === '.' || segment === '..';
|
|
|
|
//Have a file extension alias, and it is not the
|
|
//dots from a relative path.
|
|
if (index !== -1 && (!isRelative || index > 1)) {
|
|
ext = moduleNamePlusExt.substring(index, moduleNamePlusExt.length);
|
|
moduleNamePlusExt = moduleNamePlusExt.substring(0, index);
|
|
}
|
|
|
|
return context.nameToUrl(normalize(moduleNamePlusExt,
|
|
relMap && relMap.id, true), ext, true);
|
|
},
|
|
|
|
defined: function (id) {
|
|
return hasProp(defined, makeModuleMap(id, relMap, false, true).id);
|
|
},
|
|
|
|
specified: function (id) {
|
|
id = makeModuleMap(id, relMap, false, true).id;
|
|
return hasProp(defined, id) || hasProp(registry, id);
|
|
}
|
|
});
|
|
|
|
//Only allow undef on top level require calls
|
|
if (!relMap) {
|
|
localRequire.undef = function (id) {
|
|
//Bind any waiting define() calls to this context,
|
|
//fix for #408
|
|
takeGlobalQueue();
|
|
|
|
var map = makeModuleMap(id, relMap, true),
|
|
mod = getOwn(registry, id);
|
|
|
|
mod.undefed = true;
|
|
removeScript(id);
|
|
|
|
delete defined[id];
|
|
delete urlFetched[map.url];
|
|
delete undefEvents[id];
|
|
|
|
//Clean queued defines too. Go backwards
|
|
//in array so that the splices do not
|
|
//mess up the iteration.
|
|
eachReverse(defQueue, function(args, i) {
|
|
if (args[0] === id) {
|
|
defQueue.splice(i, 1);
|
|
}
|
|
});
|
|
delete context.defQueueMap[id];
|
|
|
|
if (mod) {
|
|
//Hold on to listeners in case the
|
|
//module will be attempted to be reloaded
|
|
//using a different config.
|
|
if (mod.events.defined) {
|
|
undefEvents[id] = mod.events;
|
|
}
|
|
|
|
cleanRegistry(id);
|
|
}
|
|
};
|
|
}
|
|
|
|
return localRequire;
|
|
},
|
|
|
|
/**
|
|
* Called to enable a module if it is still in the registry
|
|
* awaiting enablement. A second arg, parent, the parent module,
|
|
* is passed in for context, when this method is overridden by
|
|
* the optimizer. Not shown here to keep code compact.
|
|
*/
|
|
enable: function (depMap) {
|
|
var mod = getOwn(registry, depMap.id);
|
|
if (mod) {
|
|
getModule(depMap).enable();
|
|
}
|
|
},
|
|
|
|
/**
|
|
* Internal method used by environment adapters to complete a load event.
|
|
* A load event could be a script load or just a load pass from a synchronous
|
|
* load call.
|
|
* @param {String} moduleName the name of the module to potentially complete.
|
|
*/
|
|
completeLoad: function (moduleName) {
|
|
var found, args, mod,
|
|
shim = getOwn(config.shim, moduleName) || {},
|
|
shExports = shim.exports;
|
|
|
|
takeGlobalQueue();
|
|
|
|
while (defQueue.length) {
|
|
args = defQueue.shift();
|
|
if (args[0] === null) {
|
|
args[0] = moduleName;
|
|
//If already found an anonymous module and bound it
|
|
//to this name, then this is some other anon module
|
|
//waiting for its completeLoad to fire.
|
|
if (found) {
|
|
break;
|
|
}
|
|
found = true;
|
|
} else if (args[0] === moduleName) {
|
|
//Found matching define call for this script!
|
|
found = true;
|
|
}
|
|
|
|
callGetModule(args);
|
|
}
|
|
context.defQueueMap = {};
|
|
|
|
//Do this after the cycle of callGetModule in case the result
|
|
//of those calls/init calls changes the registry.
|
|
mod = getOwn(registry, moduleName);
|
|
|
|
if (!found && !hasProp(defined, moduleName) && mod && !mod.inited) {
|
|
if (config.enforceDefine && (!shExports || !getGlobal(shExports))) {
|
|
if (hasPathFallback(moduleName)) {
|
|
return;
|
|
} else {
|
|
return onError(makeError('nodefine',
|
|
'No define call for ' + moduleName,
|
|
null,
|
|
[moduleName]));
|
|
}
|
|
} else {
|
|
//A script that does not call define(), so just simulate
|
|
//the call for it.
|
|
callGetModule([moduleName, (shim.deps || []), shim.exportsFn]);
|
|
}
|
|
}
|
|
|
|
checkLoaded();
|
|
},
|
|
|
|
/**
|
|
* Converts a module name to a file path. Supports cases where
|
|
* moduleName may actually be just an URL.
|
|
* Note that it **does not** call normalize on the moduleName,
|
|
* it is assumed to have already been normalized. This is an
|
|
* internal API, not a public one. Use toUrl for the public API.
|
|
*/
|
|
nameToUrl: function (moduleName, ext, skipExt) {
|
|
var paths, syms, i, parentModule, url,
|
|
parentPath, bundleId,
|
|
pkgMain = getOwn(config.pkgs, moduleName);
|
|
|
|
if (pkgMain) {
|
|
moduleName = pkgMain;
|
|
}
|
|
|
|
bundleId = getOwn(bundlesMap, moduleName);
|
|
|
|
if (bundleId) {
|
|
return context.nameToUrl(bundleId, ext, skipExt);
|
|
}
|
|
|
|
//If a colon is in the URL, it indicates a protocol is used and it is just
|
|
//an URL to a file, or if it starts with a slash, contains a query arg (i.e. ?)
|
|
//or ends with .js, then assume the user meant to use an url and not a module id.
|
|
//The slash is important for protocol-less URLs as well as full paths.
|
|
if (req.jsExtRegExp.test(moduleName)) {
|
|
//Just a plain path, not module name lookup, so just return it.
|
|
//Add extension if it is included. This is a bit wonky, only non-.js things pass
|
|
//an extension, this method probably needs to be reworked.
|
|
url = moduleName + (ext || '');
|
|
} else {
|
|
//A module that needs to be converted to a path.
|
|
paths = config.paths;
|
|
|
|
syms = moduleName.split('/');
|
|
//For each module name segment, see if there is a path
|
|
//registered for it. Start with most specific name
|
|
//and work up from it.
|
|
for (i = syms.length; i > 0; i -= 1) {
|
|
parentModule = syms.slice(0, i).join('/');
|
|
|
|
parentPath = getOwn(paths, parentModule);
|
|
if (parentPath) {
|
|
//If an array, it means there are a few choices,
|
|
//Choose the one that is desired
|
|
if (isArray(parentPath)) {
|
|
parentPath = parentPath[0];
|
|
}
|
|
syms.splice(0, i, parentPath);
|
|
break;
|
|
}
|
|
}
|
|
|
|
//Join the path parts together, then figure out if baseUrl is needed.
|
|
url = syms.join('/');
|
|
url += (ext || (/^data\:|^blob\:|\?/.test(url) || skipExt ? '' : '.js'));
|
|
url = (url.charAt(0) === '/' || url.match(/^[\w\+\.\-]+:/) ? '' : config.baseUrl) + url;
|
|
}
|
|
|
|
return config.urlArgs && !/^blob\:/.test(url) ?
|
|
url + config.urlArgs(moduleName, url) : url;
|
|
},
|
|
|
|
//Delegates to req.load. Broken out as a separate function to
|
|
//allow overriding in the optimizer.
|
|
load: function (id, url) {
|
|
req.load(context, id, url);
|
|
},
|
|
|
|
/**
|
|
* Executes a module callback function. Broken out as a separate function
|
|
* solely to allow the build system to sequence the files in the built
|
|
* layer in the right sequence.
|
|
*
|
|
* @private
|
|
*/
|
|
execCb: function (name, callback, args, exports) {
|
|
return callback.apply(exports, args);
|
|
},
|
|
|
|
/**
|
|
* callback for script loads, used to check status of loading.
|
|
*
|
|
* @param {Event} evt the event from the browser for the script
|
|
* that was loaded.
|
|
*/
|
|
onScriptLoad: function (evt) {
|
|
//Using currentTarget instead of target for Firefox 2.0's sake. Not
|
|
//all old browsers will be supported, but this one was easy enough
|
|
//to support and still makes sense.
|
|
if (evt.type === 'load' ||
|
|
(readyRegExp.test((evt.currentTarget || evt.srcElement).readyState))) {
|
|
//Reset interactive script so a script node is not held onto for
|
|
//to long.
|
|
interactiveScript = null;
|
|
|
|
//Pull out the name of the module and the context.
|
|
var data = getScriptData(evt);
|
|
context.completeLoad(data.id);
|
|
}
|
|
},
|
|
|
|
/**
|
|
* Callback for script errors.
|
|
*/
|
|
onScriptError: function (evt) {
|
|
var data = getScriptData(evt);
|
|
if (!hasPathFallback(data.id)) {
|
|
var parents = [];
|
|
eachProp(registry, function(value, key) {
|
|
if (key.indexOf('_@r') !== 0) {
|
|
each(value.depMaps, function(depMap) {
|
|
if (depMap.id === data.id) {
|
|
parents.push(key);
|
|
return true;
|
|
}
|
|
});
|
|
}
|
|
});
|
|
return onError(makeError('scripterror', 'Script error for "' + data.id +
|
|
(parents.length ?
|
|
'", needed by: ' + parents.join(', ') :
|
|
'"'), evt, [data.id]));
|
|
}
|
|
}
|
|
};
|
|
|
|
context.require = context.makeRequire();
|
|
return context;
|
|
}
|
|
|
|
/**
|
|
* Main entry point.
|
|
*
|
|
* If the only argument to require is a string, then the module that
|
|
* is represented by that string is fetched for the appropriate context.
|
|
*
|
|
* If the first argument is an array, then it will be treated as an array
|
|
* of dependency string names to fetch. An optional function callback can
|
|
* be specified to execute when all of those dependencies are available.
|
|
*
|
|
* Make a local req variable to help Caja compliance (it assumes things
|
|
* on a require that are not standardized), and to give a short
|
|
* name for minification/local scope use.
|
|
*/
|
|
req = requirejs = function (deps, callback, errback, optional) {
|
|
|
|
//Find the right context, use default
|
|
var context, config,
|
|
contextName = defContextName;
|
|
|
|
// Determine if have config object in the call.
|
|
if (!isArray(deps) && typeof deps !== 'string') {
|
|
// deps is a config object
|
|
config = deps;
|
|
if (isArray(callback)) {
|
|
// Adjust args if there are dependencies
|
|
deps = callback;
|
|
callback = errback;
|
|
errback = optional;
|
|
} else {
|
|
deps = [];
|
|
}
|
|
}
|
|
|
|
if (config && config.context) {
|
|
contextName = config.context;
|
|
}
|
|
|
|
context = getOwn(contexts, contextName);
|
|
if (!context) {
|
|
context = contexts[contextName] = req.s.newContext(contextName);
|
|
}
|
|
|
|
if (config) {
|
|
context.configure(config);
|
|
}
|
|
|
|
return context.require(deps, callback, errback);
|
|
};
|
|
|
|
/**
|
|
* Support require.config() to make it easier to cooperate with other
|
|
* AMD loaders on globally agreed names.
|
|
*/
|
|
req.config = function (config) {
|
|
return req(config);
|
|
};
|
|
|
|
/**
|
|
* Execute something after the current tick
|
|
* of the event loop. Override for other envs
|
|
* that have a better solution than setTimeout.
|
|
* @param {Function} fn function to execute later.
|
|
*/
|
|
req.nextTick = typeof setTimeout !== 'undefined' ? function (fn) {
|
|
setTimeout(fn, 4);
|
|
} : function (fn) { fn(); };
|
|
|
|
/**
|
|
* Export require as a global, but only if it does not already exist.
|
|
*/
|
|
if (!require) {
|
|
require = req;
|
|
}
|
|
|
|
req.version = version;
|
|
|
|
//Used to filter out dependencies that are already paths.
|
|
req.jsExtRegExp = /^\/|:|\?|\.js$/;
|
|
req.isBrowser = isBrowser;
|
|
s = req.s = {
|
|
contexts: contexts,
|
|
newContext: newContext
|
|
};
|
|
|
|
//Create default context.
|
|
req({});
|
|
|
|
//Exports some context-sensitive methods on global require.
|
|
each([
|
|
'toUrl',
|
|
'undef',
|
|
'defined',
|
|
'specified'
|
|
], function (prop) {
|
|
//Reference from contexts instead of early binding to default context,
|
|
//so that during builds, the latest instance of the default context
|
|
//with its config gets used.
|
|
req[prop] = function () {
|
|
var ctx = contexts[defContextName];
|
|
return ctx.require[prop].apply(ctx, arguments);
|
|
};
|
|
});
|
|
|
|
if (isBrowser) {
|
|
head = s.head = document.getElementsByTagName('head')[0];
|
|
//If BASE tag is in play, using appendChild is a problem for IE6.
|
|
//When that browser dies, this can be removed. Details in this jQuery bug:
|
|
//http://dev.jquery.com/ticket/2709
|
|
baseElement = document.getElementsByTagName('base')[0];
|
|
if (baseElement) {
|
|
head = s.head = baseElement.parentNode;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Any errors that require explicitly generates will be passed to this
|
|
* function. Intercept/override it if you want custom error handling.
|
|
* @param {Error} err the error object.
|
|
*/
|
|
req.onError = defaultOnError;
|
|
|
|
/**
|
|
* Creates the node for the load command. Only used in browser envs.
|
|
*/
|
|
req.createNode = function (config, moduleName, url) {
|
|
var node = config.xhtml ?
|
|
document.createElementNS('http://www.w3.org/1999/xhtml', 'html:script') :
|
|
document.createElement('script');
|
|
node.type = config.scriptType || 'text/javascript';
|
|
node.charset = 'utf-8';
|
|
node.async = true;
|
|
return node;
|
|
};
|
|
|
|
/**
|
|
* Does the request to load a module for the browser case.
|
|
* Make this a separate function to allow other environments
|
|
* to override it.
|
|
*
|
|
* @param {Object} context the require context to find state.
|
|
* @param {String} moduleName the name of the module.
|
|
* @param {Object} url the URL to the module.
|
|
*/
|
|
req.load = function (context, moduleName, url) {
|
|
var config = (context && context.config) || {},
|
|
node;
|
|
if (isBrowser) {
|
|
//In the browser so use a script tag
|
|
node = req.createNode(config, moduleName, url);
|
|
|
|
node.setAttribute('data-requirecontext', context.contextName);
|
|
node.setAttribute('data-requiremodule', moduleName);
|
|
|
|
//Set up load listener. Test attachEvent first because IE9 has
|
|
//a subtle issue in its addEventListener and script onload firings
|
|
//that do not match the behavior of all other browsers with
|
|
//addEventListener support, which fire the onload event for a
|
|
//script right after the script execution. See:
|
|
//https://connect.microsoft.com/IE/feedback/details/648057/script-onload-event-is-not-fired-immediately-after-script-execution
|
|
//UNFORTUNATELY Opera implements attachEvent but does not follow the script
|
|
//script execution mode.
|
|
if (node.attachEvent &&
|
|
//Check if node.attachEvent is artificially added by custom script or
|
|
//natively supported by browser
|
|
//read https://github.com/requirejs/requirejs/issues/187
|
|
//if we can NOT find [native code] then it must NOT natively supported.
|
|
//in IE8, node.attachEvent does not have toString()
|
|
//Note the test for "[native code" with no closing brace, see:
|
|
//https://github.com/requirejs/requirejs/issues/273
|
|
!(node.attachEvent.toString && node.attachEvent.toString().indexOf('[native code') < 0) &&
|
|
!isOpera) {
|
|
//Probably IE. IE (at least 6-8) do not fire
|
|
//script onload right after executing the script, so
|
|
//we cannot tie the anonymous define call to a name.
|
|
//However, IE reports the script as being in 'interactive'
|
|
//readyState at the time of the define call.
|
|
useInteractive = true;
|
|
|
|
node.attachEvent('onreadystatechange', context.onScriptLoad);
|
|
//It would be great to add an error handler here to catch
|
|
//404s in IE9+. However, onreadystatechange will fire before
|
|
//the error handler, so that does not help. If addEventListener
|
|
//is used, then IE will fire error before load, but we cannot
|
|
//use that pathway given the connect.microsoft.com issue
|
|
//mentioned above about not doing the 'script execute,
|
|
//then fire the script load event listener before execute
|
|
//next script' that other browsers do.
|
|
//Best hope: IE10 fixes the issues,
|
|
//and then destroys all installs of IE 6-9.
|
|
//node.attachEvent('onerror', context.onScriptError);
|
|
} else {
|
|
node.addEventListener('load', context.onScriptLoad, false);
|
|
node.addEventListener('error', context.onScriptError, false);
|
|
}
|
|
node.src = url;
|
|
|
|
//Calling onNodeCreated after all properties on the node have been
|
|
//set, but before it is placed in the DOM.
|
|
if (config.onNodeCreated) {
|
|
config.onNodeCreated(node, config, moduleName, url);
|
|
}
|
|
|
|
//For some cache cases in IE 6-8, the script executes before the end
|
|
//of the appendChild execution, so to tie an anonymous define
|
|
//call to the module name (which is stored on the node), hold on
|
|
//to a reference to this node, but clear after the DOM insertion.
|
|
currentlyAddingScript = node;
|
|
if (baseElement) {
|
|
head.insertBefore(node, baseElement);
|
|
} else {
|
|
head.appendChild(node);
|
|
}
|
|
currentlyAddingScript = null;
|
|
|
|
return node;
|
|
} else if (isWebWorker) {
|
|
try {
|
|
//In a web worker, use importScripts. This is not a very
|
|
//efficient use of importScripts, importScripts will block until
|
|
//its script is downloaded and evaluated. However, if web workers
|
|
//are in play, the expectation is that a build has been done so
|
|
//that only one script needs to be loaded anyway. This may need
|
|
//to be reevaluated if other use cases become common.
|
|
|
|
// Post a task to the event loop to work around a bug in WebKit
|
|
// where the worker gets garbage-collected after calling
|
|
// importScripts(): https://webkit.org/b/153317
|
|
setTimeout(function() {}, 0);
|
|
importScripts(url);
|
|
|
|
//Account for anonymous modules
|
|
context.completeLoad(moduleName);
|
|
} catch (e) {
|
|
context.onError(makeError('importscripts',
|
|
'importScripts failed for ' +
|
|
moduleName + ' at ' + url,
|
|
e,
|
|
[moduleName]));
|
|
}
|
|
}
|
|
};
|
|
|
|
function getInteractiveScript() {
|
|
if (interactiveScript && interactiveScript.readyState === 'interactive') {
|
|
return interactiveScript;
|
|
}
|
|
|
|
eachReverse(scripts(), function (script) {
|
|
if (script.readyState === 'interactive') {
|
|
return (interactiveScript = script);
|
|
}
|
|
});
|
|
return interactiveScript;
|
|
}
|
|
|
|
//Look for a data-main script attribute, which could also adjust the baseUrl.
|
|
if (isBrowser && !cfg.skipDataMain) {
|
|
//Figure out baseUrl. Get it from the script tag with require.js in it.
|
|
eachReverse(scripts(), function (script) {
|
|
//Set the 'head' where we can append children by
|
|
//using the script's parent.
|
|
if (!head) {
|
|
head = script.parentNode;
|
|
}
|
|
|
|
//Look for a data-main attribute to set main script for the page
|
|
//to load. If it is there, the path to data main becomes the
|
|
//baseUrl, if it is not already set.
|
|
dataMain = script.getAttribute('data-main');
|
|
if (dataMain) {
|
|
//Preserve dataMain in case it is a path (i.e. contains '?')
|
|
mainScript = dataMain;
|
|
|
|
//Set final baseUrl if there is not already an explicit one,
|
|
//but only do so if the data-main value is not a loader plugin
|
|
//module ID.
|
|
if (!cfg.baseUrl && mainScript.indexOf('!') === -1) {
|
|
//Pull off the directory of data-main for use as the
|
|
//baseUrl.
|
|
src = mainScript.split('/');
|
|
mainScript = src.pop();
|
|
subPath = src.length ? src.join('/') + '/' : './';
|
|
|
|
cfg.baseUrl = subPath;
|
|
}
|
|
|
|
//Strip off any trailing .js since mainScript is now
|
|
//like a module name.
|
|
mainScript = mainScript.replace(jsSuffixRegExp, '');
|
|
|
|
//If mainScript is still a path, fall back to dataMain
|
|
if (req.jsExtRegExp.test(mainScript)) {
|
|
mainScript = dataMain;
|
|
}
|
|
|
|
//Put the data-main script in the files to load.
|
|
cfg.deps = cfg.deps ? cfg.deps.concat(mainScript) : [mainScript];
|
|
|
|
return true;
|
|
}
|
|
});
|
|
}
|
|
|
|
/**
|
|
* The function that handles definitions of modules. Differs from
|
|
* require() in that a string for the module should be the first argument,
|
|
* and the function to execute after dependencies are loaded should
|
|
* return a value to define the module corresponding to the first argument's
|
|
* name.
|
|
*/
|
|
define = function (name, deps, callback) {
|
|
var node, context;
|
|
|
|
//Allow for anonymous modules
|
|
if (typeof name !== 'string') {
|
|
//Adjust args appropriately
|
|
callback = deps;
|
|
deps = name;
|
|
name = null;
|
|
}
|
|
|
|
//This module may not have dependencies
|
|
if (!isArray(deps)) {
|
|
callback = deps;
|
|
deps = null;
|
|
}
|
|
|
|
//If no name, and callback is a function, then figure out if it a
|
|
//CommonJS thing with dependencies.
|
|
if (!deps && isFunction(callback)) {
|
|
deps = [];
|
|
//Remove comments from the callback string,
|
|
//look for require calls, and pull them into the dependencies,
|
|
//but only if there are function args.
|
|
if (callback.length) {
|
|
callback
|
|
.toString()
|
|
.replace(commentRegExp, commentReplace)
|
|
.replace(cjsRequireRegExp, function (match, dep) {
|
|
deps.push(dep);
|
|
});
|
|
|
|
//May be a CommonJS thing even without require calls, but still
|
|
//could use exports, and module. Avoid doing exports and module
|
|
//work though if it just needs require.
|
|
//REQUIRES the function to expect the CommonJS variables in the
|
|
//order listed below.
|
|
deps = (callback.length === 1 ? ['require'] : ['require', 'exports', 'module']).concat(deps);
|
|
}
|
|
}
|
|
|
|
//If in IE 6-8 and hit an anonymous define() call, do the interactive
|
|
//work.
|
|
if (useInteractive) {
|
|
node = currentlyAddingScript || getInteractiveScript();
|
|
if (node) {
|
|
if (!name) {
|
|
name = node.getAttribute('data-requiremodule');
|
|
}
|
|
context = contexts[node.getAttribute('data-requirecontext')];
|
|
}
|
|
}
|
|
|
|
//Always save off evaluating the def call until the script onload handler.
|
|
//This allows multiple modules to be in a file without prematurely
|
|
//tracing dependencies, and allows for anonymous module support,
|
|
//where the module name is not known until the script onload event
|
|
//occurs. If no context, use the global queue, and get it processed
|
|
//in the onscript load callback.
|
|
if (context) {
|
|
context.defQueue.push([name, deps, callback]);
|
|
context.defQueueMap[name] = true;
|
|
} else {
|
|
globalDefQueue.push([name, deps, callback]);
|
|
}
|
|
};
|
|
|
|
define.amd = {
|
|
jQuery: true
|
|
};
|
|
|
|
/**
|
|
* Executes the text. Normally just uses eval, but can be modified
|
|
* to use a better, environment-specific call. Only used for transpiling
|
|
* loader plugins, not for plain JS modules.
|
|
* @param {String} text the text to execute/evaluate.
|
|
*/
|
|
req.exec = function (text) {
|
|
/*jslint evil: true */
|
|
return eval(text);
|
|
};
|
|
|
|
//Set up with config info.
|
|
req(cfg);
|
|
}(this, (typeof setTimeout === 'undefined' ? undefined : setTimeout)));
|
|
|
|
|
|
|
|
this.requirejsVars = {
|
|
require: require,
|
|
requirejs: require,
|
|
define: define
|
|
};
|
|
|
|
if (env === 'browser') {
|
|
//sloppy since eval enclosed with use strict causes problems if the source
|
|
//text is not strict-compliant.
|
|
/*jslint sloppy: true, evil: true */
|
|
/*global require, XMLHttpRequest */
|
|
|
|
(function () {
|
|
// Separate function to avoid eval pollution, same with arguments use.
|
|
function exec() {
|
|
eval(arguments[0]);
|
|
}
|
|
|
|
require.load = function (context, moduleName, url) {
|
|
var xhr = new XMLHttpRequest();
|
|
|
|
xhr.open('GET', url, true);
|
|
xhr.send();
|
|
|
|
xhr.onreadystatechange = function () {
|
|
if (xhr.readyState === 4) {
|
|
exec(xhr.responseText);
|
|
|
|
//Support anonymous modules.
|
|
context.completeLoad(moduleName);
|
|
}
|
|
};
|
|
};
|
|
}());
|
|
} else if (env === 'rhino') {
|
|
/*global require: false, java: false, load: false */
|
|
|
|
(function () {
|
|
'use strict';
|
|
require.load = function (context, moduleName, url) {
|
|
|
|
load(url);
|
|
|
|
//Support anonymous modules.
|
|
context.completeLoad(moduleName);
|
|
};
|
|
|
|
}());
|
|
} else if (env === 'node') {
|
|
this.requirejsVars.nodeRequire = nodeRequire;
|
|
require.nodeRequire = nodeRequire;
|
|
|
|
//Explicity not strict since this file contains an eval call, and do not want
|
|
//to enforce strict on code evaluated that way. See
|
|
//https://github.com/requirejs/r.js/issues/774
|
|
/*jslint regexp: false, sloppy: true*/
|
|
/*global require: false, define: false, requirejsVars: false, process: false */
|
|
|
|
/**
|
|
* This adapter assumes that x.js has loaded it and set up
|
|
* some variables. This adapter just allows limited RequireJS
|
|
* usage from within the requirejs directory. The general
|
|
* node adapater is r.js.
|
|
*/
|
|
|
|
(function () {
|
|
var nodeReq = requirejsVars.nodeRequire,
|
|
req = requirejsVars.require,
|
|
def = requirejsVars.define,
|
|
fs = nodeReq('fs'),
|
|
path = nodeReq('path'),
|
|
vm = nodeReq('vm'),
|
|
//In Node 0.7+ existsSync is on fs.
|
|
exists = fs.existsSync || path.existsSync,
|
|
hasOwn = Object.prototype.hasOwnProperty;
|
|
|
|
function hasProp(obj, prop) {
|
|
return hasOwn.call(obj, prop);
|
|
}
|
|
|
|
function syncTick(fn) {
|
|
fn();
|
|
}
|
|
|
|
function makeError(message, moduleName) {
|
|
var err = new Error(message);
|
|
err.requireModules = [moduleName];
|
|
return err;
|
|
}
|
|
|
|
//Supply an implementation that allows synchronous get of a module.
|
|
req.get = function (context, moduleName, relModuleMap, localRequire) {
|
|
if (moduleName === "require" || moduleName === "exports" || moduleName === "module") {
|
|
context.onError(makeError("Explicit require of " + moduleName + " is not allowed.", moduleName));
|
|
}
|
|
|
|
var ret, oldTick,
|
|
moduleMap = context.makeModuleMap(moduleName, relModuleMap, false, true);
|
|
|
|
//Normalize module name, if it contains . or ..
|
|
moduleName = moduleMap.id;
|
|
|
|
if (hasProp(context.defined, moduleName)) {
|
|
ret = context.defined[moduleName];
|
|
} else {
|
|
if (ret === undefined) {
|
|
//Make sure nextTick for this type of call is sync-based.
|
|
oldTick = context.nextTick;
|
|
context.nextTick = syncTick;
|
|
try {
|
|
if (moduleMap.prefix) {
|
|
//A plugin, call requirejs to handle it. Now that
|
|
//nextTick is syncTick, the require will complete
|
|
//synchronously.
|
|
localRequire([moduleMap.originalName]);
|
|
|
|
//Now that plugin is loaded, can regenerate the moduleMap
|
|
//to get the final, normalized ID.
|
|
moduleMap = context.makeModuleMap(moduleMap.originalName, relModuleMap, false, true);
|
|
moduleName = moduleMap.id;
|
|
} else {
|
|
//Try to dynamically fetch it.
|
|
req.load(context, moduleName, moduleMap.url);
|
|
|
|
//Enable the module
|
|
context.enable(moduleMap, relModuleMap);
|
|
}
|
|
|
|
//Break any cycles by requiring it normally, but this will
|
|
//finish synchronously
|
|
context.require([moduleName]);
|
|
|
|
//The above calls are sync, so can do the next thing safely.
|
|
ret = context.defined[moduleName];
|
|
} finally {
|
|
context.nextTick = oldTick;
|
|
}
|
|
}
|
|
}
|
|
|
|
return ret;
|
|
};
|
|
|
|
req.nextTick = function (fn) {
|
|
process.nextTick(fn);
|
|
};
|
|
|
|
//Add wrapper around the code so that it gets the requirejs
|
|
//API instead of the Node API, and it is done lexically so
|
|
//that it survives later execution.
|
|
req.makeNodeWrapper = function (contents) {
|
|
return '(function (require, requirejs, define) { ' +
|
|
contents +
|
|
'\n}(requirejsVars.require, requirejsVars.requirejs, requirejsVars.define));';
|
|
};
|
|
|
|
req.load = function (context, moduleName, url) {
|
|
var contents, err,
|
|
config = context.config;
|
|
|
|
if (config.shim[moduleName] && (!config.suppress || !config.suppress.nodeShim)) {
|
|
console.warn('Shim config not supported in Node, may or may not work. Detected ' +
|
|
'for module: ' + moduleName);
|
|
}
|
|
|
|
if (exists(url)) {
|
|
contents = fs.readFileSync(url, 'utf8');
|
|
|
|
contents = req.makeNodeWrapper(contents);
|
|
try {
|
|
vm.runInThisContext(contents, fs.realpathSync(url));
|
|
} catch (e) {
|
|
err = new Error('Evaluating ' + url + ' as module "' +
|
|
moduleName + '" failed with error: ' + e);
|
|
err.originalError = e;
|
|
err.moduleName = moduleName;
|
|
err.requireModules = [moduleName];
|
|
err.fileName = url;
|
|
return context.onError(err);
|
|
}
|
|
} else {
|
|
def(moduleName, function () {
|
|
//Get the original name, since relative requires may be
|
|
//resolved differently in node (issue #202). Also, if relative,
|
|
//make it relative to the URL of the item requesting it
|
|
//(issue #393)
|
|
var dirName,
|
|
map = hasProp(context.registry, moduleName) &&
|
|
context.registry[moduleName].map,
|
|
parentMap = map && map.parentMap,
|
|
originalName = map && map.originalName;
|
|
|
|
if (originalName.charAt(0) === '.' && parentMap) {
|
|
dirName = parentMap.url.split('/');
|
|
dirName.pop();
|
|
originalName = dirName.join('/') + '/' + originalName;
|
|
}
|
|
|
|
try {
|
|
return (context.config.nodeRequire || req.nodeRequire)(originalName);
|
|
} catch (e) {
|
|
err = new Error('Tried loading "' + moduleName + '" at ' +
|
|
url + ' then tried node\'s require("' +
|
|
originalName + '") and it failed ' +
|
|
'with error: ' + e);
|
|
err.originalError = e;
|
|
err.moduleName = originalName;
|
|
err.requireModules = [moduleName];
|
|
throw err;
|
|
}
|
|
});
|
|
}
|
|
|
|
//Support anonymous modules.
|
|
context.completeLoad(moduleName);
|
|
};
|
|
|
|
//Override to provide the function wrapper for define/require.
|
|
req.exec = function (text) {
|
|
/*jslint evil: true */
|
|
text = req.makeNodeWrapper(text);
|
|
return eval(text);
|
|
};
|
|
}());
|
|
|
|
} else if (env === 'xpconnect') {
|
|
/*jslint */
|
|
/*global require, load */
|
|
|
|
(function () {
|
|
'use strict';
|
|
require.load = function (context, moduleName, url) {
|
|
|
|
load(url);
|
|
|
|
//Support anonymous modules.
|
|
context.completeLoad(moduleName);
|
|
};
|
|
|
|
}());
|
|
|
|
}
|
|
|
|
//Support a default file name to execute. Useful for hosted envs
|
|
//like Joyent where it defaults to a server.js as the only executed
|
|
//script. But only do it if this is not an optimization run.
|
|
if (commandOption !== 'o' && (!fileName || !jsSuffixRegExp.test(fileName))) {
|
|
fileName = 'main.js';
|
|
}
|
|
|
|
/**
|
|
* Loads the library files that can be used for the optimizer, or for other
|
|
* tasks.
|
|
*/
|
|
function loadLib() {
|
|
/*jslint strict: false */
|
|
/*global Packages: false, process: false, window: false, navigator: false,
|
|
document: false, define: false */
|
|
|
|
/**
|
|
* A plugin that modifies any /env/ path to be the right path based on
|
|
* the host environment. Right now only works for Node, Rhino and browser.
|
|
*/
|
|
(function () {
|
|
var pathRegExp = /(\/|^)env\/|\{env\}/,
|
|
env = 'unknown';
|
|
|
|
if (typeof process !== 'undefined' && process.versions && !!process.versions.node) {
|
|
env = 'node';
|
|
} else if (typeof Packages !== 'undefined') {
|
|
env = 'rhino';
|
|
} else if ((typeof navigator !== 'undefined' && typeof document !== 'undefined') ||
|
|
(typeof importScripts !== 'undefined' && typeof self !== 'undefined')) {
|
|
env = 'browser';
|
|
} else if (typeof Components !== 'undefined' && Components.classes && Components.interfaces) {
|
|
env = 'xpconnect';
|
|
}
|
|
|
|
define('env', {
|
|
get: function () {
|
|
return env;
|
|
},
|
|
|
|
load: function (name, req, load, config) {
|
|
//Allow override in the config.
|
|
if (config.env) {
|
|
env = config.env;
|
|
}
|
|
|
|
name = name.replace(pathRegExp, function (match, prefix) {
|
|
if (match.indexOf('{') === -1) {
|
|
return prefix + env + '/';
|
|
} else {
|
|
return env;
|
|
}
|
|
});
|
|
|
|
req([name], function (mod) {
|
|
load(mod);
|
|
});
|
|
}
|
|
});
|
|
}());
|
|
/*jslint plusplus: true */
|
|
/*global define, java */
|
|
|
|
define('lang', function () {
|
|
'use strict';
|
|
|
|
var lang, isJavaObj,
|
|
hasOwn = Object.prototype.hasOwnProperty;
|
|
|
|
function hasProp(obj, prop) {
|
|
return hasOwn.call(obj, prop);
|
|
}
|
|
|
|
isJavaObj = function () {
|
|
return false;
|
|
};
|
|
|
|
//Rhino, but not Nashorn (detected by importPackage not existing)
|
|
//Can have some strange foreign objects.
|
|
if (typeof java !== 'undefined' && java.lang && java.lang.Object && typeof importPackage !== 'undefined') {
|
|
isJavaObj = function (obj) {
|
|
return obj instanceof java.lang.Object;
|
|
};
|
|
}
|
|
|
|
lang = {
|
|
backSlashRegExp: /\\/g,
|
|
ostring: Object.prototype.toString,
|
|
|
|
isArray: Array.isArray || function (it) {
|
|
return lang.ostring.call(it) === "[object Array]";
|
|
},
|
|
|
|
isFunction: function(it) {
|
|
return lang.ostring.call(it) === "[object Function]";
|
|
},
|
|
|
|
isRegExp: function(it) {
|
|
return it && it instanceof RegExp;
|
|
},
|
|
|
|
hasProp: hasProp,
|
|
|
|
//returns true if the object does not have an own property prop,
|
|
//or if it does, it is a falsy value.
|
|
falseProp: function (obj, prop) {
|
|
return !hasProp(obj, prop) || !obj[prop];
|
|
},
|
|
|
|
//gets own property value for given prop on object
|
|
getOwn: function (obj, prop) {
|
|
return hasProp(obj, prop) && obj[prop];
|
|
},
|
|
|
|
_mixin: function(dest, source, override){
|
|
var name;
|
|
for (name in source) {
|
|
if(source.hasOwnProperty(name) &&
|
|
(override || !dest.hasOwnProperty(name))) {
|
|
dest[name] = source[name];
|
|
}
|
|
}
|
|
|
|
return dest; // Object
|
|
},
|
|
|
|
/**
|
|
* mixin({}, obj1, obj2) is allowed. If the last argument is a boolean,
|
|
* then the source objects properties are force copied over to dest.
|
|
*/
|
|
mixin: function(dest){
|
|
var parameters = Array.prototype.slice.call(arguments),
|
|
override, i, l;
|
|
|
|
if (!dest) { dest = {}; }
|
|
|
|
if (parameters.length > 2 && typeof arguments[parameters.length-1] === 'boolean') {
|
|
override = parameters.pop();
|
|
}
|
|
|
|
for (i = 1, l = parameters.length; i < l; i++) {
|
|
lang._mixin(dest, parameters[i], override);
|
|
}
|
|
return dest; // Object
|
|
},
|
|
|
|
/**
|
|
* Does a deep mix of source into dest, where source values override
|
|
* dest values if a winner is needed.
|
|
* @param {Object} dest destination object that receives the mixed
|
|
* values.
|
|
* @param {Object} source source object contributing properties to mix
|
|
* in.
|
|
* @return {[Object]} returns dest object with the modification.
|
|
*/
|
|
deepMix: function(dest, source) {
|
|
lang.eachProp(source, function (value, prop) {
|
|
if (typeof value === 'object' && value &&
|
|
!lang.isArray(value) && !lang.isFunction(value) &&
|
|
!(value instanceof RegExp)) {
|
|
|
|
if (!dest[prop]) {
|
|
dest[prop] = {};
|
|
}
|
|
lang.deepMix(dest[prop], value);
|
|
} else {
|
|
dest[prop] = value;
|
|
}
|
|
});
|
|
return dest;
|
|
},
|
|
|
|
/**
|
|
* Does a type of deep copy. Do not give it anything fancy, best
|
|
* for basic object copies of objects that also work well as
|
|
* JSON-serialized things, or has properties pointing to functions.
|
|
* For non-array/object values, just returns the same object.
|
|
* @param {Object} obj copy properties from this object
|
|
* @param {Object} [ignoredProps] optional object whose own properties
|
|
* are keys that should be ignored.
|
|
* @return {Object}
|
|
*/
|
|
deeplikeCopy: function (obj, ignoredProps) {
|
|
var type, result;
|
|
|
|
if (lang.isArray(obj)) {
|
|
result = [];
|
|
obj.forEach(function(value) {
|
|
result.push(lang.deeplikeCopy(value, ignoredProps));
|
|
});
|
|
return result;
|
|
}
|
|
|
|
type = typeof obj;
|
|
if (obj === null || obj === undefined || type === 'boolean' ||
|
|
type === 'string' || type === 'number' || lang.isFunction(obj) ||
|
|
lang.isRegExp(obj)|| isJavaObj(obj)) {
|
|
return obj;
|
|
}
|
|
|
|
//Anything else is an object, hopefully.
|
|
result = {};
|
|
lang.eachProp(obj, function(value, key) {
|
|
if (!ignoredProps || !hasProp(ignoredProps, key)) {
|
|
result[key] = lang.deeplikeCopy(value, ignoredProps);
|
|
}
|
|
});
|
|
return result;
|
|
},
|
|
|
|
delegate: (function () {
|
|
// boodman/crockford delegation w/ cornford optimization
|
|
function TMP() {}
|
|
return function (obj, props) {
|
|
TMP.prototype = obj;
|
|
var tmp = new TMP();
|
|
TMP.prototype = null;
|
|
if (props) {
|
|
lang.mixin(tmp, props);
|
|
}
|
|
return tmp; // Object
|
|
};
|
|
}()),
|
|
|
|
/**
|
|
* Helper function for iterating over an array. If the func returns
|
|
* a true value, it will break out of the loop.
|
|
*/
|
|
each: function each(ary, func) {
|
|
if (ary) {
|
|
var i;
|
|
for (i = 0; i < ary.length; i += 1) {
|
|
if (func(ary[i], i, ary)) {
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
},
|
|
|
|
/**
|
|
* Cycles over properties in an object and calls a function for each
|
|
* property value. If the function returns a truthy value, then the
|
|
* iteration is stopped.
|
|
*/
|
|
eachProp: function eachProp(obj, func) {
|
|
var prop;
|
|
for (prop in obj) {
|
|
if (hasProp(obj, prop)) {
|
|
if (func(obj[prop], prop)) {
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
},
|
|
|
|
//Similar to Function.prototype.bind, but the "this" object is specified
|
|
//first, since it is easier to read/figure out what "this" will be.
|
|
bind: function bind(obj, fn) {
|
|
return function () {
|
|
return fn.apply(obj, arguments);
|
|
};
|
|
},
|
|
|
|
//Escapes a content string to be be a string that has characters escaped
|
|
//for inclusion as part of a JS string.
|
|
jsEscape: function (content) {
|
|
return content.replace(/(["'\\])/g, '\\$1')
|
|
.replace(/[\f]/g, "\\f")
|
|
.replace(/[\b]/g, "\\b")
|
|
.replace(/[\n]/g, "\\n")
|
|
.replace(/[\t]/g, "\\t")
|
|
.replace(/[\r]/g, "\\r");
|
|
}
|
|
};
|
|
return lang;
|
|
});
|
|
/**
|
|
* prim 0.0.1 Copyright (c) 2012-2014, The Dojo Foundation All Rights Reserved.
|
|
* Available via the MIT or new BSD license.
|
|
* see: http://github.com/requirejs/prim for details
|
|
*/
|
|
|
|
/*global setImmediate, process, setTimeout, define, module */
|
|
|
|
//Set prime.hideResolutionConflict = true to allow "resolution-races"
|
|
//in promise-tests to pass.
|
|
//Since the goal of prim is to be a small impl for trusted code, it is
|
|
//more important to normally throw in this case so that we can find
|
|
//logic errors quicker.
|
|
|
|
var prim;
|
|
(function () {
|
|
'use strict';
|
|
var op = Object.prototype,
|
|
hasOwn = op.hasOwnProperty;
|
|
|
|
function hasProp(obj, prop) {
|
|
return hasOwn.call(obj, prop);
|
|
}
|
|
|
|
/**
|
|
* Helper function for iterating over an array. If the func returns
|
|
* a true value, it will break out of the loop.
|
|
*/
|
|
function each(ary, func) {
|
|
if (ary) {
|
|
var i;
|
|
for (i = 0; i < ary.length; i += 1) {
|
|
if (ary[i]) {
|
|
func(ary[i], i, ary);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
function check(p) {
|
|
if (hasProp(p, 'e') || hasProp(p, 'v')) {
|
|
if (!prim.hideResolutionConflict) {
|
|
throw new Error('Prim promise already resolved: ' +
|
|
JSON.stringify(p));
|
|
}
|
|
return false;
|
|
}
|
|
return true;
|
|
}
|
|
|
|
function notify(ary, value) {
|
|
prim.nextTick(function () {
|
|
each(ary, function (item) {
|
|
item(value);
|
|
});
|
|
});
|
|
}
|
|
|
|
prim = function prim() {
|
|
var p,
|
|
ok = [],
|
|
fail = [];
|
|
|
|
return (p = {
|
|
callback: function (yes, no) {
|
|
if (no) {
|
|
p.errback(no);
|
|
}
|
|
|
|
if (hasProp(p, 'v')) {
|
|
prim.nextTick(function () {
|
|
yes(p.v);
|
|
});
|
|
} else {
|
|
ok.push(yes);
|
|
}
|
|
},
|
|
|
|
errback: function (no) {
|
|
if (hasProp(p, 'e')) {
|
|
prim.nextTick(function () {
|
|
no(p.e);
|
|
});
|
|
} else {
|
|
fail.push(no);
|
|
}
|
|
},
|
|
|
|
finished: function () {
|
|
return hasProp(p, 'e') || hasProp(p, 'v');
|
|
},
|
|
|
|
rejected: function () {
|
|
return hasProp(p, 'e');
|
|
},
|
|
|
|
resolve: function (v) {
|
|
if (check(p)) {
|
|
p.v = v;
|
|
notify(ok, v);
|
|
}
|
|
return p;
|
|
},
|
|
reject: function (e) {
|
|
if (check(p)) {
|
|
p.e = e;
|
|
notify(fail, e);
|
|
}
|
|
return p;
|
|
},
|
|
|
|
start: function (fn) {
|
|
p.resolve();
|
|
return p.promise.then(fn);
|
|
},
|
|
|
|
promise: {
|
|
then: function (yes, no) {
|
|
var next = prim();
|
|
|
|
p.callback(function (v) {
|
|
try {
|
|
if (yes && typeof yes === 'function') {
|
|
v = yes(v);
|
|
}
|
|
|
|
if (v && v.then) {
|
|
v.then(next.resolve, next.reject);
|
|
} else {
|
|
next.resolve(v);
|
|
}
|
|
} catch (e) {
|
|
next.reject(e);
|
|
}
|
|
}, function (e) {
|
|
var err;
|
|
|
|
try {
|
|
if (!no || typeof no !== 'function') {
|
|
next.reject(e);
|
|
} else {
|
|
err = no(e);
|
|
|
|
if (err && err.then) {
|
|
err.then(next.resolve, next.reject);
|
|
} else {
|
|
next.resolve(err);
|
|
}
|
|
}
|
|
} catch (e2) {
|
|
next.reject(e2);
|
|
}
|
|
});
|
|
|
|
return next.promise;
|
|
},
|
|
|
|
fail: function (no) {
|
|
return p.promise.then(null, no);
|
|
},
|
|
|
|
end: function () {
|
|
p.errback(function (e) {
|
|
throw e;
|
|
});
|
|
}
|
|
}
|
|
});
|
|
};
|
|
|
|
prim.serial = function (ary) {
|
|
var result = prim().resolve().promise;
|
|
each(ary, function (item) {
|
|
result = result.then(function () {
|
|
return item();
|
|
});
|
|
});
|
|
return result;
|
|
};
|
|
|
|
prim.nextTick = typeof setImmediate === 'function' ? setImmediate :
|
|
(typeof process !== 'undefined' && process.nextTick ?
|
|
process.nextTick : (typeof setTimeout !== 'undefined' ?
|
|
function (fn) {
|
|
setTimeout(fn, 0);
|
|
} : function (fn) {
|
|
fn();
|
|
}));
|
|
|
|
if (typeof define === 'function' && define.amd) {
|
|
define('prim', function () { return prim; });
|
|
} else if (typeof module !== 'undefined' && module.exports) {
|
|
module.exports = prim;
|
|
}
|
|
}());
|
|
if(env === 'browser') {
|
|
/*jslint strict: false */
|
|
/*global define: false, load: false */
|
|
|
|
//Just a stub for use with uglify's consolidator.js
|
|
define('browser/assert', function () {
|
|
return {};
|
|
});
|
|
|
|
}
|
|
|
|
if(env === 'node') {
|
|
/*jslint strict: false */
|
|
/*global define: false, load: false */
|
|
|
|
//Needed so that rhino/assert can return a stub for uglify's consolidator.js
|
|
define('node/assert', ['assert'], function (assert) {
|
|
return assert;
|
|
});
|
|
|
|
}
|
|
|
|
if(env === 'rhino') {
|
|
/*jslint strict: false */
|
|
/*global define: false, load: false */
|
|
|
|
//Just a stub for use with uglify's consolidator.js
|
|
define('rhino/assert', function () {
|
|
return {};
|
|
});
|
|
|
|
}
|
|
|
|
if(env === 'xpconnect') {
|
|
/*jslint strict: false */
|
|
/*global define: false, load: false */
|
|
|
|
//Just a stub for use with uglify's consolidator.js
|
|
define('xpconnect/assert', function () {
|
|
return {};
|
|
});
|
|
|
|
}
|
|
|
|
if(env === 'browser') {
|
|
/*jslint strict: false */
|
|
/*global define: false, process: false */
|
|
|
|
define('browser/args', function () {
|
|
//Always expect config via an API call
|
|
return [];
|
|
});
|
|
|
|
}
|
|
|
|
if(env === 'node') {
|
|
/*jslint strict: false */
|
|
/*global define: false, process: false */
|
|
|
|
define('node/args', function () {
|
|
//Do not return the "node" or "r.js" arguments
|
|
var args = process.argv.slice(2);
|
|
|
|
//Ignore any command option used for main x.js branching
|
|
if (args[0] && args[0].indexOf('-') === 0) {
|
|
args = args.slice(1);
|
|
}
|
|
|
|
return args;
|
|
});
|
|
|
|
}
|
|
|
|
if(env === 'rhino') {
|
|
/*jslint strict: false */
|
|
/*global define: false, process: false */
|
|
|
|
var jsLibRhinoArgs = (typeof rhinoArgs !== 'undefined' && rhinoArgs) || [].concat(Array.prototype.slice.call(arguments, 0));
|
|
|
|
define('rhino/args', function () {
|
|
var args = jsLibRhinoArgs;
|
|
|
|
//Ignore any command option used for main x.js branching
|
|
if (args[0] && args[0].indexOf('-') === 0) {
|
|
args = args.slice(1);
|
|
}
|
|
|
|
return args;
|
|
});
|
|
|
|
}
|
|
|
|
if(env === 'xpconnect') {
|
|
/*jslint strict: false */
|
|
/*global define, xpconnectArgs */
|
|
|
|
var jsLibXpConnectArgs = (typeof xpconnectArgs !== 'undefined' && xpconnectArgs) || [].concat(Array.prototype.slice.call(arguments, 0));
|
|
|
|
define('xpconnect/args', function () {
|
|
var args = jsLibXpConnectArgs;
|
|
|
|
//Ignore any command option used for main x.js branching
|
|
if (args[0] && args[0].indexOf('-') === 0) {
|
|
args = args.slice(1);
|
|
}
|
|
|
|
return args;
|
|
});
|
|
|
|
}
|
|
|
|
if(env === 'browser') {
|
|
/*jslint strict: false */
|
|
/*global define: false, console: false */
|
|
|
|
define('browser/load', ['./file'], function (file) {
|
|
function load(fileName) {
|
|
eval(file.readFile(fileName));
|
|
}
|
|
|
|
return load;
|
|
});
|
|
|
|
}
|
|
|
|
if(env === 'node') {
|
|
/*jslint strict: false */
|
|
/*global define: false, console: false */
|
|
|
|
define('node/load', ['fs'], function (fs) {
|
|
function load(fileName) {
|
|
var contents = fs.readFileSync(fileName, 'utf8');
|
|
process.compile(contents, fileName);
|
|
}
|
|
|
|
return load;
|
|
});
|
|
|
|
}
|
|
|
|
if(env === 'rhino') {
|
|
/*jslint strict: false */
|
|
/*global define: false, load: false */
|
|
|
|
define('rhino/load', function () {
|
|
return load;
|
|
});
|
|
|
|
}
|
|
|
|
if(env === 'xpconnect') {
|
|
/*jslint strict: false */
|
|
/*global define: false, load: false */
|
|
|
|
define('xpconnect/load', function () {
|
|
return load;
|
|
});
|
|
|
|
}
|
|
|
|
if(env === 'browser') {
|
|
/*jslint sloppy: true, nomen: true */
|
|
/*global require, define, console, XMLHttpRequest, requirejs, location */
|
|
|
|
define('browser/file', ['prim'], function (prim) {
|
|
|
|
var file,
|
|
currDirRegExp = /^\.(\/|$)/;
|
|
|
|
function frontSlash(path) {
|
|
return path.replace(/\\/g, '/');
|
|
}
|
|
|
|
function exists(path) {
|
|
var status, xhr = new XMLHttpRequest();
|
|
|
|
//Oh yeah, that is right SYNC IO. Behold its glory
|
|
//and horrible blocking behavior.
|
|
xhr.open('HEAD', path, false);
|
|
xhr.send();
|
|
status = xhr.status;
|
|
|
|
return status === 200 || status === 304;
|
|
}
|
|
|
|
function mkDir(dir) {
|
|
console.log('mkDir is no-op in browser');
|
|
}
|
|
|
|
function mkFullDir(dir) {
|
|
console.log('mkFullDir is no-op in browser');
|
|
}
|
|
|
|
file = {
|
|
backSlashRegExp: /\\/g,
|
|
exclusionRegExp: /^\./,
|
|
getLineSeparator: function () {
|
|
return '/';
|
|
},
|
|
|
|
exists: function (fileName) {
|
|
return exists(fileName);
|
|
},
|
|
|
|
parent: function (fileName) {
|
|
var parts = fileName.split('/');
|
|
parts.pop();
|
|
return parts.join('/');
|
|
},
|
|
|
|
/**
|
|
* Gets the absolute file path as a string, normalized
|
|
* to using front slashes for path separators.
|
|
* @param {String} fileName
|
|
*/
|
|
absPath: function (fileName) {
|
|
var dir;
|
|
if (currDirRegExp.test(fileName)) {
|
|
dir = frontSlash(location.href);
|
|
if (dir.indexOf('/') !== -1) {
|
|
dir = dir.split('/');
|
|
|
|
//Pull off protocol and host, just want
|
|
//to allow paths (other build parts, like
|
|
//require._isSupportedBuildUrl do not support
|
|
//full URLs), but a full path from
|
|
//the root.
|
|
dir.splice(0, 3);
|
|
|
|
dir.pop();
|
|
dir = '/' + dir.join('/');
|
|
}
|
|
|
|
fileName = dir + fileName.substring(1);
|
|
}
|
|
|
|
return fileName;
|
|
},
|
|
|
|
normalize: function (fileName) {
|
|
return fileName;
|
|
},
|
|
|
|
isFile: function (path) {
|
|
return true;
|
|
},
|
|
|
|
isDirectory: function (path) {
|
|
return false;
|
|
},
|
|
|
|
getFilteredFileList: function (startDir, regExpFilters, makeUnixPaths) {
|
|
console.log('file.getFilteredFileList is no-op in browser');
|
|
},
|
|
|
|
copyDir: function (srcDir, destDir, regExpFilter, onlyCopyNew) {
|
|
console.log('file.copyDir is no-op in browser');
|
|
|
|
},
|
|
|
|
copyFile: function (srcFileName, destFileName, onlyCopyNew) {
|
|
console.log('file.copyFile is no-op in browser');
|
|
},
|
|
|
|
/**
|
|
* Renames a file. May fail if "to" already exists or is on another drive.
|
|
*/
|
|
renameFile: function (from, to) {
|
|
console.log('file.renameFile is no-op in browser');
|
|
},
|
|
|
|
/**
|
|
* Reads a *text* file.
|
|
*/
|
|
readFile: function (path, encoding) {
|
|
var xhr = new XMLHttpRequest();
|
|
|
|
//Oh yeah, that is right SYNC IO. Behold its glory
|
|
//and horrible blocking behavior.
|
|
xhr.open('GET', path, false);
|
|
xhr.send();
|
|
|
|
return xhr.responseText;
|
|
},
|
|
|
|
readFileAsync: function (path, encoding) {
|
|
var xhr = new XMLHttpRequest(),
|
|
d = prim();
|
|
|
|
xhr.open('GET', path, true);
|
|
xhr.send();
|
|
|
|
xhr.onreadystatechange = function () {
|
|
if (xhr.readyState === 4) {
|
|
if (xhr.status > 400) {
|
|
d.reject(new Error('Status: ' + xhr.status + ': ' + xhr.statusText));
|
|
} else {
|
|
d.resolve(xhr.responseText);
|
|
}
|
|
}
|
|
};
|
|
|
|
return d.promise;
|
|
},
|
|
|
|
saveUtf8File: function (fileName, fileContents) {
|
|
//summary: saves a *text* file using UTF-8 encoding.
|
|
file.saveFile(fileName, fileContents, "utf8");
|
|
},
|
|
|
|
saveFile: function (fileName, fileContents, encoding) {
|
|
requirejs.browser.saveFile(fileName, fileContents, encoding);
|
|
},
|
|
|
|
deleteFile: function (fileName) {
|
|
console.log('file.deleteFile is no-op in browser');
|
|
},
|
|
|
|
/**
|
|
* Deletes any empty directories under the given directory.
|
|
*/
|
|
deleteEmptyDirs: function (startDir) {
|
|
console.log('file.deleteEmptyDirs is no-op in browser');
|
|
}
|
|
};
|
|
|
|
return file;
|
|
|
|
});
|
|
|
|
}
|
|
|
|
if(env === 'node') {
|
|
/*jslint plusplus: false, octal:false, strict: false */
|
|
/*global define: false, process: false */
|
|
|
|
define('node/file', ['fs', 'path', 'prim'], function (fs, path, prim) {
|
|
|
|
var isWindows = process.platform === 'win32',
|
|
windowsDriveRegExp = /^[a-zA-Z]\:\/$/,
|
|
file;
|
|
|
|
function frontSlash(path) {
|
|
return path.replace(/\\/g, '/');
|
|
}
|
|
|
|
function exists(path) {
|
|
if (isWindows && path.charAt(path.length - 1) === '/' &&
|
|
path.charAt(path.length - 2) !== ':') {
|
|
path = path.substring(0, path.length - 1);
|
|
}
|
|
|
|
try {
|
|
fs.statSync(path);
|
|
return true;
|
|
} catch (e) {
|
|
return false;
|
|
}
|
|
}
|
|
|
|
function mkDir(dir) {
|
|
if (!exists(dir) && (!isWindows || !windowsDriveRegExp.test(dir))) {
|
|
fs.mkdirSync(dir, 511);
|
|
}
|
|
}
|
|
|
|
function mkFullDir(dir) {
|
|
var parts = dir.split('/'),
|
|
currDir = '',
|
|
first = true;
|
|
|
|
parts.forEach(function (part) {
|
|
//First part may be empty string if path starts with a slash.
|
|
currDir += part + '/';
|
|
first = false;
|
|
|
|
if (part) {
|
|
mkDir(currDir);
|
|
}
|
|
});
|
|
}
|
|
|
|
file = {
|
|
backSlashRegExp: /\\/g,
|
|
exclusionRegExp: /^\./,
|
|
getLineSeparator: function () {
|
|
return '/';
|
|
},
|
|
|
|
exists: function (fileName) {
|
|
return exists(fileName);
|
|
},
|
|
|
|
parent: function (fileName) {
|
|
var parts = fileName.split('/');
|
|
parts.pop();
|
|
return parts.join('/');
|
|
},
|
|
|
|
/**
|
|
* Gets the absolute file path as a string, normalized
|
|
* to using front slashes for path separators.
|
|
* @param {String} fileName
|
|
*/
|
|
absPath: function (fileName) {
|
|
return frontSlash(path.normalize(frontSlash(fs.realpathSync(fileName))));
|
|
},
|
|
|
|
normalize: function (fileName) {
|
|
return frontSlash(path.normalize(fileName));
|
|
},
|
|
|
|
isFile: function (path) {
|
|
return fs.statSync(path).isFile();
|
|
},
|
|
|
|
isDirectory: function (path) {
|
|
return fs.statSync(path).isDirectory();
|
|
},
|
|
|
|
getFilteredFileList: function (/*String*/startDir, /*RegExp*/regExpFilters, /*boolean?*/makeUnixPaths) {
|
|
//summary: Recurses startDir and finds matches to the files that match regExpFilters.include
|
|
//and do not match regExpFilters.exclude. Or just one regexp can be passed in for regExpFilters,
|
|
//and it will be treated as the "include" case.
|
|
//Ignores files/directories that start with a period (.) unless exclusionRegExp
|
|
//is set to another value.
|
|
var files = [], topDir, regExpInclude, regExpExclude, dirFileArray,
|
|
i, stat, filePath, ok, dirFiles, fileName;
|
|
|
|
topDir = startDir;
|
|
|
|
regExpInclude = regExpFilters.include || regExpFilters;
|
|
regExpExclude = regExpFilters.exclude || null;
|
|
|
|
if (file.exists(topDir)) {
|
|
dirFileArray = fs.readdirSync(topDir);
|
|
for (i = 0; i < dirFileArray.length; i++) {
|
|
fileName = dirFileArray[i];
|
|
filePath = path.join(topDir, fileName);
|
|
stat = fs.statSync(filePath);
|
|
if (stat.isFile()) {
|
|
if (makeUnixPaths) {
|
|
//Make sure we have a JS string.
|
|
if (filePath.indexOf("/") === -1) {
|
|
filePath = frontSlash(filePath);
|
|
}
|
|
}
|
|
|
|
ok = true;
|
|
if (regExpInclude) {
|
|
ok = filePath.match(regExpInclude);
|
|
}
|
|
if (ok && regExpExclude) {
|
|
ok = !filePath.match(regExpExclude);
|
|
}
|
|
|
|
if (ok && (!file.exclusionRegExp ||
|
|
!file.exclusionRegExp.test(fileName))) {
|
|
files.push(filePath);
|
|
}
|
|
} else if (stat.isDirectory() &&
|
|
(!file.exclusionRegExp || !file.exclusionRegExp.test(fileName))) {
|
|
dirFiles = this.getFilteredFileList(filePath, regExpFilters, makeUnixPaths);
|
|
//Do not use push.apply for dir listings, can hit limit of max number
|
|
//of arguments to a function call, #921.
|
|
dirFiles.forEach(function (dirFile) {
|
|
files.push(dirFile);
|
|
});
|
|
}
|
|
}
|
|
}
|
|
|
|
return files; //Array
|
|
},
|
|
|
|
copyDir: function (/*String*/srcDir, /*String*/destDir, /*RegExp?*/regExpFilter, /*boolean?*/onlyCopyNew) {
|
|
//summary: copies files from srcDir to destDir using the regExpFilter to determine if the
|
|
//file should be copied. Returns a list file name strings of the destinations that were copied.
|
|
regExpFilter = regExpFilter || /\w/;
|
|
|
|
//Normalize th directory names, but keep front slashes.
|
|
//path module on windows now returns backslashed paths.
|
|
srcDir = frontSlash(path.normalize(srcDir));
|
|
destDir = frontSlash(path.normalize(destDir));
|
|
|
|
var fileNames = file.getFilteredFileList(srcDir, regExpFilter, true),
|
|
copiedFiles = [], i, srcFileName, destFileName;
|
|
|
|
for (i = 0; i < fileNames.length; i++) {
|
|
srcFileName = fileNames[i];
|
|
destFileName = srcFileName.replace(srcDir, destDir);
|
|
|
|
if (file.copyFile(srcFileName, destFileName, onlyCopyNew)) {
|
|
copiedFiles.push(destFileName);
|
|
}
|
|
}
|
|
|
|
return copiedFiles.length ? copiedFiles : null; //Array or null
|
|
},
|
|
|
|
copyFile: function (/*String*/srcFileName, /*String*/destFileName, /*boolean?*/onlyCopyNew) {
|
|
//summary: copies srcFileName to destFileName. If onlyCopyNew is set, it only copies the file if
|
|
//srcFileName is newer than destFileName. Returns a boolean indicating if the copy occurred.
|
|
var parentDir;
|
|
|
|
//logger.trace("Src filename: " + srcFileName);
|
|
//logger.trace("Dest filename: " + destFileName);
|
|
|
|
//If onlyCopyNew is true, then compare dates and only copy if the src is newer
|
|
//than dest.
|
|
if (onlyCopyNew) {
|
|
if (file.exists(destFileName) && fs.statSync(destFileName).mtime.getTime() >= fs.statSync(srcFileName).mtime.getTime()) {
|
|
return false; //Boolean
|
|
}
|
|
}
|
|
|
|
//Make sure destination dir exists.
|
|
parentDir = path.dirname(destFileName);
|
|
if (!file.exists(parentDir)) {
|
|
mkFullDir(parentDir);
|
|
}
|
|
|
|
fs.writeFileSync(destFileName, fs.readFileSync(srcFileName, 'binary'), 'binary');
|
|
|
|
return true; //Boolean
|
|
},
|
|
|
|
/**
|
|
* Renames a file. May fail if "to" already exists or is on another drive.
|
|
*/
|
|
renameFile: function (from, to) {
|
|
return fs.renameSync(from, to);
|
|
},
|
|
|
|
/**
|
|
* Reads a *text* file.
|
|
*/
|
|
readFile: function (/*String*/path, /*String?*/encoding) {
|
|
if (encoding === 'utf-8') {
|
|
encoding = 'utf8';
|
|
}
|
|
if (!encoding) {
|
|
encoding = 'utf8';
|
|
}
|
|
|
|
var text = fs.readFileSync(path, encoding);
|
|
|
|
//Hmm, would not expect to get A BOM, but it seems to happen,
|
|
//remove it just in case.
|
|
if (text.indexOf('\uFEFF') === 0) {
|
|
text = text.substring(1, text.length);
|
|
}
|
|
|
|
return text;
|
|
},
|
|
|
|
readFileAsync: function (path, encoding) {
|
|
var d = prim();
|
|
try {
|
|
d.resolve(file.readFile(path, encoding));
|
|
} catch (e) {
|
|
d.reject(e);
|
|
}
|
|
return d.promise;
|
|
},
|
|
|
|
saveUtf8File: function (/*String*/fileName, /*String*/fileContents) {
|
|
//summary: saves a *text* file using UTF-8 encoding.
|
|
file.saveFile(fileName, fileContents, "utf8");
|
|
},
|
|
|
|
saveFile: function (/*String*/fileName, /*String*/fileContents, /*String?*/encoding) {
|
|
//summary: saves a *text* file.
|
|
var parentDir;
|
|
|
|
if (encoding === 'utf-8') {
|
|
encoding = 'utf8';
|
|
}
|
|
if (!encoding) {
|
|
encoding = 'utf8';
|
|
}
|
|
|
|
//Make sure destination directories exist.
|
|
parentDir = path.dirname(fileName);
|
|
if (!file.exists(parentDir)) {
|
|
mkFullDir(parentDir);
|
|
}
|
|
|
|
fs.writeFileSync(fileName, fileContents, encoding);
|
|
},
|
|
|
|
deleteFile: function (/*String*/fileName) {
|
|
//summary: deletes a file or directory if it exists.
|
|
var files, i, stat;
|
|
if (file.exists(fileName)) {
|
|
stat = fs.lstatSync(fileName);
|
|
if (stat.isDirectory()) {
|
|
files = fs.readdirSync(fileName);
|
|
for (i = 0; i < files.length; i++) {
|
|
this.deleteFile(path.join(fileName, files[i]));
|
|
}
|
|
fs.rmdirSync(fileName);
|
|
} else {
|
|
fs.unlinkSync(fileName);
|
|
}
|
|
}
|
|
},
|
|
|
|
|
|
/**
|
|
* Deletes any empty directories under the given directory.
|
|
*/
|
|
deleteEmptyDirs: function (startDir) {
|
|
var dirFileArray, i, fileName, filePath, stat;
|
|
|
|
if (file.exists(startDir)) {
|
|
dirFileArray = fs.readdirSync(startDir);
|
|
for (i = 0; i < dirFileArray.length; i++) {
|
|
fileName = dirFileArray[i];
|
|
filePath = path.join(startDir, fileName);
|
|
stat = fs.lstatSync(filePath);
|
|
if (stat.isDirectory()) {
|
|
file.deleteEmptyDirs(filePath);
|
|
}
|
|
}
|
|
|
|
//If directory is now empty, remove it.
|
|
if (fs.readdirSync(startDir).length === 0) {
|
|
file.deleteFile(startDir);
|
|
}
|
|
}
|
|
}
|
|
};
|
|
|
|
return file;
|
|
|
|
});
|
|
|
|
}
|
|
|
|
if(env === 'rhino') {
|
|
//Helper functions to deal with file I/O.
|
|
|
|
/*jslint plusplus: false */
|
|
/*global java: false, define: false */
|
|
|
|
define('rhino/file', ['prim'], function (prim) {
|
|
var file = {
|
|
backSlashRegExp: /\\/g,
|
|
|
|
exclusionRegExp: /^\./,
|
|
|
|
getLineSeparator: function () {
|
|
return file.lineSeparator;
|
|
},
|
|
|
|
lineSeparator: java.lang.System.getProperty("line.separator"), //Java String
|
|
|
|
exists: function (fileName) {
|
|
return (new java.io.File(fileName)).exists();
|
|
},
|
|
|
|
parent: function (fileName) {
|
|
return file.absPath((new java.io.File(fileName)).getParentFile());
|
|
},
|
|
|
|
normalize: function (fileName) {
|
|
return file.absPath(fileName);
|
|
},
|
|
|
|
isFile: function (path) {
|
|
return (new java.io.File(path)).isFile();
|
|
},
|
|
|
|
isDirectory: function (path) {
|
|
return (new java.io.File(path)).isDirectory();
|
|
},
|
|
|
|
/**
|
|
* Gets the absolute file path as a string, normalized
|
|
* to using front slashes for path separators.
|
|
* @param {java.io.File||String} file
|
|
*/
|
|
absPath: function (fileObj) {
|
|
if (typeof fileObj === "string") {
|
|
fileObj = new java.io.File(fileObj);
|
|
}
|
|
return (fileObj.getCanonicalPath() + "").replace(file.backSlashRegExp, "/");
|
|
},
|
|
|
|
getFilteredFileList: function (/*String*/startDir, /*RegExp*/regExpFilters, /*boolean?*/makeUnixPaths, /*boolean?*/startDirIsJavaObject) {
|
|
//summary: Recurses startDir and finds matches to the files that match regExpFilters.include
|
|
//and do not match regExpFilters.exclude. Or just one regexp can be passed in for regExpFilters,
|
|
//and it will be treated as the "include" case.
|
|
//Ignores files/directories that start with a period (.) unless exclusionRegExp
|
|
//is set to another value.
|
|
var files = [], topDir, regExpInclude, regExpExclude, dirFileArray,
|
|
i, fileObj, filePath, ok, dirFiles;
|
|
|
|
topDir = startDir;
|
|
if (!startDirIsJavaObject) {
|
|
topDir = new java.io.File(startDir);
|
|
}
|
|
|
|
regExpInclude = regExpFilters.include || regExpFilters;
|
|
regExpExclude = regExpFilters.exclude || null;
|
|
|
|
if (topDir.exists()) {
|
|
dirFileArray = topDir.listFiles();
|
|
for (i = 0; i < dirFileArray.length; i++) {
|
|
fileObj = dirFileArray[i];
|
|
if (fileObj.isFile()) {
|
|
filePath = fileObj.getPath();
|
|
if (makeUnixPaths) {
|
|
//Make sure we have a JS string.
|
|
filePath = String(filePath);
|
|
if (filePath.indexOf("/") === -1) {
|
|
filePath = filePath.replace(/\\/g, "/");
|
|
}
|
|
}
|
|
|
|
ok = true;
|
|
if (regExpInclude) {
|
|
ok = filePath.match(regExpInclude);
|
|
}
|
|
if (ok && regExpExclude) {
|
|
ok = !filePath.match(regExpExclude);
|
|
}
|
|
|
|
if (ok && (!file.exclusionRegExp ||
|
|
!file.exclusionRegExp.test(fileObj.getName()))) {
|
|
files.push(filePath);
|
|
}
|
|
} else if (fileObj.isDirectory() &&
|
|
(!file.exclusionRegExp || !file.exclusionRegExp.test(fileObj.getName()))) {
|
|
dirFiles = this.getFilteredFileList(fileObj, regExpFilters, makeUnixPaths, true);
|
|
//Do not use push.apply for dir listings, can hit limit of max number
|
|
//of arguments to a function call, #921.
|
|
dirFiles.forEach(function (dirFile) {
|
|
files.push(dirFile);
|
|
});
|
|
}
|
|
}
|
|
}
|
|
|
|
return files; //Array
|
|
},
|
|
|
|
copyDir: function (/*String*/srcDir, /*String*/destDir, /*RegExp?*/regExpFilter, /*boolean?*/onlyCopyNew) {
|
|
//summary: copies files from srcDir to destDir using the regExpFilter to determine if the
|
|
//file should be copied. Returns a list file name strings of the destinations that were copied.
|
|
regExpFilter = regExpFilter || /\w/;
|
|
|
|
var fileNames = file.getFilteredFileList(srcDir, regExpFilter, true),
|
|
copiedFiles = [], i, srcFileName, destFileName;
|
|
|
|
for (i = 0; i < fileNames.length; i++) {
|
|
srcFileName = fileNames[i];
|
|
destFileName = srcFileName.replace(srcDir, destDir);
|
|
|
|
if (file.copyFile(srcFileName, destFileName, onlyCopyNew)) {
|
|
copiedFiles.push(destFileName);
|
|
}
|
|
}
|
|
|
|
return copiedFiles.length ? copiedFiles : null; //Array or null
|
|
},
|
|
|
|
copyFile: function (/*String*/srcFileName, /*String*/destFileName, /*boolean?*/onlyCopyNew) {
|
|
//summary: copies srcFileName to destFileName. If onlyCopyNew is set, it only copies the file if
|
|
//srcFileName is newer than destFileName. Returns a boolean indicating if the copy occurred.
|
|
var destFile = new java.io.File(destFileName), srcFile, parentDir,
|
|
srcChannel, destChannel;
|
|
|
|
//logger.trace("Src filename: " + srcFileName);
|
|
//logger.trace("Dest filename: " + destFileName);
|
|
|
|
//If onlyCopyNew is true, then compare dates and only copy if the src is newer
|
|
//than dest.
|
|
if (onlyCopyNew) {
|
|
srcFile = new java.io.File(srcFileName);
|
|
if (destFile.exists() && destFile.lastModified() >= srcFile.lastModified()) {
|
|
return false; //Boolean
|
|
}
|
|
}
|
|
|
|
//Make sure destination dir exists.
|
|
parentDir = destFile.getParentFile();
|
|
if (!parentDir.exists()) {
|
|
if (!parentDir.mkdirs()) {
|
|
throw "Could not create directory: " + parentDir.getCanonicalPath();
|
|
}
|
|
}
|
|
|
|
//Java's version of copy file.
|
|
srcChannel = new java.io.FileInputStream(srcFileName).getChannel();
|
|
destChannel = new java.io.FileOutputStream(destFileName).getChannel();
|
|
destChannel.transferFrom(srcChannel, 0, srcChannel.size());
|
|
srcChannel.close();
|
|
destChannel.close();
|
|
|
|
return true; //Boolean
|
|
},
|
|
|
|
/**
|
|
* Renames a file. May fail if "to" already exists or is on another drive.
|
|
*/
|
|
renameFile: function (from, to) {
|
|
return (new java.io.File(from)).renameTo((new java.io.File(to)));
|
|
},
|
|
|
|
readFile: function (/*String*/path, /*String?*/encoding) {
|
|
//A file read function that can deal with BOMs
|
|
encoding = encoding || "utf-8";
|
|
var fileObj = new java.io.File(path),
|
|
input = new java.io.BufferedReader(new java.io.InputStreamReader(new java.io.FileInputStream(fileObj), encoding)),
|
|
stringBuffer, line;
|
|
try {
|
|
stringBuffer = new java.lang.StringBuffer();
|
|
line = input.readLine();
|
|
|
|
// Byte Order Mark (BOM) - The Unicode Standard, version 3.0, page 324
|
|
// http://www.unicode.org/faq/utf_bom.html
|
|
|
|
// Note that when we use utf-8, the BOM should appear as "EF BB BF", but it doesn't due to this bug in the JDK:
|
|
// http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4508058
|
|
if (line && line.length() && line.charAt(0) === 0xfeff) {
|
|
// Eat the BOM, since we've already found the encoding on this file,
|
|
// and we plan to concatenating this buffer with others; the BOM should
|
|
// only appear at the top of a file.
|
|
line = line.substring(1);
|
|
}
|
|
while (line !== null) {
|
|
stringBuffer.append(line);
|
|
stringBuffer.append(file.lineSeparator);
|
|
line = input.readLine();
|
|
}
|
|
//Make sure we return a JavaScript string and not a Java string.
|
|
return String(stringBuffer.toString()); //String
|
|
} finally {
|
|
input.close();
|
|
}
|
|
},
|
|
|
|
readFileAsync: function (path, encoding) {
|
|
var d = prim();
|
|
try {
|
|
d.resolve(file.readFile(path, encoding));
|
|
} catch (e) {
|
|
d.reject(e);
|
|
}
|
|
return d.promise;
|
|
},
|
|
|
|
saveUtf8File: function (/*String*/fileName, /*String*/fileContents) {
|
|
//summary: saves a file using UTF-8 encoding.
|
|
file.saveFile(fileName, fileContents, "utf-8");
|
|
},
|
|
|
|
saveFile: function (/*String*/fileName, /*String*/fileContents, /*String?*/encoding) {
|
|
//summary: saves a file.
|
|
var outFile = new java.io.File(fileName), outWriter, parentDir, os;
|
|
|
|
parentDir = outFile.getAbsoluteFile().getParentFile();
|
|
if (!parentDir.exists()) {
|
|
if (!parentDir.mkdirs()) {
|
|
throw "Could not create directory: " + parentDir.getAbsolutePath();
|
|
}
|
|
}
|
|
|
|
if (encoding) {
|
|
outWriter = new java.io.OutputStreamWriter(new java.io.FileOutputStream(outFile), encoding);
|
|
} else {
|
|
outWriter = new java.io.OutputStreamWriter(new java.io.FileOutputStream(outFile));
|
|
}
|
|
|
|
os = new java.io.BufferedWriter(outWriter);
|
|
try {
|
|
//If in Nashorn, need to coerce the JS string to a Java string so that
|
|
//writer.write method dispatch correctly detects the type.
|
|
if (typeof importPackage !== 'undefined') {
|
|
os.write(fileContents);
|
|
} else {
|
|
os.write(new java.lang.String(fileContents));
|
|
}
|
|
} finally {
|
|
os.close();
|
|
}
|
|
},
|
|
|
|
deleteFile: function (/*String*/fileName) {
|
|
//summary: deletes a file or directory if it exists.
|
|
var fileObj = new java.io.File(fileName), files, i;
|
|
if (fileObj.exists()) {
|
|
if (fileObj.isDirectory()) {
|
|
files = fileObj.listFiles();
|
|
for (i = 0; i < files.length; i++) {
|
|
this.deleteFile(files[i]);
|
|
}
|
|
}
|
|
fileObj["delete"]();
|
|
}
|
|
},
|
|
|
|
/**
|
|
* Deletes any empty directories under the given directory.
|
|
* The startDirIsJavaObject is private to this implementation's
|
|
* recursion needs.
|
|
*/
|
|
deleteEmptyDirs: function (startDir, startDirIsJavaObject) {
|
|
var topDir = startDir,
|
|
dirFileArray, i, fileObj;
|
|
|
|
if (!startDirIsJavaObject) {
|
|
topDir = new java.io.File(startDir);
|
|
}
|
|
|
|
if (topDir.exists()) {
|
|
dirFileArray = topDir.listFiles();
|
|
for (i = 0; i < dirFileArray.length; i++) {
|
|
fileObj = dirFileArray[i];
|
|
if (fileObj.isDirectory()) {
|
|
file.deleteEmptyDirs(fileObj, true);
|
|
}
|
|
}
|
|
|
|
//If the directory is empty now, delete it.
|
|
if (topDir.listFiles().length === 0) {
|
|
file.deleteFile(String(topDir.getPath()));
|
|
}
|
|
}
|
|
}
|
|
};
|
|
|
|
return file;
|
|
});
|
|
|
|
}
|
|
|
|
if(env === 'xpconnect') {
|
|
//Helper functions to deal with file I/O.
|
|
|
|
/*jslint plusplus: false */
|
|
/*global define, Components, xpcUtil */
|
|
|
|
define('xpconnect/file', ['prim'], function (prim) {
|
|
var file,
|
|
Cc = Components.classes,
|
|
Ci = Components.interfaces,
|
|
//Depends on xpcUtil which is set up in x.js
|
|
xpfile = xpcUtil.xpfile;
|
|
|
|
function mkFullDir(dirObj) {
|
|
//1 is DIRECTORY_TYPE, 511 is 0777 permissions
|
|
if (!dirObj.exists()) {
|
|
dirObj.create(1, 511);
|
|
}
|
|
}
|
|
|
|
file = {
|
|
backSlashRegExp: /\\/g,
|
|
|
|
exclusionRegExp: /^\./,
|
|
|
|
getLineSeparator: function () {
|
|
return file.lineSeparator;
|
|
},
|
|
|
|
lineSeparator: ('@mozilla.org/windows-registry-key;1' in Cc) ?
|
|
'\r\n' : '\n',
|
|
|
|
exists: function (fileName) {
|
|
return xpfile(fileName).exists();
|
|
},
|
|
|
|
parent: function (fileName) {
|
|
return xpfile(fileName).parent;
|
|
},
|
|
|
|
normalize: function (fileName) {
|
|
return file.absPath(fileName);
|
|
},
|
|
|
|
isFile: function (path) {
|
|
return xpfile(path).isFile();
|
|
},
|
|
|
|
isDirectory: function (path) {
|
|
return xpfile(path).isDirectory();
|
|
},
|
|
|
|
/**
|
|
* Gets the absolute file path as a string, normalized
|
|
* to using front slashes for path separators.
|
|
* @param {java.io.File||String} file
|
|
*/
|
|
absPath: function (fileObj) {
|
|
if (typeof fileObj === "string") {
|
|
fileObj = xpfile(fileObj);
|
|
}
|
|
return fileObj.path;
|
|
},
|
|
|
|
getFilteredFileList: function (/*String*/startDir, /*RegExp*/regExpFilters, /*boolean?*/makeUnixPaths, /*boolean?*/startDirIsObject) {
|
|
//summary: Recurses startDir and finds matches to the files that match regExpFilters.include
|
|
//and do not match regExpFilters.exclude. Or just one regexp can be passed in for regExpFilters,
|
|
//and it will be treated as the "include" case.
|
|
//Ignores files/directories that start with a period (.) unless exclusionRegExp
|
|
//is set to another value.
|
|
var files = [], topDir, regExpInclude, regExpExclude, dirFileArray,
|
|
fileObj, filePath, ok, dirFiles;
|
|
|
|
topDir = startDir;
|
|
if (!startDirIsObject) {
|
|
topDir = xpfile(startDir);
|
|
}
|
|
|
|
regExpInclude = regExpFilters.include || regExpFilters;
|
|
regExpExclude = regExpFilters.exclude || null;
|
|
|
|
if (topDir.exists()) {
|
|
dirFileArray = topDir.directoryEntries;
|
|
while (dirFileArray.hasMoreElements()) {
|
|
fileObj = dirFileArray.getNext().QueryInterface(Ci.nsILocalFile);
|
|
if (fileObj.isFile()) {
|
|
filePath = fileObj.path;
|
|
if (makeUnixPaths) {
|
|
if (filePath.indexOf("/") === -1) {
|
|
filePath = filePath.replace(/\\/g, "/");
|
|
}
|
|
}
|
|
|
|
ok = true;
|
|
if (regExpInclude) {
|
|
ok = filePath.match(regExpInclude);
|
|
}
|
|
if (ok && regExpExclude) {
|
|
ok = !filePath.match(regExpExclude);
|
|
}
|
|
|
|
if (ok && (!file.exclusionRegExp ||
|
|
!file.exclusionRegExp.test(fileObj.leafName))) {
|
|
files.push(filePath);
|
|
}
|
|
} else if (fileObj.isDirectory() &&
|
|
(!file.exclusionRegExp || !file.exclusionRegExp.test(fileObj.leafName))) {
|
|
dirFiles = this.getFilteredFileList(fileObj, regExpFilters, makeUnixPaths, true);
|
|
//Do not use push.apply for dir listings, can hit limit of max number
|
|
//of arguments to a function call, #921.
|
|
dirFiles.forEach(function (dirFile) {
|
|
files.push(dirFile);
|
|
});
|
|
}
|
|
}
|
|
}
|
|
|
|
return files; //Array
|
|
},
|
|
|
|
copyDir: function (/*String*/srcDir, /*String*/destDir, /*RegExp?*/regExpFilter, /*boolean?*/onlyCopyNew) {
|
|
//summary: copies files from srcDir to destDir using the regExpFilter to determine if the
|
|
//file should be copied. Returns a list file name strings of the destinations that were copied.
|
|
regExpFilter = regExpFilter || /\w/;
|
|
|
|
var fileNames = file.getFilteredFileList(srcDir, regExpFilter, true),
|
|
copiedFiles = [], i, srcFileName, destFileName;
|
|
|
|
for (i = 0; i < fileNames.length; i += 1) {
|
|
srcFileName = fileNames[i];
|
|
destFileName = srcFileName.replace(srcDir, destDir);
|
|
|
|
if (file.copyFile(srcFileName, destFileName, onlyCopyNew)) {
|
|
copiedFiles.push(destFileName);
|
|
}
|
|
}
|
|
|
|
return copiedFiles.length ? copiedFiles : null; //Array or null
|
|
},
|
|
|
|
copyFile: function (/*String*/srcFileName, /*String*/destFileName, /*boolean?*/onlyCopyNew) {
|
|
//summary: copies srcFileName to destFileName. If onlyCopyNew is set, it only copies the file if
|
|
//srcFileName is newer than destFileName. Returns a boolean indicating if the copy occurred.
|
|
var destFile = xpfile(destFileName),
|
|
srcFile = xpfile(srcFileName);
|
|
|
|
//logger.trace("Src filename: " + srcFileName);
|
|
//logger.trace("Dest filename: " + destFileName);
|
|
|
|
//If onlyCopyNew is true, then compare dates and only copy if the src is newer
|
|
//than dest.
|
|
if (onlyCopyNew) {
|
|
if (destFile.exists() && destFile.lastModifiedTime >= srcFile.lastModifiedTime) {
|
|
return false; //Boolean
|
|
}
|
|
}
|
|
|
|
srcFile.copyTo(destFile.parent, destFile.leafName);
|
|
|
|
return true; //Boolean
|
|
},
|
|
|
|
/**
|
|
* Renames a file. May fail if "to" already exists or is on another drive.
|
|
*/
|
|
renameFile: function (from, to) {
|
|
var toFile = xpfile(to);
|
|
return xpfile(from).moveTo(toFile.parent, toFile.leafName);
|
|
},
|
|
|
|
readFile: xpcUtil.readFile,
|
|
|
|
readFileAsync: function (path, encoding) {
|
|
var d = prim();
|
|
try {
|
|
d.resolve(file.readFile(path, encoding));
|
|
} catch (e) {
|
|
d.reject(e);
|
|
}
|
|
return d.promise;
|
|
},
|
|
|
|
saveUtf8File: function (/*String*/fileName, /*String*/fileContents) {
|
|
//summary: saves a file using UTF-8 encoding.
|
|
file.saveFile(fileName, fileContents, "utf-8");
|
|
},
|
|
|
|
saveFile: function (/*String*/fileName, /*String*/fileContents, /*String?*/encoding) {
|
|
var outStream, convertStream,
|
|
fileObj = xpfile(fileName);
|
|
|
|
mkFullDir(fileObj.parent);
|
|
|
|
try {
|
|
outStream = Cc['@mozilla.org/network/file-output-stream;1']
|
|
.createInstance(Ci.nsIFileOutputStream);
|
|
//438 is decimal for 0777
|
|
outStream.init(fileObj, 0x02 | 0x08 | 0x20, 511, 0);
|
|
|
|
convertStream = Cc['@mozilla.org/intl/converter-output-stream;1']
|
|
.createInstance(Ci.nsIConverterOutputStream);
|
|
|
|
convertStream.init(outStream, encoding, 0, 0);
|
|
convertStream.writeString(fileContents);
|
|
} catch (e) {
|
|
throw new Error((fileObj && fileObj.path || '') + ': ' + e);
|
|
} finally {
|
|
if (convertStream) {
|
|
convertStream.close();
|
|
}
|
|
if (outStream) {
|
|
outStream.close();
|
|
}
|
|
}
|
|
},
|
|
|
|
deleteFile: function (/*String*/fileName) {
|
|
//summary: deletes a file or directory if it exists.
|
|
var fileObj = xpfile(fileName);
|
|
if (fileObj.exists()) {
|
|
fileObj.remove(true);
|
|
}
|
|
},
|
|
|
|
/**
|
|
* Deletes any empty directories under the given directory.
|
|
* The startDirIsJavaObject is private to this implementation's
|
|
* recursion needs.
|
|
*/
|
|
deleteEmptyDirs: function (startDir, startDirIsObject) {
|
|
var topDir = startDir,
|
|
dirFileArray, fileObj;
|
|
|
|
if (!startDirIsObject) {
|
|
topDir = xpfile(startDir);
|
|
}
|
|
|
|
if (topDir.exists()) {
|
|
dirFileArray = topDir.directoryEntries;
|
|
while (dirFileArray.hasMoreElements()) {
|
|
fileObj = dirFileArray.getNext().QueryInterface(Ci.nsILocalFile);
|
|
|
|
if (fileObj.isDirectory()) {
|
|
file.deleteEmptyDirs(fileObj, true);
|
|
}
|
|
}
|
|
|
|
//If the directory is empty now, delete it.
|
|
dirFileArray = topDir.directoryEntries;
|
|
if (!dirFileArray.hasMoreElements()) {
|
|
file.deleteFile(topDir.path);
|
|
}
|
|
}
|
|
}
|
|
};
|
|
|
|
return file;
|
|
});
|
|
|
|
}
|
|
|
|
if(env === 'browser') {
|
|
/*global process */
|
|
define('browser/quit', function () {
|
|
'use strict';
|
|
return function (code) {
|
|
};
|
|
});
|
|
}
|
|
|
|
if(env === 'node') {
|
|
/*global process */
|
|
define('node/quit', function () {
|
|
'use strict';
|
|
return function (code) {
|
|
var draining = 0;
|
|
var exit = function () {
|
|
if (draining === 0) {
|
|
process.exit(code);
|
|
} else {
|
|
draining -= 1;
|
|
}
|
|
};
|
|
if (process.stdout.bufferSize) {
|
|
draining += 1;
|
|
process.stdout.once('drain', exit);
|
|
}
|
|
if (process.stderr.bufferSize) {
|
|
draining += 1;
|
|
process.stderr.once('drain', exit);
|
|
}
|
|
exit();
|
|
};
|
|
});
|
|
|
|
}
|
|
|
|
if(env === 'rhino') {
|
|
/*global quit */
|
|
define('rhino/quit', function () {
|
|
'use strict';
|
|
return function (code) {
|
|
return quit(code);
|
|
};
|
|
});
|
|
|
|
}
|
|
|
|
if(env === 'xpconnect') {
|
|
/*global quit */
|
|
define('xpconnect/quit', function () {
|
|
'use strict';
|
|
return function (code) {
|
|
return quit(code);
|
|
};
|
|
});
|
|
|
|
}
|
|
|
|
if(env === 'browser') {
|
|
/*jslint strict: false */
|
|
/*global define: false, console: false */
|
|
|
|
define('browser/print', function () {
|
|
function print(msg) {
|
|
console.log(msg);
|
|
}
|
|
|
|
return print;
|
|
});
|
|
|
|
}
|
|
|
|
if(env === 'node') {
|
|
/*jslint strict: false */
|
|
/*global define: false, console: false */
|
|
|
|
define('node/print', function () {
|
|
function print(msg) {
|
|
console.log(msg);
|
|
}
|
|
|
|
return print;
|
|
});
|
|
|
|
}
|
|
|
|
if(env === 'rhino') {
|
|
/*jslint strict: false */
|
|
/*global define: false, print: false */
|
|
|
|
define('rhino/print', function () {
|
|
return print;
|
|
});
|
|
|
|
}
|
|
|
|
if(env === 'xpconnect') {
|
|
/*jslint strict: false */
|
|
/*global define: false, print: false */
|
|
|
|
define('xpconnect/print', function () {
|
|
return print;
|
|
});
|
|
|
|
}
|
|
/*jslint nomen: false, strict: false */
|
|
/*global define: false */
|
|
|
|
define('logger', ['env!env/print'], function (print) {
|
|
var logger = {
|
|
TRACE: 0,
|
|
INFO: 1,
|
|
WARN: 2,
|
|
ERROR: 3,
|
|
SILENT: 4,
|
|
level: 0,
|
|
logPrefix: "",
|
|
|
|
logLevel: function( level ) {
|
|
this.level = level;
|
|
},
|
|
|
|
trace: function (message) {
|
|
if (this.level <= this.TRACE) {
|
|
this._print(message);
|
|
}
|
|
},
|
|
|
|
info: function (message) {
|
|
if (this.level <= this.INFO) {
|
|
this._print(message);
|
|
}
|
|
},
|
|
|
|
warn: function (message) {
|
|
if (this.level <= this.WARN) {
|
|
this._print(message);
|
|
}
|
|
},
|
|
|
|
error: function (message) {
|
|
if (this.level <= this.ERROR) {
|
|
this._print(message);
|
|
}
|
|
},
|
|
|
|
_print: function (message) {
|
|
this._sysPrint((this.logPrefix ? (this.logPrefix + " ") : "") + message);
|
|
},
|
|
|
|
_sysPrint: function (message) {
|
|
print(message);
|
|
}
|
|
};
|
|
|
|
return logger;
|
|
});
|
|
//Just a blank file to use when building the optimizer with the optimizer,
|
|
//so that the build does not attempt to inline some env modules,
|
|
//like Node's fs and path.
|
|
|
|
(function webpackUniversalModuleDefinition(root, factory) {
|
|
/* istanbul ignore next */
|
|
if(typeof define === 'function' && define.amd)
|
|
define('esprima', [], factory);
|
|
else if(typeof exports === 'object' && typeof module === 'object')
|
|
module.exports = factory();
|
|
/* istanbul ignore next */
|
|
else if(typeof exports === 'object')
|
|
exports["esprima"] = factory();
|
|
else
|
|
root["esprima"] = factory();
|
|
})(this, function() {
|
|
return /******/ (function(modules) { // webpackBootstrap
|
|
/******/ // The module cache
|
|
/******/ var installedModules = {};
|
|
|
|
/******/ // The require function
|
|
/******/ function __webpack_require__(moduleId) {
|
|
|
|
/******/ // Check if module is in cache
|
|
/* istanbul ignore if */
|
|
/******/ if(installedModules[moduleId])
|
|
/******/ return installedModules[moduleId].exports;
|
|
|
|
/******/ // Create a new module (and put it into the cache)
|
|
/******/ var module = installedModules[moduleId] = {
|
|
/******/ exports: {},
|
|
/******/ id: moduleId,
|
|
/******/ loaded: false
|
|
/******/ };
|
|
|
|
/******/ // Execute the module function
|
|
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
|
|
|
|
/******/ // Flag the module as loaded
|
|
/******/ module.loaded = true;
|
|
|
|
/******/ // Return the exports of the module
|
|
/******/ return module.exports;
|
|
/******/ }
|
|
|
|
|
|
/******/ // expose the modules object (__webpack_modules__)
|
|
/******/ __webpack_require__.m = modules;
|
|
|
|
/******/ // expose the module cache
|
|
/******/ __webpack_require__.c = installedModules;
|
|
|
|
/******/ // __webpack_public_path__
|
|
/******/ __webpack_require__.p = "";
|
|
|
|
/******/ // Load entry module and return exports
|
|
/******/ return __webpack_require__(0);
|
|
/******/ })
|
|
/************************************************************************/
|
|
/******/ ([
|
|
/* 0 */
|
|
/***/ function(module, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
/*
|
|
Copyright JS Foundation and other contributors, https://js.foundation/
|
|
|
|
Redistribution and use in source and binary forms, with or without
|
|
modification, are permitted provided that the following conditions are met:
|
|
|
|
* Redistributions of source code must retain the above copyright
|
|
notice, this list of conditions and the following disclaimer.
|
|
* Redistributions in binary form must reproduce the above copyright
|
|
notice, this list of conditions and the following disclaimer in the
|
|
documentation and/or other materials provided with the distribution.
|
|
|
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
|
|
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
|
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
|
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
*/
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
var comment_handler_1 = __webpack_require__(1);
|
|
var jsx_parser_1 = __webpack_require__(3);
|
|
var parser_1 = __webpack_require__(8);
|
|
var tokenizer_1 = __webpack_require__(15);
|
|
function parse(code, options, delegate) {
|
|
var commentHandler = null;
|
|
var proxyDelegate = function (node, metadata) {
|
|
if (delegate) {
|
|
delegate(node, metadata);
|
|
}
|
|
if (commentHandler) {
|
|
commentHandler.visit(node, metadata);
|
|
}
|
|
};
|
|
var parserDelegate = (typeof delegate === 'function') ? proxyDelegate : null;
|
|
var collectComment = false;
|
|
if (options) {
|
|
collectComment = (typeof options.comment === 'boolean' && options.comment);
|
|
var attachComment = (typeof options.attachComment === 'boolean' && options.attachComment);
|
|
if (collectComment || attachComment) {
|
|
commentHandler = new comment_handler_1.CommentHandler();
|
|
commentHandler.attach = attachComment;
|
|
options.comment = true;
|
|
parserDelegate = proxyDelegate;
|
|
}
|
|
}
|
|
var isModule = false;
|
|
if (options && typeof options.sourceType === 'string') {
|
|
isModule = (options.sourceType === 'module');
|
|
}
|
|
var parser;
|
|
if (options && typeof options.jsx === 'boolean' && options.jsx) {
|
|
parser = new jsx_parser_1.JSXParser(code, options, parserDelegate);
|
|
}
|
|
else {
|
|
parser = new parser_1.Parser(code, options, parserDelegate);
|
|
}
|
|
var program = isModule ? parser.parseModule() : parser.parseScript();
|
|
var ast = program;
|
|
if (collectComment && commentHandler) {
|
|
ast.comments = commentHandler.comments;
|
|
}
|
|
if (parser.config.tokens) {
|
|
ast.tokens = parser.tokens;
|
|
}
|
|
if (parser.config.tolerant) {
|
|
ast.errors = parser.errorHandler.errors;
|
|
}
|
|
return ast;
|
|
}
|
|
exports.parse = parse;
|
|
function parseModule(code, options, delegate) {
|
|
var parsingOptions = options || {};
|
|
parsingOptions.sourceType = 'module';
|
|
return parse(code, parsingOptions, delegate);
|
|
}
|
|
exports.parseModule = parseModule;
|
|
function parseScript(code, options, delegate) {
|
|
var parsingOptions = options || {};
|
|
parsingOptions.sourceType = 'script';
|
|
return parse(code, parsingOptions, delegate);
|
|
}
|
|
exports.parseScript = parseScript;
|
|
function tokenize(code, options, delegate) {
|
|
var tokenizer = new tokenizer_1.Tokenizer(code, options);
|
|
var tokens;
|
|
tokens = [];
|
|
try {
|
|
while (true) {
|
|
var token = tokenizer.getNextToken();
|
|
if (!token) {
|
|
break;
|
|
}
|
|
if (delegate) {
|
|
token = delegate(token);
|
|
}
|
|
tokens.push(token);
|
|
}
|
|
}
|
|
catch (e) {
|
|
tokenizer.errorHandler.tolerate(e);
|
|
}
|
|
if (tokenizer.errorHandler.tolerant) {
|
|
tokens.errors = tokenizer.errors();
|
|
}
|
|
return tokens;
|
|
}
|
|
exports.tokenize = tokenize;
|
|
var syntax_1 = __webpack_require__(2);
|
|
exports.Syntax = syntax_1.Syntax;
|
|
// Sync with *.json manifests.
|
|
exports.version = '4.0.1';
|
|
|
|
|
|
/***/ },
|
|
/* 1 */
|
|
/***/ function(module, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
var syntax_1 = __webpack_require__(2);
|
|
var CommentHandler = (function () {
|
|
function CommentHandler() {
|
|
this.attach = false;
|
|
this.comments = [];
|
|
this.stack = [];
|
|
this.leading = [];
|
|
this.trailing = [];
|
|
}
|
|
CommentHandler.prototype.insertInnerComments = function (node, metadata) {
|
|
// innnerComments for properties empty block
|
|
// `function a() {/** comments **\/}`
|
|
if (node.type === syntax_1.Syntax.BlockStatement && node.body.length === 0) {
|
|
var innerComments = [];
|
|
for (var i = this.leading.length - 1; i >= 0; --i) {
|
|
var entry = this.leading[i];
|
|
if (metadata.end.offset >= entry.start) {
|
|
innerComments.unshift(entry.comment);
|
|
this.leading.splice(i, 1);
|
|
this.trailing.splice(i, 1);
|
|
}
|
|
}
|
|
if (innerComments.length) {
|
|
node.innerComments = innerComments;
|
|
}
|
|
}
|
|
};
|
|
CommentHandler.prototype.findTrailingComments = function (metadata) {
|
|
var trailingComments = [];
|
|
if (this.trailing.length > 0) {
|
|
for (var i = this.trailing.length - 1; i >= 0; --i) {
|
|
var entry_1 = this.trailing[i];
|
|
if (entry_1.start >= metadata.end.offset) {
|
|
trailingComments.unshift(entry_1.comment);
|
|
}
|
|
}
|
|
this.trailing.length = 0;
|
|
return trailingComments;
|
|
}
|
|
var entry = this.stack[this.stack.length - 1];
|
|
if (entry && entry.node.trailingComments) {
|
|
var firstComment = entry.node.trailingComments[0];
|
|
if (firstComment && firstComment.range[0] >= metadata.end.offset) {
|
|
trailingComments = entry.node.trailingComments;
|
|
delete entry.node.trailingComments;
|
|
}
|
|
}
|
|
return trailingComments;
|
|
};
|
|
CommentHandler.prototype.findLeadingComments = function (metadata) {
|
|
var leadingComments = [];
|
|
var target;
|
|
while (this.stack.length > 0) {
|
|
var entry = this.stack[this.stack.length - 1];
|
|
if (entry && entry.start >= metadata.start.offset) {
|
|
target = entry.node;
|
|
this.stack.pop();
|
|
}
|
|
else {
|
|
break;
|
|
}
|
|
}
|
|
if (target) {
|
|
var count = target.leadingComments ? target.leadingComments.length : 0;
|
|
for (var i = count - 1; i >= 0; --i) {
|
|
var comment = target.leadingComments[i];
|
|
if (comment.range[1] <= metadata.start.offset) {
|
|
leadingComments.unshift(comment);
|
|
target.leadingComments.splice(i, 1);
|
|
}
|
|
}
|
|
if (target.leadingComments && target.leadingComments.length === 0) {
|
|
delete target.leadingComments;
|
|
}
|
|
return leadingComments;
|
|
}
|
|
for (var i = this.leading.length - 1; i >= 0; --i) {
|
|
var entry = this.leading[i];
|
|
if (entry.start <= metadata.start.offset) {
|
|
leadingComments.unshift(entry.comment);
|
|
this.leading.splice(i, 1);
|
|
}
|
|
}
|
|
return leadingComments;
|
|
};
|
|
CommentHandler.prototype.visitNode = function (node, metadata) {
|
|
if (node.type === syntax_1.Syntax.Program && node.body.length > 0) {
|
|
return;
|
|
}
|
|
this.insertInnerComments(node, metadata);
|
|
var trailingComments = this.findTrailingComments(metadata);
|
|
var leadingComments = this.findLeadingComments(metadata);
|
|
if (leadingComments.length > 0) {
|
|
node.leadingComments = leadingComments;
|
|
}
|
|
if (trailingComments.length > 0) {
|
|
node.trailingComments = trailingComments;
|
|
}
|
|
this.stack.push({
|
|
node: node,
|
|
start: metadata.start.offset
|
|
});
|
|
};
|
|
CommentHandler.prototype.visitComment = function (node, metadata) {
|
|
var type = (node.type[0] === 'L') ? 'Line' : 'Block';
|
|
var comment = {
|
|
type: type,
|
|
value: node.value
|
|
};
|
|
if (node.range) {
|
|
comment.range = node.range;
|
|
}
|
|
if (node.loc) {
|
|
comment.loc = node.loc;
|
|
}
|
|
this.comments.push(comment);
|
|
if (this.attach) {
|
|
var entry = {
|
|
comment: {
|
|
type: type,
|
|
value: node.value,
|
|
range: [metadata.start.offset, metadata.end.offset]
|
|
},
|
|
start: metadata.start.offset
|
|
};
|
|
if (node.loc) {
|
|
entry.comment.loc = node.loc;
|
|
}
|
|
node.type = type;
|
|
this.leading.push(entry);
|
|
this.trailing.push(entry);
|
|
}
|
|
};
|
|
CommentHandler.prototype.visit = function (node, metadata) {
|
|
if (node.type === 'LineComment') {
|
|
this.visitComment(node, metadata);
|
|
}
|
|
else if (node.type === 'BlockComment') {
|
|
this.visitComment(node, metadata);
|
|
}
|
|
else if (this.attach) {
|
|
this.visitNode(node, metadata);
|
|
}
|
|
};
|
|
return CommentHandler;
|
|
}());
|
|
exports.CommentHandler = CommentHandler;
|
|
|
|
|
|
/***/ },
|
|
/* 2 */
|
|
/***/ function(module, exports) {
|
|
|
|
"use strict";
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
exports.Syntax = {
|
|
AssignmentExpression: 'AssignmentExpression',
|
|
AssignmentPattern: 'AssignmentPattern',
|
|
ArrayExpression: 'ArrayExpression',
|
|
ArrayPattern: 'ArrayPattern',
|
|
ArrowFunctionExpression: 'ArrowFunctionExpression',
|
|
AwaitExpression: 'AwaitExpression',
|
|
BlockStatement: 'BlockStatement',
|
|
BinaryExpression: 'BinaryExpression',
|
|
BreakStatement: 'BreakStatement',
|
|
CallExpression: 'CallExpression',
|
|
CatchClause: 'CatchClause',
|
|
ClassBody: 'ClassBody',
|
|
ClassDeclaration: 'ClassDeclaration',
|
|
ClassExpression: 'ClassExpression',
|
|
ConditionalExpression: 'ConditionalExpression',
|
|
ContinueStatement: 'ContinueStatement',
|
|
DoWhileStatement: 'DoWhileStatement',
|
|
DebuggerStatement: 'DebuggerStatement',
|
|
EmptyStatement: 'EmptyStatement',
|
|
ExportAllDeclaration: 'ExportAllDeclaration',
|
|
ExportDefaultDeclaration: 'ExportDefaultDeclaration',
|
|
ExportNamedDeclaration: 'ExportNamedDeclaration',
|
|
ExportSpecifier: 'ExportSpecifier',
|
|
ExpressionStatement: 'ExpressionStatement',
|
|
ForStatement: 'ForStatement',
|
|
ForOfStatement: 'ForOfStatement',
|
|
ForInStatement: 'ForInStatement',
|
|
FunctionDeclaration: 'FunctionDeclaration',
|
|
FunctionExpression: 'FunctionExpression',
|
|
Identifier: 'Identifier',
|
|
IfStatement: 'IfStatement',
|
|
ImportDeclaration: 'ImportDeclaration',
|
|
ImportDefaultSpecifier: 'ImportDefaultSpecifier',
|
|
ImportNamespaceSpecifier: 'ImportNamespaceSpecifier',
|
|
ImportSpecifier: 'ImportSpecifier',
|
|
Literal: 'Literal',
|
|
LabeledStatement: 'LabeledStatement',
|
|
LogicalExpression: 'LogicalExpression',
|
|
MemberExpression: 'MemberExpression',
|
|
MetaProperty: 'MetaProperty',
|
|
MethodDefinition: 'MethodDefinition',
|
|
NewExpression: 'NewExpression',
|
|
ObjectExpression: 'ObjectExpression',
|
|
ObjectPattern: 'ObjectPattern',
|
|
Program: 'Program',
|
|
Property: 'Property',
|
|
RestElement: 'RestElement',
|
|
ReturnStatement: 'ReturnStatement',
|
|
SequenceExpression: 'SequenceExpression',
|
|
SpreadElement: 'SpreadElement',
|
|
Super: 'Super',
|
|
SwitchCase: 'SwitchCase',
|
|
SwitchStatement: 'SwitchStatement',
|
|
TaggedTemplateExpression: 'TaggedTemplateExpression',
|
|
TemplateElement: 'TemplateElement',
|
|
TemplateLiteral: 'TemplateLiteral',
|
|
ThisExpression: 'ThisExpression',
|
|
ThrowStatement: 'ThrowStatement',
|
|
TryStatement: 'TryStatement',
|
|
UnaryExpression: 'UnaryExpression',
|
|
UpdateExpression: 'UpdateExpression',
|
|
VariableDeclaration: 'VariableDeclaration',
|
|
VariableDeclarator: 'VariableDeclarator',
|
|
WhileStatement: 'WhileStatement',
|
|
WithStatement: 'WithStatement',
|
|
YieldExpression: 'YieldExpression'
|
|
};
|
|
|
|
|
|
/***/ },
|
|
/* 3 */
|
|
/***/ function(module, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
/* istanbul ignore next */
|
|
var __extends = (this && this.__extends) || (function () {
|
|
var extendStatics = Object.setPrototypeOf ||
|
|
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
|
|
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
|
|
return function (d, b) {
|
|
extendStatics(d, b);
|
|
function __() { this.constructor = d; }
|
|
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
|
};
|
|
})();
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
var character_1 = __webpack_require__(4);
|
|
var JSXNode = __webpack_require__(5);
|
|
var jsx_syntax_1 = __webpack_require__(6);
|
|
var Node = __webpack_require__(7);
|
|
var parser_1 = __webpack_require__(8);
|
|
var token_1 = __webpack_require__(13);
|
|
var xhtml_entities_1 = __webpack_require__(14);
|
|
token_1.TokenName[100 /* Identifier */] = 'JSXIdentifier';
|
|
token_1.TokenName[101 /* Text */] = 'JSXText';
|
|
// Fully qualified element name, e.g. <svg:path> returns "svg:path"
|
|
function getQualifiedElementName(elementName) {
|
|
var qualifiedName;
|
|
switch (elementName.type) {
|
|
case jsx_syntax_1.JSXSyntax.JSXIdentifier:
|
|
var id = elementName;
|
|
qualifiedName = id.name;
|
|
break;
|
|
case jsx_syntax_1.JSXSyntax.JSXNamespacedName:
|
|
var ns = elementName;
|
|
qualifiedName = getQualifiedElementName(ns.namespace) + ':' +
|
|
getQualifiedElementName(ns.name);
|
|
break;
|
|
case jsx_syntax_1.JSXSyntax.JSXMemberExpression:
|
|
var expr = elementName;
|
|
qualifiedName = getQualifiedElementName(expr.object) + '.' +
|
|
getQualifiedElementName(expr.property);
|
|
break;
|
|
/* istanbul ignore next */
|
|
default:
|
|
break;
|
|
}
|
|
return qualifiedName;
|
|
}
|
|
var JSXParser = (function (_super) {
|
|
__extends(JSXParser, _super);
|
|
function JSXParser(code, options, delegate) {
|
|
return _super.call(this, code, options, delegate) || this;
|
|
}
|
|
JSXParser.prototype.parsePrimaryExpression = function () {
|
|
return this.match('<') ? this.parseJSXRoot() : _super.prototype.parsePrimaryExpression.call(this);
|
|
};
|
|
JSXParser.prototype.startJSX = function () {
|
|
// Unwind the scanner before the lookahead token.
|
|
this.scanner.index = this.startMarker.index;
|
|
this.scanner.lineNumber = this.startMarker.line;
|
|
this.scanner.lineStart = this.startMarker.index - this.startMarker.column;
|
|
};
|
|
JSXParser.prototype.finishJSX = function () {
|
|
// Prime the next lookahead.
|
|
this.nextToken();
|
|
};
|
|
JSXParser.prototype.reenterJSX = function () {
|
|
this.startJSX();
|
|
this.expectJSX('}');
|
|
// Pop the closing '}' added from the lookahead.
|
|
if (this.config.tokens) {
|
|
this.tokens.pop();
|
|
}
|
|
};
|
|
JSXParser.prototype.createJSXNode = function () {
|
|
this.collectComments();
|
|
return {
|
|
index: this.scanner.index,
|
|
line: this.scanner.lineNumber,
|
|
column: this.scanner.index - this.scanner.lineStart
|
|
};
|
|
};
|
|
JSXParser.prototype.createJSXChildNode = function () {
|
|
return {
|
|
index: this.scanner.index,
|
|
line: this.scanner.lineNumber,
|
|
column: this.scanner.index - this.scanner.lineStart
|
|
};
|
|
};
|
|
JSXParser.prototype.scanXHTMLEntity = function (quote) {
|
|
var result = '&';
|
|
var valid = true;
|
|
var terminated = false;
|
|
var numeric = false;
|
|
var hex = false;
|
|
while (!this.scanner.eof() && valid && !terminated) {
|
|
var ch = this.scanner.source[this.scanner.index];
|
|
if (ch === quote) {
|
|
break;
|
|
}
|
|
terminated = (ch === ';');
|
|
result += ch;
|
|
++this.scanner.index;
|
|
if (!terminated) {
|
|
switch (result.length) {
|
|
case 2:
|
|
// e.g. '{'
|
|
numeric = (ch === '#');
|
|
break;
|
|
case 3:
|
|
if (numeric) {
|
|
// e.g. 'A'
|
|
hex = (ch === 'x');
|
|
valid = hex || character_1.Character.isDecimalDigit(ch.charCodeAt(0));
|
|
numeric = numeric && !hex;
|
|
}
|
|
break;
|
|
default:
|
|
valid = valid && !(numeric && !character_1.Character.isDecimalDigit(ch.charCodeAt(0)));
|
|
valid = valid && !(hex && !character_1.Character.isHexDigit(ch.charCodeAt(0)));
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
if (valid && terminated && result.length > 2) {
|
|
// e.g. 'A' becomes just '#x41'
|
|
var str = result.substr(1, result.length - 2);
|
|
if (numeric && str.length > 1) {
|
|
result = String.fromCharCode(parseInt(str.substr(1), 10));
|
|
}
|
|
else if (hex && str.length > 2) {
|
|
result = String.fromCharCode(parseInt('0' + str.substr(1), 16));
|
|
}
|
|
else if (!numeric && !hex && xhtml_entities_1.XHTMLEntities[str]) {
|
|
result = xhtml_entities_1.XHTMLEntities[str];
|
|
}
|
|
}
|
|
return result;
|
|
};
|
|
// Scan the next JSX token. This replaces Scanner#lex when in JSX mode.
|
|
JSXParser.prototype.lexJSX = function () {
|
|
var cp = this.scanner.source.charCodeAt(this.scanner.index);
|
|
// < > / : = { }
|
|
if (cp === 60 || cp === 62 || cp === 47 || cp === 58 || cp === 61 || cp === 123 || cp === 125) {
|
|
var value = this.scanner.source[this.scanner.index++];
|
|
return {
|
|
type: 7 /* Punctuator */,
|
|
value: value,
|
|
lineNumber: this.scanner.lineNumber,
|
|
lineStart: this.scanner.lineStart,
|
|
start: this.scanner.index - 1,
|
|
end: this.scanner.index
|
|
};
|
|
}
|
|
// " '
|
|
if (cp === 34 || cp === 39) {
|
|
var start = this.scanner.index;
|
|
var quote = this.scanner.source[this.scanner.index++];
|
|
var str = '';
|
|
while (!this.scanner.eof()) {
|
|
var ch = this.scanner.source[this.scanner.index++];
|
|
if (ch === quote) {
|
|
break;
|
|
}
|
|
else if (ch === '&') {
|
|
str += this.scanXHTMLEntity(quote);
|
|
}
|
|
else {
|
|
str += ch;
|
|
}
|
|
}
|
|
return {
|
|
type: 8 /* StringLiteral */,
|
|
value: str,
|
|
lineNumber: this.scanner.lineNumber,
|
|
lineStart: this.scanner.lineStart,
|
|
start: start,
|
|
end: this.scanner.index
|
|
};
|
|
}
|
|
// ... or .
|
|
if (cp === 46) {
|
|
var n1 = this.scanner.source.charCodeAt(this.scanner.index + 1);
|
|
var n2 = this.scanner.source.charCodeAt(this.scanner.index + 2);
|
|
var value = (n1 === 46 && n2 === 46) ? '...' : '.';
|
|
var start = this.scanner.index;
|
|
this.scanner.index += value.length;
|
|
return {
|
|
type: 7 /* Punctuator */,
|
|
value: value,
|
|
lineNumber: this.scanner.lineNumber,
|
|
lineStart: this.scanner.lineStart,
|
|
start: start,
|
|
end: this.scanner.index
|
|
};
|
|
}
|
|
// `
|
|
if (cp === 96) {
|
|
// Only placeholder, since it will be rescanned as a real assignment expression.
|
|
return {
|
|
type: 10 /* Template */,
|
|
value: '',
|
|
lineNumber: this.scanner.lineNumber,
|
|
lineStart: this.scanner.lineStart,
|
|
start: this.scanner.index,
|
|
end: this.scanner.index
|
|
};
|
|
}
|
|
// Identifer can not contain backslash (char code 92).
|
|
if (character_1.Character.isIdentifierStart(cp) && (cp !== 92)) {
|
|
var start = this.scanner.index;
|
|
++this.scanner.index;
|
|
while (!this.scanner.eof()) {
|
|
var ch = this.scanner.source.charCodeAt(this.scanner.index);
|
|
if (character_1.Character.isIdentifierPart(ch) && (ch !== 92)) {
|
|
++this.scanner.index;
|
|
}
|
|
else if (ch === 45) {
|
|
// Hyphen (char code 45) can be part of an identifier.
|
|
++this.scanner.index;
|
|
}
|
|
else {
|
|
break;
|
|
}
|
|
}
|
|
var id = this.scanner.source.slice(start, this.scanner.index);
|
|
return {
|
|
type: 100 /* Identifier */,
|
|
value: id,
|
|
lineNumber: this.scanner.lineNumber,
|
|
lineStart: this.scanner.lineStart,
|
|
start: start,
|
|
end: this.scanner.index
|
|
};
|
|
}
|
|
return this.scanner.lex();
|
|
};
|
|
JSXParser.prototype.nextJSXToken = function () {
|
|
this.collectComments();
|
|
this.startMarker.index = this.scanner.index;
|
|
this.startMarker.line = this.scanner.lineNumber;
|
|
this.startMarker.column = this.scanner.index - this.scanner.lineStart;
|
|
var token = this.lexJSX();
|
|
this.lastMarker.index = this.scanner.index;
|
|
this.lastMarker.line = this.scanner.lineNumber;
|
|
this.lastMarker.column = this.scanner.index - this.scanner.lineStart;
|
|
if (this.config.tokens) {
|
|
this.tokens.push(this.convertToken(token));
|
|
}
|
|
return token;
|
|
};
|
|
JSXParser.prototype.nextJSXText = function () {
|
|
this.startMarker.index = this.scanner.index;
|
|
this.startMarker.line = this.scanner.lineNumber;
|
|
this.startMarker.column = this.scanner.index - this.scanner.lineStart;
|
|
var start = this.scanner.index;
|
|
var text = '';
|
|
while (!this.scanner.eof()) {
|
|
var ch = this.scanner.source[this.scanner.index];
|
|
if (ch === '{' || ch === '<') {
|
|
break;
|
|
}
|
|
++this.scanner.index;
|
|
text += ch;
|
|
if (character_1.Character.isLineTerminator(ch.charCodeAt(0))) {
|
|
++this.scanner.lineNumber;
|
|
if (ch === '\r' && this.scanner.source[this.scanner.index] === '\n') {
|
|
++this.scanner.index;
|
|
}
|
|
this.scanner.lineStart = this.scanner.index;
|
|
}
|
|
}
|
|
this.lastMarker.index = this.scanner.index;
|
|
this.lastMarker.line = this.scanner.lineNumber;
|
|
this.lastMarker.column = this.scanner.index - this.scanner.lineStart;
|
|
var token = {
|
|
type: 101 /* Text */,
|
|
value: text,
|
|
lineNumber: this.scanner.lineNumber,
|
|
lineStart: this.scanner.lineStart,
|
|
start: start,
|
|
end: this.scanner.index
|
|
};
|
|
if ((text.length > 0) && this.config.tokens) {
|
|
this.tokens.push(this.convertToken(token));
|
|
}
|
|
return token;
|
|
};
|
|
JSXParser.prototype.peekJSXToken = function () {
|
|
var state = this.scanner.saveState();
|
|
this.scanner.scanComments();
|
|
var next = this.lexJSX();
|
|
this.scanner.restoreState(state);
|
|
return next;
|
|
};
|
|
// Expect the next JSX token to match the specified punctuator.
|
|
// If not, an exception will be thrown.
|
|
JSXParser.prototype.expectJSX = function (value) {
|
|
var token = this.nextJSXToken();
|
|
if (token.type !== 7 /* Punctuator */ || token.value !== value) {
|
|
this.throwUnexpectedToken(token);
|
|
}
|
|
};
|
|
// Return true if the next JSX token matches the specified punctuator.
|
|
JSXParser.prototype.matchJSX = function (value) {
|
|
var next = this.peekJSXToken();
|
|
return next.type === 7 /* Punctuator */ && next.value === value;
|
|
};
|
|
JSXParser.prototype.parseJSXIdentifier = function () {
|
|
var node = this.createJSXNode();
|
|
var token = this.nextJSXToken();
|
|
if (token.type !== 100 /* Identifier */) {
|
|
this.throwUnexpectedToken(token);
|
|
}
|
|
return this.finalize(node, new JSXNode.JSXIdentifier(token.value));
|
|
};
|
|
JSXParser.prototype.parseJSXElementName = function () {
|
|
var node = this.createJSXNode();
|
|
var elementName = this.parseJSXIdentifier();
|
|
if (this.matchJSX(':')) {
|
|
var namespace = elementName;
|
|
this.expectJSX(':');
|
|
var name_1 = this.parseJSXIdentifier();
|
|
elementName = this.finalize(node, new JSXNode.JSXNamespacedName(namespace, name_1));
|
|
}
|
|
else if (this.matchJSX('.')) {
|
|
while (this.matchJSX('.')) {
|
|
var object = elementName;
|
|
this.expectJSX('.');
|
|
var property = this.parseJSXIdentifier();
|
|
elementName = this.finalize(node, new JSXNode.JSXMemberExpression(object, property));
|
|
}
|
|
}
|
|
return elementName;
|
|
};
|
|
JSXParser.prototype.parseJSXAttributeName = function () {
|
|
var node = this.createJSXNode();
|
|
var attributeName;
|
|
var identifier = this.parseJSXIdentifier();
|
|
if (this.matchJSX(':')) {
|
|
var namespace = identifier;
|
|
this.expectJSX(':');
|
|
var name_2 = this.parseJSXIdentifier();
|
|
attributeName = this.finalize(node, new JSXNode.JSXNamespacedName(namespace, name_2));
|
|
}
|
|
else {
|
|
attributeName = identifier;
|
|
}
|
|
return attributeName;
|
|
};
|
|
JSXParser.prototype.parseJSXStringLiteralAttribute = function () {
|
|
var node = this.createJSXNode();
|
|
var token = this.nextJSXToken();
|
|
if (token.type !== 8 /* StringLiteral */) {
|
|
this.throwUnexpectedToken(token);
|
|
}
|
|
var raw = this.getTokenRaw(token);
|
|
return this.finalize(node, new Node.Literal(token.value, raw));
|
|
};
|
|
JSXParser.prototype.parseJSXExpressionAttribute = function () {
|
|
var node = this.createJSXNode();
|
|
this.expectJSX('{');
|
|
this.finishJSX();
|
|
if (this.match('}')) {
|
|
this.tolerateError('JSX attributes must only be assigned a non-empty expression');
|
|
}
|
|
var expression = this.parseAssignmentExpression();
|
|
this.reenterJSX();
|
|
return this.finalize(node, new JSXNode.JSXExpressionContainer(expression));
|
|
};
|
|
JSXParser.prototype.parseJSXAttributeValue = function () {
|
|
return this.matchJSX('{') ? this.parseJSXExpressionAttribute() :
|
|
this.matchJSX('<') ? this.parseJSXElement() : this.parseJSXStringLiteralAttribute();
|
|
};
|
|
JSXParser.prototype.parseJSXNameValueAttribute = function () {
|
|
var node = this.createJSXNode();
|
|
var name = this.parseJSXAttributeName();
|
|
var value = null;
|
|
if (this.matchJSX('=')) {
|
|
this.expectJSX('=');
|
|
value = this.parseJSXAttributeValue();
|
|
}
|
|
return this.finalize(node, new JSXNode.JSXAttribute(name, value));
|
|
};
|
|
JSXParser.prototype.parseJSXSpreadAttribute = function () {
|
|
var node = this.createJSXNode();
|
|
this.expectJSX('{');
|
|
this.expectJSX('...');
|
|
this.finishJSX();
|
|
var argument = this.parseAssignmentExpression();
|
|
this.reenterJSX();
|
|
return this.finalize(node, new JSXNode.JSXSpreadAttribute(argument));
|
|
};
|
|
JSXParser.prototype.parseJSXAttributes = function () {
|
|
var attributes = [];
|
|
while (!this.matchJSX('/') && !this.matchJSX('>')) {
|
|
var attribute = this.matchJSX('{') ? this.parseJSXSpreadAttribute() :
|
|
this.parseJSXNameValueAttribute();
|
|
attributes.push(attribute);
|
|
}
|
|
return attributes;
|
|
};
|
|
JSXParser.prototype.parseJSXOpeningElement = function () {
|
|
var node = this.createJSXNode();
|
|
this.expectJSX('<');
|
|
var name = this.parseJSXElementName();
|
|
var attributes = this.parseJSXAttributes();
|
|
var selfClosing = this.matchJSX('/');
|
|
if (selfClosing) {
|
|
this.expectJSX('/');
|
|
}
|
|
this.expectJSX('>');
|
|
return this.finalize(node, new JSXNode.JSXOpeningElement(name, selfClosing, attributes));
|
|
};
|
|
JSXParser.prototype.parseJSXBoundaryElement = function () {
|
|
var node = this.createJSXNode();
|
|
this.expectJSX('<');
|
|
if (this.matchJSX('/')) {
|
|
this.expectJSX('/');
|
|
var name_3 = this.parseJSXElementName();
|
|
this.expectJSX('>');
|
|
return this.finalize(node, new JSXNode.JSXClosingElement(name_3));
|
|
}
|
|
var name = this.parseJSXElementName();
|
|
var attributes = this.parseJSXAttributes();
|
|
var selfClosing = this.matchJSX('/');
|
|
if (selfClosing) {
|
|
this.expectJSX('/');
|
|
}
|
|
this.expectJSX('>');
|
|
return this.finalize(node, new JSXNode.JSXOpeningElement(name, selfClosing, attributes));
|
|
};
|
|
JSXParser.prototype.parseJSXEmptyExpression = function () {
|
|
var node = this.createJSXChildNode();
|
|
this.collectComments();
|
|
this.lastMarker.index = this.scanner.index;
|
|
this.lastMarker.line = this.scanner.lineNumber;
|
|
this.lastMarker.column = this.scanner.index - this.scanner.lineStart;
|
|
return this.finalize(node, new JSXNode.JSXEmptyExpression());
|
|
};
|
|
JSXParser.prototype.parseJSXExpressionContainer = function () {
|
|
var node = this.createJSXNode();
|
|
this.expectJSX('{');
|
|
var expression;
|
|
if (this.matchJSX('}')) {
|
|
expression = this.parseJSXEmptyExpression();
|
|
this.expectJSX('}');
|
|
}
|
|
else {
|
|
this.finishJSX();
|
|
expression = this.parseAssignmentExpression();
|
|
this.reenterJSX();
|
|
}
|
|
return this.finalize(node, new JSXNode.JSXExpressionContainer(expression));
|
|
};
|
|
JSXParser.prototype.parseJSXChildren = function () {
|
|
var children = [];
|
|
while (!this.scanner.eof()) {
|
|
var node = this.createJSXChildNode();
|
|
var token = this.nextJSXText();
|
|
if (token.start < token.end) {
|
|
var raw = this.getTokenRaw(token);
|
|
var child = this.finalize(node, new JSXNode.JSXText(token.value, raw));
|
|
children.push(child);
|
|
}
|
|
if (this.scanner.source[this.scanner.index] === '{') {
|
|
var container = this.parseJSXExpressionContainer();
|
|
children.push(container);
|
|
}
|
|
else {
|
|
break;
|
|
}
|
|
}
|
|
return children;
|
|
};
|
|
JSXParser.prototype.parseComplexJSXElement = function (el) {
|
|
var stack = [];
|
|
while (!this.scanner.eof()) {
|
|
el.children = el.children.concat(this.parseJSXChildren());
|
|
var node = this.createJSXChildNode();
|
|
var element = this.parseJSXBoundaryElement();
|
|
if (element.type === jsx_syntax_1.JSXSyntax.JSXOpeningElement) {
|
|
var opening = element;
|
|
if (opening.selfClosing) {
|
|
var child = this.finalize(node, new JSXNode.JSXElement(opening, [], null));
|
|
el.children.push(child);
|
|
}
|
|
else {
|
|
stack.push(el);
|
|
el = { node: node, opening: opening, closing: null, children: [] };
|
|
}
|
|
}
|
|
if (element.type === jsx_syntax_1.JSXSyntax.JSXClosingElement) {
|
|
el.closing = element;
|
|
var open_1 = getQualifiedElementName(el.opening.name);
|
|
var close_1 = getQualifiedElementName(el.closing.name);
|
|
if (open_1 !== close_1) {
|
|
this.tolerateError('Expected corresponding JSX closing tag for %0', open_1);
|
|
}
|
|
if (stack.length > 0) {
|
|
var child = this.finalize(el.node, new JSXNode.JSXElement(el.opening, el.children, el.closing));
|
|
el = stack[stack.length - 1];
|
|
el.children.push(child);
|
|
stack.pop();
|
|
}
|
|
else {
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
return el;
|
|
};
|
|
JSXParser.prototype.parseJSXElement = function () {
|
|
var node = this.createJSXNode();
|
|
var opening = this.parseJSXOpeningElement();
|
|
var children = [];
|
|
var closing = null;
|
|
if (!opening.selfClosing) {
|
|
var el = this.parseComplexJSXElement({ node: node, opening: opening, closing: closing, children: children });
|
|
children = el.children;
|
|
closing = el.closing;
|
|
}
|
|
return this.finalize(node, new JSXNode.JSXElement(opening, children, closing));
|
|
};
|
|
JSXParser.prototype.parseJSXRoot = function () {
|
|
// Pop the opening '<' added from the lookahead.
|
|
if (this.config.tokens) {
|
|
this.tokens.pop();
|
|
}
|
|
this.startJSX();
|
|
var element = this.parseJSXElement();
|
|
this.finishJSX();
|
|
return element;
|
|
};
|
|
JSXParser.prototype.isStartOfExpression = function () {
|
|
return _super.prototype.isStartOfExpression.call(this) || this.match('<');
|
|
};
|
|
return JSXParser;
|
|
}(parser_1.Parser));
|
|
exports.JSXParser = JSXParser;
|
|
|
|
|
|
/***/ },
|
|
/* 4 */
|
|
/***/ function(module, exports) {
|
|
|
|
"use strict";
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
// See also tools/generate-unicode-regex.js.
|
|
var Regex = {
|
|
// Unicode v8.0.0 NonAsciiIdentifierStart:
|
|
NonAsciiIdentifierStart: /[\xAA\xB5\xBA\xC0-\xD6\xD8-\xF6\xF8-\u02C1\u02C6-\u02D1\u02E0-\u02E4\u02EC\u02EE\u0370-\u0374\u0376\u0377\u037A-\u037D\u037F\u0386\u0388-\u038A\u038C\u038E-\u03A1\u03A3-\u03F5\u03F7-\u0481\u048A-\u052F\u0531-\u0556\u0559\u0561-\u0587\u05D0-\u05EA\u05F0-\u05F2\u0620-\u064A\u066E\u066F\u0671-\u06D3\u06D5\u06E5\u06E6\u06EE\u06EF\u06FA-\u06FC\u06FF\u0710\u0712-\u072F\u074D-\u07A5\u07B1\u07CA-\u07EA\u07F4\u07F5\u07FA\u0800-\u0815\u081A\u0824\u0828\u0840-\u0858\u08A0-\u08B4\u0904-\u0939\u093D\u0950\u0958-\u0961\u0971-\u0980\u0985-\u098C\u098F\u0990\u0993-\u09A8\u09AA-\u09B0\u09B2\u09B6-\u09B9\u09BD\u09CE\u09DC\u09DD\u09DF-\u09E1\u09F0\u09F1\u0A05-\u0A0A\u0A0F\u0A10\u0A13-\u0A28\u0A2A-\u0A30\u0A32\u0A33\u0A35\u0A36\u0A38\u0A39\u0A59-\u0A5C\u0A5E\u0A72-\u0A74\u0A85-\u0A8D\u0A8F-\u0A91\u0A93-\u0AA8\u0AAA-\u0AB0\u0AB2\u0AB3\u0AB5-\u0AB9\u0ABD\u0AD0\u0AE0\u0AE1\u0AF9\u0B05-\u0B0C\u0B0F\u0B10\u0B13-\u0B28\u0B2A-\u0B30\u0B32\u0B33\u0B35-\u0B39\u0B3D\u0B5C\u0B5D\u0B5F-\u0B61\u0B71\u0B83\u0B85-\u0B8A\u0B8E-\u0B90\u0B92-\u0B95\u0B99\u0B9A\u0B9C\u0B9E\u0B9F\u0BA3\u0BA4\u0BA8-\u0BAA\u0BAE-\u0BB9\u0BD0\u0C05-\u0C0C\u0C0E-\u0C10\u0C12-\u0C28\u0C2A-\u0C39\u0C3D\u0C58-\u0C5A\u0C60\u0C61\u0C85-\u0C8C\u0C8E-\u0C90\u0C92-\u0CA8\u0CAA-\u0CB3\u0CB5-\u0CB9\u0CBD\u0CDE\u0CE0\u0CE1\u0CF1\u0CF2\u0D05-\u0D0C\u0D0E-\u0D10\u0D12-\u0D3A\u0D3D\u0D4E\u0D5F-\u0D61\u0D7A-\u0D7F\u0D85-\u0D96\u0D9A-\u0DB1\u0DB3-\u0DBB\u0DBD\u0DC0-\u0DC6\u0E01-\u0E30\u0E32\u0E33\u0E40-\u0E46\u0E81\u0E82\u0E84\u0E87\u0E88\u0E8A\u0E8D\u0E94-\u0E97\u0E99-\u0E9F\u0EA1-\u0EA3\u0EA5\u0EA7\u0EAA\u0EAB\u0EAD-\u0EB0\u0EB2\u0EB3\u0EBD\u0EC0-\u0EC4\u0EC6\u0EDC-\u0EDF\u0F00\u0F40-\u0F47\u0F49-\u0F6C\u0F88-\u0F8C\u1000-\u102A\u103F\u1050-\u1055\u105A-\u105D\u1061\u1065\u1066\u106E-\u1070\u1075-\u1081\u108E\u10A0-\u10C5\u10C7\u10CD\u10D0-\u10FA\u10FC-\u1248\u124A-\u124D\u1250-\u1256\u1258\u125A-\u125D\u1260-\u1288\u128A-\u128D\u1290-\u12B0\u12B2-\u12B5\u12B8-\u12BE\u12C0\u12C2-\u12C5\u12C8-\u12D6\u12D8-\u1310\u1312-\u1315\u1318-\u135A\u1380-\u138F\u13A0-\u13F5\u13F8-\u13FD\u1401-\u166C\u166F-\u167F\u1681-\u169A\u16A0-\u16EA\u16EE-\u16F8\u1700-\u170C\u170E-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176C\u176E-\u1770\u1780-\u17B3\u17D7\u17DC\u1820-\u1877\u1880-\u18A8\u18AA\u18B0-\u18F5\u1900-\u191E\u1950-\u196D\u1970-\u1974\u1980-\u19AB\u19B0-\u19C9\u1A00-\u1A16\u1A20-\u1A54\u1AA7\u1B05-\u1B33\u1B45-\u1B4B\u1B83-\u1BA0\u1BAE\u1BAF\u1BBA-\u1BE5\u1C00-\u1C23\u1C4D-\u1C4F\u1C5A-\u1C7D\u1CE9-\u1CEC\u1CEE-\u1CF1\u1CF5\u1CF6\u1D00-\u1DBF\u1E00-\u1F15\u1F18-\u1F1D\u1F20-\u1F45\u1F48-\u1F4D\u1F50-\u1F57\u1F59\u1F5B\u1F5D\u1F5F-\u1F7D\u1F80-\u1FB4\u1FB6-\u1FBC\u1FBE\u1FC2-\u1FC4\u1FC6-\u1FCC\u1FD0-\u1FD3\u1FD6-\u1FDB\u1FE0-\u1FEC\u1FF2-\u1FF4\u1FF6-\u1FFC\u2071\u207F\u2090-\u209C\u2102\u2107\u210A-\u2113\u2115\u2118-\u211D\u2124\u2126\u2128\u212A-\u2139\u213C-\u213F\u2145-\u2149\u214E\u2160-\u2188\u2C00-\u2C2E\u2C30-\u2C5E\u2C60-\u2CE4\u2CEB-\u2CEE\u2CF2\u2CF3\u2D00-\u2D25\u2D27\u2D2D\u2D30-\u2D67\u2D6F\u2D80-\u2D96\u2DA0-\u2DA6\u2DA8-\u2DAE\u2DB0-\u2DB6\u2DB8-\u2DBE\u2DC0-\u2DC6\u2DC8-\u2DCE\u2DD0-\u2DD6\u2DD8-\u2DDE\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303C\u3041-\u3096\u309B-\u309F\u30A1-\u30FA\u30FC-\u30FF\u3105-\u312D\u3131-\u318E\u31A0-\u31BA\u31F0-\u31FF\u3400-\u4DB5\u4E00-\u9FD5\uA000-\uA48C\uA4D0-\uA4FD\uA500-\uA60C\uA610-\uA61F\uA62A\uA62B\uA640-\uA66E\uA67F-\uA69D\uA6A0-\uA6EF\uA717-\uA71F\uA722-\uA788\uA78B-\uA7AD\uA7B0-\uA7B7\uA7F7-\uA801\uA803-\uA805\uA807-\uA80A\uA80C-\uA822\uA840-\uA873\uA882-\uA8B3\uA8F2-\uA8F7\uA8FB\uA8FD\uA90A-\uA925\uA930-\uA946\uA960-\uA97C\uA984-\uA9B2\uA9CF\uA9E0-\uA9E4\uA9E6-\uA9EF\uA9FA-\uA9FE\uAA00-\uAA28\uAA40-\uAA42\uAA44-\uAA4B\uAA60-\uAA76\uAA7A\uAA7E-\uAAAF\uAAB1\uAAB5\uAAB6\uAAB9-\uAABD\uAAC0\uAAC2\uAADB-\uAADD\uAAE0-\uAAEA\uAAF2-\uAAF4\uAB01-\uAB06\uAB09-\uAB0E\uAB11-\uAB16\uAB20-\uAB26\uAB28-\uAB2E\uAB30-\uAB5A\uAB5C-\uAB65\uAB70-\uABE2\uAC00-\uD7A3\uD7B0-\uD7C6\uD7CB-\uD7FB\uF900-\uFA6D\uFA70-\uFAD9\uFB00-\uFB06\uFB13-\uFB17\uFB1D\uFB1F-\uFB28\uFB2A-\uFB36\uFB38-\uFB3C\uFB3E\uFB40\uFB41\uFB43\uFB44\uFB46-\uFBB1\uFBD3-\uFD3D\uFD50-\uFD8F\uFD92-\uFDC7\uFDF0-\uFDFB\uFE70-\uFE74\uFE76-\uFEFC\uFF21-\uFF3A\uFF41-\uFF5A\uFF66-\uFFBE\uFFC2-\uFFC7\uFFCA-\uFFCF\uFFD2-\uFFD7\uFFDA-\uFFDC]|\uD800[\uDC00-\uDC0B\uDC0D-\uDC26\uDC28-\uDC3A\uDC3C\uDC3D\uDC3F-\uDC4D\uDC50-\uDC5D\uDC80-\uDCFA\uDD40-\uDD74\uDE80-\uDE9C\uDEA0-\uDED0\uDF00-\uDF1F\uDF30-\uDF4A\uDF50-\uDF75\uDF80-\uDF9D\uDFA0-\uDFC3\uDFC8-\uDFCF\uDFD1-\uDFD5]|\uD801[\uDC00-\uDC9D\uDD00-\uDD27\uDD30-\uDD63\uDE00-\uDF36\uDF40-\uDF55\uDF60-\uDF67]|\uD802[\uDC00-\uDC05\uDC08\uDC0A-\uDC35\uDC37\uDC38\uDC3C\uDC3F-\uDC55\uDC60-\uDC76\uDC80-\uDC9E\uDCE0-\uDCF2\uDCF4\uDCF5\uDD00-\uDD15\uDD20-\uDD39\uDD80-\uDDB7\uDDBE\uDDBF\uDE00\uDE10-\uDE13\uDE15-\uDE17\uDE19-\uDE33\uDE60-\uDE7C\uDE80-\uDE9C\uDEC0-\uDEC7\uDEC9-\uDEE4\uDF00-\uDF35\uDF40-\uDF55\uDF60-\uDF72\uDF80-\uDF91]|\uD803[\uDC00-\uDC48\uDC80-\uDCB2\uDCC0-\uDCF2]|\uD804[\uDC03-\uDC37\uDC83-\uDCAF\uDCD0-\uDCE8\uDD03-\uDD26\uDD50-\uDD72\uDD76\uDD83-\uDDB2\uDDC1-\uDDC4\uDDDA\uDDDC\uDE00-\uDE11\uDE13-\uDE2B\uDE80-\uDE86\uDE88\uDE8A-\uDE8D\uDE8F-\uDE9D\uDE9F-\uDEA8\uDEB0-\uDEDE\uDF05-\uDF0C\uDF0F\uDF10\uDF13-\uDF28\uDF2A-\uDF30\uDF32\uDF33\uDF35-\uDF39\uDF3D\uDF50\uDF5D-\uDF61]|\uD805[\uDC80-\uDCAF\uDCC4\uDCC5\uDCC7\uDD80-\uDDAE\uDDD8-\uDDDB\uDE00-\uDE2F\uDE44\uDE80-\uDEAA\uDF00-\uDF19]|\uD806[\uDCA0-\uDCDF\uDCFF\uDEC0-\uDEF8]|\uD808[\uDC00-\uDF99]|\uD809[\uDC00-\uDC6E\uDC80-\uDD43]|[\uD80C\uD840-\uD868\uD86A-\uD86C\uD86F-\uD872][\uDC00-\uDFFF]|\uD80D[\uDC00-\uDC2E]|\uD811[\uDC00-\uDE46]|\uD81A[\uDC00-\uDE38\uDE40-\uDE5E\uDED0-\uDEED\uDF00-\uDF2F\uDF40-\uDF43\uDF63-\uDF77\uDF7D-\uDF8F]|\uD81B[\uDF00-\uDF44\uDF50\uDF93-\uDF9F]|\uD82C[\uDC00\uDC01]|\uD82F[\uDC00-\uDC6A\uDC70-\uDC7C\uDC80-\uDC88\uDC90-\uDC99]|\uD835[\uDC00-\uDC54\uDC56-\uDC9C\uDC9E\uDC9F\uDCA2\uDCA5\uDCA6\uDCA9-\uDCAC\uDCAE-\uDCB9\uDCBB\uDCBD-\uDCC3\uDCC5-\uDD05\uDD07-\uDD0A\uDD0D-\uDD14\uDD16-\uDD1C\uDD1E-\uDD39\uDD3B-\uDD3E\uDD40-\uDD44\uDD46\uDD4A-\uDD50\uDD52-\uDEA5\uDEA8-\uDEC0\uDEC2-\uDEDA\uDEDC-\uDEFA\uDEFC-\uDF14\uDF16-\uDF34\uDF36-\uDF4E\uDF50-\uDF6E\uDF70-\uDF88\uDF8A-\uDFA8\uDFAA-\uDFC2\uDFC4-\uDFCB]|\uD83A[\uDC00-\uDCC4]|\uD83B[\uDE00-\uDE03\uDE05-\uDE1F\uDE21\uDE22\uDE24\uDE27\uDE29-\uDE32\uDE34-\uDE37\uDE39\uDE3B\uDE42\uDE47\uDE49\uDE4B\uDE4D-\uDE4F\uDE51\uDE52\uDE54\uDE57\uDE59\uDE5B\uDE5D\uDE5F\uDE61\uDE62\uDE64\uDE67-\uDE6A\uDE6C-\uDE72\uDE74-\uDE77\uDE79-\uDE7C\uDE7E\uDE80-\uDE89\uDE8B-\uDE9B\uDEA1-\uDEA3\uDEA5-\uDEA9\uDEAB-\uDEBB]|\uD869[\uDC00-\uDED6\uDF00-\uDFFF]|\uD86D[\uDC00-\uDF34\uDF40-\uDFFF]|\uD86E[\uDC00-\uDC1D\uDC20-\uDFFF]|\uD873[\uDC00-\uDEA1]|\uD87E[\uDC00-\uDE1D]/,
|
|
// Unicode v8.0.0 NonAsciiIdentifierPart:
|
|
NonAsciiIdentifierPart: /[\xAA\xB5\xB7\xBA\xC0-\xD6\xD8-\xF6\xF8-\u02C1\u02C6-\u02D1\u02E0-\u02E4\u02EC\u02EE\u0300-\u0374\u0376\u0377\u037A-\u037D\u037F\u0386-\u038A\u038C\u038E-\u03A1\u03A3-\u03F5\u03F7-\u0481\u0483-\u0487\u048A-\u052F\u0531-\u0556\u0559\u0561-\u0587\u0591-\u05BD\u05BF\u05C1\u05C2\u05C4\u05C5\u05C7\u05D0-\u05EA\u05F0-\u05F2\u0610-\u061A\u0620-\u0669\u066E-\u06D3\u06D5-\u06DC\u06DF-\u06E8\u06EA-\u06FC\u06FF\u0710-\u074A\u074D-\u07B1\u07C0-\u07F5\u07FA\u0800-\u082D\u0840-\u085B\u08A0-\u08B4\u08E3-\u0963\u0966-\u096F\u0971-\u0983\u0985-\u098C\u098F\u0990\u0993-\u09A8\u09AA-\u09B0\u09B2\u09B6-\u09B9\u09BC-\u09C4\u09C7\u09C8\u09CB-\u09CE\u09D7\u09DC\u09DD\u09DF-\u09E3\u09E6-\u09F1\u0A01-\u0A03\u0A05-\u0A0A\u0A0F\u0A10\u0A13-\u0A28\u0A2A-\u0A30\u0A32\u0A33\u0A35\u0A36\u0A38\u0A39\u0A3C\u0A3E-\u0A42\u0A47\u0A48\u0A4B-\u0A4D\u0A51\u0A59-\u0A5C\u0A5E\u0A66-\u0A75\u0A81-\u0A83\u0A85-\u0A8D\u0A8F-\u0A91\u0A93-\u0AA8\u0AAA-\u0AB0\u0AB2\u0AB3\u0AB5-\u0AB9\u0ABC-\u0AC5\u0AC7-\u0AC9\u0ACB-\u0ACD\u0AD0\u0AE0-\u0AE3\u0AE6-\u0AEF\u0AF9\u0B01-\u0B03\u0B05-\u0B0C\u0B0F\u0B10\u0B13-\u0B28\u0B2A-\u0B30\u0B32\u0B33\u0B35-\u0B39\u0B3C-\u0B44\u0B47\u0B48\u0B4B-\u0B4D\u0B56\u0B57\u0B5C\u0B5D\u0B5F-\u0B63\u0B66-\u0B6F\u0B71\u0B82\u0B83\u0B85-\u0B8A\u0B8E-\u0B90\u0B92-\u0B95\u0B99\u0B9A\u0B9C\u0B9E\u0B9F\u0BA3\u0BA4\u0BA8-\u0BAA\u0BAE-\u0BB9\u0BBE-\u0BC2\u0BC6-\u0BC8\u0BCA-\u0BCD\u0BD0\u0BD7\u0BE6-\u0BEF\u0C00-\u0C03\u0C05-\u0C0C\u0C0E-\u0C10\u0C12-\u0C28\u0C2A-\u0C39\u0C3D-\u0C44\u0C46-\u0C48\u0C4A-\u0C4D\u0C55\u0C56\u0C58-\u0C5A\u0C60-\u0C63\u0C66-\u0C6F\u0C81-\u0C83\u0C85-\u0C8C\u0C8E-\u0C90\u0C92-\u0CA8\u0CAA-\u0CB3\u0CB5-\u0CB9\u0CBC-\u0CC4\u0CC6-\u0CC8\u0CCA-\u0CCD\u0CD5\u0CD6\u0CDE\u0CE0-\u0CE3\u0CE6-\u0CEF\u0CF1\u0CF2\u0D01-\u0D03\u0D05-\u0D0C\u0D0E-\u0D10\u0D12-\u0D3A\u0D3D-\u0D44\u0D46-\u0D48\u0D4A-\u0D4E\u0D57\u0D5F-\u0D63\u0D66-\u0D6F\u0D7A-\u0D7F\u0D82\u0D83\u0D85-\u0D96\u0D9A-\u0DB1\u0DB3-\u0DBB\u0DBD\u0DC0-\u0DC6\u0DCA\u0DCF-\u0DD4\u0DD6\u0DD8-\u0DDF\u0DE6-\u0DEF\u0DF2\u0DF3\u0E01-\u0E3A\u0E40-\u0E4E\u0E50-\u0E59\u0E81\u0E82\u0E84\u0E87\u0E88\u0E8A\u0E8D\u0E94-\u0E97\u0E99-\u0E9F\u0EA1-\u0EA3\u0EA5\u0EA7\u0EAA\u0EAB\u0EAD-\u0EB9\u0EBB-\u0EBD\u0EC0-\u0EC4\u0EC6\u0EC8-\u0ECD\u0ED0-\u0ED9\u0EDC-\u0EDF\u0F00\u0F18\u0F19\u0F20-\u0F29\u0F35\u0F37\u0F39\u0F3E-\u0F47\u0F49-\u0F6C\u0F71-\u0F84\u0F86-\u0F97\u0F99-\u0FBC\u0FC6\u1000-\u1049\u1050-\u109D\u10A0-\u10C5\u10C7\u10CD\u10D0-\u10FA\u10FC-\u1248\u124A-\u124D\u1250-\u1256\u1258\u125A-\u125D\u1260-\u1288\u128A-\u128D\u1290-\u12B0\u12B2-\u12B5\u12B8-\u12BE\u12C0\u12C2-\u12C5\u12C8-\u12D6\u12D8-\u1310\u1312-\u1315\u1318-\u135A\u135D-\u135F\u1369-\u1371\u1380-\u138F\u13A0-\u13F5\u13F8-\u13FD\u1401-\u166C\u166F-\u167F\u1681-\u169A\u16A0-\u16EA\u16EE-\u16F8\u1700-\u170C\u170E-\u1714\u1720-\u1734\u1740-\u1753\u1760-\u176C\u176E-\u1770\u1772\u1773\u1780-\u17D3\u17D7\u17DC\u17DD\u17E0-\u17E9\u180B-\u180D\u1810-\u1819\u1820-\u1877\u1880-\u18AA\u18B0-\u18F5\u1900-\u191E\u1920-\u192B\u1930-\u193B\u1946-\u196D\u1970-\u1974\u1980-\u19AB\u19B0-\u19C9\u19D0-\u19DA\u1A00-\u1A1B\u1A20-\u1A5E\u1A60-\u1A7C\u1A7F-\u1A89\u1A90-\u1A99\u1AA7\u1AB0-\u1ABD\u1B00-\u1B4B\u1B50-\u1B59\u1B6B-\u1B73\u1B80-\u1BF3\u1C00-\u1C37\u1C40-\u1C49\u1C4D-\u1C7D\u1CD0-\u1CD2\u1CD4-\u1CF6\u1CF8\u1CF9\u1D00-\u1DF5\u1DFC-\u1F15\u1F18-\u1F1D\u1F20-\u1F45\u1F48-\u1F4D\u1F50-\u1F57\u1F59\u1F5B\u1F5D\u1F5F-\u1F7D\u1F80-\u1FB4\u1FB6-\u1FBC\u1FBE\u1FC2-\u1FC4\u1FC6-\u1FCC\u1FD0-\u1FD3\u1FD6-\u1FDB\u1FE0-\u1FEC\u1FF2-\u1FF4\u1FF6-\u1FFC\u200C\u200D\u203F\u2040\u2054\u2071\u207F\u2090-\u209C\u20D0-\u20DC\u20E1\u20E5-\u20F0\u2102\u2107\u210A-\u2113\u2115\u2118-\u211D\u2124\u2126\u2128\u212A-\u2139\u213C-\u213F\u2145-\u2149\u214E\u2160-\u2188\u2C00-\u2C2E\u2C30-\u2C5E\u2C60-\u2CE4\u2CEB-\u2CF3\u2D00-\u2D25\u2D27\u2D2D\u2D30-\u2D67\u2D6F\u2D7F-\u2D96\u2DA0-\u2DA6\u2DA8-\u2DAE\u2DB0-\u2DB6\u2DB8-\u2DBE\u2DC0-\u2DC6\u2DC8-\u2DCE\u2DD0-\u2DD6\u2DD8-\u2DDE\u2DE0-\u2DFF\u3005-\u3007\u3021-\u302F\u3031-\u3035\u3038-\u303C\u3041-\u3096\u3099-\u309F\u30A1-\u30FA\u30FC-\u30FF\u3105-\u312D\u3131-\u318E\u31A0-\u31BA\u31F0-\u31FF\u3400-\u4DB5\u4E00-\u9FD5\uA000-\uA48C\uA4D0-\uA4FD\uA500-\uA60C\uA610-\uA62B\uA640-\uA66F\uA674-\uA67D\uA67F-\uA6F1\uA717-\uA71F\uA722-\uA788\uA78B-\uA7AD\uA7B0-\uA7B7\uA7F7-\uA827\uA840-\uA873\uA880-\uA8C4\uA8D0-\uA8D9\uA8E0-\uA8F7\uA8FB\uA8FD\uA900-\uA92D\uA930-\uA953\uA960-\uA97C\uA980-\uA9C0\uA9CF-\uA9D9\uA9E0-\uA9FE\uAA00-\uAA36\uAA40-\uAA4D\uAA50-\uAA59\uAA60-\uAA76\uAA7A-\uAAC2\uAADB-\uAADD\uAAE0-\uAAEF\uAAF2-\uAAF6\uAB01-\uAB06\uAB09-\uAB0E\uAB11-\uAB16\uAB20-\uAB26\uAB28-\uAB2E\uAB30-\uAB5A\uAB5C-\uAB65\uAB70-\uABEA\uABEC\uABED\uABF0-\uABF9\uAC00-\uD7A3\uD7B0-\uD7C6\uD7CB-\uD7FB\uF900-\uFA6D\uFA70-\uFAD9\uFB00-\uFB06\uFB13-\uFB17\uFB1D-\uFB28\uFB2A-\uFB36\uFB38-\uFB3C\uFB3E\uFB40\uFB41\uFB43\uFB44\uFB46-\uFBB1\uFBD3-\uFD3D\uFD50-\uFD8F\uFD92-\uFDC7\uFDF0-\uFDFB\uFE00-\uFE0F\uFE20-\uFE2F\uFE33\uFE34\uFE4D-\uFE4F\uFE70-\uFE74\uFE76-\uFEFC\uFF10-\uFF19\uFF21-\uFF3A\uFF3F\uFF41-\uFF5A\uFF66-\uFFBE\uFFC2-\uFFC7\uFFCA-\uFFCF\uFFD2-\uFFD7\uFFDA-\uFFDC]|\uD800[\uDC00-\uDC0B\uDC0D-\uDC26\uDC28-\uDC3A\uDC3C\uDC3D\uDC3F-\uDC4D\uDC50-\uDC5D\uDC80-\uDCFA\uDD40-\uDD74\uDDFD\uDE80-\uDE9C\uDEA0-\uDED0\uDEE0\uDF00-\uDF1F\uDF30-\uDF4A\uDF50-\uDF7A\uDF80-\uDF9D\uDFA0-\uDFC3\uDFC8-\uDFCF\uDFD1-\uDFD5]|\uD801[\uDC00-\uDC9D\uDCA0-\uDCA9\uDD00-\uDD27\uDD30-\uDD63\uDE00-\uDF36\uDF40-\uDF55\uDF60-\uDF67]|\uD802[\uDC00-\uDC05\uDC08\uDC0A-\uDC35\uDC37\uDC38\uDC3C\uDC3F-\uDC55\uDC60-\uDC76\uDC80-\uDC9E\uDCE0-\uDCF2\uDCF4\uDCF5\uDD00-\uDD15\uDD20-\uDD39\uDD80-\uDDB7\uDDBE\uDDBF\uDE00-\uDE03\uDE05\uDE06\uDE0C-\uDE13\uDE15-\uDE17\uDE19-\uDE33\uDE38-\uDE3A\uDE3F\uDE60-\uDE7C\uDE80-\uDE9C\uDEC0-\uDEC7\uDEC9-\uDEE6\uDF00-\uDF35\uDF40-\uDF55\uDF60-\uDF72\uDF80-\uDF91]|\uD803[\uDC00-\uDC48\uDC80-\uDCB2\uDCC0-\uDCF2]|\uD804[\uDC00-\uDC46\uDC66-\uDC6F\uDC7F-\uDCBA\uDCD0-\uDCE8\uDCF0-\uDCF9\uDD00-\uDD34\uDD36-\uDD3F\uDD50-\uDD73\uDD76\uDD80-\uDDC4\uDDCA-\uDDCC\uDDD0-\uDDDA\uDDDC\uDE00-\uDE11\uDE13-\uDE37\uDE80-\uDE86\uDE88\uDE8A-\uDE8D\uDE8F-\uDE9D\uDE9F-\uDEA8\uDEB0-\uDEEA\uDEF0-\uDEF9\uDF00-\uDF03\uDF05-\uDF0C\uDF0F\uDF10\uDF13-\uDF28\uDF2A-\uDF30\uDF32\uDF33\uDF35-\uDF39\uDF3C-\uDF44\uDF47\uDF48\uDF4B-\uDF4D\uDF50\uDF57\uDF5D-\uDF63\uDF66-\uDF6C\uDF70-\uDF74]|\uD805[\uDC80-\uDCC5\uDCC7\uDCD0-\uDCD9\uDD80-\uDDB5\uDDB8-\uDDC0\uDDD8-\uDDDD\uDE00-\uDE40\uDE44\uDE50-\uDE59\uDE80-\uDEB7\uDEC0-\uDEC9\uDF00-\uDF19\uDF1D-\uDF2B\uDF30-\uDF39]|\uD806[\uDCA0-\uDCE9\uDCFF\uDEC0-\uDEF8]|\uD808[\uDC00-\uDF99]|\uD809[\uDC00-\uDC6E\uDC80-\uDD43]|[\uD80C\uD840-\uD868\uD86A-\uD86C\uD86F-\uD872][\uDC00-\uDFFF]|\uD80D[\uDC00-\uDC2E]|\uD811[\uDC00-\uDE46]|\uD81A[\uDC00-\uDE38\uDE40-\uDE5E\uDE60-\uDE69\uDED0-\uDEED\uDEF0-\uDEF4\uDF00-\uDF36\uDF40-\uDF43\uDF50-\uDF59\uDF63-\uDF77\uDF7D-\uDF8F]|\uD81B[\uDF00-\uDF44\uDF50-\uDF7E\uDF8F-\uDF9F]|\uD82C[\uDC00\uDC01]|\uD82F[\uDC00-\uDC6A\uDC70-\uDC7C\uDC80-\uDC88\uDC90-\uDC99\uDC9D\uDC9E]|\uD834[\uDD65-\uDD69\uDD6D-\uDD72\uDD7B-\uDD82\uDD85-\uDD8B\uDDAA-\uDDAD\uDE42-\uDE44]|\uD835[\uDC00-\uDC54\uDC56-\uDC9C\uDC9E\uDC9F\uDCA2\uDCA5\uDCA6\uDCA9-\uDCAC\uDCAE-\uDCB9\uDCBB\uDCBD-\uDCC3\uDCC5-\uDD05\uDD07-\uDD0A\uDD0D-\uDD14\uDD16-\uDD1C\uDD1E-\uDD39\uDD3B-\uDD3E\uDD40-\uDD44\uDD46\uDD4A-\uDD50\uDD52-\uDEA5\uDEA8-\uDEC0\uDEC2-\uDEDA\uDEDC-\uDEFA\uDEFC-\uDF14\uDF16-\uDF34\uDF36-\uDF4E\uDF50-\uDF6E\uDF70-\uDF88\uDF8A-\uDFA8\uDFAA-\uDFC2\uDFC4-\uDFCB\uDFCE-\uDFFF]|\uD836[\uDE00-\uDE36\uDE3B-\uDE6C\uDE75\uDE84\uDE9B-\uDE9F\uDEA1-\uDEAF]|\uD83A[\uDC00-\uDCC4\uDCD0-\uDCD6]|\uD83B[\uDE00-\uDE03\uDE05-\uDE1F\uDE21\uDE22\uDE24\uDE27\uDE29-\uDE32\uDE34-\uDE37\uDE39\uDE3B\uDE42\uDE47\uDE49\uDE4B\uDE4D-\uDE4F\uDE51\uDE52\uDE54\uDE57\uDE59\uDE5B\uDE5D\uDE5F\uDE61\uDE62\uDE64\uDE67-\uDE6A\uDE6C-\uDE72\uDE74-\uDE77\uDE79-\uDE7C\uDE7E\uDE80-\uDE89\uDE8B-\uDE9B\uDEA1-\uDEA3\uDEA5-\uDEA9\uDEAB-\uDEBB]|\uD869[\uDC00-\uDED6\uDF00-\uDFFF]|\uD86D[\uDC00-\uDF34\uDF40-\uDFFF]|\uD86E[\uDC00-\uDC1D\uDC20-\uDFFF]|\uD873[\uDC00-\uDEA1]|\uD87E[\uDC00-\uDE1D]|\uDB40[\uDD00-\uDDEF]/
|
|
};
|
|
exports.Character = {
|
|
/* tslint:disable:no-bitwise */
|
|
fromCodePoint: function (cp) {
|
|
return (cp < 0x10000) ? String.fromCharCode(cp) :
|
|
String.fromCharCode(0xD800 + ((cp - 0x10000) >> 10)) +
|
|
String.fromCharCode(0xDC00 + ((cp - 0x10000) & 1023));
|
|
},
|
|
// https://tc39.github.io/ecma262/#sec-white-space
|
|
isWhiteSpace: function (cp) {
|
|
return (cp === 0x20) || (cp === 0x09) || (cp === 0x0B) || (cp === 0x0C) || (cp === 0xA0) ||
|
|
(cp >= 0x1680 && [0x1680, 0x2000, 0x2001, 0x2002, 0x2003, 0x2004, 0x2005, 0x2006, 0x2007, 0x2008, 0x2009, 0x200A, 0x202F, 0x205F, 0x3000, 0xFEFF].indexOf(cp) >= 0);
|
|
},
|
|
// https://tc39.github.io/ecma262/#sec-line-terminators
|
|
isLineTerminator: function (cp) {
|
|
return (cp === 0x0A) || (cp === 0x0D) || (cp === 0x2028) || (cp === 0x2029);
|
|
},
|
|
// https://tc39.github.io/ecma262/#sec-names-and-keywords
|
|
isIdentifierStart: function (cp) {
|
|
return (cp === 0x24) || (cp === 0x5F) ||
|
|
(cp >= 0x41 && cp <= 0x5A) ||
|
|
(cp >= 0x61 && cp <= 0x7A) ||
|
|
(cp === 0x5C) ||
|
|
((cp >= 0x80) && Regex.NonAsciiIdentifierStart.test(exports.Character.fromCodePoint(cp)));
|
|
},
|
|
isIdentifierPart: function (cp) {
|
|
return (cp === 0x24) || (cp === 0x5F) ||
|
|
(cp >= 0x41 && cp <= 0x5A) ||
|
|
(cp >= 0x61 && cp <= 0x7A) ||
|
|
(cp >= 0x30 && cp <= 0x39) ||
|
|
(cp === 0x5C) ||
|
|
((cp >= 0x80) && Regex.NonAsciiIdentifierPart.test(exports.Character.fromCodePoint(cp)));
|
|
},
|
|
// https://tc39.github.io/ecma262/#sec-literals-numeric-literals
|
|
isDecimalDigit: function (cp) {
|
|
return (cp >= 0x30 && cp <= 0x39); // 0..9
|
|
},
|
|
isHexDigit: function (cp) {
|
|
return (cp >= 0x30 && cp <= 0x39) ||
|
|
(cp >= 0x41 && cp <= 0x46) ||
|
|
(cp >= 0x61 && cp <= 0x66); // a..f
|
|
},
|
|
isOctalDigit: function (cp) {
|
|
return (cp >= 0x30 && cp <= 0x37); // 0..7
|
|
}
|
|
};
|
|
|
|
|
|
/***/ },
|
|
/* 5 */
|
|
/***/ function(module, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
var jsx_syntax_1 = __webpack_require__(6);
|
|
/* tslint:disable:max-classes-per-file */
|
|
var JSXClosingElement = (function () {
|
|
function JSXClosingElement(name) {
|
|
this.type = jsx_syntax_1.JSXSyntax.JSXClosingElement;
|
|
this.name = name;
|
|
}
|
|
return JSXClosingElement;
|
|
}());
|
|
exports.JSXClosingElement = JSXClosingElement;
|
|
var JSXElement = (function () {
|
|
function JSXElement(openingElement, children, closingElement) {
|
|
this.type = jsx_syntax_1.JSXSyntax.JSXElement;
|
|
this.openingElement = openingElement;
|
|
this.children = children;
|
|
this.closingElement = closingElement;
|
|
}
|
|
return JSXElement;
|
|
}());
|
|
exports.JSXElement = JSXElement;
|
|
var JSXEmptyExpression = (function () {
|
|
function JSXEmptyExpression() {
|
|
this.type = jsx_syntax_1.JSXSyntax.JSXEmptyExpression;
|
|
}
|
|
return JSXEmptyExpression;
|
|
}());
|
|
exports.JSXEmptyExpression = JSXEmptyExpression;
|
|
var JSXExpressionContainer = (function () {
|
|
function JSXExpressionContainer(expression) {
|
|
this.type = jsx_syntax_1.JSXSyntax.JSXExpressionContainer;
|
|
this.expression = expression;
|
|
}
|
|
return JSXExpressionContainer;
|
|
}());
|
|
exports.JSXExpressionContainer = JSXExpressionContainer;
|
|
var JSXIdentifier = (function () {
|
|
function JSXIdentifier(name) {
|
|
this.type = jsx_syntax_1.JSXSyntax.JSXIdentifier;
|
|
this.name = name;
|
|
}
|
|
return JSXIdentifier;
|
|
}());
|
|
exports.JSXIdentifier = JSXIdentifier;
|
|
var JSXMemberExpression = (function () {
|
|
function JSXMemberExpression(object, property) {
|
|
this.type = jsx_syntax_1.JSXSyntax.JSXMemberExpression;
|
|
this.object = object;
|
|
this.property = property;
|
|
}
|
|
return JSXMemberExpression;
|
|
}());
|
|
exports.JSXMemberExpression = JSXMemberExpression;
|
|
var JSXAttribute = (function () {
|
|
function JSXAttribute(name, value) {
|
|
this.type = jsx_syntax_1.JSXSyntax.JSXAttribute;
|
|
this.name = name;
|
|
this.value = value;
|
|
}
|
|
return JSXAttribute;
|
|
}());
|
|
exports.JSXAttribute = JSXAttribute;
|
|
var JSXNamespacedName = (function () {
|
|
function JSXNamespacedName(namespace, name) {
|
|
this.type = jsx_syntax_1.JSXSyntax.JSXNamespacedName;
|
|
this.namespace = namespace;
|
|
this.name = name;
|
|
}
|
|
return JSXNamespacedName;
|
|
}());
|
|
exports.JSXNamespacedName = JSXNamespacedName;
|
|
var JSXOpeningElement = (function () {
|
|
function JSXOpeningElement(name, selfClosing, attributes) {
|
|
this.type = jsx_syntax_1.JSXSyntax.JSXOpeningElement;
|
|
this.name = name;
|
|
this.selfClosing = selfClosing;
|
|
this.attributes = attributes;
|
|
}
|
|
return JSXOpeningElement;
|
|
}());
|
|
exports.JSXOpeningElement = JSXOpeningElement;
|
|
var JSXSpreadAttribute = (function () {
|
|
function JSXSpreadAttribute(argument) {
|
|
this.type = jsx_syntax_1.JSXSyntax.JSXSpreadAttribute;
|
|
this.argument = argument;
|
|
}
|
|
return JSXSpreadAttribute;
|
|
}());
|
|
exports.JSXSpreadAttribute = JSXSpreadAttribute;
|
|
var JSXText = (function () {
|
|
function JSXText(value, raw) {
|
|
this.type = jsx_syntax_1.JSXSyntax.JSXText;
|
|
this.value = value;
|
|
this.raw = raw;
|
|
}
|
|
return JSXText;
|
|
}());
|
|
exports.JSXText = JSXText;
|
|
|
|
|
|
/***/ },
|
|
/* 6 */
|
|
/***/ function(module, exports) {
|
|
|
|
"use strict";
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
exports.JSXSyntax = {
|
|
JSXAttribute: 'JSXAttribute',
|
|
JSXClosingElement: 'JSXClosingElement',
|
|
JSXElement: 'JSXElement',
|
|
JSXEmptyExpression: 'JSXEmptyExpression',
|
|
JSXExpressionContainer: 'JSXExpressionContainer',
|
|
JSXIdentifier: 'JSXIdentifier',
|
|
JSXMemberExpression: 'JSXMemberExpression',
|
|
JSXNamespacedName: 'JSXNamespacedName',
|
|
JSXOpeningElement: 'JSXOpeningElement',
|
|
JSXSpreadAttribute: 'JSXSpreadAttribute',
|
|
JSXText: 'JSXText'
|
|
};
|
|
|
|
|
|
/***/ },
|
|
/* 7 */
|
|
/***/ function(module, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
var syntax_1 = __webpack_require__(2);
|
|
/* tslint:disable:max-classes-per-file */
|
|
var ArrayExpression = (function () {
|
|
function ArrayExpression(elements) {
|
|
this.type = syntax_1.Syntax.ArrayExpression;
|
|
this.elements = elements;
|
|
}
|
|
return ArrayExpression;
|
|
}());
|
|
exports.ArrayExpression = ArrayExpression;
|
|
var ArrayPattern = (function () {
|
|
function ArrayPattern(elements) {
|
|
this.type = syntax_1.Syntax.ArrayPattern;
|
|
this.elements = elements;
|
|
}
|
|
return ArrayPattern;
|
|
}());
|
|
exports.ArrayPattern = ArrayPattern;
|
|
var ArrowFunctionExpression = (function () {
|
|
function ArrowFunctionExpression(params, body, expression) {
|
|
this.type = syntax_1.Syntax.ArrowFunctionExpression;
|
|
this.id = null;
|
|
this.params = params;
|
|
this.body = body;
|
|
this.generator = false;
|
|
this.expression = expression;
|
|
this.async = false;
|
|
}
|
|
return ArrowFunctionExpression;
|
|
}());
|
|
exports.ArrowFunctionExpression = ArrowFunctionExpression;
|
|
var AssignmentExpression = (function () {
|
|
function AssignmentExpression(operator, left, right) {
|
|
this.type = syntax_1.Syntax.AssignmentExpression;
|
|
this.operator = operator;
|
|
this.left = left;
|
|
this.right = right;
|
|
}
|
|
return AssignmentExpression;
|
|
}());
|
|
exports.AssignmentExpression = AssignmentExpression;
|
|
var AssignmentPattern = (function () {
|
|
function AssignmentPattern(left, right) {
|
|
this.type = syntax_1.Syntax.AssignmentPattern;
|
|
this.left = left;
|
|
this.right = right;
|
|
}
|
|
return AssignmentPattern;
|
|
}());
|
|
exports.AssignmentPattern = AssignmentPattern;
|
|
var AsyncArrowFunctionExpression = (function () {
|
|
function AsyncArrowFunctionExpression(params, body, expression) {
|
|
this.type = syntax_1.Syntax.ArrowFunctionExpression;
|
|
this.id = null;
|
|
this.params = params;
|
|
this.body = body;
|
|
this.generator = false;
|
|
this.expression = expression;
|
|
this.async = true;
|
|
}
|
|
return AsyncArrowFunctionExpression;
|
|
}());
|
|
exports.AsyncArrowFunctionExpression = AsyncArrowFunctionExpression;
|
|
var AsyncFunctionDeclaration = (function () {
|
|
function AsyncFunctionDeclaration(id, params, body) {
|
|
this.type = syntax_1.Syntax.FunctionDeclaration;
|
|
this.id = id;
|
|
this.params = params;
|
|
this.body = body;
|
|
this.generator = false;
|
|
this.expression = false;
|
|
this.async = true;
|
|
}
|
|
return AsyncFunctionDeclaration;
|
|
}());
|
|
exports.AsyncFunctionDeclaration = AsyncFunctionDeclaration;
|
|
var AsyncFunctionExpression = (function () {
|
|
function AsyncFunctionExpression(id, params, body) {
|
|
this.type = syntax_1.Syntax.FunctionExpression;
|
|
this.id = id;
|
|
this.params = params;
|
|
this.body = body;
|
|
this.generator = false;
|
|
this.expression = false;
|
|
this.async = true;
|
|
}
|
|
return AsyncFunctionExpression;
|
|
}());
|
|
exports.AsyncFunctionExpression = AsyncFunctionExpression;
|
|
var AwaitExpression = (function () {
|
|
function AwaitExpression(argument) {
|
|
this.type = syntax_1.Syntax.AwaitExpression;
|
|
this.argument = argument;
|
|
}
|
|
return AwaitExpression;
|
|
}());
|
|
exports.AwaitExpression = AwaitExpression;
|
|
var BinaryExpression = (function () {
|
|
function BinaryExpression(operator, left, right) {
|
|
var logical = (operator === '||' || operator === '&&');
|
|
this.type = logical ? syntax_1.Syntax.LogicalExpression : syntax_1.Syntax.BinaryExpression;
|
|
this.operator = operator;
|
|
this.left = left;
|
|
this.right = right;
|
|
}
|
|
return BinaryExpression;
|
|
}());
|
|
exports.BinaryExpression = BinaryExpression;
|
|
var BlockStatement = (function () {
|
|
function BlockStatement(body) {
|
|
this.type = syntax_1.Syntax.BlockStatement;
|
|
this.body = body;
|
|
}
|
|
return BlockStatement;
|
|
}());
|
|
exports.BlockStatement = BlockStatement;
|
|
var BreakStatement = (function () {
|
|
function BreakStatement(label) {
|
|
this.type = syntax_1.Syntax.BreakStatement;
|
|
this.label = label;
|
|
}
|
|
return BreakStatement;
|
|
}());
|
|
exports.BreakStatement = BreakStatement;
|
|
var CallExpression = (function () {
|
|
function CallExpression(callee, args) {
|
|
this.type = syntax_1.Syntax.CallExpression;
|
|
this.callee = callee;
|
|
this.arguments = args;
|
|
}
|
|
return CallExpression;
|
|
}());
|
|
exports.CallExpression = CallExpression;
|
|
var CatchClause = (function () {
|
|
function CatchClause(param, body) {
|
|
this.type = syntax_1.Syntax.CatchClause;
|
|
this.param = param;
|
|
this.body = body;
|
|
}
|
|
return CatchClause;
|
|
}());
|
|
exports.CatchClause = CatchClause;
|
|
var ClassBody = (function () {
|
|
function ClassBody(body) {
|
|
this.type = syntax_1.Syntax.ClassBody;
|
|
this.body = body;
|
|
}
|
|
return ClassBody;
|
|
}());
|
|
exports.ClassBody = ClassBody;
|
|
var ClassDeclaration = (function () {
|
|
function ClassDeclaration(id, superClass, body) {
|
|
this.type = syntax_1.Syntax.ClassDeclaration;
|
|
this.id = id;
|
|
this.superClass = superClass;
|
|
this.body = body;
|
|
}
|
|
return ClassDeclaration;
|
|
}());
|
|
exports.ClassDeclaration = ClassDeclaration;
|
|
var ClassExpression = (function () {
|
|
function ClassExpression(id, superClass, body) {
|
|
this.type = syntax_1.Syntax.ClassExpression;
|
|
this.id = id;
|
|
this.superClass = superClass;
|
|
this.body = body;
|
|
}
|
|
return ClassExpression;
|
|
}());
|
|
exports.ClassExpression = ClassExpression;
|
|
var ComputedMemberExpression = (function () {
|
|
function ComputedMemberExpression(object, property) {
|
|
this.type = syntax_1.Syntax.MemberExpression;
|
|
this.computed = true;
|
|
this.object = object;
|
|
this.property = property;
|
|
}
|
|
return ComputedMemberExpression;
|
|
}());
|
|
exports.ComputedMemberExpression = ComputedMemberExpression;
|
|
var ConditionalExpression = (function () {
|
|
function ConditionalExpression(test, consequent, alternate) {
|
|
this.type = syntax_1.Syntax.ConditionalExpression;
|
|
this.test = test;
|
|
this.consequent = consequent;
|
|
this.alternate = alternate;
|
|
}
|
|
return ConditionalExpression;
|
|
}());
|
|
exports.ConditionalExpression = ConditionalExpression;
|
|
var ContinueStatement = (function () {
|
|
function ContinueStatement(label) {
|
|
this.type = syntax_1.Syntax.ContinueStatement;
|
|
this.label = label;
|
|
}
|
|
return ContinueStatement;
|
|
}());
|
|
exports.ContinueStatement = ContinueStatement;
|
|
var DebuggerStatement = (function () {
|
|
function DebuggerStatement() {
|
|
this.type = syntax_1.Syntax.DebuggerStatement;
|
|
}
|
|
return DebuggerStatement;
|
|
}());
|
|
exports.DebuggerStatement = DebuggerStatement;
|
|
var Directive = (function () {
|
|
function Directive(expression, directive) {
|
|
this.type = syntax_1.Syntax.ExpressionStatement;
|
|
this.expression = expression;
|
|
this.directive = directive;
|
|
}
|
|
return Directive;
|
|
}());
|
|
exports.Directive = Directive;
|
|
var DoWhileStatement = (function () {
|
|
function DoWhileStatement(body, test) {
|
|
this.type = syntax_1.Syntax.DoWhileStatement;
|
|
this.body = body;
|
|
this.test = test;
|
|
}
|
|
return DoWhileStatement;
|
|
}());
|
|
exports.DoWhileStatement = DoWhileStatement;
|
|
var EmptyStatement = (function () {
|
|
function EmptyStatement() {
|
|
this.type = syntax_1.Syntax.EmptyStatement;
|
|
}
|
|
return EmptyStatement;
|
|
}());
|
|
exports.EmptyStatement = EmptyStatement;
|
|
var ExportAllDeclaration = (function () {
|
|
function ExportAllDeclaration(source) {
|
|
this.type = syntax_1.Syntax.ExportAllDeclaration;
|
|
this.source = source;
|
|
}
|
|
return ExportAllDeclaration;
|
|
}());
|
|
exports.ExportAllDeclaration = ExportAllDeclaration;
|
|
var ExportDefaultDeclaration = (function () {
|
|
function ExportDefaultDeclaration(declaration) {
|
|
this.type = syntax_1.Syntax.ExportDefaultDeclaration;
|
|
this.declaration = declaration;
|
|
}
|
|
return ExportDefaultDeclaration;
|
|
}());
|
|
exports.ExportDefaultDeclaration = ExportDefaultDeclaration;
|
|
var ExportNamedDeclaration = (function () {
|
|
function ExportNamedDeclaration(declaration, specifiers, source) {
|
|
this.type = syntax_1.Syntax.ExportNamedDeclaration;
|
|
this.declaration = declaration;
|
|
this.specifiers = specifiers;
|
|
this.source = source;
|
|
}
|
|
return ExportNamedDeclaration;
|
|
}());
|
|
exports.ExportNamedDeclaration = ExportNamedDeclaration;
|
|
var ExportSpecifier = (function () {
|
|
function ExportSpecifier(local, exported) {
|
|
this.type = syntax_1.Syntax.ExportSpecifier;
|
|
this.exported = exported;
|
|
this.local = local;
|
|
}
|
|
return ExportSpecifier;
|
|
}());
|
|
exports.ExportSpecifier = ExportSpecifier;
|
|
var ExpressionStatement = (function () {
|
|
function ExpressionStatement(expression) {
|
|
this.type = syntax_1.Syntax.ExpressionStatement;
|
|
this.expression = expression;
|
|
}
|
|
return ExpressionStatement;
|
|
}());
|
|
exports.ExpressionStatement = ExpressionStatement;
|
|
var ForInStatement = (function () {
|
|
function ForInStatement(left, right, body) {
|
|
this.type = syntax_1.Syntax.ForInStatement;
|
|
this.left = left;
|
|
this.right = right;
|
|
this.body = body;
|
|
this.each = false;
|
|
}
|
|
return ForInStatement;
|
|
}());
|
|
exports.ForInStatement = ForInStatement;
|
|
var ForOfStatement = (function () {
|
|
function ForOfStatement(left, right, body) {
|
|
this.type = syntax_1.Syntax.ForOfStatement;
|
|
this.left = left;
|
|
this.right = right;
|
|
this.body = body;
|
|
}
|
|
return ForOfStatement;
|
|
}());
|
|
exports.ForOfStatement = ForOfStatement;
|
|
var ForStatement = (function () {
|
|
function ForStatement(init, test, update, body) {
|
|
this.type = syntax_1.Syntax.ForStatement;
|
|
this.init = init;
|
|
this.test = test;
|
|
this.update = update;
|
|
this.body = body;
|
|
}
|
|
return ForStatement;
|
|
}());
|
|
exports.ForStatement = ForStatement;
|
|
var FunctionDeclaration = (function () {
|
|
function FunctionDeclaration(id, params, body, generator) {
|
|
this.type = syntax_1.Syntax.FunctionDeclaration;
|
|
this.id = id;
|
|
this.params = params;
|
|
this.body = body;
|
|
this.generator = generator;
|
|
this.expression = false;
|
|
this.async = false;
|
|
}
|
|
return FunctionDeclaration;
|
|
}());
|
|
exports.FunctionDeclaration = FunctionDeclaration;
|
|
var FunctionExpression = (function () {
|
|
function FunctionExpression(id, params, body, generator) {
|
|
this.type = syntax_1.Syntax.FunctionExpression;
|
|
this.id = id;
|
|
this.params = params;
|
|
this.body = body;
|
|
this.generator = generator;
|
|
this.expression = false;
|
|
this.async = false;
|
|
}
|
|
return FunctionExpression;
|
|
}());
|
|
exports.FunctionExpression = FunctionExpression;
|
|
var Identifier = (function () {
|
|
function Identifier(name) {
|
|
this.type = syntax_1.Syntax.Identifier;
|
|
this.name = name;
|
|
}
|
|
return Identifier;
|
|
}());
|
|
exports.Identifier = Identifier;
|
|
var IfStatement = (function () {
|
|
function IfStatement(test, consequent, alternate) {
|
|
this.type = syntax_1.Syntax.IfStatement;
|
|
this.test = test;
|
|
this.consequent = consequent;
|
|
this.alternate = alternate;
|
|
}
|
|
return IfStatement;
|
|
}());
|
|
exports.IfStatement = IfStatement;
|
|
var ImportDeclaration = (function () {
|
|
function ImportDeclaration(specifiers, source) {
|
|
this.type = syntax_1.Syntax.ImportDeclaration;
|
|
this.specifiers = specifiers;
|
|
this.source = source;
|
|
}
|
|
return ImportDeclaration;
|
|
}());
|
|
exports.ImportDeclaration = ImportDeclaration;
|
|
var ImportDefaultSpecifier = (function () {
|
|
function ImportDefaultSpecifier(local) {
|
|
this.type = syntax_1.Syntax.ImportDefaultSpecifier;
|
|
this.local = local;
|
|
}
|
|
return ImportDefaultSpecifier;
|
|
}());
|
|
exports.ImportDefaultSpecifier = ImportDefaultSpecifier;
|
|
var ImportNamespaceSpecifier = (function () {
|
|
function ImportNamespaceSpecifier(local) {
|
|
this.type = syntax_1.Syntax.ImportNamespaceSpecifier;
|
|
this.local = local;
|
|
}
|
|
return ImportNamespaceSpecifier;
|
|
}());
|
|
exports.ImportNamespaceSpecifier = ImportNamespaceSpecifier;
|
|
var ImportSpecifier = (function () {
|
|
function ImportSpecifier(local, imported) {
|
|
this.type = syntax_1.Syntax.ImportSpecifier;
|
|
this.local = local;
|
|
this.imported = imported;
|
|
}
|
|
return ImportSpecifier;
|
|
}());
|
|
exports.ImportSpecifier = ImportSpecifier;
|
|
var LabeledStatement = (function () {
|
|
function LabeledStatement(label, body) {
|
|
this.type = syntax_1.Syntax.LabeledStatement;
|
|
this.label = label;
|
|
this.body = body;
|
|
}
|
|
return LabeledStatement;
|
|
}());
|
|
exports.LabeledStatement = LabeledStatement;
|
|
var Literal = (function () {
|
|
function Literal(value, raw) {
|
|
this.type = syntax_1.Syntax.Literal;
|
|
this.value = value;
|
|
this.raw = raw;
|
|
}
|
|
return Literal;
|
|
}());
|
|
exports.Literal = Literal;
|
|
var MetaProperty = (function () {
|
|
function MetaProperty(meta, property) {
|
|
this.type = syntax_1.Syntax.MetaProperty;
|
|
this.meta = meta;
|
|
this.property = property;
|
|
}
|
|
return MetaProperty;
|
|
}());
|
|
exports.MetaProperty = MetaProperty;
|
|
var MethodDefinition = (function () {
|
|
function MethodDefinition(key, computed, value, kind, isStatic) {
|
|
this.type = syntax_1.Syntax.MethodDefinition;
|
|
this.key = key;
|
|
this.computed = computed;
|
|
this.value = value;
|
|
this.kind = kind;
|
|
this.static = isStatic;
|
|
}
|
|
return MethodDefinition;
|
|
}());
|
|
exports.MethodDefinition = MethodDefinition;
|
|
var Module = (function () {
|
|
function Module(body) {
|
|
this.type = syntax_1.Syntax.Program;
|
|
this.body = body;
|
|
this.sourceType = 'module';
|
|
}
|
|
return Module;
|
|
}());
|
|
exports.Module = Module;
|
|
var NewExpression = (function () {
|
|
function NewExpression(callee, args) {
|
|
this.type = syntax_1.Syntax.NewExpression;
|
|
this.callee = callee;
|
|
this.arguments = args;
|
|
}
|
|
return NewExpression;
|
|
}());
|
|
exports.NewExpression = NewExpression;
|
|
var ObjectExpression = (function () {
|
|
function ObjectExpression(properties) {
|
|
this.type = syntax_1.Syntax.ObjectExpression;
|
|
this.properties = properties;
|
|
}
|
|
return ObjectExpression;
|
|
}());
|
|
exports.ObjectExpression = ObjectExpression;
|
|
var ObjectPattern = (function () {
|
|
function ObjectPattern(properties) {
|
|
this.type = syntax_1.Syntax.ObjectPattern;
|
|
this.properties = properties;
|
|
}
|
|
return ObjectPattern;
|
|
}());
|
|
exports.ObjectPattern = ObjectPattern;
|
|
var Property = (function () {
|
|
function Property(kind, key, computed, value, method, shorthand) {
|
|
this.type = syntax_1.Syntax.Property;
|
|
this.key = key;
|
|
this.computed = computed;
|
|
this.value = value;
|
|
this.kind = kind;
|
|
this.method = method;
|
|
this.shorthand = shorthand;
|
|
}
|
|
return Property;
|
|
}());
|
|
exports.Property = Property;
|
|
var RegexLiteral = (function () {
|
|
function RegexLiteral(value, raw, pattern, flags) {
|
|
this.type = syntax_1.Syntax.Literal;
|
|
this.value = value;
|
|
this.raw = raw;
|
|
this.regex = { pattern: pattern, flags: flags };
|
|
}
|
|
return RegexLiteral;
|
|
}());
|
|
exports.RegexLiteral = RegexLiteral;
|
|
var RestElement = (function () {
|
|
function RestElement(argument) {
|
|
this.type = syntax_1.Syntax.RestElement;
|
|
this.argument = argument;
|
|
}
|
|
return RestElement;
|
|
}());
|
|
exports.RestElement = RestElement;
|
|
var ReturnStatement = (function () {
|
|
function ReturnStatement(argument) {
|
|
this.type = syntax_1.Syntax.ReturnStatement;
|
|
this.argument = argument;
|
|
}
|
|
return ReturnStatement;
|
|
}());
|
|
exports.ReturnStatement = ReturnStatement;
|
|
var Script = (function () {
|
|
function Script(body) {
|
|
this.type = syntax_1.Syntax.Program;
|
|
this.body = body;
|
|
this.sourceType = 'script';
|
|
}
|
|
return Script;
|
|
}());
|
|
exports.Script = Script;
|
|
var SequenceExpression = (function () {
|
|
function SequenceExpression(expressions) {
|
|
this.type = syntax_1.Syntax.SequenceExpression;
|
|
this.expressions = expressions;
|
|
}
|
|
return SequenceExpression;
|
|
}());
|
|
exports.SequenceExpression = SequenceExpression;
|
|
var SpreadElement = (function () {
|
|
function SpreadElement(argument) {
|
|
this.type = syntax_1.Syntax.SpreadElement;
|
|
this.argument = argument;
|
|
}
|
|
return SpreadElement;
|
|
}());
|
|
exports.SpreadElement = SpreadElement;
|
|
var StaticMemberExpression = (function () {
|
|
function StaticMemberExpression(object, property) {
|
|
this.type = syntax_1.Syntax.MemberExpression;
|
|
this.computed = false;
|
|
this.object = object;
|
|
this.property = property;
|
|
}
|
|
return StaticMemberExpression;
|
|
}());
|
|
exports.StaticMemberExpression = StaticMemberExpression;
|
|
var Super = (function () {
|
|
function Super() {
|
|
this.type = syntax_1.Syntax.Super;
|
|
}
|
|
return Super;
|
|
}());
|
|
exports.Super = Super;
|
|
var SwitchCase = (function () {
|
|
function SwitchCase(test, consequent) {
|
|
this.type = syntax_1.Syntax.SwitchCase;
|
|
this.test = test;
|
|
this.consequent = consequent;
|
|
}
|
|
return SwitchCase;
|
|
}());
|
|
exports.SwitchCase = SwitchCase;
|
|
var SwitchStatement = (function () {
|
|
function SwitchStatement(discriminant, cases) {
|
|
this.type = syntax_1.Syntax.SwitchStatement;
|
|
this.discriminant = discriminant;
|
|
this.cases = cases;
|
|
}
|
|
return SwitchStatement;
|
|
}());
|
|
exports.SwitchStatement = SwitchStatement;
|
|
var TaggedTemplateExpression = (function () {
|
|
function TaggedTemplateExpression(tag, quasi) {
|
|
this.type = syntax_1.Syntax.TaggedTemplateExpression;
|
|
this.tag = tag;
|
|
this.quasi = quasi;
|
|
}
|
|
return TaggedTemplateExpression;
|
|
}());
|
|
exports.TaggedTemplateExpression = TaggedTemplateExpression;
|
|
var TemplateElement = (function () {
|
|
function TemplateElement(value, tail) {
|
|
this.type = syntax_1.Syntax.TemplateElement;
|
|
this.value = value;
|
|
this.tail = tail;
|
|
}
|
|
return TemplateElement;
|
|
}());
|
|
exports.TemplateElement = TemplateElement;
|
|
var TemplateLiteral = (function () {
|
|
function TemplateLiteral(quasis, expressions) {
|
|
this.type = syntax_1.Syntax.TemplateLiteral;
|
|
this.quasis = quasis;
|
|
this.expressions = expressions;
|
|
}
|
|
return TemplateLiteral;
|
|
}());
|
|
exports.TemplateLiteral = TemplateLiteral;
|
|
var ThisExpression = (function () {
|
|
function ThisExpression() {
|
|
this.type = syntax_1.Syntax.ThisExpression;
|
|
}
|
|
return ThisExpression;
|
|
}());
|
|
exports.ThisExpression = ThisExpression;
|
|
var ThrowStatement = (function () {
|
|
function ThrowStatement(argument) {
|
|
this.type = syntax_1.Syntax.ThrowStatement;
|
|
this.argument = argument;
|
|
}
|
|
return ThrowStatement;
|
|
}());
|
|
exports.ThrowStatement = ThrowStatement;
|
|
var TryStatement = (function () {
|
|
function TryStatement(block, handler, finalizer) {
|
|
this.type = syntax_1.Syntax.TryStatement;
|
|
this.block = block;
|
|
this.handler = handler;
|
|
this.finalizer = finalizer;
|
|
}
|
|
return TryStatement;
|
|
}());
|
|
exports.TryStatement = TryStatement;
|
|
var UnaryExpression = (function () {
|
|
function UnaryExpression(operator, argument) {
|
|
this.type = syntax_1.Syntax.UnaryExpression;
|
|
this.operator = operator;
|
|
this.argument = argument;
|
|
this.prefix = true;
|
|
}
|
|
return UnaryExpression;
|
|
}());
|
|
exports.UnaryExpression = UnaryExpression;
|
|
var UpdateExpression = (function () {
|
|
function UpdateExpression(operator, argument, prefix) {
|
|
this.type = syntax_1.Syntax.UpdateExpression;
|
|
this.operator = operator;
|
|
this.argument = argument;
|
|
this.prefix = prefix;
|
|
}
|
|
return UpdateExpression;
|
|
}());
|
|
exports.UpdateExpression = UpdateExpression;
|
|
var VariableDeclaration = (function () {
|
|
function VariableDeclaration(declarations, kind) {
|
|
this.type = syntax_1.Syntax.VariableDeclaration;
|
|
this.declarations = declarations;
|
|
this.kind = kind;
|
|
}
|
|
return VariableDeclaration;
|
|
}());
|
|
exports.VariableDeclaration = VariableDeclaration;
|
|
var VariableDeclarator = (function () {
|
|
function VariableDeclarator(id, init) {
|
|
this.type = syntax_1.Syntax.VariableDeclarator;
|
|
this.id = id;
|
|
this.init = init;
|
|
}
|
|
return VariableDeclarator;
|
|
}());
|
|
exports.VariableDeclarator = VariableDeclarator;
|
|
var WhileStatement = (function () {
|
|
function WhileStatement(test, body) {
|
|
this.type = syntax_1.Syntax.WhileStatement;
|
|
this.test = test;
|
|
this.body = body;
|
|
}
|
|
return WhileStatement;
|
|
}());
|
|
exports.WhileStatement = WhileStatement;
|
|
var WithStatement = (function () {
|
|
function WithStatement(object, body) {
|
|
this.type = syntax_1.Syntax.WithStatement;
|
|
this.object = object;
|
|
this.body = body;
|
|
}
|
|
return WithStatement;
|
|
}());
|
|
exports.WithStatement = WithStatement;
|
|
var YieldExpression = (function () {
|
|
function YieldExpression(argument, delegate) {
|
|
this.type = syntax_1.Syntax.YieldExpression;
|
|
this.argument = argument;
|
|
this.delegate = delegate;
|
|
}
|
|
return YieldExpression;
|
|
}());
|
|
exports.YieldExpression = YieldExpression;
|
|
|
|
|
|
/***/ },
|
|
/* 8 */
|
|
/***/ function(module, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
var assert_1 = __webpack_require__(9);
|
|
var error_handler_1 = __webpack_require__(10);
|
|
var messages_1 = __webpack_require__(11);
|
|
var Node = __webpack_require__(7);
|
|
var scanner_1 = __webpack_require__(12);
|
|
var syntax_1 = __webpack_require__(2);
|
|
var token_1 = __webpack_require__(13);
|
|
var ArrowParameterPlaceHolder = 'ArrowParameterPlaceHolder';
|
|
var Parser = (function () {
|
|
function Parser(code, options, delegate) {
|
|
if (options === void 0) { options = {}; }
|
|
this.config = {
|
|
range: (typeof options.range === 'boolean') && options.range,
|
|
loc: (typeof options.loc === 'boolean') && options.loc,
|
|
source: null,
|
|
tokens: (typeof options.tokens === 'boolean') && options.tokens,
|
|
comment: (typeof options.comment === 'boolean') && options.comment,
|
|
tolerant: (typeof options.tolerant === 'boolean') && options.tolerant
|
|
};
|
|
if (this.config.loc && options.source && options.source !== null) {
|
|
this.config.source = String(options.source);
|
|
}
|
|
this.delegate = delegate;
|
|
this.errorHandler = new error_handler_1.ErrorHandler();
|
|
this.errorHandler.tolerant = this.config.tolerant;
|
|
this.scanner = new scanner_1.Scanner(code, this.errorHandler);
|
|
this.scanner.trackComment = this.config.comment;
|
|
this.operatorPrecedence = {
|
|
')': 0,
|
|
';': 0,
|
|
',': 0,
|
|
'=': 0,
|
|
']': 0,
|
|
'||': 1,
|
|
'&&': 2,
|
|
'|': 3,
|
|
'^': 4,
|
|
'&': 5,
|
|
'==': 6,
|
|
'!=': 6,
|
|
'===': 6,
|
|
'!==': 6,
|
|
'<': 7,
|
|
'>': 7,
|
|
'<=': 7,
|
|
'>=': 7,
|
|
'<<': 8,
|
|
'>>': 8,
|
|
'>>>': 8,
|
|
'+': 9,
|
|
'-': 9,
|
|
'*': 11,
|
|
'/': 11,
|
|
'%': 11
|
|
};
|
|
this.lookahead = {
|
|
type: 2 /* EOF */,
|
|
value: '',
|
|
lineNumber: this.scanner.lineNumber,
|
|
lineStart: 0,
|
|
start: 0,
|
|
end: 0
|
|
};
|
|
this.hasLineTerminator = false;
|
|
this.context = {
|
|
isModule: false,
|
|
await: false,
|
|
allowIn: true,
|
|
allowStrictDirective: true,
|
|
allowYield: true,
|
|
firstCoverInitializedNameError: null,
|
|
isAssignmentTarget: false,
|
|
isBindingElement: false,
|
|
inFunctionBody: false,
|
|
inIteration: false,
|
|
inSwitch: false,
|
|
labelSet: {},
|
|
strict: false
|
|
};
|
|
this.tokens = [];
|
|
this.startMarker = {
|
|
index: 0,
|
|
line: this.scanner.lineNumber,
|
|
column: 0
|
|
};
|
|
this.lastMarker = {
|
|
index: 0,
|
|
line: this.scanner.lineNumber,
|
|
column: 0
|
|
};
|
|
this.nextToken();
|
|
this.lastMarker = {
|
|
index: this.scanner.index,
|
|
line: this.scanner.lineNumber,
|
|
column: this.scanner.index - this.scanner.lineStart
|
|
};
|
|
}
|
|
Parser.prototype.throwError = function (messageFormat) {
|
|
var values = [];
|
|
for (var _i = 1; _i < arguments.length; _i++) {
|
|
values[_i - 1] = arguments[_i];
|
|
}
|
|
var args = Array.prototype.slice.call(arguments, 1);
|
|
var msg = messageFormat.replace(/%(\d)/g, function (whole, idx) {
|
|
assert_1.assert(idx < args.length, 'Message reference must be in range');
|
|
return args[idx];
|
|
});
|
|
var index = this.lastMarker.index;
|
|
var line = this.lastMarker.line;
|
|
var column = this.lastMarker.column + 1;
|
|
throw this.errorHandler.createError(index, line, column, msg);
|
|
};
|
|
Parser.prototype.tolerateError = function (messageFormat) {
|
|
var values = [];
|
|
for (var _i = 1; _i < arguments.length; _i++) {
|
|
values[_i - 1] = arguments[_i];
|
|
}
|
|
var args = Array.prototype.slice.call(arguments, 1);
|
|
var msg = messageFormat.replace(/%(\d)/g, function (whole, idx) {
|
|
assert_1.assert(idx < args.length, 'Message reference must be in range');
|
|
return args[idx];
|
|
});
|
|
var index = this.lastMarker.index;
|
|
var line = this.scanner.lineNumber;
|
|
var column = this.lastMarker.column + 1;
|
|
this.errorHandler.tolerateError(index, line, column, msg);
|
|
};
|
|
// Throw an exception because of the token.
|
|
Parser.prototype.unexpectedTokenError = function (token, message) {
|
|
var msg = message || messages_1.Messages.UnexpectedToken;
|
|
var value;
|
|
if (token) {
|
|
if (!message) {
|
|
msg = (token.type === 2 /* EOF */) ? messages_1.Messages.UnexpectedEOS :
|
|
(token.type === 3 /* Identifier */) ? messages_1.Messages.UnexpectedIdentifier :
|
|
(token.type === 6 /* NumericLiteral */) ? messages_1.Messages.UnexpectedNumber :
|
|
(token.type === 8 /* StringLiteral */) ? messages_1.Messages.UnexpectedString :
|
|
(token.type === 10 /* Template */) ? messages_1.Messages.UnexpectedTemplate :
|
|
messages_1.Messages.UnexpectedToken;
|
|
if (token.type === 4 /* Keyword */) {
|
|
if (this.scanner.isFutureReservedWord(token.value)) {
|
|
msg = messages_1.Messages.UnexpectedReserved;
|
|
}
|
|
else if (this.context.strict && this.scanner.isStrictModeReservedWord(token.value)) {
|
|
msg = messages_1.Messages.StrictReservedWord;
|
|
}
|
|
}
|
|
}
|
|
value = token.value;
|
|
}
|
|
else {
|
|
value = 'ILLEGAL';
|
|
}
|
|
msg = msg.replace('%0', value);
|
|
if (token && typeof token.lineNumber === 'number') {
|
|
var index = token.start;
|
|
var line = token.lineNumber;
|
|
var lastMarkerLineStart = this.lastMarker.index - this.lastMarker.column;
|
|
var column = token.start - lastMarkerLineStart + 1;
|
|
return this.errorHandler.createError(index, line, column, msg);
|
|
}
|
|
else {
|
|
var index = this.lastMarker.index;
|
|
var line = this.lastMarker.line;
|
|
var column = this.lastMarker.column + 1;
|
|
return this.errorHandler.createError(index, line, column, msg);
|
|
}
|
|
};
|
|
Parser.prototype.throwUnexpectedToken = function (token, message) {
|
|
throw this.unexpectedTokenError(token, message);
|
|
};
|
|
Parser.prototype.tolerateUnexpectedToken = function (token, message) {
|
|
this.errorHandler.tolerate(this.unexpectedTokenError(token, message));
|
|
};
|
|
Parser.prototype.collectComments = function () {
|
|
if (!this.config.comment) {
|
|
this.scanner.scanComments();
|
|
}
|
|
else {
|
|
var comments = this.scanner.scanComments();
|
|
if (comments.length > 0 && this.delegate) {
|
|
for (var i = 0; i < comments.length; ++i) {
|
|
var e = comments[i];
|
|
var node = void 0;
|
|
node = {
|
|
type: e.multiLine ? 'BlockComment' : 'LineComment',
|
|
value: this.scanner.source.slice(e.slice[0], e.slice[1])
|
|
};
|
|
if (this.config.range) {
|
|
node.range = e.range;
|
|
}
|
|
if (this.config.loc) {
|
|
node.loc = e.loc;
|
|
}
|
|
var metadata = {
|
|
start: {
|
|
line: e.loc.start.line,
|
|
column: e.loc.start.column,
|
|
offset: e.range[0]
|
|
},
|
|
end: {
|
|
line: e.loc.end.line,
|
|
column: e.loc.end.column,
|
|
offset: e.range[1]
|
|
}
|
|
};
|
|
this.delegate(node, metadata);
|
|
}
|
|
}
|
|
}
|
|
};
|
|
// From internal representation to an external structure
|
|
Parser.prototype.getTokenRaw = function (token) {
|
|
return this.scanner.source.slice(token.start, token.end);
|
|
};
|
|
Parser.prototype.convertToken = function (token) {
|
|
var t = {
|
|
type: token_1.TokenName[token.type],
|
|
value: this.getTokenRaw(token)
|
|
};
|
|
if (this.config.range) {
|
|
t.range = [token.start, token.end];
|
|
}
|
|
if (this.config.loc) {
|
|
t.loc = {
|
|
start: {
|
|
line: this.startMarker.line,
|
|
column: this.startMarker.column
|
|
},
|
|
end: {
|
|
line: this.scanner.lineNumber,
|
|
column: this.scanner.index - this.scanner.lineStart
|
|
}
|
|
};
|
|
}
|
|
if (token.type === 9 /* RegularExpression */) {
|
|
var pattern = token.pattern;
|
|
var flags = token.flags;
|
|
t.regex = { pattern: pattern, flags: flags };
|
|
}
|
|
return t;
|
|
};
|
|
Parser.prototype.nextToken = function () {
|
|
var token = this.lookahead;
|
|
this.lastMarker.index = this.scanner.index;
|
|
this.lastMarker.line = this.scanner.lineNumber;
|
|
this.lastMarker.column = this.scanner.index - this.scanner.lineStart;
|
|
this.collectComments();
|
|
if (this.scanner.index !== this.startMarker.index) {
|
|
this.startMarker.index = this.scanner.index;
|
|
this.startMarker.line = this.scanner.lineNumber;
|
|
this.startMarker.column = this.scanner.index - this.scanner.lineStart;
|
|
}
|
|
var next = this.scanner.lex();
|
|
this.hasLineTerminator = (token.lineNumber !== next.lineNumber);
|
|
if (next && this.context.strict && next.type === 3 /* Identifier */) {
|
|
if (this.scanner.isStrictModeReservedWord(next.value)) {
|
|
next.type = 4 /* Keyword */;
|
|
}
|
|
}
|
|
this.lookahead = next;
|
|
if (this.config.tokens && next.type !== 2 /* EOF */) {
|
|
this.tokens.push(this.convertToken(next));
|
|
}
|
|
return token;
|
|
};
|
|
Parser.prototype.nextRegexToken = function () {
|
|
this.collectComments();
|
|
var token = this.scanner.scanRegExp();
|
|
if (this.config.tokens) {
|
|
// Pop the previous token, '/' or '/='
|
|
// This is added from the lookahead token.
|
|
this.tokens.pop();
|
|
this.tokens.push(this.convertToken(token));
|
|
}
|
|
// Prime the next lookahead.
|
|
this.lookahead = token;
|
|
this.nextToken();
|
|
return token;
|
|
};
|
|
Parser.prototype.createNode = function () {
|
|
return {
|
|
index: this.startMarker.index,
|
|
line: this.startMarker.line,
|
|
column: this.startMarker.column
|
|
};
|
|
};
|
|
Parser.prototype.startNode = function (token, lastLineStart) {
|
|
if (lastLineStart === void 0) { lastLineStart = 0; }
|
|
var column = token.start - token.lineStart;
|
|
var line = token.lineNumber;
|
|
if (column < 0) {
|
|
column += lastLineStart;
|
|
line--;
|
|
}
|
|
return {
|
|
index: token.start,
|
|
line: line,
|
|
column: column
|
|
};
|
|
};
|
|
Parser.prototype.finalize = function (marker, node) {
|
|
if (this.config.range) {
|
|
node.range = [marker.index, this.lastMarker.index];
|
|
}
|
|
if (this.config.loc) {
|
|
node.loc = {
|
|
start: {
|
|
line: marker.line,
|
|
column: marker.column,
|
|
},
|
|
end: {
|
|
line: this.lastMarker.line,
|
|
column: this.lastMarker.column
|
|
}
|
|
};
|
|
if (this.config.source) {
|
|
node.loc.source = this.config.source;
|
|
}
|
|
}
|
|
if (this.delegate) {
|
|
var metadata = {
|
|
start: {
|
|
line: marker.line,
|
|
column: marker.column,
|
|
offset: marker.index
|
|
},
|
|
end: {
|
|
line: this.lastMarker.line,
|
|
column: this.lastMarker.column,
|
|
offset: this.lastMarker.index
|
|
}
|
|
};
|
|
this.delegate(node, metadata);
|
|
}
|
|
return node;
|
|
};
|
|
// Expect the next token to match the specified punctuator.
|
|
// If not, an exception will be thrown.
|
|
Parser.prototype.expect = function (value) {
|
|
var token = this.nextToken();
|
|
if (token.type !== 7 /* Punctuator */ || token.value !== value) {
|
|
this.throwUnexpectedToken(token);
|
|
}
|
|
};
|
|
// Quietly expect a comma when in tolerant mode, otherwise delegates to expect().
|
|
Parser.prototype.expectCommaSeparator = function () {
|
|
if (this.config.tolerant) {
|
|
var token = this.lookahead;
|
|
if (token.type === 7 /* Punctuator */ && token.value === ',') {
|
|
this.nextToken();
|
|
}
|
|
else if (token.type === 7 /* Punctuator */ && token.value === ';') {
|
|
this.nextToken();
|
|
this.tolerateUnexpectedToken(token);
|
|
}
|
|
else {
|
|
this.tolerateUnexpectedToken(token, messages_1.Messages.UnexpectedToken);
|
|
}
|
|
}
|
|
else {
|
|
this.expect(',');
|
|
}
|
|
};
|
|
// Expect the next token to match the specified keyword.
|
|
// If not, an exception will be thrown.
|
|
Parser.prototype.expectKeyword = function (keyword) {
|
|
var token = this.nextToken();
|
|
if (token.type !== 4 /* Keyword */ || token.value !== keyword) {
|
|
this.throwUnexpectedToken(token);
|
|
}
|
|
};
|
|
// Return true if the next token matches the specified punctuator.
|
|
Parser.prototype.match = function (value) {
|
|
return this.lookahead.type === 7 /* Punctuator */ && this.lookahead.value === value;
|
|
};
|
|
// Return true if the next token matches the specified keyword
|
|
Parser.prototype.matchKeyword = function (keyword) {
|
|
return this.lookahead.type === 4 /* Keyword */ && this.lookahead.value === keyword;
|
|
};
|
|
// Return true if the next token matches the specified contextual keyword
|
|
// (where an identifier is sometimes a keyword depending on the context)
|
|
Parser.prototype.matchContextualKeyword = function (keyword) {
|
|
return this.lookahead.type === 3 /* Identifier */ && this.lookahead.value === keyword;
|
|
};
|
|
// Return true if the next token is an assignment operator
|
|
Parser.prototype.matchAssign = function () {
|
|
if (this.lookahead.type !== 7 /* Punctuator */) {
|
|
return false;
|
|
}
|
|
var op = this.lookahead.value;
|
|
return op === '=' ||
|
|
op === '*=' ||
|
|
op === '**=' ||
|
|
op === '/=' ||
|
|
op === '%=' ||
|
|
op === '+=' ||
|
|
op === '-=' ||
|
|
op === '<<=' ||
|
|
op === '>>=' ||
|
|
op === '>>>=' ||
|
|
op === '&=' ||
|
|
op === '^=' ||
|
|
op === '|=';
|
|
};
|
|
// Cover grammar support.
|
|
//
|
|
// When an assignment expression position starts with an left parenthesis, the determination of the type
|
|
// of the syntax is to be deferred arbitrarily long until the end of the parentheses pair (plus a lookahead)
|
|
// or the first comma. This situation also defers the determination of all the expressions nested in the pair.
|
|
//
|
|
// There are three productions that can be parsed in a parentheses pair that needs to be determined
|
|
// after the outermost pair is closed. They are:
|
|
//
|
|
// 1. AssignmentExpression
|
|
// 2. BindingElements
|
|
// 3. AssignmentTargets
|
|
//
|
|
// In order to avoid exponential backtracking, we use two flags to denote if the production can be
|
|
// binding element or assignment target.
|
|
//
|
|
// The three productions have the relationship:
|
|
//
|
|
// BindingElements ⊆ AssignmentTargets ⊆ AssignmentExpression
|
|
//
|
|
// with a single exception that CoverInitializedName when used directly in an Expression, generates
|
|
// an early error. Therefore, we need the third state, firstCoverInitializedNameError, to track the
|
|
// first usage of CoverInitializedName and report it when we reached the end of the parentheses pair.
|
|
//
|
|
// isolateCoverGrammar function runs the given parser function with a new cover grammar context, and it does not
|
|
// effect the current flags. This means the production the parser parses is only used as an expression. Therefore
|
|
// the CoverInitializedName check is conducted.
|
|
//
|
|
// inheritCoverGrammar function runs the given parse function with a new cover grammar context, and it propagates
|
|
// the flags outside of the parser. This means the production the parser parses is used as a part of a potential
|
|
// pattern. The CoverInitializedName check is deferred.
|
|
Parser.prototype.isolateCoverGrammar = function (parseFunction) {
|
|
var previousIsBindingElement = this.context.isBindingElement;
|
|
var previousIsAssignmentTarget = this.context.isAssignmentTarget;
|
|
var previousFirstCoverInitializedNameError = this.context.firstCoverInitializedNameError;
|
|
this.context.isBindingElement = true;
|
|
this.context.isAssignmentTarget = true;
|
|
this.context.firstCoverInitializedNameError = null;
|
|
var result = parseFunction.call(this);
|
|
if (this.context.firstCoverInitializedNameError !== null) {
|
|
this.throwUnexpectedToken(this.context.firstCoverInitializedNameError);
|
|
}
|
|
this.context.isBindingElement = previousIsBindingElement;
|
|
this.context.isAssignmentTarget = previousIsAssignmentTarget;
|
|
this.context.firstCoverInitializedNameError = previousFirstCoverInitializedNameError;
|
|
return result;
|
|
};
|
|
Parser.prototype.inheritCoverGrammar = function (parseFunction) {
|
|
var previousIsBindingElement = this.context.isBindingElement;
|
|
var previousIsAssignmentTarget = this.context.isAssignmentTarget;
|
|
var previousFirstCoverInitializedNameError = this.context.firstCoverInitializedNameError;
|
|
this.context.isBindingElement = true;
|
|
this.context.isAssignmentTarget = true;
|
|
this.context.firstCoverInitializedNameError = null;
|
|
var result = parseFunction.call(this);
|
|
this.context.isBindingElement = this.context.isBindingElement && previousIsBindingElement;
|
|
this.context.isAssignmentTarget = this.context.isAssignmentTarget && previousIsAssignmentTarget;
|
|
this.context.firstCoverInitializedNameError = previousFirstCoverInitializedNameError || this.context.firstCoverInitializedNameError;
|
|
return result;
|
|
};
|
|
Parser.prototype.consumeSemicolon = function () {
|
|
if (this.match(';')) {
|
|
this.nextToken();
|
|
}
|
|
else if (!this.hasLineTerminator) {
|
|
if (this.lookahead.type !== 2 /* EOF */ && !this.match('}')) {
|
|
this.throwUnexpectedToken(this.lookahead);
|
|
}
|
|
this.lastMarker.index = this.startMarker.index;
|
|
this.lastMarker.line = this.startMarker.line;
|
|
this.lastMarker.column = this.startMarker.column;
|
|
}
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-primary-expression
|
|
Parser.prototype.parsePrimaryExpression = function () {
|
|
var node = this.createNode();
|
|
var expr;
|
|
var token, raw;
|
|
switch (this.lookahead.type) {
|
|
case 3 /* Identifier */:
|
|
if ((this.context.isModule || this.context.await) && this.lookahead.value === 'await') {
|
|
this.tolerateUnexpectedToken(this.lookahead);
|
|
}
|
|
expr = this.matchAsyncFunction() ? this.parseFunctionExpression() : this.finalize(node, new Node.Identifier(this.nextToken().value));
|
|
break;
|
|
case 6 /* NumericLiteral */:
|
|
case 8 /* StringLiteral */:
|
|
if (this.context.strict && this.lookahead.octal) {
|
|
this.tolerateUnexpectedToken(this.lookahead, messages_1.Messages.StrictOctalLiteral);
|
|
}
|
|
this.context.isAssignmentTarget = false;
|
|
this.context.isBindingElement = false;
|
|
token = this.nextToken();
|
|
raw = this.getTokenRaw(token);
|
|
expr = this.finalize(node, new Node.Literal(token.value, raw));
|
|
break;
|
|
case 1 /* BooleanLiteral */:
|
|
this.context.isAssignmentTarget = false;
|
|
this.context.isBindingElement = false;
|
|
token = this.nextToken();
|
|
raw = this.getTokenRaw(token);
|
|
expr = this.finalize(node, new Node.Literal(token.value === 'true', raw));
|
|
break;
|
|
case 5 /* NullLiteral */:
|
|
this.context.isAssignmentTarget = false;
|
|
this.context.isBindingElement = false;
|
|
token = this.nextToken();
|
|
raw = this.getTokenRaw(token);
|
|
expr = this.finalize(node, new Node.Literal(null, raw));
|
|
break;
|
|
case 10 /* Template */:
|
|
expr = this.parseTemplateLiteral();
|
|
break;
|
|
case 7 /* Punctuator */:
|
|
switch (this.lookahead.value) {
|
|
case '(':
|
|
this.context.isBindingElement = false;
|
|
expr = this.inheritCoverGrammar(this.parseGroupExpression);
|
|
break;
|
|
case '[':
|
|
expr = this.inheritCoverGrammar(this.parseArrayInitializer);
|
|
break;
|
|
case '{':
|
|
expr = this.inheritCoverGrammar(this.parseObjectInitializer);
|
|
break;
|
|
case '/':
|
|
case '/=':
|
|
this.context.isAssignmentTarget = false;
|
|
this.context.isBindingElement = false;
|
|
this.scanner.index = this.startMarker.index;
|
|
token = this.nextRegexToken();
|
|
raw = this.getTokenRaw(token);
|
|
expr = this.finalize(node, new Node.RegexLiteral(token.regex, raw, token.pattern, token.flags));
|
|
break;
|
|
default:
|
|
expr = this.throwUnexpectedToken(this.nextToken());
|
|
}
|
|
break;
|
|
case 4 /* Keyword */:
|
|
if (!this.context.strict && this.context.allowYield && this.matchKeyword('yield')) {
|
|
expr = this.parseIdentifierName();
|
|
}
|
|
else if (!this.context.strict && this.matchKeyword('let')) {
|
|
expr = this.finalize(node, new Node.Identifier(this.nextToken().value));
|
|
}
|
|
else {
|
|
this.context.isAssignmentTarget = false;
|
|
this.context.isBindingElement = false;
|
|
if (this.matchKeyword('function')) {
|
|
expr = this.parseFunctionExpression();
|
|
}
|
|
else if (this.matchKeyword('this')) {
|
|
this.nextToken();
|
|
expr = this.finalize(node, new Node.ThisExpression());
|
|
}
|
|
else if (this.matchKeyword('class')) {
|
|
expr = this.parseClassExpression();
|
|
}
|
|
else {
|
|
expr = this.throwUnexpectedToken(this.nextToken());
|
|
}
|
|
}
|
|
break;
|
|
default:
|
|
expr = this.throwUnexpectedToken(this.nextToken());
|
|
}
|
|
return expr;
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-array-initializer
|
|
Parser.prototype.parseSpreadElement = function () {
|
|
var node = this.createNode();
|
|
this.expect('...');
|
|
var arg = this.inheritCoverGrammar(this.parseAssignmentExpression);
|
|
return this.finalize(node, new Node.SpreadElement(arg));
|
|
};
|
|
Parser.prototype.parseArrayInitializer = function () {
|
|
var node = this.createNode();
|
|
var elements = [];
|
|
this.expect('[');
|
|
while (!this.match(']')) {
|
|
if (this.match(',')) {
|
|
this.nextToken();
|
|
elements.push(null);
|
|
}
|
|
else if (this.match('...')) {
|
|
var element = this.parseSpreadElement();
|
|
if (!this.match(']')) {
|
|
this.context.isAssignmentTarget = false;
|
|
this.context.isBindingElement = false;
|
|
this.expect(',');
|
|
}
|
|
elements.push(element);
|
|
}
|
|
else {
|
|
elements.push(this.inheritCoverGrammar(this.parseAssignmentExpression));
|
|
if (!this.match(']')) {
|
|
this.expect(',');
|
|
}
|
|
}
|
|
}
|
|
this.expect(']');
|
|
return this.finalize(node, new Node.ArrayExpression(elements));
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-object-initializer
|
|
Parser.prototype.parsePropertyMethod = function (params) {
|
|
this.context.isAssignmentTarget = false;
|
|
this.context.isBindingElement = false;
|
|
var previousStrict = this.context.strict;
|
|
var previousAllowStrictDirective = this.context.allowStrictDirective;
|
|
this.context.allowStrictDirective = params.simple;
|
|
var body = this.isolateCoverGrammar(this.parseFunctionSourceElements);
|
|
if (this.context.strict && params.firstRestricted) {
|
|
this.tolerateUnexpectedToken(params.firstRestricted, params.message);
|
|
}
|
|
if (this.context.strict && params.stricted) {
|
|
this.tolerateUnexpectedToken(params.stricted, params.message);
|
|
}
|
|
this.context.strict = previousStrict;
|
|
this.context.allowStrictDirective = previousAllowStrictDirective;
|
|
return body;
|
|
};
|
|
Parser.prototype.parsePropertyMethodFunction = function () {
|
|
var isGenerator = false;
|
|
var node = this.createNode();
|
|
var previousAllowYield = this.context.allowYield;
|
|
this.context.allowYield = true;
|
|
var params = this.parseFormalParameters();
|
|
var method = this.parsePropertyMethod(params);
|
|
this.context.allowYield = previousAllowYield;
|
|
return this.finalize(node, new Node.FunctionExpression(null, params.params, method, isGenerator));
|
|
};
|
|
Parser.prototype.parsePropertyMethodAsyncFunction = function () {
|
|
var node = this.createNode();
|
|
var previousAllowYield = this.context.allowYield;
|
|
var previousAwait = this.context.await;
|
|
this.context.allowYield = false;
|
|
this.context.await = true;
|
|
var params = this.parseFormalParameters();
|
|
var method = this.parsePropertyMethod(params);
|
|
this.context.allowYield = previousAllowYield;
|
|
this.context.await = previousAwait;
|
|
return this.finalize(node, new Node.AsyncFunctionExpression(null, params.params, method));
|
|
};
|
|
Parser.prototype.parseObjectPropertyKey = function () {
|
|
var node = this.createNode();
|
|
var token = this.nextToken();
|
|
var key;
|
|
switch (token.type) {
|
|
case 8 /* StringLiteral */:
|
|
case 6 /* NumericLiteral */:
|
|
if (this.context.strict && token.octal) {
|
|
this.tolerateUnexpectedToken(token, messages_1.Messages.StrictOctalLiteral);
|
|
}
|
|
var raw = this.getTokenRaw(token);
|
|
key = this.finalize(node, new Node.Literal(token.value, raw));
|
|
break;
|
|
case 3 /* Identifier */:
|
|
case 1 /* BooleanLiteral */:
|
|
case 5 /* NullLiteral */:
|
|
case 4 /* Keyword */:
|
|
key = this.finalize(node, new Node.Identifier(token.value));
|
|
break;
|
|
case 7 /* Punctuator */:
|
|
if (token.value === '[') {
|
|
key = this.isolateCoverGrammar(this.parseAssignmentExpression);
|
|
this.expect(']');
|
|
}
|
|
else {
|
|
key = this.throwUnexpectedToken(token);
|
|
}
|
|
break;
|
|
default:
|
|
key = this.throwUnexpectedToken(token);
|
|
}
|
|
return key;
|
|
};
|
|
Parser.prototype.isPropertyKey = function (key, value) {
|
|
return (key.type === syntax_1.Syntax.Identifier && key.name === value) ||
|
|
(key.type === syntax_1.Syntax.Literal && key.value === value);
|
|
};
|
|
Parser.prototype.parseObjectProperty = function (hasProto) {
|
|
var node = this.createNode();
|
|
var token = this.lookahead;
|
|
var kind;
|
|
var key = null;
|
|
var value = null;
|
|
var computed = false;
|
|
var method = false;
|
|
var shorthand = false;
|
|
var isAsync = false;
|
|
if (token.type === 3 /* Identifier */) {
|
|
var id = token.value;
|
|
this.nextToken();
|
|
computed = this.match('[');
|
|
isAsync = !this.hasLineTerminator && (id === 'async') &&
|
|
!this.match(':') && !this.match('(') && !this.match('*') && !this.match(',');
|
|
key = isAsync ? this.parseObjectPropertyKey() : this.finalize(node, new Node.Identifier(id));
|
|
}
|
|
else if (this.match('*')) {
|
|
this.nextToken();
|
|
}
|
|
else {
|
|
computed = this.match('[');
|
|
key = this.parseObjectPropertyKey();
|
|
}
|
|
var lookaheadPropertyKey = this.qualifiedPropertyName(this.lookahead);
|
|
if (token.type === 3 /* Identifier */ && !isAsync && token.value === 'get' && lookaheadPropertyKey) {
|
|
kind = 'get';
|
|
computed = this.match('[');
|
|
key = this.parseObjectPropertyKey();
|
|
this.context.allowYield = false;
|
|
value = this.parseGetterMethod();
|
|
}
|
|
else if (token.type === 3 /* Identifier */ && !isAsync && token.value === 'set' && lookaheadPropertyKey) {
|
|
kind = 'set';
|
|
computed = this.match('[');
|
|
key = this.parseObjectPropertyKey();
|
|
value = this.parseSetterMethod();
|
|
}
|
|
else if (token.type === 7 /* Punctuator */ && token.value === '*' && lookaheadPropertyKey) {
|
|
kind = 'init';
|
|
computed = this.match('[');
|
|
key = this.parseObjectPropertyKey();
|
|
value = this.parseGeneratorMethod();
|
|
method = true;
|
|
}
|
|
else {
|
|
if (!key) {
|
|
this.throwUnexpectedToken(this.lookahead);
|
|
}
|
|
kind = 'init';
|
|
if (this.match(':') && !isAsync) {
|
|
if (!computed && this.isPropertyKey(key, '__proto__')) {
|
|
if (hasProto.value) {
|
|
this.tolerateError(messages_1.Messages.DuplicateProtoProperty);
|
|
}
|
|
hasProto.value = true;
|
|
}
|
|
this.nextToken();
|
|
value = this.inheritCoverGrammar(this.parseAssignmentExpression);
|
|
}
|
|
else if (this.match('(')) {
|
|
value = isAsync ? this.parsePropertyMethodAsyncFunction() : this.parsePropertyMethodFunction();
|
|
method = true;
|
|
}
|
|
else if (token.type === 3 /* Identifier */) {
|
|
var id = this.finalize(node, new Node.Identifier(token.value));
|
|
if (this.match('=')) {
|
|
this.context.firstCoverInitializedNameError = this.lookahead;
|
|
this.nextToken();
|
|
shorthand = true;
|
|
var init = this.isolateCoverGrammar(this.parseAssignmentExpression);
|
|
value = this.finalize(node, new Node.AssignmentPattern(id, init));
|
|
}
|
|
else {
|
|
shorthand = true;
|
|
value = id;
|
|
}
|
|
}
|
|
else {
|
|
this.throwUnexpectedToken(this.nextToken());
|
|
}
|
|
}
|
|
return this.finalize(node, new Node.Property(kind, key, computed, value, method, shorthand));
|
|
};
|
|
Parser.prototype.parseObjectInitializer = function () {
|
|
var node = this.createNode();
|
|
this.expect('{');
|
|
var properties = [];
|
|
var hasProto = { value: false };
|
|
while (!this.match('}')) {
|
|
properties.push(this.parseObjectProperty(hasProto));
|
|
if (!this.match('}')) {
|
|
this.expectCommaSeparator();
|
|
}
|
|
}
|
|
this.expect('}');
|
|
return this.finalize(node, new Node.ObjectExpression(properties));
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-template-literals
|
|
Parser.prototype.parseTemplateHead = function () {
|
|
assert_1.assert(this.lookahead.head, 'Template literal must start with a template head');
|
|
var node = this.createNode();
|
|
var token = this.nextToken();
|
|
var raw = token.value;
|
|
var cooked = token.cooked;
|
|
return this.finalize(node, new Node.TemplateElement({ raw: raw, cooked: cooked }, token.tail));
|
|
};
|
|
Parser.prototype.parseTemplateElement = function () {
|
|
if (this.lookahead.type !== 10 /* Template */) {
|
|
this.throwUnexpectedToken();
|
|
}
|
|
var node = this.createNode();
|
|
var token = this.nextToken();
|
|
var raw = token.value;
|
|
var cooked = token.cooked;
|
|
return this.finalize(node, new Node.TemplateElement({ raw: raw, cooked: cooked }, token.tail));
|
|
};
|
|
Parser.prototype.parseTemplateLiteral = function () {
|
|
var node = this.createNode();
|
|
var expressions = [];
|
|
var quasis = [];
|
|
var quasi = this.parseTemplateHead();
|
|
quasis.push(quasi);
|
|
while (!quasi.tail) {
|
|
expressions.push(this.parseExpression());
|
|
quasi = this.parseTemplateElement();
|
|
quasis.push(quasi);
|
|
}
|
|
return this.finalize(node, new Node.TemplateLiteral(quasis, expressions));
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-grouping-operator
|
|
Parser.prototype.reinterpretExpressionAsPattern = function (expr) {
|
|
switch (expr.type) {
|
|
case syntax_1.Syntax.Identifier:
|
|
case syntax_1.Syntax.MemberExpression:
|
|
case syntax_1.Syntax.RestElement:
|
|
case syntax_1.Syntax.AssignmentPattern:
|
|
break;
|
|
case syntax_1.Syntax.SpreadElement:
|
|
expr.type = syntax_1.Syntax.RestElement;
|
|
this.reinterpretExpressionAsPattern(expr.argument);
|
|
break;
|
|
case syntax_1.Syntax.ArrayExpression:
|
|
expr.type = syntax_1.Syntax.ArrayPattern;
|
|
for (var i = 0; i < expr.elements.length; i++) {
|
|
if (expr.elements[i] !== null) {
|
|
this.reinterpretExpressionAsPattern(expr.elements[i]);
|
|
}
|
|
}
|
|
break;
|
|
case syntax_1.Syntax.ObjectExpression:
|
|
expr.type = syntax_1.Syntax.ObjectPattern;
|
|
for (var i = 0; i < expr.properties.length; i++) {
|
|
this.reinterpretExpressionAsPattern(expr.properties[i].value);
|
|
}
|
|
break;
|
|
case syntax_1.Syntax.AssignmentExpression:
|
|
expr.type = syntax_1.Syntax.AssignmentPattern;
|
|
delete expr.operator;
|
|
this.reinterpretExpressionAsPattern(expr.left);
|
|
break;
|
|
default:
|
|
// Allow other node type for tolerant parsing.
|
|
break;
|
|
}
|
|
};
|
|
Parser.prototype.parseGroupExpression = function () {
|
|
var expr;
|
|
this.expect('(');
|
|
if (this.match(')')) {
|
|
this.nextToken();
|
|
if (!this.match('=>')) {
|
|
this.expect('=>');
|
|
}
|
|
expr = {
|
|
type: ArrowParameterPlaceHolder,
|
|
params: [],
|
|
async: false
|
|
};
|
|
}
|
|
else {
|
|
var startToken = this.lookahead;
|
|
var params = [];
|
|
if (this.match('...')) {
|
|
expr = this.parseRestElement(params);
|
|
this.expect(')');
|
|
if (!this.match('=>')) {
|
|
this.expect('=>');
|
|
}
|
|
expr = {
|
|
type: ArrowParameterPlaceHolder,
|
|
params: [expr],
|
|
async: false
|
|
};
|
|
}
|
|
else {
|
|
var arrow = false;
|
|
this.context.isBindingElement = true;
|
|
expr = this.inheritCoverGrammar(this.parseAssignmentExpression);
|
|
if (this.match(',')) {
|
|
var expressions = [];
|
|
this.context.isAssignmentTarget = false;
|
|
expressions.push(expr);
|
|
while (this.lookahead.type !== 2 /* EOF */) {
|
|
if (!this.match(',')) {
|
|
break;
|
|
}
|
|
this.nextToken();
|
|
if (this.match(')')) {
|
|
this.nextToken();
|
|
for (var i = 0; i < expressions.length; i++) {
|
|
this.reinterpretExpressionAsPattern(expressions[i]);
|
|
}
|
|
arrow = true;
|
|
expr = {
|
|
type: ArrowParameterPlaceHolder,
|
|
params: expressions,
|
|
async: false
|
|
};
|
|
}
|
|
else if (this.match('...')) {
|
|
if (!this.context.isBindingElement) {
|
|
this.throwUnexpectedToken(this.lookahead);
|
|
}
|
|
expressions.push(this.parseRestElement(params));
|
|
this.expect(')');
|
|
if (!this.match('=>')) {
|
|
this.expect('=>');
|
|
}
|
|
this.context.isBindingElement = false;
|
|
for (var i = 0; i < expressions.length; i++) {
|
|
this.reinterpretExpressionAsPattern(expressions[i]);
|
|
}
|
|
arrow = true;
|
|
expr = {
|
|
type: ArrowParameterPlaceHolder,
|
|
params: expressions,
|
|
async: false
|
|
};
|
|
}
|
|
else {
|
|
expressions.push(this.inheritCoverGrammar(this.parseAssignmentExpression));
|
|
}
|
|
if (arrow) {
|
|
break;
|
|
}
|
|
}
|
|
if (!arrow) {
|
|
expr = this.finalize(this.startNode(startToken), new Node.SequenceExpression(expressions));
|
|
}
|
|
}
|
|
if (!arrow) {
|
|
this.expect(')');
|
|
if (this.match('=>')) {
|
|
if (expr.type === syntax_1.Syntax.Identifier && expr.name === 'yield') {
|
|
arrow = true;
|
|
expr = {
|
|
type: ArrowParameterPlaceHolder,
|
|
params: [expr],
|
|
async: false
|
|
};
|
|
}
|
|
if (!arrow) {
|
|
if (!this.context.isBindingElement) {
|
|
this.throwUnexpectedToken(this.lookahead);
|
|
}
|
|
if (expr.type === syntax_1.Syntax.SequenceExpression) {
|
|
for (var i = 0; i < expr.expressions.length; i++) {
|
|
this.reinterpretExpressionAsPattern(expr.expressions[i]);
|
|
}
|
|
}
|
|
else {
|
|
this.reinterpretExpressionAsPattern(expr);
|
|
}
|
|
var parameters = (expr.type === syntax_1.Syntax.SequenceExpression ? expr.expressions : [expr]);
|
|
expr = {
|
|
type: ArrowParameterPlaceHolder,
|
|
params: parameters,
|
|
async: false
|
|
};
|
|
}
|
|
}
|
|
this.context.isBindingElement = false;
|
|
}
|
|
}
|
|
}
|
|
return expr;
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-left-hand-side-expressions
|
|
Parser.prototype.parseArguments = function () {
|
|
this.expect('(');
|
|
var args = [];
|
|
if (!this.match(')')) {
|
|
while (true) {
|
|
var expr = this.match('...') ? this.parseSpreadElement() :
|
|
this.isolateCoverGrammar(this.parseAssignmentExpression);
|
|
args.push(expr);
|
|
if (this.match(')')) {
|
|
break;
|
|
}
|
|
this.expectCommaSeparator();
|
|
if (this.match(')')) {
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
this.expect(')');
|
|
return args;
|
|
};
|
|
Parser.prototype.isIdentifierName = function (token) {
|
|
return token.type === 3 /* Identifier */ ||
|
|
token.type === 4 /* Keyword */ ||
|
|
token.type === 1 /* BooleanLiteral */ ||
|
|
token.type === 5 /* NullLiteral */;
|
|
};
|
|
Parser.prototype.parseIdentifierName = function () {
|
|
var node = this.createNode();
|
|
var token = this.nextToken();
|
|
if (!this.isIdentifierName(token)) {
|
|
this.throwUnexpectedToken(token);
|
|
}
|
|
return this.finalize(node, new Node.Identifier(token.value));
|
|
};
|
|
Parser.prototype.parseNewExpression = function () {
|
|
var node = this.createNode();
|
|
var id = this.parseIdentifierName();
|
|
assert_1.assert(id.name === 'new', 'New expression must start with `new`');
|
|
var expr;
|
|
if (this.match('.')) {
|
|
this.nextToken();
|
|
if (this.lookahead.type === 3 /* Identifier */ && this.context.inFunctionBody && this.lookahead.value === 'target') {
|
|
var property = this.parseIdentifierName();
|
|
expr = new Node.MetaProperty(id, property);
|
|
}
|
|
else {
|
|
this.throwUnexpectedToken(this.lookahead);
|
|
}
|
|
}
|
|
else {
|
|
var callee = this.isolateCoverGrammar(this.parseLeftHandSideExpression);
|
|
var args = this.match('(') ? this.parseArguments() : [];
|
|
expr = new Node.NewExpression(callee, args);
|
|
this.context.isAssignmentTarget = false;
|
|
this.context.isBindingElement = false;
|
|
}
|
|
return this.finalize(node, expr);
|
|
};
|
|
Parser.prototype.parseAsyncArgument = function () {
|
|
var arg = this.parseAssignmentExpression();
|
|
this.context.firstCoverInitializedNameError = null;
|
|
return arg;
|
|
};
|
|
Parser.prototype.parseAsyncArguments = function () {
|
|
this.expect('(');
|
|
var args = [];
|
|
if (!this.match(')')) {
|
|
while (true) {
|
|
var expr = this.match('...') ? this.parseSpreadElement() :
|
|
this.isolateCoverGrammar(this.parseAsyncArgument);
|
|
args.push(expr);
|
|
if (this.match(')')) {
|
|
break;
|
|
}
|
|
this.expectCommaSeparator();
|
|
if (this.match(')')) {
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
this.expect(')');
|
|
return args;
|
|
};
|
|
Parser.prototype.parseLeftHandSideExpressionAllowCall = function () {
|
|
var startToken = this.lookahead;
|
|
var maybeAsync = this.matchContextualKeyword('async');
|
|
var previousAllowIn = this.context.allowIn;
|
|
this.context.allowIn = true;
|
|
var expr;
|
|
if (this.matchKeyword('super') && this.context.inFunctionBody) {
|
|
expr = this.createNode();
|
|
this.nextToken();
|
|
expr = this.finalize(expr, new Node.Super());
|
|
if (!this.match('(') && !this.match('.') && !this.match('[')) {
|
|
this.throwUnexpectedToken(this.lookahead);
|
|
}
|
|
}
|
|
else {
|
|
expr = this.inheritCoverGrammar(this.matchKeyword('new') ? this.parseNewExpression : this.parsePrimaryExpression);
|
|
}
|
|
while (true) {
|
|
if (this.match('.')) {
|
|
this.context.isBindingElement = false;
|
|
this.context.isAssignmentTarget = true;
|
|
this.expect('.');
|
|
var property = this.parseIdentifierName();
|
|
expr = this.finalize(this.startNode(startToken), new Node.StaticMemberExpression(expr, property));
|
|
}
|
|
else if (this.match('(')) {
|
|
var asyncArrow = maybeAsync && (startToken.lineNumber === this.lookahead.lineNumber);
|
|
this.context.isBindingElement = false;
|
|
this.context.isAssignmentTarget = false;
|
|
var args = asyncArrow ? this.parseAsyncArguments() : this.parseArguments();
|
|
expr = this.finalize(this.startNode(startToken), new Node.CallExpression(expr, args));
|
|
if (asyncArrow && this.match('=>')) {
|
|
for (var i = 0; i < args.length; ++i) {
|
|
this.reinterpretExpressionAsPattern(args[i]);
|
|
}
|
|
expr = {
|
|
type: ArrowParameterPlaceHolder,
|
|
params: args,
|
|
async: true
|
|
};
|
|
}
|
|
}
|
|
else if (this.match('[')) {
|
|
this.context.isBindingElement = false;
|
|
this.context.isAssignmentTarget = true;
|
|
this.expect('[');
|
|
var property = this.isolateCoverGrammar(this.parseExpression);
|
|
this.expect(']');
|
|
expr = this.finalize(this.startNode(startToken), new Node.ComputedMemberExpression(expr, property));
|
|
}
|
|
else if (this.lookahead.type === 10 /* Template */ && this.lookahead.head) {
|
|
var quasi = this.parseTemplateLiteral();
|
|
expr = this.finalize(this.startNode(startToken), new Node.TaggedTemplateExpression(expr, quasi));
|
|
}
|
|
else {
|
|
break;
|
|
}
|
|
}
|
|
this.context.allowIn = previousAllowIn;
|
|
return expr;
|
|
};
|
|
Parser.prototype.parseSuper = function () {
|
|
var node = this.createNode();
|
|
this.expectKeyword('super');
|
|
if (!this.match('[') && !this.match('.')) {
|
|
this.throwUnexpectedToken(this.lookahead);
|
|
}
|
|
return this.finalize(node, new Node.Super());
|
|
};
|
|
Parser.prototype.parseLeftHandSideExpression = function () {
|
|
assert_1.assert(this.context.allowIn, 'callee of new expression always allow in keyword.');
|
|
var node = this.startNode(this.lookahead);
|
|
var expr = (this.matchKeyword('super') && this.context.inFunctionBody) ? this.parseSuper() :
|
|
this.inheritCoverGrammar(this.matchKeyword('new') ? this.parseNewExpression : this.parsePrimaryExpression);
|
|
while (true) {
|
|
if (this.match('[')) {
|
|
this.context.isBindingElement = false;
|
|
this.context.isAssignmentTarget = true;
|
|
this.expect('[');
|
|
var property = this.isolateCoverGrammar(this.parseExpression);
|
|
this.expect(']');
|
|
expr = this.finalize(node, new Node.ComputedMemberExpression(expr, property));
|
|
}
|
|
else if (this.match('.')) {
|
|
this.context.isBindingElement = false;
|
|
this.context.isAssignmentTarget = true;
|
|
this.expect('.');
|
|
var property = this.parseIdentifierName();
|
|
expr = this.finalize(node, new Node.StaticMemberExpression(expr, property));
|
|
}
|
|
else if (this.lookahead.type === 10 /* Template */ && this.lookahead.head) {
|
|
var quasi = this.parseTemplateLiteral();
|
|
expr = this.finalize(node, new Node.TaggedTemplateExpression(expr, quasi));
|
|
}
|
|
else {
|
|
break;
|
|
}
|
|
}
|
|
return expr;
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-update-expressions
|
|
Parser.prototype.parseUpdateExpression = function () {
|
|
var expr;
|
|
var startToken = this.lookahead;
|
|
if (this.match('++') || this.match('--')) {
|
|
var node = this.startNode(startToken);
|
|
var token = this.nextToken();
|
|
expr = this.inheritCoverGrammar(this.parseUnaryExpression);
|
|
if (this.context.strict && expr.type === syntax_1.Syntax.Identifier && this.scanner.isRestrictedWord(expr.name)) {
|
|
this.tolerateError(messages_1.Messages.StrictLHSPrefix);
|
|
}
|
|
if (!this.context.isAssignmentTarget) {
|
|
this.tolerateError(messages_1.Messages.InvalidLHSInAssignment);
|
|
}
|
|
var prefix = true;
|
|
expr = this.finalize(node, new Node.UpdateExpression(token.value, expr, prefix));
|
|
this.context.isAssignmentTarget = false;
|
|
this.context.isBindingElement = false;
|
|
}
|
|
else {
|
|
expr = this.inheritCoverGrammar(this.parseLeftHandSideExpressionAllowCall);
|
|
if (!this.hasLineTerminator && this.lookahead.type === 7 /* Punctuator */) {
|
|
if (this.match('++') || this.match('--')) {
|
|
if (this.context.strict && expr.type === syntax_1.Syntax.Identifier && this.scanner.isRestrictedWord(expr.name)) {
|
|
this.tolerateError(messages_1.Messages.StrictLHSPostfix);
|
|
}
|
|
if (!this.context.isAssignmentTarget) {
|
|
this.tolerateError(messages_1.Messages.InvalidLHSInAssignment);
|
|
}
|
|
this.context.isAssignmentTarget = false;
|
|
this.context.isBindingElement = false;
|
|
var operator = this.nextToken().value;
|
|
var prefix = false;
|
|
expr = this.finalize(this.startNode(startToken), new Node.UpdateExpression(operator, expr, prefix));
|
|
}
|
|
}
|
|
}
|
|
return expr;
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-unary-operators
|
|
Parser.prototype.parseAwaitExpression = function () {
|
|
var node = this.createNode();
|
|
this.nextToken();
|
|
var argument = this.parseUnaryExpression();
|
|
return this.finalize(node, new Node.AwaitExpression(argument));
|
|
};
|
|
Parser.prototype.parseUnaryExpression = function () {
|
|
var expr;
|
|
if (this.match('+') || this.match('-') || this.match('~') || this.match('!') ||
|
|
this.matchKeyword('delete') || this.matchKeyword('void') || this.matchKeyword('typeof')) {
|
|
var node = this.startNode(this.lookahead);
|
|
var token = this.nextToken();
|
|
expr = this.inheritCoverGrammar(this.parseUnaryExpression);
|
|
expr = this.finalize(node, new Node.UnaryExpression(token.value, expr));
|
|
if (this.context.strict && expr.operator === 'delete' && expr.argument.type === syntax_1.Syntax.Identifier) {
|
|
this.tolerateError(messages_1.Messages.StrictDelete);
|
|
}
|
|
this.context.isAssignmentTarget = false;
|
|
this.context.isBindingElement = false;
|
|
}
|
|
else if (this.context.await && this.matchContextualKeyword('await')) {
|
|
expr = this.parseAwaitExpression();
|
|
}
|
|
else {
|
|
expr = this.parseUpdateExpression();
|
|
}
|
|
return expr;
|
|
};
|
|
Parser.prototype.parseExponentiationExpression = function () {
|
|
var startToken = this.lookahead;
|
|
var expr = this.inheritCoverGrammar(this.parseUnaryExpression);
|
|
if (expr.type !== syntax_1.Syntax.UnaryExpression && this.match('**')) {
|
|
this.nextToken();
|
|
this.context.isAssignmentTarget = false;
|
|
this.context.isBindingElement = false;
|
|
var left = expr;
|
|
var right = this.isolateCoverGrammar(this.parseExponentiationExpression);
|
|
expr = this.finalize(this.startNode(startToken), new Node.BinaryExpression('**', left, right));
|
|
}
|
|
return expr;
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-exp-operator
|
|
// https://tc39.github.io/ecma262/#sec-multiplicative-operators
|
|
// https://tc39.github.io/ecma262/#sec-additive-operators
|
|
// https://tc39.github.io/ecma262/#sec-bitwise-shift-operators
|
|
// https://tc39.github.io/ecma262/#sec-relational-operators
|
|
// https://tc39.github.io/ecma262/#sec-equality-operators
|
|
// https://tc39.github.io/ecma262/#sec-binary-bitwise-operators
|
|
// https://tc39.github.io/ecma262/#sec-binary-logical-operators
|
|
Parser.prototype.binaryPrecedence = function (token) {
|
|
var op = token.value;
|
|
var precedence;
|
|
if (token.type === 7 /* Punctuator */) {
|
|
precedence = this.operatorPrecedence[op] || 0;
|
|
}
|
|
else if (token.type === 4 /* Keyword */) {
|
|
precedence = (op === 'instanceof' || (this.context.allowIn && op === 'in')) ? 7 : 0;
|
|
}
|
|
else {
|
|
precedence = 0;
|
|
}
|
|
return precedence;
|
|
};
|
|
Parser.prototype.parseBinaryExpression = function () {
|
|
var startToken = this.lookahead;
|
|
var expr = this.inheritCoverGrammar(this.parseExponentiationExpression);
|
|
var token = this.lookahead;
|
|
var prec = this.binaryPrecedence(token);
|
|
if (prec > 0) {
|
|
this.nextToken();
|
|
this.context.isAssignmentTarget = false;
|
|
this.context.isBindingElement = false;
|
|
var markers = [startToken, this.lookahead];
|
|
var left = expr;
|
|
var right = this.isolateCoverGrammar(this.parseExponentiationExpression);
|
|
var stack = [left, token.value, right];
|
|
var precedences = [prec];
|
|
while (true) {
|
|
prec = this.binaryPrecedence(this.lookahead);
|
|
if (prec <= 0) {
|
|
break;
|
|
}
|
|
// Reduce: make a binary expression from the three topmost entries.
|
|
while ((stack.length > 2) && (prec <= precedences[precedences.length - 1])) {
|
|
right = stack.pop();
|
|
var operator = stack.pop();
|
|
precedences.pop();
|
|
left = stack.pop();
|
|
markers.pop();
|
|
var node = this.startNode(markers[markers.length - 1]);
|
|
stack.push(this.finalize(node, new Node.BinaryExpression(operator, left, right)));
|
|
}
|
|
// Shift.
|
|
stack.push(this.nextToken().value);
|
|
precedences.push(prec);
|
|
markers.push(this.lookahead);
|
|
stack.push(this.isolateCoverGrammar(this.parseExponentiationExpression));
|
|
}
|
|
// Final reduce to clean-up the stack.
|
|
var i = stack.length - 1;
|
|
expr = stack[i];
|
|
var lastMarker = markers.pop();
|
|
while (i > 1) {
|
|
var marker = markers.pop();
|
|
var lastLineStart = lastMarker && lastMarker.lineStart;
|
|
var node = this.startNode(marker, lastLineStart);
|
|
var operator = stack[i - 1];
|
|
expr = this.finalize(node, new Node.BinaryExpression(operator, stack[i - 2], expr));
|
|
i -= 2;
|
|
lastMarker = marker;
|
|
}
|
|
}
|
|
return expr;
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-conditional-operator
|
|
Parser.prototype.parseConditionalExpression = function () {
|
|
var startToken = this.lookahead;
|
|
var expr = this.inheritCoverGrammar(this.parseBinaryExpression);
|
|
if (this.match('?')) {
|
|
this.nextToken();
|
|
var previousAllowIn = this.context.allowIn;
|
|
this.context.allowIn = true;
|
|
var consequent = this.isolateCoverGrammar(this.parseAssignmentExpression);
|
|
this.context.allowIn = previousAllowIn;
|
|
this.expect(':');
|
|
var alternate = this.isolateCoverGrammar(this.parseAssignmentExpression);
|
|
expr = this.finalize(this.startNode(startToken), new Node.ConditionalExpression(expr, consequent, alternate));
|
|
this.context.isAssignmentTarget = false;
|
|
this.context.isBindingElement = false;
|
|
}
|
|
return expr;
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-assignment-operators
|
|
Parser.prototype.checkPatternParam = function (options, param) {
|
|
switch (param.type) {
|
|
case syntax_1.Syntax.Identifier:
|
|
this.validateParam(options, param, param.name);
|
|
break;
|
|
case syntax_1.Syntax.RestElement:
|
|
this.checkPatternParam(options, param.argument);
|
|
break;
|
|
case syntax_1.Syntax.AssignmentPattern:
|
|
this.checkPatternParam(options, param.left);
|
|
break;
|
|
case syntax_1.Syntax.ArrayPattern:
|
|
for (var i = 0; i < param.elements.length; i++) {
|
|
if (param.elements[i] !== null) {
|
|
this.checkPatternParam(options, param.elements[i]);
|
|
}
|
|
}
|
|
break;
|
|
case syntax_1.Syntax.ObjectPattern:
|
|
for (var i = 0; i < param.properties.length; i++) {
|
|
this.checkPatternParam(options, param.properties[i].value);
|
|
}
|
|
break;
|
|
default:
|
|
break;
|
|
}
|
|
options.simple = options.simple && (param instanceof Node.Identifier);
|
|
};
|
|
Parser.prototype.reinterpretAsCoverFormalsList = function (expr) {
|
|
var params = [expr];
|
|
var options;
|
|
var asyncArrow = false;
|
|
switch (expr.type) {
|
|
case syntax_1.Syntax.Identifier:
|
|
break;
|
|
case ArrowParameterPlaceHolder:
|
|
params = expr.params;
|
|
asyncArrow = expr.async;
|
|
break;
|
|
default:
|
|
return null;
|
|
}
|
|
options = {
|
|
simple: true,
|
|
paramSet: {}
|
|
};
|
|
for (var i = 0; i < params.length; ++i) {
|
|
var param = params[i];
|
|
if (param.type === syntax_1.Syntax.AssignmentPattern) {
|
|
if (param.right.type === syntax_1.Syntax.YieldExpression) {
|
|
if (param.right.argument) {
|
|
this.throwUnexpectedToken(this.lookahead);
|
|
}
|
|
param.right.type = syntax_1.Syntax.Identifier;
|
|
param.right.name = 'yield';
|
|
delete param.right.argument;
|
|
delete param.right.delegate;
|
|
}
|
|
}
|
|
else if (asyncArrow && param.type === syntax_1.Syntax.Identifier && param.name === 'await') {
|
|
this.throwUnexpectedToken(this.lookahead);
|
|
}
|
|
this.checkPatternParam(options, param);
|
|
params[i] = param;
|
|
}
|
|
if (this.context.strict || !this.context.allowYield) {
|
|
for (var i = 0; i < params.length; ++i) {
|
|
var param = params[i];
|
|
if (param.type === syntax_1.Syntax.YieldExpression) {
|
|
this.throwUnexpectedToken(this.lookahead);
|
|
}
|
|
}
|
|
}
|
|
if (options.message === messages_1.Messages.StrictParamDupe) {
|
|
var token = this.context.strict ? options.stricted : options.firstRestricted;
|
|
this.throwUnexpectedToken(token, options.message);
|
|
}
|
|
return {
|
|
simple: options.simple,
|
|
params: params,
|
|
stricted: options.stricted,
|
|
firstRestricted: options.firstRestricted,
|
|
message: options.message
|
|
};
|
|
};
|
|
Parser.prototype.parseAssignmentExpression = function () {
|
|
var expr;
|
|
if (!this.context.allowYield && this.matchKeyword('yield')) {
|
|
expr = this.parseYieldExpression();
|
|
}
|
|
else {
|
|
var startToken = this.lookahead;
|
|
var token = startToken;
|
|
expr = this.parseConditionalExpression();
|
|
if (token.type === 3 /* Identifier */ && (token.lineNumber === this.lookahead.lineNumber) && token.value === 'async') {
|
|
if (this.lookahead.type === 3 /* Identifier */ || this.matchKeyword('yield')) {
|
|
var arg = this.parsePrimaryExpression();
|
|
this.reinterpretExpressionAsPattern(arg);
|
|
expr = {
|
|
type: ArrowParameterPlaceHolder,
|
|
params: [arg],
|
|
async: true
|
|
};
|
|
}
|
|
}
|
|
if (expr.type === ArrowParameterPlaceHolder || this.match('=>')) {
|
|
// https://tc39.github.io/ecma262/#sec-arrow-function-definitions
|
|
this.context.isAssignmentTarget = false;
|
|
this.context.isBindingElement = false;
|
|
var isAsync = expr.async;
|
|
var list = this.reinterpretAsCoverFormalsList(expr);
|
|
if (list) {
|
|
if (this.hasLineTerminator) {
|
|
this.tolerateUnexpectedToken(this.lookahead);
|
|
}
|
|
this.context.firstCoverInitializedNameError = null;
|
|
var previousStrict = this.context.strict;
|
|
var previousAllowStrictDirective = this.context.allowStrictDirective;
|
|
this.context.allowStrictDirective = list.simple;
|
|
var previousAllowYield = this.context.allowYield;
|
|
var previousAwait = this.context.await;
|
|
this.context.allowYield = true;
|
|
this.context.await = isAsync;
|
|
var node = this.startNode(startToken);
|
|
this.expect('=>');
|
|
var body = void 0;
|
|
if (this.match('{')) {
|
|
var previousAllowIn = this.context.allowIn;
|
|
this.context.allowIn = true;
|
|
body = this.parseFunctionSourceElements();
|
|
this.context.allowIn = previousAllowIn;
|
|
}
|
|
else {
|
|
body = this.isolateCoverGrammar(this.parseAssignmentExpression);
|
|
}
|
|
var expression = body.type !== syntax_1.Syntax.BlockStatement;
|
|
if (this.context.strict && list.firstRestricted) {
|
|
this.throwUnexpectedToken(list.firstRestricted, list.message);
|
|
}
|
|
if (this.context.strict && list.stricted) {
|
|
this.tolerateUnexpectedToken(list.stricted, list.message);
|
|
}
|
|
expr = isAsync ? this.finalize(node, new Node.AsyncArrowFunctionExpression(list.params, body, expression)) :
|
|
this.finalize(node, new Node.ArrowFunctionExpression(list.params, body, expression));
|
|
this.context.strict = previousStrict;
|
|
this.context.allowStrictDirective = previousAllowStrictDirective;
|
|
this.context.allowYield = previousAllowYield;
|
|
this.context.await = previousAwait;
|
|
}
|
|
}
|
|
else {
|
|
if (this.matchAssign()) {
|
|
if (!this.context.isAssignmentTarget) {
|
|
this.tolerateError(messages_1.Messages.InvalidLHSInAssignment);
|
|
}
|
|
if (this.context.strict && expr.type === syntax_1.Syntax.Identifier) {
|
|
var id = expr;
|
|
if (this.scanner.isRestrictedWord(id.name)) {
|
|
this.tolerateUnexpectedToken(token, messages_1.Messages.StrictLHSAssignment);
|
|
}
|
|
if (this.scanner.isStrictModeReservedWord(id.name)) {
|
|
this.tolerateUnexpectedToken(token, messages_1.Messages.StrictReservedWord);
|
|
}
|
|
}
|
|
if (!this.match('=')) {
|
|
this.context.isAssignmentTarget = false;
|
|
this.context.isBindingElement = false;
|
|
}
|
|
else {
|
|
this.reinterpretExpressionAsPattern(expr);
|
|
}
|
|
token = this.nextToken();
|
|
var operator = token.value;
|
|
var right = this.isolateCoverGrammar(this.parseAssignmentExpression);
|
|
expr = this.finalize(this.startNode(startToken), new Node.AssignmentExpression(operator, expr, right));
|
|
this.context.firstCoverInitializedNameError = null;
|
|
}
|
|
}
|
|
}
|
|
return expr;
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-comma-operator
|
|
Parser.prototype.parseExpression = function () {
|
|
var startToken = this.lookahead;
|
|
var expr = this.isolateCoverGrammar(this.parseAssignmentExpression);
|
|
if (this.match(',')) {
|
|
var expressions = [];
|
|
expressions.push(expr);
|
|
while (this.lookahead.type !== 2 /* EOF */) {
|
|
if (!this.match(',')) {
|
|
break;
|
|
}
|
|
this.nextToken();
|
|
expressions.push(this.isolateCoverGrammar(this.parseAssignmentExpression));
|
|
}
|
|
expr = this.finalize(this.startNode(startToken), new Node.SequenceExpression(expressions));
|
|
}
|
|
return expr;
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-block
|
|
Parser.prototype.parseStatementListItem = function () {
|
|
var statement;
|
|
this.context.isAssignmentTarget = true;
|
|
this.context.isBindingElement = true;
|
|
if (this.lookahead.type === 4 /* Keyword */) {
|
|
switch (this.lookahead.value) {
|
|
case 'export':
|
|
if (!this.context.isModule) {
|
|
this.tolerateUnexpectedToken(this.lookahead, messages_1.Messages.IllegalExportDeclaration);
|
|
}
|
|
statement = this.parseExportDeclaration();
|
|
break;
|
|
case 'import':
|
|
if (!this.context.isModule) {
|
|
this.tolerateUnexpectedToken(this.lookahead, messages_1.Messages.IllegalImportDeclaration);
|
|
}
|
|
statement = this.parseImportDeclaration();
|
|
break;
|
|
case 'const':
|
|
statement = this.parseLexicalDeclaration({ inFor: false });
|
|
break;
|
|
case 'function':
|
|
statement = this.parseFunctionDeclaration();
|
|
break;
|
|
case 'class':
|
|
statement = this.parseClassDeclaration();
|
|
break;
|
|
case 'let':
|
|
statement = this.isLexicalDeclaration() ? this.parseLexicalDeclaration({ inFor: false }) : this.parseStatement();
|
|
break;
|
|
default:
|
|
statement = this.parseStatement();
|
|
break;
|
|
}
|
|
}
|
|
else {
|
|
statement = this.parseStatement();
|
|
}
|
|
return statement;
|
|
};
|
|
Parser.prototype.parseBlock = function () {
|
|
var node = this.createNode();
|
|
this.expect('{');
|
|
var block = [];
|
|
while (true) {
|
|
if (this.match('}')) {
|
|
break;
|
|
}
|
|
block.push(this.parseStatementListItem());
|
|
}
|
|
this.expect('}');
|
|
return this.finalize(node, new Node.BlockStatement(block));
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-let-and-const-declarations
|
|
Parser.prototype.parseLexicalBinding = function (kind, options) {
|
|
var node = this.createNode();
|
|
var params = [];
|
|
var id = this.parsePattern(params, kind);
|
|
if (this.context.strict && id.type === syntax_1.Syntax.Identifier) {
|
|
if (this.scanner.isRestrictedWord(id.name)) {
|
|
this.tolerateError(messages_1.Messages.StrictVarName);
|
|
}
|
|
}
|
|
var init = null;
|
|
if (kind === 'const') {
|
|
if (!this.matchKeyword('in') && !this.matchContextualKeyword('of')) {
|
|
if (this.match('=')) {
|
|
this.nextToken();
|
|
init = this.isolateCoverGrammar(this.parseAssignmentExpression);
|
|
}
|
|
else {
|
|
this.throwError(messages_1.Messages.DeclarationMissingInitializer, 'const');
|
|
}
|
|
}
|
|
}
|
|
else if ((!options.inFor && id.type !== syntax_1.Syntax.Identifier) || this.match('=')) {
|
|
this.expect('=');
|
|
init = this.isolateCoverGrammar(this.parseAssignmentExpression);
|
|
}
|
|
return this.finalize(node, new Node.VariableDeclarator(id, init));
|
|
};
|
|
Parser.prototype.parseBindingList = function (kind, options) {
|
|
var list = [this.parseLexicalBinding(kind, options)];
|
|
while (this.match(',')) {
|
|
this.nextToken();
|
|
list.push(this.parseLexicalBinding(kind, options));
|
|
}
|
|
return list;
|
|
};
|
|
Parser.prototype.isLexicalDeclaration = function () {
|
|
var state = this.scanner.saveState();
|
|
this.scanner.scanComments();
|
|
var next = this.scanner.lex();
|
|
this.scanner.restoreState(state);
|
|
return (next.type === 3 /* Identifier */) ||
|
|
(next.type === 7 /* Punctuator */ && next.value === '[') ||
|
|
(next.type === 7 /* Punctuator */ && next.value === '{') ||
|
|
(next.type === 4 /* Keyword */ && next.value === 'let') ||
|
|
(next.type === 4 /* Keyword */ && next.value === 'yield');
|
|
};
|
|
Parser.prototype.parseLexicalDeclaration = function (options) {
|
|
var node = this.createNode();
|
|
var kind = this.nextToken().value;
|
|
assert_1.assert(kind === 'let' || kind === 'const', 'Lexical declaration must be either let or const');
|
|
var declarations = this.parseBindingList(kind, options);
|
|
this.consumeSemicolon();
|
|
return this.finalize(node, new Node.VariableDeclaration(declarations, kind));
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-destructuring-binding-patterns
|
|
Parser.prototype.parseBindingRestElement = function (params, kind) {
|
|
var node = this.createNode();
|
|
this.expect('...');
|
|
var arg = this.parsePattern(params, kind);
|
|
return this.finalize(node, new Node.RestElement(arg));
|
|
};
|
|
Parser.prototype.parseArrayPattern = function (params, kind) {
|
|
var node = this.createNode();
|
|
this.expect('[');
|
|
var elements = [];
|
|
while (!this.match(']')) {
|
|
if (this.match(',')) {
|
|
this.nextToken();
|
|
elements.push(null);
|
|
}
|
|
else {
|
|
if (this.match('...')) {
|
|
elements.push(this.parseBindingRestElement(params, kind));
|
|
break;
|
|
}
|
|
else {
|
|
elements.push(this.parsePatternWithDefault(params, kind));
|
|
}
|
|
if (!this.match(']')) {
|
|
this.expect(',');
|
|
}
|
|
}
|
|
}
|
|
this.expect(']');
|
|
return this.finalize(node, new Node.ArrayPattern(elements));
|
|
};
|
|
Parser.prototype.parsePropertyPattern = function (params, kind) {
|
|
var node = this.createNode();
|
|
var computed = false;
|
|
var shorthand = false;
|
|
var method = false;
|
|
var key;
|
|
var value;
|
|
if (this.lookahead.type === 3 /* Identifier */) {
|
|
var keyToken = this.lookahead;
|
|
key = this.parseVariableIdentifier();
|
|
var init = this.finalize(node, new Node.Identifier(keyToken.value));
|
|
if (this.match('=')) {
|
|
params.push(keyToken);
|
|
shorthand = true;
|
|
this.nextToken();
|
|
var expr = this.parseAssignmentExpression();
|
|
value = this.finalize(this.startNode(keyToken), new Node.AssignmentPattern(init, expr));
|
|
}
|
|
else if (!this.match(':')) {
|
|
params.push(keyToken);
|
|
shorthand = true;
|
|
value = init;
|
|
}
|
|
else {
|
|
this.expect(':');
|
|
value = this.parsePatternWithDefault(params, kind);
|
|
}
|
|
}
|
|
else {
|
|
computed = this.match('[');
|
|
key = this.parseObjectPropertyKey();
|
|
this.expect(':');
|
|
value = this.parsePatternWithDefault(params, kind);
|
|
}
|
|
return this.finalize(node, new Node.Property('init', key, computed, value, method, shorthand));
|
|
};
|
|
Parser.prototype.parseObjectPattern = function (params, kind) {
|
|
var node = this.createNode();
|
|
var properties = [];
|
|
this.expect('{');
|
|
while (!this.match('}')) {
|
|
properties.push(this.parsePropertyPattern(params, kind));
|
|
if (!this.match('}')) {
|
|
this.expect(',');
|
|
}
|
|
}
|
|
this.expect('}');
|
|
return this.finalize(node, new Node.ObjectPattern(properties));
|
|
};
|
|
Parser.prototype.parsePattern = function (params, kind) {
|
|
var pattern;
|
|
if (this.match('[')) {
|
|
pattern = this.parseArrayPattern(params, kind);
|
|
}
|
|
else if (this.match('{')) {
|
|
pattern = this.parseObjectPattern(params, kind);
|
|
}
|
|
else {
|
|
if (this.matchKeyword('let') && (kind === 'const' || kind === 'let')) {
|
|
this.tolerateUnexpectedToken(this.lookahead, messages_1.Messages.LetInLexicalBinding);
|
|
}
|
|
params.push(this.lookahead);
|
|
pattern = this.parseVariableIdentifier(kind);
|
|
}
|
|
return pattern;
|
|
};
|
|
Parser.prototype.parsePatternWithDefault = function (params, kind) {
|
|
var startToken = this.lookahead;
|
|
var pattern = this.parsePattern(params, kind);
|
|
if (this.match('=')) {
|
|
this.nextToken();
|
|
var previousAllowYield = this.context.allowYield;
|
|
this.context.allowYield = true;
|
|
var right = this.isolateCoverGrammar(this.parseAssignmentExpression);
|
|
this.context.allowYield = previousAllowYield;
|
|
pattern = this.finalize(this.startNode(startToken), new Node.AssignmentPattern(pattern, right));
|
|
}
|
|
return pattern;
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-variable-statement
|
|
Parser.prototype.parseVariableIdentifier = function (kind) {
|
|
var node = this.createNode();
|
|
var token = this.nextToken();
|
|
if (token.type === 4 /* Keyword */ && token.value === 'yield') {
|
|
if (this.context.strict) {
|
|
this.tolerateUnexpectedToken(token, messages_1.Messages.StrictReservedWord);
|
|
}
|
|
else if (!this.context.allowYield) {
|
|
this.throwUnexpectedToken(token);
|
|
}
|
|
}
|
|
else if (token.type !== 3 /* Identifier */) {
|
|
if (this.context.strict && token.type === 4 /* Keyword */ && this.scanner.isStrictModeReservedWord(token.value)) {
|
|
this.tolerateUnexpectedToken(token, messages_1.Messages.StrictReservedWord);
|
|
}
|
|
else {
|
|
if (this.context.strict || token.value !== 'let' || kind !== 'var') {
|
|
this.throwUnexpectedToken(token);
|
|
}
|
|
}
|
|
}
|
|
else if ((this.context.isModule || this.context.await) && token.type === 3 /* Identifier */ && token.value === 'await') {
|
|
this.tolerateUnexpectedToken(token);
|
|
}
|
|
return this.finalize(node, new Node.Identifier(token.value));
|
|
};
|
|
Parser.prototype.parseVariableDeclaration = function (options) {
|
|
var node = this.createNode();
|
|
var params = [];
|
|
var id = this.parsePattern(params, 'var');
|
|
if (this.context.strict && id.type === syntax_1.Syntax.Identifier) {
|
|
if (this.scanner.isRestrictedWord(id.name)) {
|
|
this.tolerateError(messages_1.Messages.StrictVarName);
|
|
}
|
|
}
|
|
var init = null;
|
|
if (this.match('=')) {
|
|
this.nextToken();
|
|
init = this.isolateCoverGrammar(this.parseAssignmentExpression);
|
|
}
|
|
else if (id.type !== syntax_1.Syntax.Identifier && !options.inFor) {
|
|
this.expect('=');
|
|
}
|
|
return this.finalize(node, new Node.VariableDeclarator(id, init));
|
|
};
|
|
Parser.prototype.parseVariableDeclarationList = function (options) {
|
|
var opt = { inFor: options.inFor };
|
|
var list = [];
|
|
list.push(this.parseVariableDeclaration(opt));
|
|
while (this.match(',')) {
|
|
this.nextToken();
|
|
list.push(this.parseVariableDeclaration(opt));
|
|
}
|
|
return list;
|
|
};
|
|
Parser.prototype.parseVariableStatement = function () {
|
|
var node = this.createNode();
|
|
this.expectKeyword('var');
|
|
var declarations = this.parseVariableDeclarationList({ inFor: false });
|
|
this.consumeSemicolon();
|
|
return this.finalize(node, new Node.VariableDeclaration(declarations, 'var'));
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-empty-statement
|
|
Parser.prototype.parseEmptyStatement = function () {
|
|
var node = this.createNode();
|
|
this.expect(';');
|
|
return this.finalize(node, new Node.EmptyStatement());
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-expression-statement
|
|
Parser.prototype.parseExpressionStatement = function () {
|
|
var node = this.createNode();
|
|
var expr = this.parseExpression();
|
|
this.consumeSemicolon();
|
|
return this.finalize(node, new Node.ExpressionStatement(expr));
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-if-statement
|
|
Parser.prototype.parseIfClause = function () {
|
|
if (this.context.strict && this.matchKeyword('function')) {
|
|
this.tolerateError(messages_1.Messages.StrictFunction);
|
|
}
|
|
return this.parseStatement();
|
|
};
|
|
Parser.prototype.parseIfStatement = function () {
|
|
var node = this.createNode();
|
|
var consequent;
|
|
var alternate = null;
|
|
this.expectKeyword('if');
|
|
this.expect('(');
|
|
var test = this.parseExpression();
|
|
if (!this.match(')') && this.config.tolerant) {
|
|
this.tolerateUnexpectedToken(this.nextToken());
|
|
consequent = this.finalize(this.createNode(), new Node.EmptyStatement());
|
|
}
|
|
else {
|
|
this.expect(')');
|
|
consequent = this.parseIfClause();
|
|
if (this.matchKeyword('else')) {
|
|
this.nextToken();
|
|
alternate = this.parseIfClause();
|
|
}
|
|
}
|
|
return this.finalize(node, new Node.IfStatement(test, consequent, alternate));
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-do-while-statement
|
|
Parser.prototype.parseDoWhileStatement = function () {
|
|
var node = this.createNode();
|
|
this.expectKeyword('do');
|
|
var previousInIteration = this.context.inIteration;
|
|
this.context.inIteration = true;
|
|
var body = this.parseStatement();
|
|
this.context.inIteration = previousInIteration;
|
|
this.expectKeyword('while');
|
|
this.expect('(');
|
|
var test = this.parseExpression();
|
|
if (!this.match(')') && this.config.tolerant) {
|
|
this.tolerateUnexpectedToken(this.nextToken());
|
|
}
|
|
else {
|
|
this.expect(')');
|
|
if (this.match(';')) {
|
|
this.nextToken();
|
|
}
|
|
}
|
|
return this.finalize(node, new Node.DoWhileStatement(body, test));
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-while-statement
|
|
Parser.prototype.parseWhileStatement = function () {
|
|
var node = this.createNode();
|
|
var body;
|
|
this.expectKeyword('while');
|
|
this.expect('(');
|
|
var test = this.parseExpression();
|
|
if (!this.match(')') && this.config.tolerant) {
|
|
this.tolerateUnexpectedToken(this.nextToken());
|
|
body = this.finalize(this.createNode(), new Node.EmptyStatement());
|
|
}
|
|
else {
|
|
this.expect(')');
|
|
var previousInIteration = this.context.inIteration;
|
|
this.context.inIteration = true;
|
|
body = this.parseStatement();
|
|
this.context.inIteration = previousInIteration;
|
|
}
|
|
return this.finalize(node, new Node.WhileStatement(test, body));
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-for-statement
|
|
// https://tc39.github.io/ecma262/#sec-for-in-and-for-of-statements
|
|
Parser.prototype.parseForStatement = function () {
|
|
var init = null;
|
|
var test = null;
|
|
var update = null;
|
|
var forIn = true;
|
|
var left, right;
|
|
var node = this.createNode();
|
|
this.expectKeyword('for');
|
|
this.expect('(');
|
|
if (this.match(';')) {
|
|
this.nextToken();
|
|
}
|
|
else {
|
|
if (this.matchKeyword('var')) {
|
|
init = this.createNode();
|
|
this.nextToken();
|
|
var previousAllowIn = this.context.allowIn;
|
|
this.context.allowIn = false;
|
|
var declarations = this.parseVariableDeclarationList({ inFor: true });
|
|
this.context.allowIn = previousAllowIn;
|
|
if (declarations.length === 1 && this.matchKeyword('in')) {
|
|
var decl = declarations[0];
|
|
if (decl.init && (decl.id.type === syntax_1.Syntax.ArrayPattern || decl.id.type === syntax_1.Syntax.ObjectPattern || this.context.strict)) {
|
|
this.tolerateError(messages_1.Messages.ForInOfLoopInitializer, 'for-in');
|
|
}
|
|
init = this.finalize(init, new Node.VariableDeclaration(declarations, 'var'));
|
|
this.nextToken();
|
|
left = init;
|
|
right = this.parseExpression();
|
|
init = null;
|
|
}
|
|
else if (declarations.length === 1 && declarations[0].init === null && this.matchContextualKeyword('of')) {
|
|
init = this.finalize(init, new Node.VariableDeclaration(declarations, 'var'));
|
|
this.nextToken();
|
|
left = init;
|
|
right = this.parseAssignmentExpression();
|
|
init = null;
|
|
forIn = false;
|
|
}
|
|
else {
|
|
init = this.finalize(init, new Node.VariableDeclaration(declarations, 'var'));
|
|
this.expect(';');
|
|
}
|
|
}
|
|
else if (this.matchKeyword('const') || this.matchKeyword('let')) {
|
|
init = this.createNode();
|
|
var kind = this.nextToken().value;
|
|
if (!this.context.strict && this.lookahead.value === 'in') {
|
|
init = this.finalize(init, new Node.Identifier(kind));
|
|
this.nextToken();
|
|
left = init;
|
|
right = this.parseExpression();
|
|
init = null;
|
|
}
|
|
else {
|
|
var previousAllowIn = this.context.allowIn;
|
|
this.context.allowIn = false;
|
|
var declarations = this.parseBindingList(kind, { inFor: true });
|
|
this.context.allowIn = previousAllowIn;
|
|
if (declarations.length === 1 && declarations[0].init === null && this.matchKeyword('in')) {
|
|
init = this.finalize(init, new Node.VariableDeclaration(declarations, kind));
|
|
this.nextToken();
|
|
left = init;
|
|
right = this.parseExpression();
|
|
init = null;
|
|
}
|
|
else if (declarations.length === 1 && declarations[0].init === null && this.matchContextualKeyword('of')) {
|
|
init = this.finalize(init, new Node.VariableDeclaration(declarations, kind));
|
|
this.nextToken();
|
|
left = init;
|
|
right = this.parseAssignmentExpression();
|
|
init = null;
|
|
forIn = false;
|
|
}
|
|
else {
|
|
this.consumeSemicolon();
|
|
init = this.finalize(init, new Node.VariableDeclaration(declarations, kind));
|
|
}
|
|
}
|
|
}
|
|
else {
|
|
var initStartToken = this.lookahead;
|
|
var previousAllowIn = this.context.allowIn;
|
|
this.context.allowIn = false;
|
|
init = this.inheritCoverGrammar(this.parseAssignmentExpression);
|
|
this.context.allowIn = previousAllowIn;
|
|
if (this.matchKeyword('in')) {
|
|
if (!this.context.isAssignmentTarget || init.type === syntax_1.Syntax.AssignmentExpression) {
|
|
this.tolerateError(messages_1.Messages.InvalidLHSInForIn);
|
|
}
|
|
this.nextToken();
|
|
this.reinterpretExpressionAsPattern(init);
|
|
left = init;
|
|
right = this.parseExpression();
|
|
init = null;
|
|
}
|
|
else if (this.matchContextualKeyword('of')) {
|
|
if (!this.context.isAssignmentTarget || init.type === syntax_1.Syntax.AssignmentExpression) {
|
|
this.tolerateError(messages_1.Messages.InvalidLHSInForLoop);
|
|
}
|
|
this.nextToken();
|
|
this.reinterpretExpressionAsPattern(init);
|
|
left = init;
|
|
right = this.parseAssignmentExpression();
|
|
init = null;
|
|
forIn = false;
|
|
}
|
|
else {
|
|
if (this.match(',')) {
|
|
var initSeq = [init];
|
|
while (this.match(',')) {
|
|
this.nextToken();
|
|
initSeq.push(this.isolateCoverGrammar(this.parseAssignmentExpression));
|
|
}
|
|
init = this.finalize(this.startNode(initStartToken), new Node.SequenceExpression(initSeq));
|
|
}
|
|
this.expect(';');
|
|
}
|
|
}
|
|
}
|
|
if (typeof left === 'undefined') {
|
|
if (!this.match(';')) {
|
|
test = this.parseExpression();
|
|
}
|
|
this.expect(';');
|
|
if (!this.match(')')) {
|
|
update = this.parseExpression();
|
|
}
|
|
}
|
|
var body;
|
|
if (!this.match(')') && this.config.tolerant) {
|
|
this.tolerateUnexpectedToken(this.nextToken());
|
|
body = this.finalize(this.createNode(), new Node.EmptyStatement());
|
|
}
|
|
else {
|
|
this.expect(')');
|
|
var previousInIteration = this.context.inIteration;
|
|
this.context.inIteration = true;
|
|
body = this.isolateCoverGrammar(this.parseStatement);
|
|
this.context.inIteration = previousInIteration;
|
|
}
|
|
return (typeof left === 'undefined') ?
|
|
this.finalize(node, new Node.ForStatement(init, test, update, body)) :
|
|
forIn ? this.finalize(node, new Node.ForInStatement(left, right, body)) :
|
|
this.finalize(node, new Node.ForOfStatement(left, right, body));
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-continue-statement
|
|
Parser.prototype.parseContinueStatement = function () {
|
|
var node = this.createNode();
|
|
this.expectKeyword('continue');
|
|
var label = null;
|
|
if (this.lookahead.type === 3 /* Identifier */ && !this.hasLineTerminator) {
|
|
var id = this.parseVariableIdentifier();
|
|
label = id;
|
|
var key = '$' + id.name;
|
|
if (!Object.prototype.hasOwnProperty.call(this.context.labelSet, key)) {
|
|
this.throwError(messages_1.Messages.UnknownLabel, id.name);
|
|
}
|
|
}
|
|
this.consumeSemicolon();
|
|
if (label === null && !this.context.inIteration) {
|
|
this.throwError(messages_1.Messages.IllegalContinue);
|
|
}
|
|
return this.finalize(node, new Node.ContinueStatement(label));
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-break-statement
|
|
Parser.prototype.parseBreakStatement = function () {
|
|
var node = this.createNode();
|
|
this.expectKeyword('break');
|
|
var label = null;
|
|
if (this.lookahead.type === 3 /* Identifier */ && !this.hasLineTerminator) {
|
|
var id = this.parseVariableIdentifier();
|
|
var key = '$' + id.name;
|
|
if (!Object.prototype.hasOwnProperty.call(this.context.labelSet, key)) {
|
|
this.throwError(messages_1.Messages.UnknownLabel, id.name);
|
|
}
|
|
label = id;
|
|
}
|
|
this.consumeSemicolon();
|
|
if (label === null && !this.context.inIteration && !this.context.inSwitch) {
|
|
this.throwError(messages_1.Messages.IllegalBreak);
|
|
}
|
|
return this.finalize(node, new Node.BreakStatement(label));
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-return-statement
|
|
Parser.prototype.parseReturnStatement = function () {
|
|
if (!this.context.inFunctionBody) {
|
|
this.tolerateError(messages_1.Messages.IllegalReturn);
|
|
}
|
|
var node = this.createNode();
|
|
this.expectKeyword('return');
|
|
var hasArgument = (!this.match(';') && !this.match('}') &&
|
|
!this.hasLineTerminator && this.lookahead.type !== 2 /* EOF */) ||
|
|
this.lookahead.type === 8 /* StringLiteral */ ||
|
|
this.lookahead.type === 10 /* Template */;
|
|
var argument = hasArgument ? this.parseExpression() : null;
|
|
this.consumeSemicolon();
|
|
return this.finalize(node, new Node.ReturnStatement(argument));
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-with-statement
|
|
Parser.prototype.parseWithStatement = function () {
|
|
if (this.context.strict) {
|
|
this.tolerateError(messages_1.Messages.StrictModeWith);
|
|
}
|
|
var node = this.createNode();
|
|
var body;
|
|
this.expectKeyword('with');
|
|
this.expect('(');
|
|
var object = this.parseExpression();
|
|
if (!this.match(')') && this.config.tolerant) {
|
|
this.tolerateUnexpectedToken(this.nextToken());
|
|
body = this.finalize(this.createNode(), new Node.EmptyStatement());
|
|
}
|
|
else {
|
|
this.expect(')');
|
|
body = this.parseStatement();
|
|
}
|
|
return this.finalize(node, new Node.WithStatement(object, body));
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-switch-statement
|
|
Parser.prototype.parseSwitchCase = function () {
|
|
var node = this.createNode();
|
|
var test;
|
|
if (this.matchKeyword('default')) {
|
|
this.nextToken();
|
|
test = null;
|
|
}
|
|
else {
|
|
this.expectKeyword('case');
|
|
test = this.parseExpression();
|
|
}
|
|
this.expect(':');
|
|
var consequent = [];
|
|
while (true) {
|
|
if (this.match('}') || this.matchKeyword('default') || this.matchKeyword('case')) {
|
|
break;
|
|
}
|
|
consequent.push(this.parseStatementListItem());
|
|
}
|
|
return this.finalize(node, new Node.SwitchCase(test, consequent));
|
|
};
|
|
Parser.prototype.parseSwitchStatement = function () {
|
|
var node = this.createNode();
|
|
this.expectKeyword('switch');
|
|
this.expect('(');
|
|
var discriminant = this.parseExpression();
|
|
this.expect(')');
|
|
var previousInSwitch = this.context.inSwitch;
|
|
this.context.inSwitch = true;
|
|
var cases = [];
|
|
var defaultFound = false;
|
|
this.expect('{');
|
|
while (true) {
|
|
if (this.match('}')) {
|
|
break;
|
|
}
|
|
var clause = this.parseSwitchCase();
|
|
if (clause.test === null) {
|
|
if (defaultFound) {
|
|
this.throwError(messages_1.Messages.MultipleDefaultsInSwitch);
|
|
}
|
|
defaultFound = true;
|
|
}
|
|
cases.push(clause);
|
|
}
|
|
this.expect('}');
|
|
this.context.inSwitch = previousInSwitch;
|
|
return this.finalize(node, new Node.SwitchStatement(discriminant, cases));
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-labelled-statements
|
|
Parser.prototype.parseLabelledStatement = function () {
|
|
var node = this.createNode();
|
|
var expr = this.parseExpression();
|
|
var statement;
|
|
if ((expr.type === syntax_1.Syntax.Identifier) && this.match(':')) {
|
|
this.nextToken();
|
|
var id = expr;
|
|
var key = '$' + id.name;
|
|
if (Object.prototype.hasOwnProperty.call(this.context.labelSet, key)) {
|
|
this.throwError(messages_1.Messages.Redeclaration, 'Label', id.name);
|
|
}
|
|
this.context.labelSet[key] = true;
|
|
var body = void 0;
|
|
if (this.matchKeyword('class')) {
|
|
this.tolerateUnexpectedToken(this.lookahead);
|
|
body = this.parseClassDeclaration();
|
|
}
|
|
else if (this.matchKeyword('function')) {
|
|
var token = this.lookahead;
|
|
var declaration = this.parseFunctionDeclaration();
|
|
if (this.context.strict) {
|
|
this.tolerateUnexpectedToken(token, messages_1.Messages.StrictFunction);
|
|
}
|
|
else if (declaration.generator) {
|
|
this.tolerateUnexpectedToken(token, messages_1.Messages.GeneratorInLegacyContext);
|
|
}
|
|
body = declaration;
|
|
}
|
|
else {
|
|
body = this.parseStatement();
|
|
}
|
|
delete this.context.labelSet[key];
|
|
statement = new Node.LabeledStatement(id, body);
|
|
}
|
|
else {
|
|
this.consumeSemicolon();
|
|
statement = new Node.ExpressionStatement(expr);
|
|
}
|
|
return this.finalize(node, statement);
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-throw-statement
|
|
Parser.prototype.parseThrowStatement = function () {
|
|
var node = this.createNode();
|
|
this.expectKeyword('throw');
|
|
if (this.hasLineTerminator) {
|
|
this.throwError(messages_1.Messages.NewlineAfterThrow);
|
|
}
|
|
var argument = this.parseExpression();
|
|
this.consumeSemicolon();
|
|
return this.finalize(node, new Node.ThrowStatement(argument));
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-try-statement
|
|
Parser.prototype.parseCatchClause = function () {
|
|
var node = this.createNode();
|
|
this.expectKeyword('catch');
|
|
this.expect('(');
|
|
if (this.match(')')) {
|
|
this.throwUnexpectedToken(this.lookahead);
|
|
}
|
|
var params = [];
|
|
var param = this.parsePattern(params);
|
|
var paramMap = {};
|
|
for (var i = 0; i < params.length; i++) {
|
|
var key = '$' + params[i].value;
|
|
if (Object.prototype.hasOwnProperty.call(paramMap, key)) {
|
|
this.tolerateError(messages_1.Messages.DuplicateBinding, params[i].value);
|
|
}
|
|
paramMap[key] = true;
|
|
}
|
|
if (this.context.strict && param.type === syntax_1.Syntax.Identifier) {
|
|
if (this.scanner.isRestrictedWord(param.name)) {
|
|
this.tolerateError(messages_1.Messages.StrictCatchVariable);
|
|
}
|
|
}
|
|
this.expect(')');
|
|
var body = this.parseBlock();
|
|
return this.finalize(node, new Node.CatchClause(param, body));
|
|
};
|
|
Parser.prototype.parseFinallyClause = function () {
|
|
this.expectKeyword('finally');
|
|
return this.parseBlock();
|
|
};
|
|
Parser.prototype.parseTryStatement = function () {
|
|
var node = this.createNode();
|
|
this.expectKeyword('try');
|
|
var block = this.parseBlock();
|
|
var handler = this.matchKeyword('catch') ? this.parseCatchClause() : null;
|
|
var finalizer = this.matchKeyword('finally') ? this.parseFinallyClause() : null;
|
|
if (!handler && !finalizer) {
|
|
this.throwError(messages_1.Messages.NoCatchOrFinally);
|
|
}
|
|
return this.finalize(node, new Node.TryStatement(block, handler, finalizer));
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-debugger-statement
|
|
Parser.prototype.parseDebuggerStatement = function () {
|
|
var node = this.createNode();
|
|
this.expectKeyword('debugger');
|
|
this.consumeSemicolon();
|
|
return this.finalize(node, new Node.DebuggerStatement());
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-ecmascript-language-statements-and-declarations
|
|
Parser.prototype.parseStatement = function () {
|
|
var statement;
|
|
switch (this.lookahead.type) {
|
|
case 1 /* BooleanLiteral */:
|
|
case 5 /* NullLiteral */:
|
|
case 6 /* NumericLiteral */:
|
|
case 8 /* StringLiteral */:
|
|
case 10 /* Template */:
|
|
case 9 /* RegularExpression */:
|
|
statement = this.parseExpressionStatement();
|
|
break;
|
|
case 7 /* Punctuator */:
|
|
var value = this.lookahead.value;
|
|
if (value === '{') {
|
|
statement = this.parseBlock();
|
|
}
|
|
else if (value === '(') {
|
|
statement = this.parseExpressionStatement();
|
|
}
|
|
else if (value === ';') {
|
|
statement = this.parseEmptyStatement();
|
|
}
|
|
else {
|
|
statement = this.parseExpressionStatement();
|
|
}
|
|
break;
|
|
case 3 /* Identifier */:
|
|
statement = this.matchAsyncFunction() ? this.parseFunctionDeclaration() : this.parseLabelledStatement();
|
|
break;
|
|
case 4 /* Keyword */:
|
|
switch (this.lookahead.value) {
|
|
case 'break':
|
|
statement = this.parseBreakStatement();
|
|
break;
|
|
case 'continue':
|
|
statement = this.parseContinueStatement();
|
|
break;
|
|
case 'debugger':
|
|
statement = this.parseDebuggerStatement();
|
|
break;
|
|
case 'do':
|
|
statement = this.parseDoWhileStatement();
|
|
break;
|
|
case 'for':
|
|
statement = this.parseForStatement();
|
|
break;
|
|
case 'function':
|
|
statement = this.parseFunctionDeclaration();
|
|
break;
|
|
case 'if':
|
|
statement = this.parseIfStatement();
|
|
break;
|
|
case 'return':
|
|
statement = this.parseReturnStatement();
|
|
break;
|
|
case 'switch':
|
|
statement = this.parseSwitchStatement();
|
|
break;
|
|
case 'throw':
|
|
statement = this.parseThrowStatement();
|
|
break;
|
|
case 'try':
|
|
statement = this.parseTryStatement();
|
|
break;
|
|
case 'var':
|
|
statement = this.parseVariableStatement();
|
|
break;
|
|
case 'while':
|
|
statement = this.parseWhileStatement();
|
|
break;
|
|
case 'with':
|
|
statement = this.parseWithStatement();
|
|
break;
|
|
default:
|
|
statement = this.parseExpressionStatement();
|
|
break;
|
|
}
|
|
break;
|
|
default:
|
|
statement = this.throwUnexpectedToken(this.lookahead);
|
|
}
|
|
return statement;
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-function-definitions
|
|
Parser.prototype.parseFunctionSourceElements = function () {
|
|
var node = this.createNode();
|
|
this.expect('{');
|
|
var body = this.parseDirectivePrologues();
|
|
var previousLabelSet = this.context.labelSet;
|
|
var previousInIteration = this.context.inIteration;
|
|
var previousInSwitch = this.context.inSwitch;
|
|
var previousInFunctionBody = this.context.inFunctionBody;
|
|
this.context.labelSet = {};
|
|
this.context.inIteration = false;
|
|
this.context.inSwitch = false;
|
|
this.context.inFunctionBody = true;
|
|
while (this.lookahead.type !== 2 /* EOF */) {
|
|
if (this.match('}')) {
|
|
break;
|
|
}
|
|
body.push(this.parseStatementListItem());
|
|
}
|
|
this.expect('}');
|
|
this.context.labelSet = previousLabelSet;
|
|
this.context.inIteration = previousInIteration;
|
|
this.context.inSwitch = previousInSwitch;
|
|
this.context.inFunctionBody = previousInFunctionBody;
|
|
return this.finalize(node, new Node.BlockStatement(body));
|
|
};
|
|
Parser.prototype.validateParam = function (options, param, name) {
|
|
var key = '$' + name;
|
|
if (this.context.strict) {
|
|
if (this.scanner.isRestrictedWord(name)) {
|
|
options.stricted = param;
|
|
options.message = messages_1.Messages.StrictParamName;
|
|
}
|
|
if (Object.prototype.hasOwnProperty.call(options.paramSet, key)) {
|
|
options.stricted = param;
|
|
options.message = messages_1.Messages.StrictParamDupe;
|
|
}
|
|
}
|
|
else if (!options.firstRestricted) {
|
|
if (this.scanner.isRestrictedWord(name)) {
|
|
options.firstRestricted = param;
|
|
options.message = messages_1.Messages.StrictParamName;
|
|
}
|
|
else if (this.scanner.isStrictModeReservedWord(name)) {
|
|
options.firstRestricted = param;
|
|
options.message = messages_1.Messages.StrictReservedWord;
|
|
}
|
|
else if (Object.prototype.hasOwnProperty.call(options.paramSet, key)) {
|
|
options.stricted = param;
|
|
options.message = messages_1.Messages.StrictParamDupe;
|
|
}
|
|
}
|
|
/* istanbul ignore next */
|
|
if (typeof Object.defineProperty === 'function') {
|
|
Object.defineProperty(options.paramSet, key, { value: true, enumerable: true, writable: true, configurable: true });
|
|
}
|
|
else {
|
|
options.paramSet[key] = true;
|
|
}
|
|
};
|
|
Parser.prototype.parseRestElement = function (params) {
|
|
var node = this.createNode();
|
|
this.expect('...');
|
|
var arg = this.parsePattern(params);
|
|
if (this.match('=')) {
|
|
this.throwError(messages_1.Messages.DefaultRestParameter);
|
|
}
|
|
if (!this.match(')')) {
|
|
this.throwError(messages_1.Messages.ParameterAfterRestParameter);
|
|
}
|
|
return this.finalize(node, new Node.RestElement(arg));
|
|
};
|
|
Parser.prototype.parseFormalParameter = function (options) {
|
|
var params = [];
|
|
var param = this.match('...') ? this.parseRestElement(params) : this.parsePatternWithDefault(params);
|
|
for (var i = 0; i < params.length; i++) {
|
|
this.validateParam(options, params[i], params[i].value);
|
|
}
|
|
options.simple = options.simple && (param instanceof Node.Identifier);
|
|
options.params.push(param);
|
|
};
|
|
Parser.prototype.parseFormalParameters = function (firstRestricted) {
|
|
var options;
|
|
options = {
|
|
simple: true,
|
|
params: [],
|
|
firstRestricted: firstRestricted
|
|
};
|
|
this.expect('(');
|
|
if (!this.match(')')) {
|
|
options.paramSet = {};
|
|
while (this.lookahead.type !== 2 /* EOF */) {
|
|
this.parseFormalParameter(options);
|
|
if (this.match(')')) {
|
|
break;
|
|
}
|
|
this.expect(',');
|
|
if (this.match(')')) {
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
this.expect(')');
|
|
return {
|
|
simple: options.simple,
|
|
params: options.params,
|
|
stricted: options.stricted,
|
|
firstRestricted: options.firstRestricted,
|
|
message: options.message
|
|
};
|
|
};
|
|
Parser.prototype.matchAsyncFunction = function () {
|
|
var match = this.matchContextualKeyword('async');
|
|
if (match) {
|
|
var state = this.scanner.saveState();
|
|
this.scanner.scanComments();
|
|
var next = this.scanner.lex();
|
|
this.scanner.restoreState(state);
|
|
match = (state.lineNumber === next.lineNumber) && (next.type === 4 /* Keyword */) && (next.value === 'function');
|
|
}
|
|
return match;
|
|
};
|
|
Parser.prototype.parseFunctionDeclaration = function (identifierIsOptional) {
|
|
var node = this.createNode();
|
|
var isAsync = this.matchContextualKeyword('async');
|
|
if (isAsync) {
|
|
this.nextToken();
|
|
}
|
|
this.expectKeyword('function');
|
|
var isGenerator = isAsync ? false : this.match('*');
|
|
if (isGenerator) {
|
|
this.nextToken();
|
|
}
|
|
var message;
|
|
var id = null;
|
|
var firstRestricted = null;
|
|
if (!identifierIsOptional || !this.match('(')) {
|
|
var token = this.lookahead;
|
|
id = this.parseVariableIdentifier();
|
|
if (this.context.strict) {
|
|
if (this.scanner.isRestrictedWord(token.value)) {
|
|
this.tolerateUnexpectedToken(token, messages_1.Messages.StrictFunctionName);
|
|
}
|
|
}
|
|
else {
|
|
if (this.scanner.isRestrictedWord(token.value)) {
|
|
firstRestricted = token;
|
|
message = messages_1.Messages.StrictFunctionName;
|
|
}
|
|
else if (this.scanner.isStrictModeReservedWord(token.value)) {
|
|
firstRestricted = token;
|
|
message = messages_1.Messages.StrictReservedWord;
|
|
}
|
|
}
|
|
}
|
|
var previousAllowAwait = this.context.await;
|
|
var previousAllowYield = this.context.allowYield;
|
|
this.context.await = isAsync;
|
|
this.context.allowYield = !isGenerator;
|
|
var formalParameters = this.parseFormalParameters(firstRestricted);
|
|
var params = formalParameters.params;
|
|
var stricted = formalParameters.stricted;
|
|
firstRestricted = formalParameters.firstRestricted;
|
|
if (formalParameters.message) {
|
|
message = formalParameters.message;
|
|
}
|
|
var previousStrict = this.context.strict;
|
|
var previousAllowStrictDirective = this.context.allowStrictDirective;
|
|
this.context.allowStrictDirective = formalParameters.simple;
|
|
var body = this.parseFunctionSourceElements();
|
|
if (this.context.strict && firstRestricted) {
|
|
this.throwUnexpectedToken(firstRestricted, message);
|
|
}
|
|
if (this.context.strict && stricted) {
|
|
this.tolerateUnexpectedToken(stricted, message);
|
|
}
|
|
this.context.strict = previousStrict;
|
|
this.context.allowStrictDirective = previousAllowStrictDirective;
|
|
this.context.await = previousAllowAwait;
|
|
this.context.allowYield = previousAllowYield;
|
|
return isAsync ? this.finalize(node, new Node.AsyncFunctionDeclaration(id, params, body)) :
|
|
this.finalize(node, new Node.FunctionDeclaration(id, params, body, isGenerator));
|
|
};
|
|
Parser.prototype.parseFunctionExpression = function () {
|
|
var node = this.createNode();
|
|
var isAsync = this.matchContextualKeyword('async');
|
|
if (isAsync) {
|
|
this.nextToken();
|
|
}
|
|
this.expectKeyword('function');
|
|
var isGenerator = isAsync ? false : this.match('*');
|
|
if (isGenerator) {
|
|
this.nextToken();
|
|
}
|
|
var message;
|
|
var id = null;
|
|
var firstRestricted;
|
|
var previousAllowAwait = this.context.await;
|
|
var previousAllowYield = this.context.allowYield;
|
|
this.context.await = isAsync;
|
|
this.context.allowYield = !isGenerator;
|
|
if (!this.match('(')) {
|
|
var token = this.lookahead;
|
|
id = (!this.context.strict && !isGenerator && this.matchKeyword('yield')) ? this.parseIdentifierName() : this.parseVariableIdentifier();
|
|
if (this.context.strict) {
|
|
if (this.scanner.isRestrictedWord(token.value)) {
|
|
this.tolerateUnexpectedToken(token, messages_1.Messages.StrictFunctionName);
|
|
}
|
|
}
|
|
else {
|
|
if (this.scanner.isRestrictedWord(token.value)) {
|
|
firstRestricted = token;
|
|
message = messages_1.Messages.StrictFunctionName;
|
|
}
|
|
else if (this.scanner.isStrictModeReservedWord(token.value)) {
|
|
firstRestricted = token;
|
|
message = messages_1.Messages.StrictReservedWord;
|
|
}
|
|
}
|
|
}
|
|
var formalParameters = this.parseFormalParameters(firstRestricted);
|
|
var params = formalParameters.params;
|
|
var stricted = formalParameters.stricted;
|
|
firstRestricted = formalParameters.firstRestricted;
|
|
if (formalParameters.message) {
|
|
message = formalParameters.message;
|
|
}
|
|
var previousStrict = this.context.strict;
|
|
var previousAllowStrictDirective = this.context.allowStrictDirective;
|
|
this.context.allowStrictDirective = formalParameters.simple;
|
|
var body = this.parseFunctionSourceElements();
|
|
if (this.context.strict && firstRestricted) {
|
|
this.throwUnexpectedToken(firstRestricted, message);
|
|
}
|
|
if (this.context.strict && stricted) {
|
|
this.tolerateUnexpectedToken(stricted, message);
|
|
}
|
|
this.context.strict = previousStrict;
|
|
this.context.allowStrictDirective = previousAllowStrictDirective;
|
|
this.context.await = previousAllowAwait;
|
|
this.context.allowYield = previousAllowYield;
|
|
return isAsync ? this.finalize(node, new Node.AsyncFunctionExpression(id, params, body)) :
|
|
this.finalize(node, new Node.FunctionExpression(id, params, body, isGenerator));
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-directive-prologues-and-the-use-strict-directive
|
|
Parser.prototype.parseDirective = function () {
|
|
var token = this.lookahead;
|
|
var node = this.createNode();
|
|
var expr = this.parseExpression();
|
|
var directive = (expr.type === syntax_1.Syntax.Literal) ? this.getTokenRaw(token).slice(1, -1) : null;
|
|
this.consumeSemicolon();
|
|
return this.finalize(node, directive ? new Node.Directive(expr, directive) : new Node.ExpressionStatement(expr));
|
|
};
|
|
Parser.prototype.parseDirectivePrologues = function () {
|
|
var firstRestricted = null;
|
|
var body = [];
|
|
while (true) {
|
|
var token = this.lookahead;
|
|
if (token.type !== 8 /* StringLiteral */) {
|
|
break;
|
|
}
|
|
var statement = this.parseDirective();
|
|
body.push(statement);
|
|
var directive = statement.directive;
|
|
if (typeof directive !== 'string') {
|
|
break;
|
|
}
|
|
if (directive === 'use strict') {
|
|
this.context.strict = true;
|
|
if (firstRestricted) {
|
|
this.tolerateUnexpectedToken(firstRestricted, messages_1.Messages.StrictOctalLiteral);
|
|
}
|
|
if (!this.context.allowStrictDirective) {
|
|
this.tolerateUnexpectedToken(token, messages_1.Messages.IllegalLanguageModeDirective);
|
|
}
|
|
}
|
|
else {
|
|
if (!firstRestricted && token.octal) {
|
|
firstRestricted = token;
|
|
}
|
|
}
|
|
}
|
|
return body;
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-method-definitions
|
|
Parser.prototype.qualifiedPropertyName = function (token) {
|
|
switch (token.type) {
|
|
case 3 /* Identifier */:
|
|
case 8 /* StringLiteral */:
|
|
case 1 /* BooleanLiteral */:
|
|
case 5 /* NullLiteral */:
|
|
case 6 /* NumericLiteral */:
|
|
case 4 /* Keyword */:
|
|
return true;
|
|
case 7 /* Punctuator */:
|
|
return token.value === '[';
|
|
default:
|
|
break;
|
|
}
|
|
return false;
|
|
};
|
|
Parser.prototype.parseGetterMethod = function () {
|
|
var node = this.createNode();
|
|
var isGenerator = false;
|
|
var previousAllowYield = this.context.allowYield;
|
|
this.context.allowYield = !isGenerator;
|
|
var formalParameters = this.parseFormalParameters();
|
|
if (formalParameters.params.length > 0) {
|
|
this.tolerateError(messages_1.Messages.BadGetterArity);
|
|
}
|
|
var method = this.parsePropertyMethod(formalParameters);
|
|
this.context.allowYield = previousAllowYield;
|
|
return this.finalize(node, new Node.FunctionExpression(null, formalParameters.params, method, isGenerator));
|
|
};
|
|
Parser.prototype.parseSetterMethod = function () {
|
|
var node = this.createNode();
|
|
var isGenerator = false;
|
|
var previousAllowYield = this.context.allowYield;
|
|
this.context.allowYield = !isGenerator;
|
|
var formalParameters = this.parseFormalParameters();
|
|
if (formalParameters.params.length !== 1) {
|
|
this.tolerateError(messages_1.Messages.BadSetterArity);
|
|
}
|
|
else if (formalParameters.params[0] instanceof Node.RestElement) {
|
|
this.tolerateError(messages_1.Messages.BadSetterRestParameter);
|
|
}
|
|
var method = this.parsePropertyMethod(formalParameters);
|
|
this.context.allowYield = previousAllowYield;
|
|
return this.finalize(node, new Node.FunctionExpression(null, formalParameters.params, method, isGenerator));
|
|
};
|
|
Parser.prototype.parseGeneratorMethod = function () {
|
|
var node = this.createNode();
|
|
var isGenerator = true;
|
|
var previousAllowYield = this.context.allowYield;
|
|
this.context.allowYield = true;
|
|
var params = this.parseFormalParameters();
|
|
this.context.allowYield = false;
|
|
var method = this.parsePropertyMethod(params);
|
|
this.context.allowYield = previousAllowYield;
|
|
return this.finalize(node, new Node.FunctionExpression(null, params.params, method, isGenerator));
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-generator-function-definitions
|
|
Parser.prototype.isStartOfExpression = function () {
|
|
var start = true;
|
|
var value = this.lookahead.value;
|
|
switch (this.lookahead.type) {
|
|
case 7 /* Punctuator */:
|
|
start = (value === '[') || (value === '(') || (value === '{') ||
|
|
(value === '+') || (value === '-') ||
|
|
(value === '!') || (value === '~') ||
|
|
(value === '++') || (value === '--') ||
|
|
(value === '/') || (value === '/='); // regular expression literal
|
|
break;
|
|
case 4 /* Keyword */:
|
|
start = (value === 'class') || (value === 'delete') ||
|
|
(value === 'function') || (value === 'let') || (value === 'new') ||
|
|
(value === 'super') || (value === 'this') || (value === 'typeof') ||
|
|
(value === 'void') || (value === 'yield');
|
|
break;
|
|
default:
|
|
break;
|
|
}
|
|
return start;
|
|
};
|
|
Parser.prototype.parseYieldExpression = function () {
|
|
var node = this.createNode();
|
|
this.expectKeyword('yield');
|
|
var argument = null;
|
|
var delegate = false;
|
|
if (!this.hasLineTerminator) {
|
|
var previousAllowYield = this.context.allowYield;
|
|
this.context.allowYield = false;
|
|
delegate = this.match('*');
|
|
if (delegate) {
|
|
this.nextToken();
|
|
argument = this.parseAssignmentExpression();
|
|
}
|
|
else if (this.isStartOfExpression()) {
|
|
argument = this.parseAssignmentExpression();
|
|
}
|
|
this.context.allowYield = previousAllowYield;
|
|
}
|
|
return this.finalize(node, new Node.YieldExpression(argument, delegate));
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-class-definitions
|
|
Parser.prototype.parseClassElement = function (hasConstructor) {
|
|
var token = this.lookahead;
|
|
var node = this.createNode();
|
|
var kind = '';
|
|
var key = null;
|
|
var value = null;
|
|
var computed = false;
|
|
var method = false;
|
|
var isStatic = false;
|
|
var isAsync = false;
|
|
if (this.match('*')) {
|
|
this.nextToken();
|
|
}
|
|
else {
|
|
computed = this.match('[');
|
|
key = this.parseObjectPropertyKey();
|
|
var id = key;
|
|
if (id.name === 'static' && (this.qualifiedPropertyName(this.lookahead) || this.match('*'))) {
|
|
token = this.lookahead;
|
|
isStatic = true;
|
|
computed = this.match('[');
|
|
if (this.match('*')) {
|
|
this.nextToken();
|
|
}
|
|
else {
|
|
key = this.parseObjectPropertyKey();
|
|
}
|
|
}
|
|
if ((token.type === 3 /* Identifier */) && !this.hasLineTerminator && (token.value === 'async')) {
|
|
var punctuator = this.lookahead.value;
|
|
if (punctuator !== ':' && punctuator !== '(' && punctuator !== '*') {
|
|
isAsync = true;
|
|
token = this.lookahead;
|
|
key = this.parseObjectPropertyKey();
|
|
if (token.type === 3 /* Identifier */ && token.value === 'constructor') {
|
|
this.tolerateUnexpectedToken(token, messages_1.Messages.ConstructorIsAsync);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
var lookaheadPropertyKey = this.qualifiedPropertyName(this.lookahead);
|
|
if (token.type === 3 /* Identifier */) {
|
|
if (token.value === 'get' && lookaheadPropertyKey) {
|
|
kind = 'get';
|
|
computed = this.match('[');
|
|
key = this.parseObjectPropertyKey();
|
|
this.context.allowYield = false;
|
|
value = this.parseGetterMethod();
|
|
}
|
|
else if (token.value === 'set' && lookaheadPropertyKey) {
|
|
kind = 'set';
|
|
computed = this.match('[');
|
|
key = this.parseObjectPropertyKey();
|
|
value = this.parseSetterMethod();
|
|
}
|
|
}
|
|
else if (token.type === 7 /* Punctuator */ && token.value === '*' && lookaheadPropertyKey) {
|
|
kind = 'init';
|
|
computed = this.match('[');
|
|
key = this.parseObjectPropertyKey();
|
|
value = this.parseGeneratorMethod();
|
|
method = true;
|
|
}
|
|
if (!kind && key && this.match('(')) {
|
|
kind = 'init';
|
|
value = isAsync ? this.parsePropertyMethodAsyncFunction() : this.parsePropertyMethodFunction();
|
|
method = true;
|
|
}
|
|
if (!kind) {
|
|
this.throwUnexpectedToken(this.lookahead);
|
|
}
|
|
if (kind === 'init') {
|
|
kind = 'method';
|
|
}
|
|
if (!computed) {
|
|
if (isStatic && this.isPropertyKey(key, 'prototype')) {
|
|
this.throwUnexpectedToken(token, messages_1.Messages.StaticPrototype);
|
|
}
|
|
if (!isStatic && this.isPropertyKey(key, 'constructor')) {
|
|
if (kind !== 'method' || !method || (value && value.generator)) {
|
|
this.throwUnexpectedToken(token, messages_1.Messages.ConstructorSpecialMethod);
|
|
}
|
|
if (hasConstructor.value) {
|
|
this.throwUnexpectedToken(token, messages_1.Messages.DuplicateConstructor);
|
|
}
|
|
else {
|
|
hasConstructor.value = true;
|
|
}
|
|
kind = 'constructor';
|
|
}
|
|
}
|
|
return this.finalize(node, new Node.MethodDefinition(key, computed, value, kind, isStatic));
|
|
};
|
|
Parser.prototype.parseClassElementList = function () {
|
|
var body = [];
|
|
var hasConstructor = { value: false };
|
|
this.expect('{');
|
|
while (!this.match('}')) {
|
|
if (this.match(';')) {
|
|
this.nextToken();
|
|
}
|
|
else {
|
|
body.push(this.parseClassElement(hasConstructor));
|
|
}
|
|
}
|
|
this.expect('}');
|
|
return body;
|
|
};
|
|
Parser.prototype.parseClassBody = function () {
|
|
var node = this.createNode();
|
|
var elementList = this.parseClassElementList();
|
|
return this.finalize(node, new Node.ClassBody(elementList));
|
|
};
|
|
Parser.prototype.parseClassDeclaration = function (identifierIsOptional) {
|
|
var node = this.createNode();
|
|
var previousStrict = this.context.strict;
|
|
this.context.strict = true;
|
|
this.expectKeyword('class');
|
|
var id = (identifierIsOptional && (this.lookahead.type !== 3 /* Identifier */)) ? null : this.parseVariableIdentifier();
|
|
var superClass = null;
|
|
if (this.matchKeyword('extends')) {
|
|
this.nextToken();
|
|
superClass = this.isolateCoverGrammar(this.parseLeftHandSideExpressionAllowCall);
|
|
}
|
|
var classBody = this.parseClassBody();
|
|
this.context.strict = previousStrict;
|
|
return this.finalize(node, new Node.ClassDeclaration(id, superClass, classBody));
|
|
};
|
|
Parser.prototype.parseClassExpression = function () {
|
|
var node = this.createNode();
|
|
var previousStrict = this.context.strict;
|
|
this.context.strict = true;
|
|
this.expectKeyword('class');
|
|
var id = (this.lookahead.type === 3 /* Identifier */) ? this.parseVariableIdentifier() : null;
|
|
var superClass = null;
|
|
if (this.matchKeyword('extends')) {
|
|
this.nextToken();
|
|
superClass = this.isolateCoverGrammar(this.parseLeftHandSideExpressionAllowCall);
|
|
}
|
|
var classBody = this.parseClassBody();
|
|
this.context.strict = previousStrict;
|
|
return this.finalize(node, new Node.ClassExpression(id, superClass, classBody));
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-scripts
|
|
// https://tc39.github.io/ecma262/#sec-modules
|
|
Parser.prototype.parseModule = function () {
|
|
this.context.strict = true;
|
|
this.context.isModule = true;
|
|
this.scanner.isModule = true;
|
|
var node = this.createNode();
|
|
var body = this.parseDirectivePrologues();
|
|
while (this.lookahead.type !== 2 /* EOF */) {
|
|
body.push(this.parseStatementListItem());
|
|
}
|
|
return this.finalize(node, new Node.Module(body));
|
|
};
|
|
Parser.prototype.parseScript = function () {
|
|
var node = this.createNode();
|
|
var body = this.parseDirectivePrologues();
|
|
while (this.lookahead.type !== 2 /* EOF */) {
|
|
body.push(this.parseStatementListItem());
|
|
}
|
|
return this.finalize(node, new Node.Script(body));
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-imports
|
|
Parser.prototype.parseModuleSpecifier = function () {
|
|
var node = this.createNode();
|
|
if (this.lookahead.type !== 8 /* StringLiteral */) {
|
|
this.throwError(messages_1.Messages.InvalidModuleSpecifier);
|
|
}
|
|
var token = this.nextToken();
|
|
var raw = this.getTokenRaw(token);
|
|
return this.finalize(node, new Node.Literal(token.value, raw));
|
|
};
|
|
// import {<foo as bar>} ...;
|
|
Parser.prototype.parseImportSpecifier = function () {
|
|
var node = this.createNode();
|
|
var imported;
|
|
var local;
|
|
if (this.lookahead.type === 3 /* Identifier */) {
|
|
imported = this.parseVariableIdentifier();
|
|
local = imported;
|
|
if (this.matchContextualKeyword('as')) {
|
|
this.nextToken();
|
|
local = this.parseVariableIdentifier();
|
|
}
|
|
}
|
|
else {
|
|
imported = this.parseIdentifierName();
|
|
local = imported;
|
|
if (this.matchContextualKeyword('as')) {
|
|
this.nextToken();
|
|
local = this.parseVariableIdentifier();
|
|
}
|
|
else {
|
|
this.throwUnexpectedToken(this.nextToken());
|
|
}
|
|
}
|
|
return this.finalize(node, new Node.ImportSpecifier(local, imported));
|
|
};
|
|
// {foo, bar as bas}
|
|
Parser.prototype.parseNamedImports = function () {
|
|
this.expect('{');
|
|
var specifiers = [];
|
|
while (!this.match('}')) {
|
|
specifiers.push(this.parseImportSpecifier());
|
|
if (!this.match('}')) {
|
|
this.expect(',');
|
|
}
|
|
}
|
|
this.expect('}');
|
|
return specifiers;
|
|
};
|
|
// import <foo> ...;
|
|
Parser.prototype.parseImportDefaultSpecifier = function () {
|
|
var node = this.createNode();
|
|
var local = this.parseIdentifierName();
|
|
return this.finalize(node, new Node.ImportDefaultSpecifier(local));
|
|
};
|
|
// import <* as foo> ...;
|
|
Parser.prototype.parseImportNamespaceSpecifier = function () {
|
|
var node = this.createNode();
|
|
this.expect('*');
|
|
if (!this.matchContextualKeyword('as')) {
|
|
this.throwError(messages_1.Messages.NoAsAfterImportNamespace);
|
|
}
|
|
this.nextToken();
|
|
var local = this.parseIdentifierName();
|
|
return this.finalize(node, new Node.ImportNamespaceSpecifier(local));
|
|
};
|
|
Parser.prototype.parseImportDeclaration = function () {
|
|
if (this.context.inFunctionBody) {
|
|
this.throwError(messages_1.Messages.IllegalImportDeclaration);
|
|
}
|
|
var node = this.createNode();
|
|
this.expectKeyword('import');
|
|
var src;
|
|
var specifiers = [];
|
|
if (this.lookahead.type === 8 /* StringLiteral */) {
|
|
// import 'foo';
|
|
src = this.parseModuleSpecifier();
|
|
}
|
|
else {
|
|
if (this.match('{')) {
|
|
// import {bar}
|
|
specifiers = specifiers.concat(this.parseNamedImports());
|
|
}
|
|
else if (this.match('*')) {
|
|
// import * as foo
|
|
specifiers.push(this.parseImportNamespaceSpecifier());
|
|
}
|
|
else if (this.isIdentifierName(this.lookahead) && !this.matchKeyword('default')) {
|
|
// import foo
|
|
specifiers.push(this.parseImportDefaultSpecifier());
|
|
if (this.match(',')) {
|
|
this.nextToken();
|
|
if (this.match('*')) {
|
|
// import foo, * as foo
|
|
specifiers.push(this.parseImportNamespaceSpecifier());
|
|
}
|
|
else if (this.match('{')) {
|
|
// import foo, {bar}
|
|
specifiers = specifiers.concat(this.parseNamedImports());
|
|
}
|
|
else {
|
|
this.throwUnexpectedToken(this.lookahead);
|
|
}
|
|
}
|
|
}
|
|
else {
|
|
this.throwUnexpectedToken(this.nextToken());
|
|
}
|
|
if (!this.matchContextualKeyword('from')) {
|
|
var message = this.lookahead.value ? messages_1.Messages.UnexpectedToken : messages_1.Messages.MissingFromClause;
|
|
this.throwError(message, this.lookahead.value);
|
|
}
|
|
this.nextToken();
|
|
src = this.parseModuleSpecifier();
|
|
}
|
|
this.consumeSemicolon();
|
|
return this.finalize(node, new Node.ImportDeclaration(specifiers, src));
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-exports
|
|
Parser.prototype.parseExportSpecifier = function () {
|
|
var node = this.createNode();
|
|
var local = this.parseIdentifierName();
|
|
var exported = local;
|
|
if (this.matchContextualKeyword('as')) {
|
|
this.nextToken();
|
|
exported = this.parseIdentifierName();
|
|
}
|
|
return this.finalize(node, new Node.ExportSpecifier(local, exported));
|
|
};
|
|
Parser.prototype.parseExportDeclaration = function () {
|
|
if (this.context.inFunctionBody) {
|
|
this.throwError(messages_1.Messages.IllegalExportDeclaration);
|
|
}
|
|
var node = this.createNode();
|
|
this.expectKeyword('export');
|
|
var exportDeclaration;
|
|
if (this.matchKeyword('default')) {
|
|
// export default ...
|
|
this.nextToken();
|
|
if (this.matchKeyword('function')) {
|
|
// export default function foo () {}
|
|
// export default function () {}
|
|
var declaration = this.parseFunctionDeclaration(true);
|
|
exportDeclaration = this.finalize(node, new Node.ExportDefaultDeclaration(declaration));
|
|
}
|
|
else if (this.matchKeyword('class')) {
|
|
// export default class foo {}
|
|
var declaration = this.parseClassDeclaration(true);
|
|
exportDeclaration = this.finalize(node, new Node.ExportDefaultDeclaration(declaration));
|
|
}
|
|
else if (this.matchContextualKeyword('async')) {
|
|
// export default async function f () {}
|
|
// export default async function () {}
|
|
// export default async x => x
|
|
var declaration = this.matchAsyncFunction() ? this.parseFunctionDeclaration(true) : this.parseAssignmentExpression();
|
|
exportDeclaration = this.finalize(node, new Node.ExportDefaultDeclaration(declaration));
|
|
}
|
|
else {
|
|
if (this.matchContextualKeyword('from')) {
|
|
this.throwError(messages_1.Messages.UnexpectedToken, this.lookahead.value);
|
|
}
|
|
// export default {};
|
|
// export default [];
|
|
// export default (1 + 2);
|
|
var declaration = this.match('{') ? this.parseObjectInitializer() :
|
|
this.match('[') ? this.parseArrayInitializer() : this.parseAssignmentExpression();
|
|
this.consumeSemicolon();
|
|
exportDeclaration = this.finalize(node, new Node.ExportDefaultDeclaration(declaration));
|
|
}
|
|
}
|
|
else if (this.match('*')) {
|
|
// export * from 'foo';
|
|
this.nextToken();
|
|
if (!this.matchContextualKeyword('from')) {
|
|
var message = this.lookahead.value ? messages_1.Messages.UnexpectedToken : messages_1.Messages.MissingFromClause;
|
|
this.throwError(message, this.lookahead.value);
|
|
}
|
|
this.nextToken();
|
|
var src = this.parseModuleSpecifier();
|
|
this.consumeSemicolon();
|
|
exportDeclaration = this.finalize(node, new Node.ExportAllDeclaration(src));
|
|
}
|
|
else if (this.lookahead.type === 4 /* Keyword */) {
|
|
// export var f = 1;
|
|
var declaration = void 0;
|
|
switch (this.lookahead.value) {
|
|
case 'let':
|
|
case 'const':
|
|
declaration = this.parseLexicalDeclaration({ inFor: false });
|
|
break;
|
|
case 'var':
|
|
case 'class':
|
|
case 'function':
|
|
declaration = this.parseStatementListItem();
|
|
break;
|
|
default:
|
|
this.throwUnexpectedToken(this.lookahead);
|
|
}
|
|
exportDeclaration = this.finalize(node, new Node.ExportNamedDeclaration(declaration, [], null));
|
|
}
|
|
else if (this.matchAsyncFunction()) {
|
|
var declaration = this.parseFunctionDeclaration();
|
|
exportDeclaration = this.finalize(node, new Node.ExportNamedDeclaration(declaration, [], null));
|
|
}
|
|
else {
|
|
var specifiers = [];
|
|
var source = null;
|
|
var isExportFromIdentifier = false;
|
|
this.expect('{');
|
|
while (!this.match('}')) {
|
|
isExportFromIdentifier = isExportFromIdentifier || this.matchKeyword('default');
|
|
specifiers.push(this.parseExportSpecifier());
|
|
if (!this.match('}')) {
|
|
this.expect(',');
|
|
}
|
|
}
|
|
this.expect('}');
|
|
if (this.matchContextualKeyword('from')) {
|
|
// export {default} from 'foo';
|
|
// export {foo} from 'foo';
|
|
this.nextToken();
|
|
source = this.parseModuleSpecifier();
|
|
this.consumeSemicolon();
|
|
}
|
|
else if (isExportFromIdentifier) {
|
|
// export {default}; // missing fromClause
|
|
var message = this.lookahead.value ? messages_1.Messages.UnexpectedToken : messages_1.Messages.MissingFromClause;
|
|
this.throwError(message, this.lookahead.value);
|
|
}
|
|
else {
|
|
// export {foo};
|
|
this.consumeSemicolon();
|
|
}
|
|
exportDeclaration = this.finalize(node, new Node.ExportNamedDeclaration(null, specifiers, source));
|
|
}
|
|
return exportDeclaration;
|
|
};
|
|
return Parser;
|
|
}());
|
|
exports.Parser = Parser;
|
|
|
|
|
|
/***/ },
|
|
/* 9 */
|
|
/***/ function(module, exports) {
|
|
|
|
"use strict";
|
|
// Ensure the condition is true, otherwise throw an error.
|
|
// This is only to have a better contract semantic, i.e. another safety net
|
|
// to catch a logic error. The condition shall be fulfilled in normal case.
|
|
// Do NOT use this to enforce a certain condition on any user input.
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
function assert(condition, message) {
|
|
/* istanbul ignore if */
|
|
if (!condition) {
|
|
throw new Error('ASSERT: ' + message);
|
|
}
|
|
}
|
|
exports.assert = assert;
|
|
|
|
|
|
/***/ },
|
|
/* 10 */
|
|
/***/ function(module, exports) {
|
|
|
|
"use strict";
|
|
/* tslint:disable:max-classes-per-file */
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
var ErrorHandler = (function () {
|
|
function ErrorHandler() {
|
|
this.errors = [];
|
|
this.tolerant = false;
|
|
}
|
|
ErrorHandler.prototype.recordError = function (error) {
|
|
this.errors.push(error);
|
|
};
|
|
ErrorHandler.prototype.tolerate = function (error) {
|
|
if (this.tolerant) {
|
|
this.recordError(error);
|
|
}
|
|
else {
|
|
throw error;
|
|
}
|
|
};
|
|
ErrorHandler.prototype.constructError = function (msg, column) {
|
|
var error = new Error(msg);
|
|
try {
|
|
throw error;
|
|
}
|
|
catch (base) {
|
|
/* istanbul ignore else */
|
|
if (Object.create && Object.defineProperty) {
|
|
error = Object.create(base);
|
|
Object.defineProperty(error, 'column', { value: column });
|
|
}
|
|
}
|
|
/* istanbul ignore next */
|
|
return error;
|
|
};
|
|
ErrorHandler.prototype.createError = function (index, line, col, description) {
|
|
var msg = 'Line ' + line + ': ' + description;
|
|
var error = this.constructError(msg, col);
|
|
error.index = index;
|
|
error.lineNumber = line;
|
|
error.description = description;
|
|
return error;
|
|
};
|
|
ErrorHandler.prototype.throwError = function (index, line, col, description) {
|
|
throw this.createError(index, line, col, description);
|
|
};
|
|
ErrorHandler.prototype.tolerateError = function (index, line, col, description) {
|
|
var error = this.createError(index, line, col, description);
|
|
if (this.tolerant) {
|
|
this.recordError(error);
|
|
}
|
|
else {
|
|
throw error;
|
|
}
|
|
};
|
|
return ErrorHandler;
|
|
}());
|
|
exports.ErrorHandler = ErrorHandler;
|
|
|
|
|
|
/***/ },
|
|
/* 11 */
|
|
/***/ function(module, exports) {
|
|
|
|
"use strict";
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
// Error messages should be identical to V8.
|
|
exports.Messages = {
|
|
BadGetterArity: 'Getter must not have any formal parameters',
|
|
BadSetterArity: 'Setter must have exactly one formal parameter',
|
|
BadSetterRestParameter: 'Setter function argument must not be a rest parameter',
|
|
ConstructorIsAsync: 'Class constructor may not be an async method',
|
|
ConstructorSpecialMethod: 'Class constructor may not be an accessor',
|
|
DeclarationMissingInitializer: 'Missing initializer in %0 declaration',
|
|
DefaultRestParameter: 'Unexpected token =',
|
|
DuplicateBinding: 'Duplicate binding %0',
|
|
DuplicateConstructor: 'A class may only have one constructor',
|
|
DuplicateProtoProperty: 'Duplicate __proto__ fields are not allowed in object literals',
|
|
ForInOfLoopInitializer: '%0 loop variable declaration may not have an initializer',
|
|
GeneratorInLegacyContext: 'Generator declarations are not allowed in legacy contexts',
|
|
IllegalBreak: 'Illegal break statement',
|
|
IllegalContinue: 'Illegal continue statement',
|
|
IllegalExportDeclaration: 'Unexpected token',
|
|
IllegalImportDeclaration: 'Unexpected token',
|
|
IllegalLanguageModeDirective: 'Illegal \'use strict\' directive in function with non-simple parameter list',
|
|
IllegalReturn: 'Illegal return statement',
|
|
InvalidEscapedReservedWord: 'Keyword must not contain escaped characters',
|
|
InvalidHexEscapeSequence: 'Invalid hexadecimal escape sequence',
|
|
InvalidLHSInAssignment: 'Invalid left-hand side in assignment',
|
|
InvalidLHSInForIn: 'Invalid left-hand side in for-in',
|
|
InvalidLHSInForLoop: 'Invalid left-hand side in for-loop',
|
|
InvalidModuleSpecifier: 'Unexpected token',
|
|
InvalidRegExp: 'Invalid regular expression',
|
|
LetInLexicalBinding: 'let is disallowed as a lexically bound name',
|
|
MissingFromClause: 'Unexpected token',
|
|
MultipleDefaultsInSwitch: 'More than one default clause in switch statement',
|
|
NewlineAfterThrow: 'Illegal newline after throw',
|
|
NoAsAfterImportNamespace: 'Unexpected token',
|
|
NoCatchOrFinally: 'Missing catch or finally after try',
|
|
ParameterAfterRestParameter: 'Rest parameter must be last formal parameter',
|
|
Redeclaration: '%0 \'%1\' has already been declared',
|
|
StaticPrototype: 'Classes may not have static property named prototype',
|
|
StrictCatchVariable: 'Catch variable may not be eval or arguments in strict mode',
|
|
StrictDelete: 'Delete of an unqualified identifier in strict mode.',
|
|
StrictFunction: 'In strict mode code, functions can only be declared at top level or inside a block',
|
|
StrictFunctionName: 'Function name may not be eval or arguments in strict mode',
|
|
StrictLHSAssignment: 'Assignment to eval or arguments is not allowed in strict mode',
|
|
StrictLHSPostfix: 'Postfix increment/decrement may not have eval or arguments operand in strict mode',
|
|
StrictLHSPrefix: 'Prefix increment/decrement may not have eval or arguments operand in strict mode',
|
|
StrictModeWith: 'Strict mode code may not include a with statement',
|
|
StrictOctalLiteral: 'Octal literals are not allowed in strict mode.',
|
|
StrictParamDupe: 'Strict mode function may not have duplicate parameter names',
|
|
StrictParamName: 'Parameter name eval or arguments is not allowed in strict mode',
|
|
StrictReservedWord: 'Use of future reserved word in strict mode',
|
|
StrictVarName: 'Variable name may not be eval or arguments in strict mode',
|
|
TemplateOctalLiteral: 'Octal literals are not allowed in template strings.',
|
|
UnexpectedEOS: 'Unexpected end of input',
|
|
UnexpectedIdentifier: 'Unexpected identifier',
|
|
UnexpectedNumber: 'Unexpected number',
|
|
UnexpectedReserved: 'Unexpected reserved word',
|
|
UnexpectedString: 'Unexpected string',
|
|
UnexpectedTemplate: 'Unexpected quasi %0',
|
|
UnexpectedToken: 'Unexpected token %0',
|
|
UnexpectedTokenIllegal: 'Unexpected token ILLEGAL',
|
|
UnknownLabel: 'Undefined label \'%0\'',
|
|
UnterminatedRegExp: 'Invalid regular expression: missing /'
|
|
};
|
|
|
|
|
|
/***/ },
|
|
/* 12 */
|
|
/***/ function(module, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
var assert_1 = __webpack_require__(9);
|
|
var character_1 = __webpack_require__(4);
|
|
var messages_1 = __webpack_require__(11);
|
|
function hexValue(ch) {
|
|
return '0123456789abcdef'.indexOf(ch.toLowerCase());
|
|
}
|
|
function octalValue(ch) {
|
|
return '01234567'.indexOf(ch);
|
|
}
|
|
var Scanner = (function () {
|
|
function Scanner(code, handler) {
|
|
this.source = code;
|
|
this.errorHandler = handler;
|
|
this.trackComment = false;
|
|
this.isModule = false;
|
|
this.length = code.length;
|
|
this.index = 0;
|
|
this.lineNumber = (code.length > 0) ? 1 : 0;
|
|
this.lineStart = 0;
|
|
this.curlyStack = [];
|
|
}
|
|
Scanner.prototype.saveState = function () {
|
|
return {
|
|
index: this.index,
|
|
lineNumber: this.lineNumber,
|
|
lineStart: this.lineStart
|
|
};
|
|
};
|
|
Scanner.prototype.restoreState = function (state) {
|
|
this.index = state.index;
|
|
this.lineNumber = state.lineNumber;
|
|
this.lineStart = state.lineStart;
|
|
};
|
|
Scanner.prototype.eof = function () {
|
|
return this.index >= this.length;
|
|
};
|
|
Scanner.prototype.throwUnexpectedToken = function (message) {
|
|
if (message === void 0) { message = messages_1.Messages.UnexpectedTokenIllegal; }
|
|
return this.errorHandler.throwError(this.index, this.lineNumber, this.index - this.lineStart + 1, message);
|
|
};
|
|
Scanner.prototype.tolerateUnexpectedToken = function (message) {
|
|
if (message === void 0) { message = messages_1.Messages.UnexpectedTokenIllegal; }
|
|
this.errorHandler.tolerateError(this.index, this.lineNumber, this.index - this.lineStart + 1, message);
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-comments
|
|
Scanner.prototype.skipSingleLineComment = function (offset) {
|
|
var comments = [];
|
|
var start, loc;
|
|
if (this.trackComment) {
|
|
comments = [];
|
|
start = this.index - offset;
|
|
loc = {
|
|
start: {
|
|
line: this.lineNumber,
|
|
column: this.index - this.lineStart - offset
|
|
},
|
|
end: {}
|
|
};
|
|
}
|
|
while (!this.eof()) {
|
|
var ch = this.source.charCodeAt(this.index);
|
|
++this.index;
|
|
if (character_1.Character.isLineTerminator(ch)) {
|
|
if (this.trackComment) {
|
|
loc.end = {
|
|
line: this.lineNumber,
|
|
column: this.index - this.lineStart - 1
|
|
};
|
|
var entry = {
|
|
multiLine: false,
|
|
slice: [start + offset, this.index - 1],
|
|
range: [start, this.index - 1],
|
|
loc: loc
|
|
};
|
|
comments.push(entry);
|
|
}
|
|
if (ch === 13 && this.source.charCodeAt(this.index) === 10) {
|
|
++this.index;
|
|
}
|
|
++this.lineNumber;
|
|
this.lineStart = this.index;
|
|
return comments;
|
|
}
|
|
}
|
|
if (this.trackComment) {
|
|
loc.end = {
|
|
line: this.lineNumber,
|
|
column: this.index - this.lineStart
|
|
};
|
|
var entry = {
|
|
multiLine: false,
|
|
slice: [start + offset, this.index],
|
|
range: [start, this.index],
|
|
loc: loc
|
|
};
|
|
comments.push(entry);
|
|
}
|
|
return comments;
|
|
};
|
|
Scanner.prototype.skipMultiLineComment = function () {
|
|
var comments = [];
|
|
var start, loc;
|
|
if (this.trackComment) {
|
|
comments = [];
|
|
start = this.index - 2;
|
|
loc = {
|
|
start: {
|
|
line: this.lineNumber,
|
|
column: this.index - this.lineStart - 2
|
|
},
|
|
end: {}
|
|
};
|
|
}
|
|
while (!this.eof()) {
|
|
var ch = this.source.charCodeAt(this.index);
|
|
if (character_1.Character.isLineTerminator(ch)) {
|
|
if (ch === 0x0D && this.source.charCodeAt(this.index + 1) === 0x0A) {
|
|
++this.index;
|
|
}
|
|
++this.lineNumber;
|
|
++this.index;
|
|
this.lineStart = this.index;
|
|
}
|
|
else if (ch === 0x2A) {
|
|
// Block comment ends with '*/'.
|
|
if (this.source.charCodeAt(this.index + 1) === 0x2F) {
|
|
this.index += 2;
|
|
if (this.trackComment) {
|
|
loc.end = {
|
|
line: this.lineNumber,
|
|
column: this.index - this.lineStart
|
|
};
|
|
var entry = {
|
|
multiLine: true,
|
|
slice: [start + 2, this.index - 2],
|
|
range: [start, this.index],
|
|
loc: loc
|
|
};
|
|
comments.push(entry);
|
|
}
|
|
return comments;
|
|
}
|
|
++this.index;
|
|
}
|
|
else {
|
|
++this.index;
|
|
}
|
|
}
|
|
// Ran off the end of the file - the whole thing is a comment
|
|
if (this.trackComment) {
|
|
loc.end = {
|
|
line: this.lineNumber,
|
|
column: this.index - this.lineStart
|
|
};
|
|
var entry = {
|
|
multiLine: true,
|
|
slice: [start + 2, this.index],
|
|
range: [start, this.index],
|
|
loc: loc
|
|
};
|
|
comments.push(entry);
|
|
}
|
|
this.tolerateUnexpectedToken();
|
|
return comments;
|
|
};
|
|
Scanner.prototype.scanComments = function () {
|
|
var comments;
|
|
if (this.trackComment) {
|
|
comments = [];
|
|
}
|
|
var start = (this.index === 0);
|
|
while (!this.eof()) {
|
|
var ch = this.source.charCodeAt(this.index);
|
|
if (character_1.Character.isWhiteSpace(ch)) {
|
|
++this.index;
|
|
}
|
|
else if (character_1.Character.isLineTerminator(ch)) {
|
|
++this.index;
|
|
if (ch === 0x0D && this.source.charCodeAt(this.index) === 0x0A) {
|
|
++this.index;
|
|
}
|
|
++this.lineNumber;
|
|
this.lineStart = this.index;
|
|
start = true;
|
|
}
|
|
else if (ch === 0x2F) {
|
|
ch = this.source.charCodeAt(this.index + 1);
|
|
if (ch === 0x2F) {
|
|
this.index += 2;
|
|
var comment = this.skipSingleLineComment(2);
|
|
if (this.trackComment) {
|
|
comments = comments.concat(comment);
|
|
}
|
|
start = true;
|
|
}
|
|
else if (ch === 0x2A) {
|
|
this.index += 2;
|
|
var comment = this.skipMultiLineComment();
|
|
if (this.trackComment) {
|
|
comments = comments.concat(comment);
|
|
}
|
|
}
|
|
else {
|
|
break;
|
|
}
|
|
}
|
|
else if (start && ch === 0x2D) {
|
|
// U+003E is '>'
|
|
if ((this.source.charCodeAt(this.index + 1) === 0x2D) && (this.source.charCodeAt(this.index + 2) === 0x3E)) {
|
|
// '-->' is a single-line comment
|
|
this.index += 3;
|
|
var comment = this.skipSingleLineComment(3);
|
|
if (this.trackComment) {
|
|
comments = comments.concat(comment);
|
|
}
|
|
}
|
|
else {
|
|
break;
|
|
}
|
|
}
|
|
else if (ch === 0x3C && !this.isModule) {
|
|
if (this.source.slice(this.index + 1, this.index + 4) === '!--') {
|
|
this.index += 4; // `<!--`
|
|
var comment = this.skipSingleLineComment(4);
|
|
if (this.trackComment) {
|
|
comments = comments.concat(comment);
|
|
}
|
|
}
|
|
else {
|
|
break;
|
|
}
|
|
}
|
|
else {
|
|
break;
|
|
}
|
|
}
|
|
return comments;
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-future-reserved-words
|
|
Scanner.prototype.isFutureReservedWord = function (id) {
|
|
switch (id) {
|
|
case 'enum':
|
|
case 'export':
|
|
case 'import':
|
|
case 'super':
|
|
return true;
|
|
default:
|
|
return false;
|
|
}
|
|
};
|
|
Scanner.prototype.isStrictModeReservedWord = function (id) {
|
|
switch (id) {
|
|
case 'implements':
|
|
case 'interface':
|
|
case 'package':
|
|
case 'private':
|
|
case 'protected':
|
|
case 'public':
|
|
case 'static':
|
|
case 'yield':
|
|
case 'let':
|
|
return true;
|
|
default:
|
|
return false;
|
|
}
|
|
};
|
|
Scanner.prototype.isRestrictedWord = function (id) {
|
|
return id === 'eval' || id === 'arguments';
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-keywords
|
|
Scanner.prototype.isKeyword = function (id) {
|
|
switch (id.length) {
|
|
case 2:
|
|
return (id === 'if') || (id === 'in') || (id === 'do');
|
|
case 3:
|
|
return (id === 'var') || (id === 'for') || (id === 'new') ||
|
|
(id === 'try') || (id === 'let');
|
|
case 4:
|
|
return (id === 'this') || (id === 'else') || (id === 'case') ||
|
|
(id === 'void') || (id === 'with') || (id === 'enum');
|
|
case 5:
|
|
return (id === 'while') || (id === 'break') || (id === 'catch') ||
|
|
(id === 'throw') || (id === 'const') || (id === 'yield') ||
|
|
(id === 'class') || (id === 'super');
|
|
case 6:
|
|
return (id === 'return') || (id === 'typeof') || (id === 'delete') ||
|
|
(id === 'switch') || (id === 'export') || (id === 'import');
|
|
case 7:
|
|
return (id === 'default') || (id === 'finally') || (id === 'extends');
|
|
case 8:
|
|
return (id === 'function') || (id === 'continue') || (id === 'debugger');
|
|
case 10:
|
|
return (id === 'instanceof');
|
|
default:
|
|
return false;
|
|
}
|
|
};
|
|
Scanner.prototype.codePointAt = function (i) {
|
|
var cp = this.source.charCodeAt(i);
|
|
if (cp >= 0xD800 && cp <= 0xDBFF) {
|
|
var second = this.source.charCodeAt(i + 1);
|
|
if (second >= 0xDC00 && second <= 0xDFFF) {
|
|
var first = cp;
|
|
cp = (first - 0xD800) * 0x400 + second - 0xDC00 + 0x10000;
|
|
}
|
|
}
|
|
return cp;
|
|
};
|
|
Scanner.prototype.scanHexEscape = function (prefix) {
|
|
var len = (prefix === 'u') ? 4 : 2;
|
|
var code = 0;
|
|
for (var i = 0; i < len; ++i) {
|
|
if (!this.eof() && character_1.Character.isHexDigit(this.source.charCodeAt(this.index))) {
|
|
code = code * 16 + hexValue(this.source[this.index++]);
|
|
}
|
|
else {
|
|
return null;
|
|
}
|
|
}
|
|
return String.fromCharCode(code);
|
|
};
|
|
Scanner.prototype.scanUnicodeCodePointEscape = function () {
|
|
var ch = this.source[this.index];
|
|
var code = 0;
|
|
// At least, one hex digit is required.
|
|
if (ch === '}') {
|
|
this.throwUnexpectedToken();
|
|
}
|
|
while (!this.eof()) {
|
|
ch = this.source[this.index++];
|
|
if (!character_1.Character.isHexDigit(ch.charCodeAt(0))) {
|
|
break;
|
|
}
|
|
code = code * 16 + hexValue(ch);
|
|
}
|
|
if (code > 0x10FFFF || ch !== '}') {
|
|
this.throwUnexpectedToken();
|
|
}
|
|
return character_1.Character.fromCodePoint(code);
|
|
};
|
|
Scanner.prototype.getIdentifier = function () {
|
|
var start = this.index++;
|
|
while (!this.eof()) {
|
|
var ch = this.source.charCodeAt(this.index);
|
|
if (ch === 0x5C) {
|
|
// Blackslash (U+005C) marks Unicode escape sequence.
|
|
this.index = start;
|
|
return this.getComplexIdentifier();
|
|
}
|
|
else if (ch >= 0xD800 && ch < 0xDFFF) {
|
|
// Need to handle surrogate pairs.
|
|
this.index = start;
|
|
return this.getComplexIdentifier();
|
|
}
|
|
if (character_1.Character.isIdentifierPart(ch)) {
|
|
++this.index;
|
|
}
|
|
else {
|
|
break;
|
|
}
|
|
}
|
|
return this.source.slice(start, this.index);
|
|
};
|
|
Scanner.prototype.getComplexIdentifier = function () {
|
|
var cp = this.codePointAt(this.index);
|
|
var id = character_1.Character.fromCodePoint(cp);
|
|
this.index += id.length;
|
|
// '\u' (U+005C, U+0075) denotes an escaped character.
|
|
var ch;
|
|
if (cp === 0x5C) {
|
|
if (this.source.charCodeAt(this.index) !== 0x75) {
|
|
this.throwUnexpectedToken();
|
|
}
|
|
++this.index;
|
|
if (this.source[this.index] === '{') {
|
|
++this.index;
|
|
ch = this.scanUnicodeCodePointEscape();
|
|
}
|
|
else {
|
|
ch = this.scanHexEscape('u');
|
|
if (ch === null || ch === '\\' || !character_1.Character.isIdentifierStart(ch.charCodeAt(0))) {
|
|
this.throwUnexpectedToken();
|
|
}
|
|
}
|
|
id = ch;
|
|
}
|
|
while (!this.eof()) {
|
|
cp = this.codePointAt(this.index);
|
|
if (!character_1.Character.isIdentifierPart(cp)) {
|
|
break;
|
|
}
|
|
ch = character_1.Character.fromCodePoint(cp);
|
|
id += ch;
|
|
this.index += ch.length;
|
|
// '\u' (U+005C, U+0075) denotes an escaped character.
|
|
if (cp === 0x5C) {
|
|
id = id.substr(0, id.length - 1);
|
|
if (this.source.charCodeAt(this.index) !== 0x75) {
|
|
this.throwUnexpectedToken();
|
|
}
|
|
++this.index;
|
|
if (this.source[this.index] === '{') {
|
|
++this.index;
|
|
ch = this.scanUnicodeCodePointEscape();
|
|
}
|
|
else {
|
|
ch = this.scanHexEscape('u');
|
|
if (ch === null || ch === '\\' || !character_1.Character.isIdentifierPart(ch.charCodeAt(0))) {
|
|
this.throwUnexpectedToken();
|
|
}
|
|
}
|
|
id += ch;
|
|
}
|
|
}
|
|
return id;
|
|
};
|
|
Scanner.prototype.octalToDecimal = function (ch) {
|
|
// \0 is not octal escape sequence
|
|
var octal = (ch !== '0');
|
|
var code = octalValue(ch);
|
|
if (!this.eof() && character_1.Character.isOctalDigit(this.source.charCodeAt(this.index))) {
|
|
octal = true;
|
|
code = code * 8 + octalValue(this.source[this.index++]);
|
|
// 3 digits are only allowed when string starts
|
|
// with 0, 1, 2, 3
|
|
if ('0123'.indexOf(ch) >= 0 && !this.eof() && character_1.Character.isOctalDigit(this.source.charCodeAt(this.index))) {
|
|
code = code * 8 + octalValue(this.source[this.index++]);
|
|
}
|
|
}
|
|
return {
|
|
code: code,
|
|
octal: octal
|
|
};
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-names-and-keywords
|
|
Scanner.prototype.scanIdentifier = function () {
|
|
var type;
|
|
var start = this.index;
|
|
// Backslash (U+005C) starts an escaped character.
|
|
var id = (this.source.charCodeAt(start) === 0x5C) ? this.getComplexIdentifier() : this.getIdentifier();
|
|
// There is no keyword or literal with only one character.
|
|
// Thus, it must be an identifier.
|
|
if (id.length === 1) {
|
|
type = 3 /* Identifier */;
|
|
}
|
|
else if (this.isKeyword(id)) {
|
|
type = 4 /* Keyword */;
|
|
}
|
|
else if (id === 'null') {
|
|
type = 5 /* NullLiteral */;
|
|
}
|
|
else if (id === 'true' || id === 'false') {
|
|
type = 1 /* BooleanLiteral */;
|
|
}
|
|
else {
|
|
type = 3 /* Identifier */;
|
|
}
|
|
if (type !== 3 /* Identifier */ && (start + id.length !== this.index)) {
|
|
var restore = this.index;
|
|
this.index = start;
|
|
this.tolerateUnexpectedToken(messages_1.Messages.InvalidEscapedReservedWord);
|
|
this.index = restore;
|
|
}
|
|
return {
|
|
type: type,
|
|
value: id,
|
|
lineNumber: this.lineNumber,
|
|
lineStart: this.lineStart,
|
|
start: start,
|
|
end: this.index
|
|
};
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-punctuators
|
|
Scanner.prototype.scanPunctuator = function () {
|
|
var start = this.index;
|
|
// Check for most common single-character punctuators.
|
|
var str = this.source[this.index];
|
|
switch (str) {
|
|
case '(':
|
|
case '{':
|
|
if (str === '{') {
|
|
this.curlyStack.push('{');
|
|
}
|
|
++this.index;
|
|
break;
|
|
case '.':
|
|
++this.index;
|
|
if (this.source[this.index] === '.' && this.source[this.index + 1] === '.') {
|
|
// Spread operator: ...
|
|
this.index += 2;
|
|
str = '...';
|
|
}
|
|
break;
|
|
case '}':
|
|
++this.index;
|
|
this.curlyStack.pop();
|
|
break;
|
|
case ')':
|
|
case ';':
|
|
case ',':
|
|
case '[':
|
|
case ']':
|
|
case ':':
|
|
case '?':
|
|
case '~':
|
|
++this.index;
|
|
break;
|
|
default:
|
|
// 4-character punctuator.
|
|
str = this.source.substr(this.index, 4);
|
|
if (str === '>>>=') {
|
|
this.index += 4;
|
|
}
|
|
else {
|
|
// 3-character punctuators.
|
|
str = str.substr(0, 3);
|
|
if (str === '===' || str === '!==' || str === '>>>' ||
|
|
str === '<<=' || str === '>>=' || str === '**=') {
|
|
this.index += 3;
|
|
}
|
|
else {
|
|
// 2-character punctuators.
|
|
str = str.substr(0, 2);
|
|
if (str === '&&' || str === '||' || str === '==' || str === '!=' ||
|
|
str === '+=' || str === '-=' || str === '*=' || str === '/=' ||
|
|
str === '++' || str === '--' || str === '<<' || str === '>>' ||
|
|
str === '&=' || str === '|=' || str === '^=' || str === '%=' ||
|
|
str === '<=' || str === '>=' || str === '=>' || str === '**') {
|
|
this.index += 2;
|
|
}
|
|
else {
|
|
// 1-character punctuators.
|
|
str = this.source[this.index];
|
|
if ('<>=!+-*%&|^/'.indexOf(str) >= 0) {
|
|
++this.index;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
if (this.index === start) {
|
|
this.throwUnexpectedToken();
|
|
}
|
|
return {
|
|
type: 7 /* Punctuator */,
|
|
value: str,
|
|
lineNumber: this.lineNumber,
|
|
lineStart: this.lineStart,
|
|
start: start,
|
|
end: this.index
|
|
};
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-literals-numeric-literals
|
|
Scanner.prototype.scanHexLiteral = function (start) {
|
|
var num = '';
|
|
while (!this.eof()) {
|
|
if (!character_1.Character.isHexDigit(this.source.charCodeAt(this.index))) {
|
|
break;
|
|
}
|
|
num += this.source[this.index++];
|
|
}
|
|
if (num.length === 0) {
|
|
this.throwUnexpectedToken();
|
|
}
|
|
if (character_1.Character.isIdentifierStart(this.source.charCodeAt(this.index))) {
|
|
this.throwUnexpectedToken();
|
|
}
|
|
return {
|
|
type: 6 /* NumericLiteral */,
|
|
value: parseInt('0x' + num, 16),
|
|
lineNumber: this.lineNumber,
|
|
lineStart: this.lineStart,
|
|
start: start,
|
|
end: this.index
|
|
};
|
|
};
|
|
Scanner.prototype.scanBinaryLiteral = function (start) {
|
|
var num = '';
|
|
var ch;
|
|
while (!this.eof()) {
|
|
ch = this.source[this.index];
|
|
if (ch !== '0' && ch !== '1') {
|
|
break;
|
|
}
|
|
num += this.source[this.index++];
|
|
}
|
|
if (num.length === 0) {
|
|
// only 0b or 0B
|
|
this.throwUnexpectedToken();
|
|
}
|
|
if (!this.eof()) {
|
|
ch = this.source.charCodeAt(this.index);
|
|
/* istanbul ignore else */
|
|
if (character_1.Character.isIdentifierStart(ch) || character_1.Character.isDecimalDigit(ch)) {
|
|
this.throwUnexpectedToken();
|
|
}
|
|
}
|
|
return {
|
|
type: 6 /* NumericLiteral */,
|
|
value: parseInt(num, 2),
|
|
lineNumber: this.lineNumber,
|
|
lineStart: this.lineStart,
|
|
start: start,
|
|
end: this.index
|
|
};
|
|
};
|
|
Scanner.prototype.scanOctalLiteral = function (prefix, start) {
|
|
var num = '';
|
|
var octal = false;
|
|
if (character_1.Character.isOctalDigit(prefix.charCodeAt(0))) {
|
|
octal = true;
|
|
num = '0' + this.source[this.index++];
|
|
}
|
|
else {
|
|
++this.index;
|
|
}
|
|
while (!this.eof()) {
|
|
if (!character_1.Character.isOctalDigit(this.source.charCodeAt(this.index))) {
|
|
break;
|
|
}
|
|
num += this.source[this.index++];
|
|
}
|
|
if (!octal && num.length === 0) {
|
|
// only 0o or 0O
|
|
this.throwUnexpectedToken();
|
|
}
|
|
if (character_1.Character.isIdentifierStart(this.source.charCodeAt(this.index)) || character_1.Character.isDecimalDigit(this.source.charCodeAt(this.index))) {
|
|
this.throwUnexpectedToken();
|
|
}
|
|
return {
|
|
type: 6 /* NumericLiteral */,
|
|
value: parseInt(num, 8),
|
|
octal: octal,
|
|
lineNumber: this.lineNumber,
|
|
lineStart: this.lineStart,
|
|
start: start,
|
|
end: this.index
|
|
};
|
|
};
|
|
Scanner.prototype.isImplicitOctalLiteral = function () {
|
|
// Implicit octal, unless there is a non-octal digit.
|
|
// (Annex B.1.1 on Numeric Literals)
|
|
for (var i = this.index + 1; i < this.length; ++i) {
|
|
var ch = this.source[i];
|
|
if (ch === '8' || ch === '9') {
|
|
return false;
|
|
}
|
|
if (!character_1.Character.isOctalDigit(ch.charCodeAt(0))) {
|
|
return true;
|
|
}
|
|
}
|
|
return true;
|
|
};
|
|
Scanner.prototype.scanNumericLiteral = function () {
|
|
var start = this.index;
|
|
var ch = this.source[start];
|
|
assert_1.assert(character_1.Character.isDecimalDigit(ch.charCodeAt(0)) || (ch === '.'), 'Numeric literal must start with a decimal digit or a decimal point');
|
|
var num = '';
|
|
if (ch !== '.') {
|
|
num = this.source[this.index++];
|
|
ch = this.source[this.index];
|
|
// Hex number starts with '0x'.
|
|
// Octal number starts with '0'.
|
|
// Octal number in ES6 starts with '0o'.
|
|
// Binary number in ES6 starts with '0b'.
|
|
if (num === '0') {
|
|
if (ch === 'x' || ch === 'X') {
|
|
++this.index;
|
|
return this.scanHexLiteral(start);
|
|
}
|
|
if (ch === 'b' || ch === 'B') {
|
|
++this.index;
|
|
return this.scanBinaryLiteral(start);
|
|
}
|
|
if (ch === 'o' || ch === 'O') {
|
|
return this.scanOctalLiteral(ch, start);
|
|
}
|
|
if (ch && character_1.Character.isOctalDigit(ch.charCodeAt(0))) {
|
|
if (this.isImplicitOctalLiteral()) {
|
|
return this.scanOctalLiteral(ch, start);
|
|
}
|
|
}
|
|
}
|
|
while (character_1.Character.isDecimalDigit(this.source.charCodeAt(this.index))) {
|
|
num += this.source[this.index++];
|
|
}
|
|
ch = this.source[this.index];
|
|
}
|
|
if (ch === '.') {
|
|
num += this.source[this.index++];
|
|
while (character_1.Character.isDecimalDigit(this.source.charCodeAt(this.index))) {
|
|
num += this.source[this.index++];
|
|
}
|
|
ch = this.source[this.index];
|
|
}
|
|
if (ch === 'e' || ch === 'E') {
|
|
num += this.source[this.index++];
|
|
ch = this.source[this.index];
|
|
if (ch === '+' || ch === '-') {
|
|
num += this.source[this.index++];
|
|
}
|
|
if (character_1.Character.isDecimalDigit(this.source.charCodeAt(this.index))) {
|
|
while (character_1.Character.isDecimalDigit(this.source.charCodeAt(this.index))) {
|
|
num += this.source[this.index++];
|
|
}
|
|
}
|
|
else {
|
|
this.throwUnexpectedToken();
|
|
}
|
|
}
|
|
if (character_1.Character.isIdentifierStart(this.source.charCodeAt(this.index))) {
|
|
this.throwUnexpectedToken();
|
|
}
|
|
return {
|
|
type: 6 /* NumericLiteral */,
|
|
value: parseFloat(num),
|
|
lineNumber: this.lineNumber,
|
|
lineStart: this.lineStart,
|
|
start: start,
|
|
end: this.index
|
|
};
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-literals-string-literals
|
|
Scanner.prototype.scanStringLiteral = function () {
|
|
var start = this.index;
|
|
var quote = this.source[start];
|
|
assert_1.assert((quote === '\'' || quote === '"'), 'String literal must starts with a quote');
|
|
++this.index;
|
|
var octal = false;
|
|
var str = '';
|
|
while (!this.eof()) {
|
|
var ch = this.source[this.index++];
|
|
if (ch === quote) {
|
|
quote = '';
|
|
break;
|
|
}
|
|
else if (ch === '\\') {
|
|
ch = this.source[this.index++];
|
|
if (!ch || !character_1.Character.isLineTerminator(ch.charCodeAt(0))) {
|
|
switch (ch) {
|
|
case 'u':
|
|
if (this.source[this.index] === '{') {
|
|
++this.index;
|
|
str += this.scanUnicodeCodePointEscape();
|
|
}
|
|
else {
|
|
var unescaped_1 = this.scanHexEscape(ch);
|
|
if (unescaped_1 === null) {
|
|
this.throwUnexpectedToken();
|
|
}
|
|
str += unescaped_1;
|
|
}
|
|
break;
|
|
case 'x':
|
|
var unescaped = this.scanHexEscape(ch);
|
|
if (unescaped === null) {
|
|
this.throwUnexpectedToken(messages_1.Messages.InvalidHexEscapeSequence);
|
|
}
|
|
str += unescaped;
|
|
break;
|
|
case 'n':
|
|
str += '\n';
|
|
break;
|
|
case 'r':
|
|
str += '\r';
|
|
break;
|
|
case 't':
|
|
str += '\t';
|
|
break;
|
|
case 'b':
|
|
str += '\b';
|
|
break;
|
|
case 'f':
|
|
str += '\f';
|
|
break;
|
|
case 'v':
|
|
str += '\x0B';
|
|
break;
|
|
case '8':
|
|
case '9':
|
|
str += ch;
|
|
this.tolerateUnexpectedToken();
|
|
break;
|
|
default:
|
|
if (ch && character_1.Character.isOctalDigit(ch.charCodeAt(0))) {
|
|
var octToDec = this.octalToDecimal(ch);
|
|
octal = octToDec.octal || octal;
|
|
str += String.fromCharCode(octToDec.code);
|
|
}
|
|
else {
|
|
str += ch;
|
|
}
|
|
break;
|
|
}
|
|
}
|
|
else {
|
|
++this.lineNumber;
|
|
if (ch === '\r' && this.source[this.index] === '\n') {
|
|
++this.index;
|
|
}
|
|
this.lineStart = this.index;
|
|
}
|
|
}
|
|
else if (character_1.Character.isLineTerminator(ch.charCodeAt(0))) {
|
|
break;
|
|
}
|
|
else {
|
|
str += ch;
|
|
}
|
|
}
|
|
if (quote !== '') {
|
|
this.index = start;
|
|
this.throwUnexpectedToken();
|
|
}
|
|
return {
|
|
type: 8 /* StringLiteral */,
|
|
value: str,
|
|
octal: octal,
|
|
lineNumber: this.lineNumber,
|
|
lineStart: this.lineStart,
|
|
start: start,
|
|
end: this.index
|
|
};
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-template-literal-lexical-components
|
|
Scanner.prototype.scanTemplate = function () {
|
|
var cooked = '';
|
|
var terminated = false;
|
|
var start = this.index;
|
|
var head = (this.source[start] === '`');
|
|
var tail = false;
|
|
var rawOffset = 2;
|
|
++this.index;
|
|
while (!this.eof()) {
|
|
var ch = this.source[this.index++];
|
|
if (ch === '`') {
|
|
rawOffset = 1;
|
|
tail = true;
|
|
terminated = true;
|
|
break;
|
|
}
|
|
else if (ch === '$') {
|
|
if (this.source[this.index] === '{') {
|
|
this.curlyStack.push('${');
|
|
++this.index;
|
|
terminated = true;
|
|
break;
|
|
}
|
|
cooked += ch;
|
|
}
|
|
else if (ch === '\\') {
|
|
ch = this.source[this.index++];
|
|
if (!character_1.Character.isLineTerminator(ch.charCodeAt(0))) {
|
|
switch (ch) {
|
|
case 'n':
|
|
cooked += '\n';
|
|
break;
|
|
case 'r':
|
|
cooked += '\r';
|
|
break;
|
|
case 't':
|
|
cooked += '\t';
|
|
break;
|
|
case 'u':
|
|
if (this.source[this.index] === '{') {
|
|
++this.index;
|
|
cooked += this.scanUnicodeCodePointEscape();
|
|
}
|
|
else {
|
|
var restore = this.index;
|
|
var unescaped_2 = this.scanHexEscape(ch);
|
|
if (unescaped_2 !== null) {
|
|
cooked += unescaped_2;
|
|
}
|
|
else {
|
|
this.index = restore;
|
|
cooked += ch;
|
|
}
|
|
}
|
|
break;
|
|
case 'x':
|
|
var unescaped = this.scanHexEscape(ch);
|
|
if (unescaped === null) {
|
|
this.throwUnexpectedToken(messages_1.Messages.InvalidHexEscapeSequence);
|
|
}
|
|
cooked += unescaped;
|
|
break;
|
|
case 'b':
|
|
cooked += '\b';
|
|
break;
|
|
case 'f':
|
|
cooked += '\f';
|
|
break;
|
|
case 'v':
|
|
cooked += '\v';
|
|
break;
|
|
default:
|
|
if (ch === '0') {
|
|
if (character_1.Character.isDecimalDigit(this.source.charCodeAt(this.index))) {
|
|
// Illegal: \01 \02 and so on
|
|
this.throwUnexpectedToken(messages_1.Messages.TemplateOctalLiteral);
|
|
}
|
|
cooked += '\0';
|
|
}
|
|
else if (character_1.Character.isOctalDigit(ch.charCodeAt(0))) {
|
|
// Illegal: \1 \2
|
|
this.throwUnexpectedToken(messages_1.Messages.TemplateOctalLiteral);
|
|
}
|
|
else {
|
|
cooked += ch;
|
|
}
|
|
break;
|
|
}
|
|
}
|
|
else {
|
|
++this.lineNumber;
|
|
if (ch === '\r' && this.source[this.index] === '\n') {
|
|
++this.index;
|
|
}
|
|
this.lineStart = this.index;
|
|
}
|
|
}
|
|
else if (character_1.Character.isLineTerminator(ch.charCodeAt(0))) {
|
|
++this.lineNumber;
|
|
if (ch === '\r' && this.source[this.index] === '\n') {
|
|
++this.index;
|
|
}
|
|
this.lineStart = this.index;
|
|
cooked += '\n';
|
|
}
|
|
else {
|
|
cooked += ch;
|
|
}
|
|
}
|
|
if (!terminated) {
|
|
this.throwUnexpectedToken();
|
|
}
|
|
if (!head) {
|
|
this.curlyStack.pop();
|
|
}
|
|
return {
|
|
type: 10 /* Template */,
|
|
value: this.source.slice(start + 1, this.index - rawOffset),
|
|
cooked: cooked,
|
|
head: head,
|
|
tail: tail,
|
|
lineNumber: this.lineNumber,
|
|
lineStart: this.lineStart,
|
|
start: start,
|
|
end: this.index
|
|
};
|
|
};
|
|
// https://tc39.github.io/ecma262/#sec-literals-regular-expression-literals
|
|
Scanner.prototype.testRegExp = function (pattern, flags) {
|
|
// The BMP character to use as a replacement for astral symbols when
|
|
// translating an ES6 "u"-flagged pattern to an ES5-compatible
|
|
// approximation.
|
|
// Note: replacing with '\uFFFF' enables false positives in unlikely
|
|
// scenarios. For example, `[\u{1044f}-\u{10440}]` is an invalid
|
|
// pattern that would not be detected by this substitution.
|
|
var astralSubstitute = '\uFFFF';
|
|
var tmp = pattern;
|
|
var self = this;
|
|
if (flags.indexOf('u') >= 0) {
|
|
tmp = tmp
|
|
.replace(/\\u\{([0-9a-fA-F]+)\}|\\u([a-fA-F0-9]{4})/g, function ($0, $1, $2) {
|
|
var codePoint = parseInt($1 || $2, 16);
|
|
if (codePoint > 0x10FFFF) {
|
|
self.throwUnexpectedToken(messages_1.Messages.InvalidRegExp);
|
|
}
|
|
if (codePoint <= 0xFFFF) {
|
|
return String.fromCharCode(codePoint);
|
|
}
|
|
return astralSubstitute;
|
|
})
|
|
.replace(/[\uD800-\uDBFF][\uDC00-\uDFFF]/g, astralSubstitute);
|
|
}
|
|
// First, detect invalid regular expressions.
|
|
try {
|
|
RegExp(tmp);
|
|
}
|
|
catch (e) {
|
|
this.throwUnexpectedToken(messages_1.Messages.InvalidRegExp);
|
|
}
|
|
// Return a regular expression object for this pattern-flag pair, or
|
|
// `null` in case the current environment doesn't support the flags it
|
|
// uses.
|
|
try {
|
|
return new RegExp(pattern, flags);
|
|
}
|
|
catch (exception) {
|
|
/* istanbul ignore next */
|
|
return null;
|
|
}
|
|
};
|
|
Scanner.prototype.scanRegExpBody = function () {
|
|
var ch = this.source[this.index];
|
|
assert_1.assert(ch === '/', 'Regular expression literal must start with a slash');
|
|
var str = this.source[this.index++];
|
|
var classMarker = false;
|
|
var terminated = false;
|
|
while (!this.eof()) {
|
|
ch = this.source[this.index++];
|
|
str += ch;
|
|
if (ch === '\\') {
|
|
ch = this.source[this.index++];
|
|
// https://tc39.github.io/ecma262/#sec-literals-regular-expression-literals
|
|
if (character_1.Character.isLineTerminator(ch.charCodeAt(0))) {
|
|
this.throwUnexpectedToken(messages_1.Messages.UnterminatedRegExp);
|
|
}
|
|
str += ch;
|
|
}
|
|
else if (character_1.Character.isLineTerminator(ch.charCodeAt(0))) {
|
|
this.throwUnexpectedToken(messages_1.Messages.UnterminatedRegExp);
|
|
}
|
|
else if (classMarker) {
|
|
if (ch === ']') {
|
|
classMarker = false;
|
|
}
|
|
}
|
|
else {
|
|
if (ch === '/') {
|
|
terminated = true;
|
|
break;
|
|
}
|
|
else if (ch === '[') {
|
|
classMarker = true;
|
|
}
|
|
}
|
|
}
|
|
if (!terminated) {
|
|
this.throwUnexpectedToken(messages_1.Messages.UnterminatedRegExp);
|
|
}
|
|
// Exclude leading and trailing slash.
|
|
return str.substr(1, str.length - 2);
|
|
};
|
|
Scanner.prototype.scanRegExpFlags = function () {
|
|
var str = '';
|
|
var flags = '';
|
|
while (!this.eof()) {
|
|
var ch = this.source[this.index];
|
|
if (!character_1.Character.isIdentifierPart(ch.charCodeAt(0))) {
|
|
break;
|
|
}
|
|
++this.index;
|
|
if (ch === '\\' && !this.eof()) {
|
|
ch = this.source[this.index];
|
|
if (ch === 'u') {
|
|
++this.index;
|
|
var restore = this.index;
|
|
var char = this.scanHexEscape('u');
|
|
if (char !== null) {
|
|
flags += char;
|
|
for (str += '\\u'; restore < this.index; ++restore) {
|
|
str += this.source[restore];
|
|
}
|
|
}
|
|
else {
|
|
this.index = restore;
|
|
flags += 'u';
|
|
str += '\\u';
|
|
}
|
|
this.tolerateUnexpectedToken();
|
|
}
|
|
else {
|
|
str += '\\';
|
|
this.tolerateUnexpectedToken();
|
|
}
|
|
}
|
|
else {
|
|
flags += ch;
|
|
str += ch;
|
|
}
|
|
}
|
|
return flags;
|
|
};
|
|
Scanner.prototype.scanRegExp = function () {
|
|
var start = this.index;
|
|
var pattern = this.scanRegExpBody();
|
|
var flags = this.scanRegExpFlags();
|
|
var value = this.testRegExp(pattern, flags);
|
|
return {
|
|
type: 9 /* RegularExpression */,
|
|
value: '',
|
|
pattern: pattern,
|
|
flags: flags,
|
|
regex: value,
|
|
lineNumber: this.lineNumber,
|
|
lineStart: this.lineStart,
|
|
start: start,
|
|
end: this.index
|
|
};
|
|
};
|
|
Scanner.prototype.lex = function () {
|
|
if (this.eof()) {
|
|
return {
|
|
type: 2 /* EOF */,
|
|
value: '',
|
|
lineNumber: this.lineNumber,
|
|
lineStart: this.lineStart,
|
|
start: this.index,
|
|
end: this.index
|
|
};
|
|
}
|
|
var cp = this.source.charCodeAt(this.index);
|
|
if (character_1.Character.isIdentifierStart(cp)) {
|
|
return this.scanIdentifier();
|
|
}
|
|
// Very common: ( and ) and ;
|
|
if (cp === 0x28 || cp === 0x29 || cp === 0x3B) {
|
|
return this.scanPunctuator();
|
|
}
|
|
// String literal starts with single quote (U+0027) or double quote (U+0022).
|
|
if (cp === 0x27 || cp === 0x22) {
|
|
return this.scanStringLiteral();
|
|
}
|
|
// Dot (.) U+002E can also start a floating-point number, hence the need
|
|
// to check the next character.
|
|
if (cp === 0x2E) {
|
|
if (character_1.Character.isDecimalDigit(this.source.charCodeAt(this.index + 1))) {
|
|
return this.scanNumericLiteral();
|
|
}
|
|
return this.scanPunctuator();
|
|
}
|
|
if (character_1.Character.isDecimalDigit(cp)) {
|
|
return this.scanNumericLiteral();
|
|
}
|
|
// Template literals start with ` (U+0060) for template head
|
|
// or } (U+007D) for template middle or template tail.
|
|
if (cp === 0x60 || (cp === 0x7D && this.curlyStack[this.curlyStack.length - 1] === '${')) {
|
|
return this.scanTemplate();
|
|
}
|
|
// Possible identifier start in a surrogate pair.
|
|
if (cp >= 0xD800 && cp < 0xDFFF) {
|
|
if (character_1.Character.isIdentifierStart(this.codePointAt(this.index))) {
|
|
return this.scanIdentifier();
|
|
}
|
|
}
|
|
return this.scanPunctuator();
|
|
};
|
|
return Scanner;
|
|
}());
|
|
exports.Scanner = Scanner;
|
|
|
|
|
|
/***/ },
|
|
/* 13 */
|
|
/***/ function(module, exports) {
|
|
|
|
"use strict";
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
exports.TokenName = {};
|
|
exports.TokenName[1 /* BooleanLiteral */] = 'Boolean';
|
|
exports.TokenName[2 /* EOF */] = '<end>';
|
|
exports.TokenName[3 /* Identifier */] = 'Identifier';
|
|
exports.TokenName[4 /* Keyword */] = 'Keyword';
|
|
exports.TokenName[5 /* NullLiteral */] = 'Null';
|
|
exports.TokenName[6 /* NumericLiteral */] = 'Numeric';
|
|
exports.TokenName[7 /* Punctuator */] = 'Punctuator';
|
|
exports.TokenName[8 /* StringLiteral */] = 'String';
|
|
exports.TokenName[9 /* RegularExpression */] = 'RegularExpression';
|
|
exports.TokenName[10 /* Template */] = 'Template';
|
|
|
|
|
|
/***/ },
|
|
/* 14 */
|
|
/***/ function(module, exports) {
|
|
|
|
"use strict";
|
|
// Generated by generate-xhtml-entities.js. DO NOT MODIFY!
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
exports.XHTMLEntities = {
|
|
quot: '\u0022',
|
|
amp: '\u0026',
|
|
apos: '\u0027',
|
|
gt: '\u003E',
|
|
nbsp: '\u00A0',
|
|
iexcl: '\u00A1',
|
|
cent: '\u00A2',
|
|
pound: '\u00A3',
|
|
curren: '\u00A4',
|
|
yen: '\u00A5',
|
|
brvbar: '\u00A6',
|
|
sect: '\u00A7',
|
|
uml: '\u00A8',
|
|
copy: '\u00A9',
|
|
ordf: '\u00AA',
|
|
laquo: '\u00AB',
|
|
not: '\u00AC',
|
|
shy: '\u00AD',
|
|
reg: '\u00AE',
|
|
macr: '\u00AF',
|
|
deg: '\u00B0',
|
|
plusmn: '\u00B1',
|
|
sup2: '\u00B2',
|
|
sup3: '\u00B3',
|
|
acute: '\u00B4',
|
|
micro: '\u00B5',
|
|
para: '\u00B6',
|
|
middot: '\u00B7',
|
|
cedil: '\u00B8',
|
|
sup1: '\u00B9',
|
|
ordm: '\u00BA',
|
|
raquo: '\u00BB',
|
|
frac14: '\u00BC',
|
|
frac12: '\u00BD',
|
|
frac34: '\u00BE',
|
|
iquest: '\u00BF',
|
|
Agrave: '\u00C0',
|
|
Aacute: '\u00C1',
|
|
Acirc: '\u00C2',
|
|
Atilde: '\u00C3',
|
|
Auml: '\u00C4',
|
|
Aring: '\u00C5',
|
|
AElig: '\u00C6',
|
|
Ccedil: '\u00C7',
|
|
Egrave: '\u00C8',
|
|
Eacute: '\u00C9',
|
|
Ecirc: '\u00CA',
|
|
Euml: '\u00CB',
|
|
Igrave: '\u00CC',
|
|
Iacute: '\u00CD',
|
|
Icirc: '\u00CE',
|
|
Iuml: '\u00CF',
|
|
ETH: '\u00D0',
|
|
Ntilde: '\u00D1',
|
|
Ograve: '\u00D2',
|
|
Oacute: '\u00D3',
|
|
Ocirc: '\u00D4',
|
|
Otilde: '\u00D5',
|
|
Ouml: '\u00D6',
|
|
times: '\u00D7',
|
|
Oslash: '\u00D8',
|
|
Ugrave: '\u00D9',
|
|
Uacute: '\u00DA',
|
|
Ucirc: '\u00DB',
|
|
Uuml: '\u00DC',
|
|
Yacute: '\u00DD',
|
|
THORN: '\u00DE',
|
|
szlig: '\u00DF',
|
|
agrave: '\u00E0',
|
|
aacute: '\u00E1',
|
|
acirc: '\u00E2',
|
|
atilde: '\u00E3',
|
|
auml: '\u00E4',
|
|
aring: '\u00E5',
|
|
aelig: '\u00E6',
|
|
ccedil: '\u00E7',
|
|
egrave: '\u00E8',
|
|
eacute: '\u00E9',
|
|
ecirc: '\u00EA',
|
|
euml: '\u00EB',
|
|
igrave: '\u00EC',
|
|
iacute: '\u00ED',
|
|
icirc: '\u00EE',
|
|
iuml: '\u00EF',
|
|
eth: '\u00F0',
|
|
ntilde: '\u00F1',
|
|
ograve: '\u00F2',
|
|
oacute: '\u00F3',
|
|
ocirc: '\u00F4',
|
|
otilde: '\u00F5',
|
|
ouml: '\u00F6',
|
|
divide: '\u00F7',
|
|
oslash: '\u00F8',
|
|
ugrave: '\u00F9',
|
|
uacute: '\u00FA',
|
|
ucirc: '\u00FB',
|
|
uuml: '\u00FC',
|
|
yacute: '\u00FD',
|
|
thorn: '\u00FE',
|
|
yuml: '\u00FF',
|
|
OElig: '\u0152',
|
|
oelig: '\u0153',
|
|
Scaron: '\u0160',
|
|
scaron: '\u0161',
|
|
Yuml: '\u0178',
|
|
fnof: '\u0192',
|
|
circ: '\u02C6',
|
|
tilde: '\u02DC',
|
|
Alpha: '\u0391',
|
|
Beta: '\u0392',
|
|
Gamma: '\u0393',
|
|
Delta: '\u0394',
|
|
Epsilon: '\u0395',
|
|
Zeta: '\u0396',
|
|
Eta: '\u0397',
|
|
Theta: '\u0398',
|
|
Iota: '\u0399',
|
|
Kappa: '\u039A',
|
|
Lambda: '\u039B',
|
|
Mu: '\u039C',
|
|
Nu: '\u039D',
|
|
Xi: '\u039E',
|
|
Omicron: '\u039F',
|
|
Pi: '\u03A0',
|
|
Rho: '\u03A1',
|
|
Sigma: '\u03A3',
|
|
Tau: '\u03A4',
|
|
Upsilon: '\u03A5',
|
|
Phi: '\u03A6',
|
|
Chi: '\u03A7',
|
|
Psi: '\u03A8',
|
|
Omega: '\u03A9',
|
|
alpha: '\u03B1',
|
|
beta: '\u03B2',
|
|
gamma: '\u03B3',
|
|
delta: '\u03B4',
|
|
epsilon: '\u03B5',
|
|
zeta: '\u03B6',
|
|
eta: '\u03B7',
|
|
theta: '\u03B8',
|
|
iota: '\u03B9',
|
|
kappa: '\u03BA',
|
|
lambda: '\u03BB',
|
|
mu: '\u03BC',
|
|
nu: '\u03BD',
|
|
xi: '\u03BE',
|
|
omicron: '\u03BF',
|
|
pi: '\u03C0',
|
|
rho: '\u03C1',
|
|
sigmaf: '\u03C2',
|
|
sigma: '\u03C3',
|
|
tau: '\u03C4',
|
|
upsilon: '\u03C5',
|
|
phi: '\u03C6',
|
|
chi: '\u03C7',
|
|
psi: '\u03C8',
|
|
omega: '\u03C9',
|
|
thetasym: '\u03D1',
|
|
upsih: '\u03D2',
|
|
piv: '\u03D6',
|
|
ensp: '\u2002',
|
|
emsp: '\u2003',
|
|
thinsp: '\u2009',
|
|
zwnj: '\u200C',
|
|
zwj: '\u200D',
|
|
lrm: '\u200E',
|
|
rlm: '\u200F',
|
|
ndash: '\u2013',
|
|
mdash: '\u2014',
|
|
lsquo: '\u2018',
|
|
rsquo: '\u2019',
|
|
sbquo: '\u201A',
|
|
ldquo: '\u201C',
|
|
rdquo: '\u201D',
|
|
bdquo: '\u201E',
|
|
dagger: '\u2020',
|
|
Dagger: '\u2021',
|
|
bull: '\u2022',
|
|
hellip: '\u2026',
|
|
permil: '\u2030',
|
|
prime: '\u2032',
|
|
Prime: '\u2033',
|
|
lsaquo: '\u2039',
|
|
rsaquo: '\u203A',
|
|
oline: '\u203E',
|
|
frasl: '\u2044',
|
|
euro: '\u20AC',
|
|
image: '\u2111',
|
|
weierp: '\u2118',
|
|
real: '\u211C',
|
|
trade: '\u2122',
|
|
alefsym: '\u2135',
|
|
larr: '\u2190',
|
|
uarr: '\u2191',
|
|
rarr: '\u2192',
|
|
darr: '\u2193',
|
|
harr: '\u2194',
|
|
crarr: '\u21B5',
|
|
lArr: '\u21D0',
|
|
uArr: '\u21D1',
|
|
rArr: '\u21D2',
|
|
dArr: '\u21D3',
|
|
hArr: '\u21D4',
|
|
forall: '\u2200',
|
|
part: '\u2202',
|
|
exist: '\u2203',
|
|
empty: '\u2205',
|
|
nabla: '\u2207',
|
|
isin: '\u2208',
|
|
notin: '\u2209',
|
|
ni: '\u220B',
|
|
prod: '\u220F',
|
|
sum: '\u2211',
|
|
minus: '\u2212',
|
|
lowast: '\u2217',
|
|
radic: '\u221A',
|
|
prop: '\u221D',
|
|
infin: '\u221E',
|
|
ang: '\u2220',
|
|
and: '\u2227',
|
|
or: '\u2228',
|
|
cap: '\u2229',
|
|
cup: '\u222A',
|
|
int: '\u222B',
|
|
there4: '\u2234',
|
|
sim: '\u223C',
|
|
cong: '\u2245',
|
|
asymp: '\u2248',
|
|
ne: '\u2260',
|
|
equiv: '\u2261',
|
|
le: '\u2264',
|
|
ge: '\u2265',
|
|
sub: '\u2282',
|
|
sup: '\u2283',
|
|
nsub: '\u2284',
|
|
sube: '\u2286',
|
|
supe: '\u2287',
|
|
oplus: '\u2295',
|
|
otimes: '\u2297',
|
|
perp: '\u22A5',
|
|
sdot: '\u22C5',
|
|
lceil: '\u2308',
|
|
rceil: '\u2309',
|
|
lfloor: '\u230A',
|
|
rfloor: '\u230B',
|
|
loz: '\u25CA',
|
|
spades: '\u2660',
|
|
clubs: '\u2663',
|
|
hearts: '\u2665',
|
|
diams: '\u2666',
|
|
lang: '\u27E8',
|
|
rang: '\u27E9'
|
|
};
|
|
|
|
|
|
/***/ },
|
|
/* 15 */
|
|
/***/ function(module, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
var error_handler_1 = __webpack_require__(10);
|
|
var scanner_1 = __webpack_require__(12);
|
|
var token_1 = __webpack_require__(13);
|
|
var Reader = (function () {
|
|
function Reader() {
|
|
this.values = [];
|
|
this.curly = this.paren = -1;
|
|
}
|
|
// A function following one of those tokens is an expression.
|
|
Reader.prototype.beforeFunctionExpression = function (t) {
|
|
return ['(', '{', '[', 'in', 'typeof', 'instanceof', 'new',
|
|
'return', 'case', 'delete', 'throw', 'void',
|
|
// assignment operators
|
|
'=', '+=', '-=', '*=', '**=', '/=', '%=', '<<=', '>>=', '>>>=',
|
|
'&=', '|=', '^=', ',',
|
|
// binary/unary operators
|
|
'+', '-', '*', '**', '/', '%', '++', '--', '<<', '>>', '>>>', '&',
|
|
'|', '^', '!', '~', '&&', '||', '?', ':', '===', '==', '>=',
|
|
'<=', '<', '>', '!=', '!=='].indexOf(t) >= 0;
|
|
};
|
|
// Determine if forward slash (/) is an operator or part of a regular expression
|
|
// https://github.com/mozilla/sweet.js/wiki/design
|
|
Reader.prototype.isRegexStart = function () {
|
|
var previous = this.values[this.values.length - 1];
|
|
var regex = (previous !== null);
|
|
switch (previous) {
|
|
case 'this':
|
|
case ']':
|
|
regex = false;
|
|
break;
|
|
case ')':
|
|
var keyword = this.values[this.paren - 1];
|
|
regex = (keyword === 'if' || keyword === 'while' || keyword === 'for' || keyword === 'with');
|
|
break;
|
|
case '}':
|
|
// Dividing a function by anything makes little sense,
|
|
// but we have to check for that.
|
|
regex = false;
|
|
if (this.values[this.curly - 3] === 'function') {
|
|
// Anonymous function, e.g. function(){} /42
|
|
var check = this.values[this.curly - 4];
|
|
regex = check ? !this.beforeFunctionExpression(check) : false;
|
|
}
|
|
else if (this.values[this.curly - 4] === 'function') {
|
|
// Named function, e.g. function f(){} /42/
|
|
var check = this.values[this.curly - 5];
|
|
regex = check ? !this.beforeFunctionExpression(check) : true;
|
|
}
|
|
break;
|
|
default:
|
|
break;
|
|
}
|
|
return regex;
|
|
};
|
|
Reader.prototype.push = function (token) {
|
|
if (token.type === 7 /* Punctuator */ || token.type === 4 /* Keyword */) {
|
|
if (token.value === '{') {
|
|
this.curly = this.values.length;
|
|
}
|
|
else if (token.value === '(') {
|
|
this.paren = this.values.length;
|
|
}
|
|
this.values.push(token.value);
|
|
}
|
|
else {
|
|
this.values.push(null);
|
|
}
|
|
};
|
|
return Reader;
|
|
}());
|
|
var Tokenizer = (function () {
|
|
function Tokenizer(code, config) {
|
|
this.errorHandler = new error_handler_1.ErrorHandler();
|
|
this.errorHandler.tolerant = config ? (typeof config.tolerant === 'boolean' && config.tolerant) : false;
|
|
this.scanner = new scanner_1.Scanner(code, this.errorHandler);
|
|
this.scanner.trackComment = config ? (typeof config.comment === 'boolean' && config.comment) : false;
|
|
this.trackRange = config ? (typeof config.range === 'boolean' && config.range) : false;
|
|
this.trackLoc = config ? (typeof config.loc === 'boolean' && config.loc) : false;
|
|
this.buffer = [];
|
|
this.reader = new Reader();
|
|
}
|
|
Tokenizer.prototype.errors = function () {
|
|
return this.errorHandler.errors;
|
|
};
|
|
Tokenizer.prototype.getNextToken = function () {
|
|
if (this.buffer.length === 0) {
|
|
var comments = this.scanner.scanComments();
|
|
if (this.scanner.trackComment) {
|
|
for (var i = 0; i < comments.length; ++i) {
|
|
var e = comments[i];
|
|
var value = this.scanner.source.slice(e.slice[0], e.slice[1]);
|
|
var comment = {
|
|
type: e.multiLine ? 'BlockComment' : 'LineComment',
|
|
value: value
|
|
};
|
|
if (this.trackRange) {
|
|
comment.range = e.range;
|
|
}
|
|
if (this.trackLoc) {
|
|
comment.loc = e.loc;
|
|
}
|
|
this.buffer.push(comment);
|
|
}
|
|
}
|
|
if (!this.scanner.eof()) {
|
|
var loc = void 0;
|
|
if (this.trackLoc) {
|
|
loc = {
|
|
start: {
|
|
line: this.scanner.lineNumber,
|
|
column: this.scanner.index - this.scanner.lineStart
|
|
},
|
|
end: {}
|
|
};
|
|
}
|
|
var startRegex = (this.scanner.source[this.scanner.index] === '/') && this.reader.isRegexStart();
|
|
var token = startRegex ? this.scanner.scanRegExp() : this.scanner.lex();
|
|
this.reader.push(token);
|
|
var entry = {
|
|
type: token_1.TokenName[token.type],
|
|
value: this.scanner.source.slice(token.start, token.end)
|
|
};
|
|
if (this.trackRange) {
|
|
entry.range = [token.start, token.end];
|
|
}
|
|
if (this.trackLoc) {
|
|
loc.end = {
|
|
line: this.scanner.lineNumber,
|
|
column: this.scanner.index - this.scanner.lineStart
|
|
};
|
|
entry.loc = loc;
|
|
}
|
|
if (token.type === 9 /* RegularExpression */) {
|
|
var pattern = token.pattern;
|
|
var flags = token.flags;
|
|
entry.regex = { pattern: pattern, flags: flags };
|
|
}
|
|
this.buffer.push(entry);
|
|
}
|
|
}
|
|
return this.buffer.shift();
|
|
};
|
|
return Tokenizer;
|
|
}());
|
|
exports.Tokenizer = Tokenizer;
|
|
|
|
|
|
/***/ }
|
|
/******/ ])
|
|
});
|
|
;
|
|
/*global define, Reflect */
|
|
|
|
/*
|
|
* xpcshell has a smaller stack on linux and windows (1MB vs 9MB on mac),
|
|
* and the recursive nature of esprima can cause it to overflow pretty
|
|
* quickly. So favor it built in Reflect parser:
|
|
* https://developer.mozilla.org/en-US/docs/SpiderMonkey/Parser_API
|
|
*/
|
|
define('esprimaAdapter', ['./esprima', 'env'], function (esprima, env) {
|
|
if (env.get() === 'xpconnect' && typeof Reflect !== 'undefined') {
|
|
return Reflect;
|
|
} else {
|
|
return esprima;
|
|
}
|
|
});
|
|
(function webpackUniversalModuleDefinition(root, factory) {
|
|
var exports, module;
|
|
if(typeof exports === 'object' && typeof module === 'object')
|
|
module.exports = factory();
|
|
else if(typeof define === 'function' && define.amd)
|
|
define('source-map', [], factory);
|
|
else if(typeof exports === 'object')
|
|
exports["sourceMap"] = factory();
|
|
else
|
|
root["sourceMap"] = factory();
|
|
})(this, function() {
|
|
return /******/ (function(modules) { // webpackBootstrap
|
|
/******/ // The module cache
|
|
/******/ var installedModules = {};
|
|
|
|
/******/ // The require function
|
|
/******/ function __webpack_require__(moduleId) {
|
|
|
|
/******/ // Check if module is in cache
|
|
/******/ if(installedModules[moduleId])
|
|
/******/ return installedModules[moduleId].exports;
|
|
|
|
/******/ // Create a new module (and put it into the cache)
|
|
/******/ var module = installedModules[moduleId] = {
|
|
/******/ exports: {},
|
|
/******/ id: moduleId,
|
|
/******/ loaded: false
|
|
/******/ };
|
|
|
|
/******/ // Execute the module function
|
|
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
|
|
|
|
/******/ // Flag the module as loaded
|
|
/******/ module.loaded = true;
|
|
|
|
/******/ // Return the exports of the module
|
|
/******/ return module.exports;
|
|
/******/ }
|
|
|
|
|
|
/******/ // expose the modules object (__webpack_modules__)
|
|
/******/ __webpack_require__.m = modules;
|
|
|
|
/******/ // expose the module cache
|
|
/******/ __webpack_require__.c = installedModules;
|
|
|
|
/******/ // __webpack_public_path__
|
|
/******/ __webpack_require__.p = "";
|
|
|
|
/******/ // Load entry module and return exports
|
|
/******/ return __webpack_require__(0);
|
|
/******/ })
|
|
/************************************************************************/
|
|
/******/ ([
|
|
/* 0 */
|
|
/***/ function(module, exports, __webpack_require__) {
|
|
|
|
/*
|
|
* Copyright 2009-2011 Mozilla Foundation and contributors
|
|
* Licensed under the New BSD license. See LICENSE.txt or:
|
|
* http://opensource.org/licenses/BSD-3-Clause
|
|
*/
|
|
exports.SourceMapGenerator = __webpack_require__(1).SourceMapGenerator;
|
|
exports.SourceMapConsumer = __webpack_require__(7).SourceMapConsumer;
|
|
exports.SourceNode = __webpack_require__(10).SourceNode;
|
|
|
|
|
|
/***/ },
|
|
/* 1 */
|
|
/***/ function(module, exports, __webpack_require__) {
|
|
|
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
|
/*
|
|
* Copyright 2011 Mozilla Foundation and contributors
|
|
* Licensed under the New BSD license. See LICENSE or:
|
|
* http://opensource.org/licenses/BSD-3-Clause
|
|
*/
|
|
|
|
var base64VLQ = __webpack_require__(2);
|
|
var util = __webpack_require__(4);
|
|
var ArraySet = __webpack_require__(5).ArraySet;
|
|
var MappingList = __webpack_require__(6).MappingList;
|
|
|
|
/**
|
|
* An instance of the SourceMapGenerator represents a source map which is
|
|
* being built incrementally. You may pass an object with the following
|
|
* properties:
|
|
*
|
|
* - file: The filename of the generated source.
|
|
* - sourceRoot: A root for all relative URLs in this source map.
|
|
*/
|
|
function SourceMapGenerator(aArgs) {
|
|
if (!aArgs) {
|
|
aArgs = {};
|
|
}
|
|
this._file = util.getArg(aArgs, 'file', null);
|
|
this._sourceRoot = util.getArg(aArgs, 'sourceRoot', null);
|
|
this._skipValidation = util.getArg(aArgs, 'skipValidation', false);
|
|
this._sources = new ArraySet();
|
|
this._names = new ArraySet();
|
|
this._mappings = new MappingList();
|
|
this._sourcesContents = null;
|
|
}
|
|
|
|
SourceMapGenerator.prototype._version = 3;
|
|
|
|
/**
|
|
* Creates a new SourceMapGenerator based on a SourceMapConsumer
|
|
*
|
|
* @param aSourceMapConsumer The SourceMap.
|
|
*/
|
|
SourceMapGenerator.fromSourceMap =
|
|
function SourceMapGenerator_fromSourceMap(aSourceMapConsumer) {
|
|
var sourceRoot = aSourceMapConsumer.sourceRoot;
|
|
var generator = new SourceMapGenerator({
|
|
file: aSourceMapConsumer.file,
|
|
sourceRoot: sourceRoot
|
|
});
|
|
aSourceMapConsumer.eachMapping(function (mapping) {
|
|
var newMapping = {
|
|
generated: {
|
|
line: mapping.generatedLine,
|
|
column: mapping.generatedColumn
|
|
}
|
|
};
|
|
|
|
if (mapping.source != null) {
|
|
newMapping.source = mapping.source;
|
|
if (sourceRoot != null) {
|
|
newMapping.source = util.relative(sourceRoot, newMapping.source);
|
|
}
|
|
|
|
newMapping.original = {
|
|
line: mapping.originalLine,
|
|
column: mapping.originalColumn
|
|
};
|
|
|
|
if (mapping.name != null) {
|
|
newMapping.name = mapping.name;
|
|
}
|
|
}
|
|
|
|
generator.addMapping(newMapping);
|
|
});
|
|
aSourceMapConsumer.sources.forEach(function (sourceFile) {
|
|
var content = aSourceMapConsumer.sourceContentFor(sourceFile);
|
|
if (content != null) {
|
|
generator.setSourceContent(sourceFile, content);
|
|
}
|
|
});
|
|
return generator;
|
|
};
|
|
|
|
/**
|
|
* Add a single mapping from original source line and column to the generated
|
|
* source's line and column for this source map being created. The mapping
|
|
* object should have the following properties:
|
|
*
|
|
* - generated: An object with the generated line and column positions.
|
|
* - original: An object with the original line and column positions.
|
|
* - source: The original source file (relative to the sourceRoot).
|
|
* - name: An optional original token name for this mapping.
|
|
*/
|
|
SourceMapGenerator.prototype.addMapping =
|
|
function SourceMapGenerator_addMapping(aArgs) {
|
|
var generated = util.getArg(aArgs, 'generated');
|
|
var original = util.getArg(aArgs, 'original', null);
|
|
var source = util.getArg(aArgs, 'source', null);
|
|
var name = util.getArg(aArgs, 'name', null);
|
|
|
|
if (!this._skipValidation) {
|
|
this._validateMapping(generated, original, source, name);
|
|
}
|
|
|
|
if (source != null) {
|
|
source = String(source);
|
|
if (!this._sources.has(source)) {
|
|
this._sources.add(source);
|
|
}
|
|
}
|
|
|
|
if (name != null) {
|
|
name = String(name);
|
|
if (!this._names.has(name)) {
|
|
this._names.add(name);
|
|
}
|
|
}
|
|
|
|
this._mappings.add({
|
|
generatedLine: generated.line,
|
|
generatedColumn: generated.column,
|
|
originalLine: original != null && original.line,
|
|
originalColumn: original != null && original.column,
|
|
source: source,
|
|
name: name
|
|
});
|
|
};
|
|
|
|
/**
|
|
* Set the source content for a source file.
|
|
*/
|
|
SourceMapGenerator.prototype.setSourceContent =
|
|
function SourceMapGenerator_setSourceContent(aSourceFile, aSourceContent) {
|
|
var source = aSourceFile;
|
|
if (this._sourceRoot != null) {
|
|
source = util.relative(this._sourceRoot, source);
|
|
}
|
|
|
|
if (aSourceContent != null) {
|
|
// Add the source content to the _sourcesContents map.
|
|
// Create a new _sourcesContents map if the property is null.
|
|
if (!this._sourcesContents) {
|
|
this._sourcesContents = Object.create(null);
|
|
}
|
|
this._sourcesContents[util.toSetString(source)] = aSourceContent;
|
|
} else if (this._sourcesContents) {
|
|
// Remove the source file from the _sourcesContents map.
|
|
// If the _sourcesContents map is empty, set the property to null.
|
|
delete this._sourcesContents[util.toSetString(source)];
|
|
if (Object.keys(this._sourcesContents).length === 0) {
|
|
this._sourcesContents = null;
|
|
}
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Applies the mappings of a sub-source-map for a specific source file to the
|
|
* source map being generated. Each mapping to the supplied source file is
|
|
* rewritten using the supplied source map. Note: The resolution for the
|
|
* resulting mappings is the minimium of this map and the supplied map.
|
|
*
|
|
* @param aSourceMapConsumer The source map to be applied.
|
|
* @param aSourceFile Optional. The filename of the source file.
|
|
* If omitted, SourceMapConsumer's file property will be used.
|
|
* @param aSourceMapPath Optional. The dirname of the path to the source map
|
|
* to be applied. If relative, it is relative to the SourceMapConsumer.
|
|
* This parameter is needed when the two source maps aren't in the same
|
|
* directory, and the source map to be applied contains relative source
|
|
* paths. If so, those relative source paths need to be rewritten
|
|
* relative to the SourceMapGenerator.
|
|
*/
|
|
SourceMapGenerator.prototype.applySourceMap =
|
|
function SourceMapGenerator_applySourceMap(aSourceMapConsumer, aSourceFile, aSourceMapPath) {
|
|
var sourceFile = aSourceFile;
|
|
// If aSourceFile is omitted, we will use the file property of the SourceMap
|
|
if (aSourceFile == null) {
|
|
if (aSourceMapConsumer.file == null) {
|
|
throw new Error(
|
|
'SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, ' +
|
|
'or the source map\'s "file" property. Both were omitted.'
|
|
);
|
|
}
|
|
sourceFile = aSourceMapConsumer.file;
|
|
}
|
|
var sourceRoot = this._sourceRoot;
|
|
// Make "sourceFile" relative if an absolute Url is passed.
|
|
if (sourceRoot != null) {
|
|
sourceFile = util.relative(sourceRoot, sourceFile);
|
|
}
|
|
// Applying the SourceMap can add and remove items from the sources and
|
|
// the names array.
|
|
var newSources = new ArraySet();
|
|
var newNames = new ArraySet();
|
|
|
|
// Find mappings for the "sourceFile"
|
|
this._mappings.unsortedForEach(function (mapping) {
|
|
if (mapping.source === sourceFile && mapping.originalLine != null) {
|
|
// Check if it can be mapped by the source map, then update the mapping.
|
|
var original = aSourceMapConsumer.originalPositionFor({
|
|
line: mapping.originalLine,
|
|
column: mapping.originalColumn
|
|
});
|
|
if (original.source != null) {
|
|
// Copy mapping
|
|
mapping.source = original.source;
|
|
if (aSourceMapPath != null) {
|
|
mapping.source = util.join(aSourceMapPath, mapping.source)
|
|
}
|
|
if (sourceRoot != null) {
|
|
mapping.source = util.relative(sourceRoot, mapping.source);
|
|
}
|
|
mapping.originalLine = original.line;
|
|
mapping.originalColumn = original.column;
|
|
if (original.name != null) {
|
|
mapping.name = original.name;
|
|
}
|
|
}
|
|
}
|
|
|
|
var source = mapping.source;
|
|
if (source != null && !newSources.has(source)) {
|
|
newSources.add(source);
|
|
}
|
|
|
|
var name = mapping.name;
|
|
if (name != null && !newNames.has(name)) {
|
|
newNames.add(name);
|
|
}
|
|
|
|
}, this);
|
|
this._sources = newSources;
|
|
this._names = newNames;
|
|
|
|
// Copy sourcesContents of applied map.
|
|
aSourceMapConsumer.sources.forEach(function (sourceFile) {
|
|
var content = aSourceMapConsumer.sourceContentFor(sourceFile);
|
|
if (content != null) {
|
|
if (aSourceMapPath != null) {
|
|
sourceFile = util.join(aSourceMapPath, sourceFile);
|
|
}
|
|
if (sourceRoot != null) {
|
|
sourceFile = util.relative(sourceRoot, sourceFile);
|
|
}
|
|
this.setSourceContent(sourceFile, content);
|
|
}
|
|
}, this);
|
|
};
|
|
|
|
/**
|
|
* A mapping can have one of the three levels of data:
|
|
*
|
|
* 1. Just the generated position.
|
|
* 2. The Generated position, original position, and original source.
|
|
* 3. Generated and original position, original source, as well as a name
|
|
* token.
|
|
*
|
|
* To maintain consistency, we validate that any new mapping being added falls
|
|
* in to one of these categories.
|
|
*/
|
|
SourceMapGenerator.prototype._validateMapping =
|
|
function SourceMapGenerator_validateMapping(aGenerated, aOriginal, aSource,
|
|
aName) {
|
|
if (aGenerated && 'line' in aGenerated && 'column' in aGenerated
|
|
&& aGenerated.line > 0 && aGenerated.column >= 0
|
|
&& !aOriginal && !aSource && !aName) {
|
|
// Case 1.
|
|
return;
|
|
}
|
|
else if (aGenerated && 'line' in aGenerated && 'column' in aGenerated
|
|
&& aOriginal && 'line' in aOriginal && 'column' in aOriginal
|
|
&& aGenerated.line > 0 && aGenerated.column >= 0
|
|
&& aOriginal.line > 0 && aOriginal.column >= 0
|
|
&& aSource) {
|
|
// Cases 2 and 3.
|
|
return;
|
|
}
|
|
else {
|
|
throw new Error('Invalid mapping: ' + JSON.stringify({
|
|
generated: aGenerated,
|
|
source: aSource,
|
|
original: aOriginal,
|
|
name: aName
|
|
}));
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Serialize the accumulated mappings in to the stream of base 64 VLQs
|
|
* specified by the source map format.
|
|
*/
|
|
SourceMapGenerator.prototype._serializeMappings =
|
|
function SourceMapGenerator_serializeMappings() {
|
|
var previousGeneratedColumn = 0;
|
|
var previousGeneratedLine = 1;
|
|
var previousOriginalColumn = 0;
|
|
var previousOriginalLine = 0;
|
|
var previousName = 0;
|
|
var previousSource = 0;
|
|
var result = '';
|
|
var next;
|
|
var mapping;
|
|
var nameIdx;
|
|
var sourceIdx;
|
|
|
|
var mappings = this._mappings.toArray();
|
|
for (var i = 0, len = mappings.length; i < len; i++) {
|
|
mapping = mappings[i];
|
|
next = ''
|
|
|
|
if (mapping.generatedLine !== previousGeneratedLine) {
|
|
previousGeneratedColumn = 0;
|
|
while (mapping.generatedLine !== previousGeneratedLine) {
|
|
next += ';';
|
|
previousGeneratedLine++;
|
|
}
|
|
}
|
|
else {
|
|
if (i > 0) {
|
|
if (!util.compareByGeneratedPositionsInflated(mapping, mappings[i - 1])) {
|
|
continue;
|
|
}
|
|
next += ',';
|
|
}
|
|
}
|
|
|
|
next += base64VLQ.encode(mapping.generatedColumn
|
|
- previousGeneratedColumn);
|
|
previousGeneratedColumn = mapping.generatedColumn;
|
|
|
|
if (mapping.source != null) {
|
|
sourceIdx = this._sources.indexOf(mapping.source);
|
|
next += base64VLQ.encode(sourceIdx - previousSource);
|
|
previousSource = sourceIdx;
|
|
|
|
// lines are stored 0-based in SourceMap spec version 3
|
|
next += base64VLQ.encode(mapping.originalLine - 1
|
|
- previousOriginalLine);
|
|
previousOriginalLine = mapping.originalLine - 1;
|
|
|
|
next += base64VLQ.encode(mapping.originalColumn
|
|
- previousOriginalColumn);
|
|
previousOriginalColumn = mapping.originalColumn;
|
|
|
|
if (mapping.name != null) {
|
|
nameIdx = this._names.indexOf(mapping.name);
|
|
next += base64VLQ.encode(nameIdx - previousName);
|
|
previousName = nameIdx;
|
|
}
|
|
}
|
|
|
|
result += next;
|
|
}
|
|
|
|
return result;
|
|
};
|
|
|
|
SourceMapGenerator.prototype._generateSourcesContent =
|
|
function SourceMapGenerator_generateSourcesContent(aSources, aSourceRoot) {
|
|
return aSources.map(function (source) {
|
|
if (!this._sourcesContents) {
|
|
return null;
|
|
}
|
|
if (aSourceRoot != null) {
|
|
source = util.relative(aSourceRoot, source);
|
|
}
|
|
var key = util.toSetString(source);
|
|
return Object.prototype.hasOwnProperty.call(this._sourcesContents, key)
|
|
? this._sourcesContents[key]
|
|
: null;
|
|
}, this);
|
|
};
|
|
|
|
/**
|
|
* Externalize the source map.
|
|
*/
|
|
SourceMapGenerator.prototype.toJSON =
|
|
function SourceMapGenerator_toJSON() {
|
|
var map = {
|
|
version: this._version,
|
|
sources: this._sources.toArray(),
|
|
names: this._names.toArray(),
|
|
mappings: this._serializeMappings()
|
|
};
|
|
if (this._file != null) {
|
|
map.file = this._file;
|
|
}
|
|
if (this._sourceRoot != null) {
|
|
map.sourceRoot = this._sourceRoot;
|
|
}
|
|
if (this._sourcesContents) {
|
|
map.sourcesContent = this._generateSourcesContent(map.sources, map.sourceRoot);
|
|
}
|
|
|
|
return map;
|
|
};
|
|
|
|
/**
|
|
* Render the source map being generated to a string.
|
|
*/
|
|
SourceMapGenerator.prototype.toString =
|
|
function SourceMapGenerator_toString() {
|
|
return JSON.stringify(this.toJSON());
|
|
};
|
|
|
|
exports.SourceMapGenerator = SourceMapGenerator;
|
|
|
|
|
|
/***/ },
|
|
/* 2 */
|
|
/***/ function(module, exports, __webpack_require__) {
|
|
|
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
|
/*
|
|
* Copyright 2011 Mozilla Foundation and contributors
|
|
* Licensed under the New BSD license. See LICENSE or:
|
|
* http://opensource.org/licenses/BSD-3-Clause
|
|
*
|
|
* Based on the Base 64 VLQ implementation in Closure Compiler:
|
|
* https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java
|
|
*
|
|
* Copyright 2011 The Closure Compiler Authors. All rights reserved.
|
|
* Redistribution and use in source and binary forms, with or without
|
|
* modification, are permitted provided that the following conditions are
|
|
* met:
|
|
*
|
|
* * Redistributions of source code must retain the above copyright
|
|
* notice, this list of conditions and the following disclaimer.
|
|
* * Redistributions in binary form must reproduce the above
|
|
* copyright notice, this list of conditions and the following
|
|
* disclaimer in the documentation and/or other materials provided
|
|
* with the distribution.
|
|
* * Neither the name of Google Inc. nor the names of its
|
|
* contributors may be used to endorse or promote products derived
|
|
* from this software without specific prior written permission.
|
|
*
|
|
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
*/
|
|
|
|
var base64 = __webpack_require__(3);
|
|
|
|
// A single base 64 digit can contain 6 bits of data. For the base 64 variable
|
|
// length quantities we use in the source map spec, the first bit is the sign,
|
|
// the next four bits are the actual value, and the 6th bit is the
|
|
// continuation bit. The continuation bit tells us whether there are more
|
|
// digits in this value following this digit.
|
|
//
|
|
// Continuation
|
|
// | Sign
|
|
// | |
|
|
// V V
|
|
// 101011
|
|
|
|
var VLQ_BASE_SHIFT = 5;
|
|
|
|
// binary: 100000
|
|
var VLQ_BASE = 1 << VLQ_BASE_SHIFT;
|
|
|
|
// binary: 011111
|
|
var VLQ_BASE_MASK = VLQ_BASE - 1;
|
|
|
|
// binary: 100000
|
|
var VLQ_CONTINUATION_BIT = VLQ_BASE;
|
|
|
|
/**
|
|
* Converts from a two-complement value to a value where the sign bit is
|
|
* placed in the least significant bit. For example, as decimals:
|
|
* 1 becomes 2 (10 binary), -1 becomes 3 (11 binary)
|
|
* 2 becomes 4 (100 binary), -2 becomes 5 (101 binary)
|
|
*/
|
|
function toVLQSigned(aValue) {
|
|
return aValue < 0
|
|
? ((-aValue) << 1) + 1
|
|
: (aValue << 1) + 0;
|
|
}
|
|
|
|
/**
|
|
* Converts to a two-complement value from a value where the sign bit is
|
|
* placed in the least significant bit. For example, as decimals:
|
|
* 2 (10 binary) becomes 1, 3 (11 binary) becomes -1
|
|
* 4 (100 binary) becomes 2, 5 (101 binary) becomes -2
|
|
*/
|
|
function fromVLQSigned(aValue) {
|
|
var isNegative = (aValue & 1) === 1;
|
|
var shifted = aValue >> 1;
|
|
return isNegative
|
|
? -shifted
|
|
: shifted;
|
|
}
|
|
|
|
/**
|
|
* Returns the base 64 VLQ encoded value.
|
|
*/
|
|
exports.encode = function base64VLQ_encode(aValue) {
|
|
var encoded = "";
|
|
var digit;
|
|
|
|
var vlq = toVLQSigned(aValue);
|
|
|
|
do {
|
|
digit = vlq & VLQ_BASE_MASK;
|
|
vlq >>>= VLQ_BASE_SHIFT;
|
|
if (vlq > 0) {
|
|
// There are still more digits in this value, so we must make sure the
|
|
// continuation bit is marked.
|
|
digit |= VLQ_CONTINUATION_BIT;
|
|
}
|
|
encoded += base64.encode(digit);
|
|
} while (vlq > 0);
|
|
|
|
return encoded;
|
|
};
|
|
|
|
/**
|
|
* Decodes the next base 64 VLQ value from the given string and returns the
|
|
* value and the rest of the string via the out parameter.
|
|
*/
|
|
exports.decode = function base64VLQ_decode(aStr, aIndex, aOutParam) {
|
|
var strLen = aStr.length;
|
|
var result = 0;
|
|
var shift = 0;
|
|
var continuation, digit;
|
|
|
|
do {
|
|
if (aIndex >= strLen) {
|
|
throw new Error("Expected more digits in base 64 VLQ value.");
|
|
}
|
|
|
|
digit = base64.decode(aStr.charCodeAt(aIndex++));
|
|
if (digit === -1) {
|
|
throw new Error("Invalid base64 digit: " + aStr.charAt(aIndex - 1));
|
|
}
|
|
|
|
continuation = !!(digit & VLQ_CONTINUATION_BIT);
|
|
digit &= VLQ_BASE_MASK;
|
|
result = result + (digit << shift);
|
|
shift += VLQ_BASE_SHIFT;
|
|
} while (continuation);
|
|
|
|
aOutParam.value = fromVLQSigned(result);
|
|
aOutParam.rest = aIndex;
|
|
};
|
|
|
|
|
|
/***/ },
|
|
/* 3 */
|
|
/***/ function(module, exports) {
|
|
|
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
|
/*
|
|
* Copyright 2011 Mozilla Foundation and contributors
|
|
* Licensed under the New BSD license. See LICENSE or:
|
|
* http://opensource.org/licenses/BSD-3-Clause
|
|
*/
|
|
|
|
var intToCharMap = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'.split('');
|
|
|
|
/**
|
|
* Encode an integer in the range of 0 to 63 to a single base 64 digit.
|
|
*/
|
|
exports.encode = function (number) {
|
|
if (0 <= number && number < intToCharMap.length) {
|
|
return intToCharMap[number];
|
|
}
|
|
throw new TypeError("Must be between 0 and 63: " + number);
|
|
};
|
|
|
|
/**
|
|
* Decode a single base 64 character code digit to an integer. Returns -1 on
|
|
* failure.
|
|
*/
|
|
exports.decode = function (charCode) {
|
|
var bigA = 65; // 'A'
|
|
var bigZ = 90; // 'Z'
|
|
|
|
var littleA = 97; // 'a'
|
|
var littleZ = 122; // 'z'
|
|
|
|
var zero = 48; // '0'
|
|
var nine = 57; // '9'
|
|
|
|
var plus = 43; // '+'
|
|
var slash = 47; // '/'
|
|
|
|
var littleOffset = 26;
|
|
var numberOffset = 52;
|
|
|
|
// 0 - 25: ABCDEFGHIJKLMNOPQRSTUVWXYZ
|
|
if (bigA <= charCode && charCode <= bigZ) {
|
|
return (charCode - bigA);
|
|
}
|
|
|
|
// 26 - 51: abcdefghijklmnopqrstuvwxyz
|
|
if (littleA <= charCode && charCode <= littleZ) {
|
|
return (charCode - littleA + littleOffset);
|
|
}
|
|
|
|
// 52 - 61: 0123456789
|
|
if (zero <= charCode && charCode <= nine) {
|
|
return (charCode - zero + numberOffset);
|
|
}
|
|
|
|
// 62: +
|
|
if (charCode == plus) {
|
|
return 62;
|
|
}
|
|
|
|
// 63: /
|
|
if (charCode == slash) {
|
|
return 63;
|
|
}
|
|
|
|
// Invalid base64 digit.
|
|
return -1;
|
|
};
|
|
|
|
|
|
/***/ },
|
|
/* 4 */
|
|
/***/ function(module, exports) {
|
|
|
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
|
/*
|
|
* Copyright 2011 Mozilla Foundation and contributors
|
|
* Licensed under the New BSD license. See LICENSE or:
|
|
* http://opensource.org/licenses/BSD-3-Clause
|
|
*/
|
|
|
|
/**
|
|
* This is a helper function for getting values from parameter/options
|
|
* objects.
|
|
*
|
|
* @param args The object we are extracting values from
|
|
* @param name The name of the property we are getting.
|
|
* @param defaultValue An optional value to return if the property is missing
|
|
* from the object. If this is not specified and the property is missing, an
|
|
* error will be thrown.
|
|
*/
|
|
function getArg(aArgs, aName, aDefaultValue) {
|
|
if (aName in aArgs) {
|
|
return aArgs[aName];
|
|
} else if (arguments.length === 3) {
|
|
return aDefaultValue;
|
|
} else {
|
|
throw new Error('"' + aName + '" is a required argument.');
|
|
}
|
|
}
|
|
exports.getArg = getArg;
|
|
|
|
var urlRegexp = /^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.]*)(?::(\d+))?(\S*)$/;
|
|
var dataUrlRegexp = /^data:.+\,.+$/;
|
|
|
|
function urlParse(aUrl) {
|
|
var match = aUrl.match(urlRegexp);
|
|
if (!match) {
|
|
return null;
|
|
}
|
|
return {
|
|
scheme: match[1],
|
|
auth: match[2],
|
|
host: match[3],
|
|
port: match[4],
|
|
path: match[5]
|
|
};
|
|
}
|
|
exports.urlParse = urlParse;
|
|
|
|
function urlGenerate(aParsedUrl) {
|
|
var url = '';
|
|
if (aParsedUrl.scheme) {
|
|
url += aParsedUrl.scheme + ':';
|
|
}
|
|
url += '//';
|
|
if (aParsedUrl.auth) {
|
|
url += aParsedUrl.auth + '@';
|
|
}
|
|
if (aParsedUrl.host) {
|
|
url += aParsedUrl.host;
|
|
}
|
|
if (aParsedUrl.port) {
|
|
url += ":" + aParsedUrl.port
|
|
}
|
|
if (aParsedUrl.path) {
|
|
url += aParsedUrl.path;
|
|
}
|
|
return url;
|
|
}
|
|
exports.urlGenerate = urlGenerate;
|
|
|
|
/**
|
|
* Normalizes a path, or the path portion of a URL:
|
|
*
|
|
* - Replaces consequtive slashes with one slash.
|
|
* - Removes unnecessary '.' parts.
|
|
* - Removes unnecessary '<dir>/..' parts.
|
|
*
|
|
* Based on code in the Node.js 'path' core module.
|
|
*
|
|
* @param aPath The path or url to normalize.
|
|
*/
|
|
function normalize(aPath) {
|
|
var path = aPath;
|
|
var url = urlParse(aPath);
|
|
if (url) {
|
|
if (!url.path) {
|
|
return aPath;
|
|
}
|
|
path = url.path;
|
|
}
|
|
var isAbsolute = exports.isAbsolute(path);
|
|
|
|
var parts = path.split(/\/+/);
|
|
for (var part, up = 0, i = parts.length - 1; i >= 0; i--) {
|
|
part = parts[i];
|
|
if (part === '.') {
|
|
parts.splice(i, 1);
|
|
} else if (part === '..') {
|
|
up++;
|
|
} else if (up > 0) {
|
|
if (part === '') {
|
|
// The first part is blank if the path is absolute. Trying to go
|
|
// above the root is a no-op. Therefore we can remove all '..' parts
|
|
// directly after the root.
|
|
parts.splice(i + 1, up);
|
|
up = 0;
|
|
} else {
|
|
parts.splice(i, 2);
|
|
up--;
|
|
}
|
|
}
|
|
}
|
|
path = parts.join('/');
|
|
|
|
if (path === '') {
|
|
path = isAbsolute ? '/' : '.';
|
|
}
|
|
|
|
if (url) {
|
|
url.path = path;
|
|
return urlGenerate(url);
|
|
}
|
|
return path;
|
|
}
|
|
exports.normalize = normalize;
|
|
|
|
/**
|
|
* Joins two paths/URLs.
|
|
*
|
|
* @param aRoot The root path or URL.
|
|
* @param aPath The path or URL to be joined with the root.
|
|
*
|
|
* - If aPath is a URL or a data URI, aPath is returned, unless aPath is a
|
|
* scheme-relative URL: Then the scheme of aRoot, if any, is prepended
|
|
* first.
|
|
* - Otherwise aPath is a path. If aRoot is a URL, then its path portion
|
|
* is updated with the result and aRoot is returned. Otherwise the result
|
|
* is returned.
|
|
* - If aPath is absolute, the result is aPath.
|
|
* - Otherwise the two paths are joined with a slash.
|
|
* - Joining for example 'http://' and 'www.example.com' is also supported.
|
|
*/
|
|
function join(aRoot, aPath) {
|
|
if (aRoot === "") {
|
|
aRoot = ".";
|
|
}
|
|
if (aPath === "") {
|
|
aPath = ".";
|
|
}
|
|
var aPathUrl = urlParse(aPath);
|
|
var aRootUrl = urlParse(aRoot);
|
|
if (aRootUrl) {
|
|
aRoot = aRootUrl.path || '/';
|
|
}
|
|
|
|
// `join(foo, '//www.example.org')`
|
|
if (aPathUrl && !aPathUrl.scheme) {
|
|
if (aRootUrl) {
|
|
aPathUrl.scheme = aRootUrl.scheme;
|
|
}
|
|
return urlGenerate(aPathUrl);
|
|
}
|
|
|
|
if (aPathUrl || aPath.match(dataUrlRegexp)) {
|
|
return aPath;
|
|
}
|
|
|
|
// `join('http://', 'www.example.com')`
|
|
if (aRootUrl && !aRootUrl.host && !aRootUrl.path) {
|
|
aRootUrl.host = aPath;
|
|
return urlGenerate(aRootUrl);
|
|
}
|
|
|
|
var joined = aPath.charAt(0) === '/'
|
|
? aPath
|
|
: normalize(aRoot.replace(/\/+$/, '') + '/' + aPath);
|
|
|
|
if (aRootUrl) {
|
|
aRootUrl.path = joined;
|
|
return urlGenerate(aRootUrl);
|
|
}
|
|
return joined;
|
|
}
|
|
exports.join = join;
|
|
|
|
exports.isAbsolute = function (aPath) {
|
|
return aPath.charAt(0) === '/' || !!aPath.match(urlRegexp);
|
|
};
|
|
|
|
/**
|
|
* Make a path relative to a URL or another path.
|
|
*
|
|
* @param aRoot The root path or URL.
|
|
* @param aPath The path or URL to be made relative to aRoot.
|
|
*/
|
|
function relative(aRoot, aPath) {
|
|
if (aRoot === "") {
|
|
aRoot = ".";
|
|
}
|
|
|
|
aRoot = aRoot.replace(/\/$/, '');
|
|
|
|
// It is possible for the path to be above the root. In this case, simply
|
|
// checking whether the root is a prefix of the path won't work. Instead, we
|
|
// need to remove components from the root one by one, until either we find
|
|
// a prefix that fits, or we run out of components to remove.
|
|
var level = 0;
|
|
while (aPath.indexOf(aRoot + '/') !== 0) {
|
|
var index = aRoot.lastIndexOf("/");
|
|
if (index < 0) {
|
|
return aPath;
|
|
}
|
|
|
|
// If the only part of the root that is left is the scheme (i.e. http://,
|
|
// file:///, etc.), one or more slashes (/), or simply nothing at all, we
|
|
// have exhausted all components, so the path is not relative to the root.
|
|
aRoot = aRoot.slice(0, index);
|
|
if (aRoot.match(/^([^\/]+:\/)?\/*$/)) {
|
|
return aPath;
|
|
}
|
|
|
|
++level;
|
|
}
|
|
|
|
// Make sure we add a "../" for each component we removed from the root.
|
|
return Array(level + 1).join("../") + aPath.substr(aRoot.length + 1);
|
|
}
|
|
exports.relative = relative;
|
|
|
|
var supportsNullProto = (function () {
|
|
var obj = Object.create(null);
|
|
return !('__proto__' in obj);
|
|
}());
|
|
|
|
function identity (s) {
|
|
return s;
|
|
}
|
|
|
|
/**
|
|
* Because behavior goes wacky when you set `__proto__` on objects, we
|
|
* have to prefix all the strings in our set with an arbitrary character.
|
|
*
|
|
* See https://github.com/mozilla/source-map/pull/31 and
|
|
* https://github.com/mozilla/source-map/issues/30
|
|
*
|
|
* @param String aStr
|
|
*/
|
|
function toSetString(aStr) {
|
|
if (isProtoString(aStr)) {
|
|
return '$' + aStr;
|
|
}
|
|
|
|
return aStr;
|
|
}
|
|
exports.toSetString = supportsNullProto ? identity : toSetString;
|
|
|
|
function fromSetString(aStr) {
|
|
if (isProtoString(aStr)) {
|
|
return aStr.slice(1);
|
|
}
|
|
|
|
return aStr;
|
|
}
|
|
exports.fromSetString = supportsNullProto ? identity : fromSetString;
|
|
|
|
function isProtoString(s) {
|
|
if (!s) {
|
|
return false;
|
|
}
|
|
|
|
var length = s.length;
|
|
|
|
if (length < 9 /* "__proto__".length */) {
|
|
return false;
|
|
}
|
|
|
|
if (s.charCodeAt(length - 1) !== 95 /* '_' */ ||
|
|
s.charCodeAt(length - 2) !== 95 /* '_' */ ||
|
|
s.charCodeAt(length - 3) !== 111 /* 'o' */ ||
|
|
s.charCodeAt(length - 4) !== 116 /* 't' */ ||
|
|
s.charCodeAt(length - 5) !== 111 /* 'o' */ ||
|
|
s.charCodeAt(length - 6) !== 114 /* 'r' */ ||
|
|
s.charCodeAt(length - 7) !== 112 /* 'p' */ ||
|
|
s.charCodeAt(length - 8) !== 95 /* '_' */ ||
|
|
s.charCodeAt(length - 9) !== 95 /* '_' */) {
|
|
return false;
|
|
}
|
|
|
|
for (var i = length - 10; i >= 0; i--) {
|
|
if (s.charCodeAt(i) !== 36 /* '$' */) {
|
|
return false;
|
|
}
|
|
}
|
|
|
|
return true;
|
|
}
|
|
|
|
/**
|
|
* Comparator between two mappings where the original positions are compared.
|
|
*
|
|
* Optionally pass in `true` as `onlyCompareGenerated` to consider two
|
|
* mappings with the same original source/line/column, but different generated
|
|
* line and column the same. Useful when searching for a mapping with a
|
|
* stubbed out mapping.
|
|
*/
|
|
function compareByOriginalPositions(mappingA, mappingB, onlyCompareOriginal) {
|
|
var cmp = mappingA.source - mappingB.source;
|
|
if (cmp !== 0) {
|
|
return cmp;
|
|
}
|
|
|
|
cmp = mappingA.originalLine - mappingB.originalLine;
|
|
if (cmp !== 0) {
|
|
return cmp;
|
|
}
|
|
|
|
cmp = mappingA.originalColumn - mappingB.originalColumn;
|
|
if (cmp !== 0 || onlyCompareOriginal) {
|
|
return cmp;
|
|
}
|
|
|
|
cmp = mappingA.generatedColumn - mappingB.generatedColumn;
|
|
if (cmp !== 0) {
|
|
return cmp;
|
|
}
|
|
|
|
cmp = mappingA.generatedLine - mappingB.generatedLine;
|
|
if (cmp !== 0) {
|
|
return cmp;
|
|
}
|
|
|
|
return mappingA.name - mappingB.name;
|
|
}
|
|
exports.compareByOriginalPositions = compareByOriginalPositions;
|
|
|
|
/**
|
|
* Comparator between two mappings with deflated source and name indices where
|
|
* the generated positions are compared.
|
|
*
|
|
* Optionally pass in `true` as `onlyCompareGenerated` to consider two
|
|
* mappings with the same generated line and column, but different
|
|
* source/name/original line and column the same. Useful when searching for a
|
|
* mapping with a stubbed out mapping.
|
|
*/
|
|
function compareByGeneratedPositionsDeflated(mappingA, mappingB, onlyCompareGenerated) {
|
|
var cmp = mappingA.generatedLine - mappingB.generatedLine;
|
|
if (cmp !== 0) {
|
|
return cmp;
|
|
}
|
|
|
|
cmp = mappingA.generatedColumn - mappingB.generatedColumn;
|
|
if (cmp !== 0 || onlyCompareGenerated) {
|
|
return cmp;
|
|
}
|
|
|
|
cmp = mappingA.source - mappingB.source;
|
|
if (cmp !== 0) {
|
|
return cmp;
|
|
}
|
|
|
|
cmp = mappingA.originalLine - mappingB.originalLine;
|
|
if (cmp !== 0) {
|
|
return cmp;
|
|
}
|
|
|
|
cmp = mappingA.originalColumn - mappingB.originalColumn;
|
|
if (cmp !== 0) {
|
|
return cmp;
|
|
}
|
|
|
|
return mappingA.name - mappingB.name;
|
|
}
|
|
exports.compareByGeneratedPositionsDeflated = compareByGeneratedPositionsDeflated;
|
|
|
|
function strcmp(aStr1, aStr2) {
|
|
if (aStr1 === aStr2) {
|
|
return 0;
|
|
}
|
|
|
|
if (aStr1 > aStr2) {
|
|
return 1;
|
|
}
|
|
|
|
return -1;
|
|
}
|
|
|
|
/**
|
|
* Comparator between two mappings with inflated source and name strings where
|
|
* the generated positions are compared.
|
|
*/
|
|
function compareByGeneratedPositionsInflated(mappingA, mappingB) {
|
|
var cmp = mappingA.generatedLine - mappingB.generatedLine;
|
|
if (cmp !== 0) {
|
|
return cmp;
|
|
}
|
|
|
|
cmp = mappingA.generatedColumn - mappingB.generatedColumn;
|
|
if (cmp !== 0) {
|
|
return cmp;
|
|
}
|
|
|
|
cmp = strcmp(mappingA.source, mappingB.source);
|
|
if (cmp !== 0) {
|
|
return cmp;
|
|
}
|
|
|
|
cmp = mappingA.originalLine - mappingB.originalLine;
|
|
if (cmp !== 0) {
|
|
return cmp;
|
|
}
|
|
|
|
cmp = mappingA.originalColumn - mappingB.originalColumn;
|
|
if (cmp !== 0) {
|
|
return cmp;
|
|
}
|
|
|
|
return strcmp(mappingA.name, mappingB.name);
|
|
}
|
|
exports.compareByGeneratedPositionsInflated = compareByGeneratedPositionsInflated;
|
|
|
|
|
|
/***/ },
|
|
/* 5 */
|
|
/***/ function(module, exports, __webpack_require__) {
|
|
|
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
|
/*
|
|
* Copyright 2011 Mozilla Foundation and contributors
|
|
* Licensed under the New BSD license. See LICENSE or:
|
|
* http://opensource.org/licenses/BSD-3-Clause
|
|
*/
|
|
|
|
var util = __webpack_require__(4);
|
|
var has = Object.prototype.hasOwnProperty;
|
|
|
|
/**
|
|
* A data structure which is a combination of an array and a set. Adding a new
|
|
* member is O(1), testing for membership is O(1), and finding the index of an
|
|
* element is O(1). Removing elements from the set is not supported. Only
|
|
* strings are supported for membership.
|
|
*/
|
|
function ArraySet() {
|
|
this._array = [];
|
|
this._set = Object.create(null);
|
|
}
|
|
|
|
/**
|
|
* Static method for creating ArraySet instances from an existing array.
|
|
*/
|
|
ArraySet.fromArray = function ArraySet_fromArray(aArray, aAllowDuplicates) {
|
|
var set = new ArraySet();
|
|
for (var i = 0, len = aArray.length; i < len; i++) {
|
|
set.add(aArray[i], aAllowDuplicates);
|
|
}
|
|
return set;
|
|
};
|
|
|
|
/**
|
|
* Return how many unique items are in this ArraySet. If duplicates have been
|
|
* added, than those do not count towards the size.
|
|
*
|
|
* @returns Number
|
|
*/
|
|
ArraySet.prototype.size = function ArraySet_size() {
|
|
return Object.getOwnPropertyNames(this._set).length;
|
|
};
|
|
|
|
/**
|
|
* Add the given string to this set.
|
|
*
|
|
* @param String aStr
|
|
*/
|
|
ArraySet.prototype.add = function ArraySet_add(aStr, aAllowDuplicates) {
|
|
var sStr = util.toSetString(aStr);
|
|
var isDuplicate = has.call(this._set, sStr);
|
|
var idx = this._array.length;
|
|
if (!isDuplicate || aAllowDuplicates) {
|
|
this._array.push(aStr);
|
|
}
|
|
if (!isDuplicate) {
|
|
this._set[sStr] = idx;
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Is the given string a member of this set?
|
|
*
|
|
* @param String aStr
|
|
*/
|
|
ArraySet.prototype.has = function ArraySet_has(aStr) {
|
|
var sStr = util.toSetString(aStr);
|
|
return has.call(this._set, sStr);
|
|
};
|
|
|
|
/**
|
|
* What is the index of the given string in the array?
|
|
*
|
|
* @param String aStr
|
|
*/
|
|
ArraySet.prototype.indexOf = function ArraySet_indexOf(aStr) {
|
|
var sStr = util.toSetString(aStr);
|
|
if (has.call(this._set, sStr)) {
|
|
return this._set[sStr];
|
|
}
|
|
throw new Error('"' + aStr + '" is not in the set.');
|
|
};
|
|
|
|
/**
|
|
* What is the element at the given index?
|
|
*
|
|
* @param Number aIdx
|
|
*/
|
|
ArraySet.prototype.at = function ArraySet_at(aIdx) {
|
|
if (aIdx >= 0 && aIdx < this._array.length) {
|
|
return this._array[aIdx];
|
|
}
|
|
throw new Error('No element indexed by ' + aIdx);
|
|
};
|
|
|
|
/**
|
|
* Returns the array representation of this set (which has the proper indices
|
|
* indicated by indexOf). Note that this is a copy of the internal array used
|
|
* for storing the members so that no one can mess with internal state.
|
|
*/
|
|
ArraySet.prototype.toArray = function ArraySet_toArray() {
|
|
return this._array.slice();
|
|
};
|
|
|
|
exports.ArraySet = ArraySet;
|
|
|
|
|
|
/***/ },
|
|
/* 6 */
|
|
/***/ function(module, exports, __webpack_require__) {
|
|
|
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
|
/*
|
|
* Copyright 2014 Mozilla Foundation and contributors
|
|
* Licensed under the New BSD license. See LICENSE or:
|
|
* http://opensource.org/licenses/BSD-3-Clause
|
|
*/
|
|
|
|
var util = __webpack_require__(4);
|
|
|
|
/**
|
|
* Determine whether mappingB is after mappingA with respect to generated
|
|
* position.
|
|
*/
|
|
function generatedPositionAfter(mappingA, mappingB) {
|
|
// Optimized for most common case
|
|
var lineA = mappingA.generatedLine;
|
|
var lineB = mappingB.generatedLine;
|
|
var columnA = mappingA.generatedColumn;
|
|
var columnB = mappingB.generatedColumn;
|
|
return lineB > lineA || lineB == lineA && columnB >= columnA ||
|
|
util.compareByGeneratedPositionsInflated(mappingA, mappingB) <= 0;
|
|
}
|
|
|
|
/**
|
|
* A data structure to provide a sorted view of accumulated mappings in a
|
|
* performance conscious manner. It trades a neglibable overhead in general
|
|
* case for a large speedup in case of mappings being added in order.
|
|
*/
|
|
function MappingList() {
|
|
this._array = [];
|
|
this._sorted = true;
|
|
// Serves as infimum
|
|
this._last = {generatedLine: -1, generatedColumn: 0};
|
|
}
|
|
|
|
/**
|
|
* Iterate through internal items. This method takes the same arguments that
|
|
* `Array.prototype.forEach` takes.
|
|
*
|
|
* NOTE: The order of the mappings is NOT guaranteed.
|
|
*/
|
|
MappingList.prototype.unsortedForEach =
|
|
function MappingList_forEach(aCallback, aThisArg) {
|
|
this._array.forEach(aCallback, aThisArg);
|
|
};
|
|
|
|
/**
|
|
* Add the given source mapping.
|
|
*
|
|
* @param Object aMapping
|
|
*/
|
|
MappingList.prototype.add = function MappingList_add(aMapping) {
|
|
if (generatedPositionAfter(this._last, aMapping)) {
|
|
this._last = aMapping;
|
|
this._array.push(aMapping);
|
|
} else {
|
|
this._sorted = false;
|
|
this._array.push(aMapping);
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Returns the flat, sorted array of mappings. The mappings are sorted by
|
|
* generated position.
|
|
*
|
|
* WARNING: This method returns internal data without copying, for
|
|
* performance. The return value must NOT be mutated, and should be treated as
|
|
* an immutable borrow. If you want to take ownership, you must make your own
|
|
* copy.
|
|
*/
|
|
MappingList.prototype.toArray = function MappingList_toArray() {
|
|
if (!this._sorted) {
|
|
this._array.sort(util.compareByGeneratedPositionsInflated);
|
|
this._sorted = true;
|
|
}
|
|
return this._array;
|
|
};
|
|
|
|
exports.MappingList = MappingList;
|
|
|
|
|
|
/***/ },
|
|
/* 7 */
|
|
/***/ function(module, exports, __webpack_require__) {
|
|
|
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
|
/*
|
|
* Copyright 2011 Mozilla Foundation and contributors
|
|
* Licensed under the New BSD license. See LICENSE or:
|
|
* http://opensource.org/licenses/BSD-3-Clause
|
|
*/
|
|
|
|
var util = __webpack_require__(4);
|
|
var binarySearch = __webpack_require__(8);
|
|
var ArraySet = __webpack_require__(5).ArraySet;
|
|
var base64VLQ = __webpack_require__(2);
|
|
var quickSort = __webpack_require__(9).quickSort;
|
|
|
|
function SourceMapConsumer(aSourceMap) {
|
|
var sourceMap = aSourceMap;
|
|
if (typeof aSourceMap === 'string') {
|
|
sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, ''));
|
|
}
|
|
|
|
return sourceMap.sections != null
|
|
? new IndexedSourceMapConsumer(sourceMap)
|
|
: new BasicSourceMapConsumer(sourceMap);
|
|
}
|
|
|
|
SourceMapConsumer.fromSourceMap = function(aSourceMap) {
|
|
return BasicSourceMapConsumer.fromSourceMap(aSourceMap);
|
|
}
|
|
|
|
/**
|
|
* The version of the source mapping spec that we are consuming.
|
|
*/
|
|
SourceMapConsumer.prototype._version = 3;
|
|
|
|
// `__generatedMappings` and `__originalMappings` are arrays that hold the
|
|
// parsed mapping coordinates from the source map's "mappings" attribute. They
|
|
// are lazily instantiated, accessed via the `_generatedMappings` and
|
|
// `_originalMappings` getters respectively, and we only parse the mappings
|
|
// and create these arrays once queried for a source location. We jump through
|
|
// these hoops because there can be many thousands of mappings, and parsing
|
|
// them is expensive, so we only want to do it if we must.
|
|
//
|
|
// Each object in the arrays is of the form:
|
|
//
|
|
// {
|
|
// generatedLine: The line number in the generated code,
|
|
// generatedColumn: The column number in the generated code,
|
|
// source: The path to the original source file that generated this
|
|
// chunk of code,
|
|
// originalLine: The line number in the original source that
|
|
// corresponds to this chunk of generated code,
|
|
// originalColumn: The column number in the original source that
|
|
// corresponds to this chunk of generated code,
|
|
// name: The name of the original symbol which generated this chunk of
|
|
// code.
|
|
// }
|
|
//
|
|
// All properties except for `generatedLine` and `generatedColumn` can be
|
|
// `null`.
|
|
//
|
|
// `_generatedMappings` is ordered by the generated positions.
|
|
//
|
|
// `_originalMappings` is ordered by the original positions.
|
|
|
|
SourceMapConsumer.prototype.__generatedMappings = null;
|
|
Object.defineProperty(SourceMapConsumer.prototype, '_generatedMappings', {
|
|
get: function () {
|
|
if (!this.__generatedMappings) {
|
|
this._parseMappings(this._mappings, this.sourceRoot);
|
|
}
|
|
|
|
return this.__generatedMappings;
|
|
}
|
|
});
|
|
|
|
SourceMapConsumer.prototype.__originalMappings = null;
|
|
Object.defineProperty(SourceMapConsumer.prototype, '_originalMappings', {
|
|
get: function () {
|
|
if (!this.__originalMappings) {
|
|
this._parseMappings(this._mappings, this.sourceRoot);
|
|
}
|
|
|
|
return this.__originalMappings;
|
|
}
|
|
});
|
|
|
|
SourceMapConsumer.prototype._charIsMappingSeparator =
|
|
function SourceMapConsumer_charIsMappingSeparator(aStr, index) {
|
|
var c = aStr.charAt(index);
|
|
return c === ";" || c === ",";
|
|
};
|
|
|
|
/**
|
|
* Parse the mappings in a string in to a data structure which we can easily
|
|
* query (the ordered arrays in the `this.__generatedMappings` and
|
|
* `this.__originalMappings` properties).
|
|
*/
|
|
SourceMapConsumer.prototype._parseMappings =
|
|
function SourceMapConsumer_parseMappings(aStr, aSourceRoot) {
|
|
throw new Error("Subclasses must implement _parseMappings");
|
|
};
|
|
|
|
SourceMapConsumer.GENERATED_ORDER = 1;
|
|
SourceMapConsumer.ORIGINAL_ORDER = 2;
|
|
|
|
SourceMapConsumer.GREATEST_LOWER_BOUND = 1;
|
|
SourceMapConsumer.LEAST_UPPER_BOUND = 2;
|
|
|
|
/**
|
|
* Iterate over each mapping between an original source/line/column and a
|
|
* generated line/column in this source map.
|
|
*
|
|
* @param Function aCallback
|
|
* The function that is called with each mapping.
|
|
* @param Object aContext
|
|
* Optional. If specified, this object will be the value of `this` every
|
|
* time that `aCallback` is called.
|
|
* @param aOrder
|
|
* Either `SourceMapConsumer.GENERATED_ORDER` or
|
|
* `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to
|
|
* iterate over the mappings sorted by the generated file's line/column
|
|
* order or the original's source/line/column order, respectively. Defaults to
|
|
* `SourceMapConsumer.GENERATED_ORDER`.
|
|
*/
|
|
SourceMapConsumer.prototype.eachMapping =
|
|
function SourceMapConsumer_eachMapping(aCallback, aContext, aOrder) {
|
|
var context = aContext || null;
|
|
var order = aOrder || SourceMapConsumer.GENERATED_ORDER;
|
|
|
|
var mappings;
|
|
switch (order) {
|
|
case SourceMapConsumer.GENERATED_ORDER:
|
|
mappings = this._generatedMappings;
|
|
break;
|
|
case SourceMapConsumer.ORIGINAL_ORDER:
|
|
mappings = this._originalMappings;
|
|
break;
|
|
default:
|
|
throw new Error("Unknown order of iteration.");
|
|
}
|
|
|
|
var sourceRoot = this.sourceRoot;
|
|
mappings.map(function (mapping) {
|
|
var source = mapping.source === null ? null : this._sources.at(mapping.source);
|
|
if (source != null && sourceRoot != null) {
|
|
source = util.join(sourceRoot, source);
|
|
}
|
|
return {
|
|
source: source,
|
|
generatedLine: mapping.generatedLine,
|
|
generatedColumn: mapping.generatedColumn,
|
|
originalLine: mapping.originalLine,
|
|
originalColumn: mapping.originalColumn,
|
|
name: mapping.name === null ? null : this._names.at(mapping.name)
|
|
};
|
|
}, this).forEach(aCallback, context);
|
|
};
|
|
|
|
/**
|
|
* Returns all generated line and column information for the original source,
|
|
* line, and column provided. If no column is provided, returns all mappings
|
|
* corresponding to a either the line we are searching for or the next
|
|
* closest line that has any mappings. Otherwise, returns all mappings
|
|
* corresponding to the given line and either the column we are searching for
|
|
* or the next closest column that has any offsets.
|
|
*
|
|
* The only argument is an object with the following properties:
|
|
*
|
|
* - source: The filename of the original source.
|
|
* - line: The line number in the original source.
|
|
* - column: Optional. the column number in the original source.
|
|
*
|
|
* and an array of objects is returned, each with the following properties:
|
|
*
|
|
* - line: The line number in the generated source, or null.
|
|
* - column: The column number in the generated source, or null.
|
|
*/
|
|
SourceMapConsumer.prototype.allGeneratedPositionsFor =
|
|
function SourceMapConsumer_allGeneratedPositionsFor(aArgs) {
|
|
var line = util.getArg(aArgs, 'line');
|
|
|
|
// When there is no exact match, BasicSourceMapConsumer.prototype._findMapping
|
|
// returns the index of the closest mapping less than the needle. By
|
|
// setting needle.originalColumn to 0, we thus find the last mapping for
|
|
// the given line, provided such a mapping exists.
|
|
var needle = {
|
|
source: util.getArg(aArgs, 'source'),
|
|
originalLine: line,
|
|
originalColumn: util.getArg(aArgs, 'column', 0)
|
|
};
|
|
|
|
if (this.sourceRoot != null) {
|
|
needle.source = util.relative(this.sourceRoot, needle.source);
|
|
}
|
|
if (!this._sources.has(needle.source)) {
|
|
return [];
|
|
}
|
|
needle.source = this._sources.indexOf(needle.source);
|
|
|
|
var mappings = [];
|
|
|
|
var index = this._findMapping(needle,
|
|
this._originalMappings,
|
|
"originalLine",
|
|
"originalColumn",
|
|
util.compareByOriginalPositions,
|
|
binarySearch.LEAST_UPPER_BOUND);
|
|
if (index >= 0) {
|
|
var mapping = this._originalMappings[index];
|
|
|
|
if (aArgs.column === undefined) {
|
|
var originalLine = mapping.originalLine;
|
|
|
|
// Iterate until either we run out of mappings, or we run into
|
|
// a mapping for a different line than the one we found. Since
|
|
// mappings are sorted, this is guaranteed to find all mappings for
|
|
// the line we found.
|
|
while (mapping && mapping.originalLine === originalLine) {
|
|
mappings.push({
|
|
line: util.getArg(mapping, 'generatedLine', null),
|
|
column: util.getArg(mapping, 'generatedColumn', null),
|
|
lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)
|
|
});
|
|
|
|
mapping = this._originalMappings[++index];
|
|
}
|
|
} else {
|
|
var originalColumn = mapping.originalColumn;
|
|
|
|
// Iterate until either we run out of mappings, or we run into
|
|
// a mapping for a different line than the one we were searching for.
|
|
// Since mappings are sorted, this is guaranteed to find all mappings for
|
|
// the line we are searching for.
|
|
while (mapping &&
|
|
mapping.originalLine === line &&
|
|
mapping.originalColumn == originalColumn) {
|
|
mappings.push({
|
|
line: util.getArg(mapping, 'generatedLine', null),
|
|
column: util.getArg(mapping, 'generatedColumn', null),
|
|
lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)
|
|
});
|
|
|
|
mapping = this._originalMappings[++index];
|
|
}
|
|
}
|
|
}
|
|
|
|
return mappings;
|
|
};
|
|
|
|
exports.SourceMapConsumer = SourceMapConsumer;
|
|
|
|
/**
|
|
* A BasicSourceMapConsumer instance represents a parsed source map which we can
|
|
* query for information about the original file positions by giving it a file
|
|
* position in the generated source.
|
|
*
|
|
* The only parameter is the raw source map (either as a JSON string, or
|
|
* already parsed to an object). According to the spec, source maps have the
|
|
* following attributes:
|
|
*
|
|
* - version: Which version of the source map spec this map is following.
|
|
* - sources: An array of URLs to the original source files.
|
|
* - names: An array of identifiers which can be referrenced by individual mappings.
|
|
* - sourceRoot: Optional. The URL root from which all sources are relative.
|
|
* - sourcesContent: Optional. An array of contents of the original source files.
|
|
* - mappings: A string of base64 VLQs which contain the actual mappings.
|
|
* - file: Optional. The generated file this source map is associated with.
|
|
*
|
|
* Here is an example source map, taken from the source map spec[0]:
|
|
*
|
|
* {
|
|
* version : 3,
|
|
* file: "out.js",
|
|
* sourceRoot : "",
|
|
* sources: ["foo.js", "bar.js"],
|
|
* names: ["src", "maps", "are", "fun"],
|
|
* mappings: "AA,AB;;ABCDE;"
|
|
* }
|
|
*
|
|
* [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit?pli=1#
|
|
*/
|
|
function BasicSourceMapConsumer(aSourceMap) {
|
|
var sourceMap = aSourceMap;
|
|
if (typeof aSourceMap === 'string') {
|
|
sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, ''));
|
|
}
|
|
|
|
var version = util.getArg(sourceMap, 'version');
|
|
var sources = util.getArg(sourceMap, 'sources');
|
|
// Sass 3.3 leaves out the 'names' array, so we deviate from the spec (which
|
|
// requires the array) to play nice here.
|
|
var names = util.getArg(sourceMap, 'names', []);
|
|
var sourceRoot = util.getArg(sourceMap, 'sourceRoot', null);
|
|
var sourcesContent = util.getArg(sourceMap, 'sourcesContent', null);
|
|
var mappings = util.getArg(sourceMap, 'mappings');
|
|
var file = util.getArg(sourceMap, 'file', null);
|
|
|
|
// Once again, Sass deviates from the spec and supplies the version as a
|
|
// string rather than a number, so we use loose equality checking here.
|
|
if (version != this._version) {
|
|
throw new Error('Unsupported version: ' + version);
|
|
}
|
|
|
|
sources = sources
|
|
.map(String)
|
|
// Some source maps produce relative source paths like "./foo.js" instead of
|
|
// "foo.js". Normalize these first so that future comparisons will succeed.
|
|
// See bugzil.la/1090768.
|
|
.map(util.normalize)
|
|
// Always ensure that absolute sources are internally stored relative to
|
|
// the source root, if the source root is absolute. Not doing this would
|
|
// be particularly problematic when the source root is a prefix of the
|
|
// source (valid, but why??). See github issue #199 and bugzil.la/1188982.
|
|
.map(function (source) {
|
|
return sourceRoot && util.isAbsolute(sourceRoot) && util.isAbsolute(source)
|
|
? util.relative(sourceRoot, source)
|
|
: source;
|
|
});
|
|
|
|
// Pass `true` below to allow duplicate names and sources. While source maps
|
|
// are intended to be compressed and deduplicated, the TypeScript compiler
|
|
// sometimes generates source maps with duplicates in them. See Github issue
|
|
// #72 and bugzil.la/889492.
|
|
this._names = ArraySet.fromArray(names.map(String), true);
|
|
this._sources = ArraySet.fromArray(sources, true);
|
|
|
|
this.sourceRoot = sourceRoot;
|
|
this.sourcesContent = sourcesContent;
|
|
this._mappings = mappings;
|
|
this.file = file;
|
|
}
|
|
|
|
BasicSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype);
|
|
BasicSourceMapConsumer.prototype.consumer = SourceMapConsumer;
|
|
|
|
/**
|
|
* Create a BasicSourceMapConsumer from a SourceMapGenerator.
|
|
*
|
|
* @param SourceMapGenerator aSourceMap
|
|
* The source map that will be consumed.
|
|
* @returns BasicSourceMapConsumer
|
|
*/
|
|
BasicSourceMapConsumer.fromSourceMap =
|
|
function SourceMapConsumer_fromSourceMap(aSourceMap) {
|
|
var smc = Object.create(BasicSourceMapConsumer.prototype);
|
|
|
|
var names = smc._names = ArraySet.fromArray(aSourceMap._names.toArray(), true);
|
|
var sources = smc._sources = ArraySet.fromArray(aSourceMap._sources.toArray(), true);
|
|
smc.sourceRoot = aSourceMap._sourceRoot;
|
|
smc.sourcesContent = aSourceMap._generateSourcesContent(smc._sources.toArray(),
|
|
smc.sourceRoot);
|
|
smc.file = aSourceMap._file;
|
|
|
|
// Because we are modifying the entries (by converting string sources and
|
|
// names to indices into the sources and names ArraySets), we have to make
|
|
// a copy of the entry or else bad things happen. Shared mutable state
|
|
// strikes again! See github issue #191.
|
|
|
|
var generatedMappings = aSourceMap._mappings.toArray().slice();
|
|
var destGeneratedMappings = smc.__generatedMappings = [];
|
|
var destOriginalMappings = smc.__originalMappings = [];
|
|
|
|
for (var i = 0, length = generatedMappings.length; i < length; i++) {
|
|
var srcMapping = generatedMappings[i];
|
|
var destMapping = new Mapping;
|
|
destMapping.generatedLine = srcMapping.generatedLine;
|
|
destMapping.generatedColumn = srcMapping.generatedColumn;
|
|
|
|
if (srcMapping.source) {
|
|
destMapping.source = sources.indexOf(srcMapping.source);
|
|
destMapping.originalLine = srcMapping.originalLine;
|
|
destMapping.originalColumn = srcMapping.originalColumn;
|
|
|
|
if (srcMapping.name) {
|
|
destMapping.name = names.indexOf(srcMapping.name);
|
|
}
|
|
|
|
destOriginalMappings.push(destMapping);
|
|
}
|
|
|
|
destGeneratedMappings.push(destMapping);
|
|
}
|
|
|
|
quickSort(smc.__originalMappings, util.compareByOriginalPositions);
|
|
|
|
return smc;
|
|
};
|
|
|
|
/**
|
|
* The version of the source mapping spec that we are consuming.
|
|
*/
|
|
BasicSourceMapConsumer.prototype._version = 3;
|
|
|
|
/**
|
|
* The list of original sources.
|
|
*/
|
|
Object.defineProperty(BasicSourceMapConsumer.prototype, 'sources', {
|
|
get: function () {
|
|
return this._sources.toArray().map(function (s) {
|
|
return this.sourceRoot != null ? util.join(this.sourceRoot, s) : s;
|
|
}, this);
|
|
}
|
|
});
|
|
|
|
/**
|
|
* Provide the JIT with a nice shape / hidden class.
|
|
*/
|
|
function Mapping() {
|
|
this.generatedLine = 0;
|
|
this.generatedColumn = 0;
|
|
this.source = null;
|
|
this.originalLine = null;
|
|
this.originalColumn = null;
|
|
this.name = null;
|
|
}
|
|
|
|
/**
|
|
* Parse the mappings in a string in to a data structure which we can easily
|
|
* query (the ordered arrays in the `this.__generatedMappings` and
|
|
* `this.__originalMappings` properties).
|
|
*/
|
|
BasicSourceMapConsumer.prototype._parseMappings =
|
|
function SourceMapConsumer_parseMappings(aStr, aSourceRoot) {
|
|
var generatedLine = 1;
|
|
var previousGeneratedColumn = 0;
|
|
var previousOriginalLine = 0;
|
|
var previousOriginalColumn = 0;
|
|
var previousSource = 0;
|
|
var previousName = 0;
|
|
var length = aStr.length;
|
|
var index = 0;
|
|
var cachedSegments = {};
|
|
var temp = {};
|
|
var originalMappings = [];
|
|
var generatedMappings = [];
|
|
var mapping, str, segment, end, value;
|
|
|
|
while (index < length) {
|
|
if (aStr.charAt(index) === ';') {
|
|
generatedLine++;
|
|
index++;
|
|
previousGeneratedColumn = 0;
|
|
}
|
|
else if (aStr.charAt(index) === ',') {
|
|
index++;
|
|
}
|
|
else {
|
|
mapping = new Mapping();
|
|
mapping.generatedLine = generatedLine;
|
|
|
|
// Because each offset is encoded relative to the previous one,
|
|
// many segments often have the same encoding. We can exploit this
|
|
// fact by caching the parsed variable length fields of each segment,
|
|
// allowing us to avoid a second parse if we encounter the same
|
|
// segment again.
|
|
for (end = index; end < length; end++) {
|
|
if (this._charIsMappingSeparator(aStr, end)) {
|
|
break;
|
|
}
|
|
}
|
|
str = aStr.slice(index, end);
|
|
|
|
segment = cachedSegments[str];
|
|
if (segment) {
|
|
index += str.length;
|
|
} else {
|
|
segment = [];
|
|
while (index < end) {
|
|
base64VLQ.decode(aStr, index, temp);
|
|
value = temp.value;
|
|
index = temp.rest;
|
|
segment.push(value);
|
|
}
|
|
|
|
if (segment.length === 2) {
|
|
throw new Error('Found a source, but no line and column');
|
|
}
|
|
|
|
if (segment.length === 3) {
|
|
throw new Error('Found a source and line, but no column');
|
|
}
|
|
|
|
cachedSegments[str] = segment;
|
|
}
|
|
|
|
// Generated column.
|
|
mapping.generatedColumn = previousGeneratedColumn + segment[0];
|
|
previousGeneratedColumn = mapping.generatedColumn;
|
|
|
|
if (segment.length > 1) {
|
|
// Original source.
|
|
mapping.source = previousSource + segment[1];
|
|
previousSource += segment[1];
|
|
|
|
// Original line.
|
|
mapping.originalLine = previousOriginalLine + segment[2];
|
|
previousOriginalLine = mapping.originalLine;
|
|
// Lines are stored 0-based
|
|
mapping.originalLine += 1;
|
|
|
|
// Original column.
|
|
mapping.originalColumn = previousOriginalColumn + segment[3];
|
|
previousOriginalColumn = mapping.originalColumn;
|
|
|
|
if (segment.length > 4) {
|
|
// Original name.
|
|
mapping.name = previousName + segment[4];
|
|
previousName += segment[4];
|
|
}
|
|
}
|
|
|
|
generatedMappings.push(mapping);
|
|
if (typeof mapping.originalLine === 'number') {
|
|
originalMappings.push(mapping);
|
|
}
|
|
}
|
|
}
|
|
|
|
quickSort(generatedMappings, util.compareByGeneratedPositionsDeflated);
|
|
this.__generatedMappings = generatedMappings;
|
|
|
|
quickSort(originalMappings, util.compareByOriginalPositions);
|
|
this.__originalMappings = originalMappings;
|
|
};
|
|
|
|
/**
|
|
* Find the mapping that best matches the hypothetical "needle" mapping that
|
|
* we are searching for in the given "haystack" of mappings.
|
|
*/
|
|
BasicSourceMapConsumer.prototype._findMapping =
|
|
function SourceMapConsumer_findMapping(aNeedle, aMappings, aLineName,
|
|
aColumnName, aComparator, aBias) {
|
|
// To return the position we are searching for, we must first find the
|
|
// mapping for the given position and then return the opposite position it
|
|
// points to. Because the mappings are sorted, we can use binary search to
|
|
// find the best mapping.
|
|
|
|
if (aNeedle[aLineName] <= 0) {
|
|
throw new TypeError('Line must be greater than or equal to 1, got '
|
|
+ aNeedle[aLineName]);
|
|
}
|
|
if (aNeedle[aColumnName] < 0) {
|
|
throw new TypeError('Column must be greater than or equal to 0, got '
|
|
+ aNeedle[aColumnName]);
|
|
}
|
|
|
|
return binarySearch.search(aNeedle, aMappings, aComparator, aBias);
|
|
};
|
|
|
|
/**
|
|
* Compute the last column for each generated mapping. The last column is
|
|
* inclusive.
|
|
*/
|
|
BasicSourceMapConsumer.prototype.computeColumnSpans =
|
|
function SourceMapConsumer_computeColumnSpans() {
|
|
for (var index = 0; index < this._generatedMappings.length; ++index) {
|
|
var mapping = this._generatedMappings[index];
|
|
|
|
// Mappings do not contain a field for the last generated columnt. We
|
|
// can come up with an optimistic estimate, however, by assuming that
|
|
// mappings are contiguous (i.e. given two consecutive mappings, the
|
|
// first mapping ends where the second one starts).
|
|
if (index + 1 < this._generatedMappings.length) {
|
|
var nextMapping = this._generatedMappings[index + 1];
|
|
|
|
if (mapping.generatedLine === nextMapping.generatedLine) {
|
|
mapping.lastGeneratedColumn = nextMapping.generatedColumn - 1;
|
|
continue;
|
|
}
|
|
}
|
|
|
|
// The last mapping for each line spans the entire line.
|
|
mapping.lastGeneratedColumn = Infinity;
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Returns the original source, line, and column information for the generated
|
|
* source's line and column positions provided. The only argument is an object
|
|
* with the following properties:
|
|
*
|
|
* - line: The line number in the generated source.
|
|
* - column: The column number in the generated source.
|
|
* - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or
|
|
* 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the
|
|
* closest element that is smaller than or greater than the one we are
|
|
* searching for, respectively, if the exact element cannot be found.
|
|
* Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'.
|
|
*
|
|
* and an object is returned with the following properties:
|
|
*
|
|
* - source: The original source file, or null.
|
|
* - line: The line number in the original source, or null.
|
|
* - column: The column number in the original source, or null.
|
|
* - name: The original identifier, or null.
|
|
*/
|
|
BasicSourceMapConsumer.prototype.originalPositionFor =
|
|
function SourceMapConsumer_originalPositionFor(aArgs) {
|
|
var needle = {
|
|
generatedLine: util.getArg(aArgs, 'line'),
|
|
generatedColumn: util.getArg(aArgs, 'column')
|
|
};
|
|
|
|
var index = this._findMapping(
|
|
needle,
|
|
this._generatedMappings,
|
|
"generatedLine",
|
|
"generatedColumn",
|
|
util.compareByGeneratedPositionsDeflated,
|
|
util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND)
|
|
);
|
|
|
|
if (index >= 0) {
|
|
var mapping = this._generatedMappings[index];
|
|
|
|
if (mapping.generatedLine === needle.generatedLine) {
|
|
var source = util.getArg(mapping, 'source', null);
|
|
if (source !== null) {
|
|
source = this._sources.at(source);
|
|
if (this.sourceRoot != null) {
|
|
source = util.join(this.sourceRoot, source);
|
|
}
|
|
}
|
|
var name = util.getArg(mapping, 'name', null);
|
|
if (name !== null) {
|
|
name = this._names.at(name);
|
|
}
|
|
return {
|
|
source: source,
|
|
line: util.getArg(mapping, 'originalLine', null),
|
|
column: util.getArg(mapping, 'originalColumn', null),
|
|
name: name
|
|
};
|
|
}
|
|
}
|
|
|
|
return {
|
|
source: null,
|
|
line: null,
|
|
column: null,
|
|
name: null
|
|
};
|
|
};
|
|
|
|
/**
|
|
* Return true if we have the source content for every source in the source
|
|
* map, false otherwise.
|
|
*/
|
|
BasicSourceMapConsumer.prototype.hasContentsOfAllSources =
|
|
function BasicSourceMapConsumer_hasContentsOfAllSources() {
|
|
if (!this.sourcesContent) {
|
|
return false;
|
|
}
|
|
return this.sourcesContent.length >= this._sources.size() &&
|
|
!this.sourcesContent.some(function (sc) { return sc == null; });
|
|
};
|
|
|
|
/**
|
|
* Returns the original source content. The only argument is the url of the
|
|
* original source file. Returns null if no original source content is
|
|
* available.
|
|
*/
|
|
BasicSourceMapConsumer.prototype.sourceContentFor =
|
|
function SourceMapConsumer_sourceContentFor(aSource, nullOnMissing) {
|
|
if (!this.sourcesContent) {
|
|
return null;
|
|
}
|
|
|
|
if (this.sourceRoot != null) {
|
|
aSource = util.relative(this.sourceRoot, aSource);
|
|
}
|
|
|
|
if (this._sources.has(aSource)) {
|
|
return this.sourcesContent[this._sources.indexOf(aSource)];
|
|
}
|
|
|
|
var url;
|
|
if (this.sourceRoot != null
|
|
&& (url = util.urlParse(this.sourceRoot))) {
|
|
// XXX: file:// URIs and absolute paths lead to unexpected behavior for
|
|
// many users. We can help them out when they expect file:// URIs to
|
|
// behave like it would if they were running a local HTTP server. See
|
|
// https://bugzilla.mozilla.org/show_bug.cgi?id=885597.
|
|
var fileUriAbsPath = aSource.replace(/^file:\/\//, "");
|
|
if (url.scheme == "file"
|
|
&& this._sources.has(fileUriAbsPath)) {
|
|
return this.sourcesContent[this._sources.indexOf(fileUriAbsPath)]
|
|
}
|
|
|
|
if ((!url.path || url.path == "/")
|
|
&& this._sources.has("/" + aSource)) {
|
|
return this.sourcesContent[this._sources.indexOf("/" + aSource)];
|
|
}
|
|
}
|
|
|
|
// This function is used recursively from
|
|
// IndexedSourceMapConsumer.prototype.sourceContentFor. In that case, we
|
|
// don't want to throw if we can't find the source - we just want to
|
|
// return null, so we provide a flag to exit gracefully.
|
|
if (nullOnMissing) {
|
|
return null;
|
|
}
|
|
else {
|
|
throw new Error('"' + aSource + '" is not in the SourceMap.');
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Returns the generated line and column information for the original source,
|
|
* line, and column positions provided. The only argument is an object with
|
|
* the following properties:
|
|
*
|
|
* - source: The filename of the original source.
|
|
* - line: The line number in the original source.
|
|
* - column: The column number in the original source.
|
|
* - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or
|
|
* 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the
|
|
* closest element that is smaller than or greater than the one we are
|
|
* searching for, respectively, if the exact element cannot be found.
|
|
* Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'.
|
|
*
|
|
* and an object is returned with the following properties:
|
|
*
|
|
* - line: The line number in the generated source, or null.
|
|
* - column: The column number in the generated source, or null.
|
|
*/
|
|
BasicSourceMapConsumer.prototype.generatedPositionFor =
|
|
function SourceMapConsumer_generatedPositionFor(aArgs) {
|
|
var source = util.getArg(aArgs, 'source');
|
|
if (this.sourceRoot != null) {
|
|
source = util.relative(this.sourceRoot, source);
|
|
}
|
|
if (!this._sources.has(source)) {
|
|
return {
|
|
line: null,
|
|
column: null,
|
|
lastColumn: null
|
|
};
|
|
}
|
|
source = this._sources.indexOf(source);
|
|
|
|
var needle = {
|
|
source: source,
|
|
originalLine: util.getArg(aArgs, 'line'),
|
|
originalColumn: util.getArg(aArgs, 'column')
|
|
};
|
|
|
|
var index = this._findMapping(
|
|
needle,
|
|
this._originalMappings,
|
|
"originalLine",
|
|
"originalColumn",
|
|
util.compareByOriginalPositions,
|
|
util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND)
|
|
);
|
|
|
|
if (index >= 0) {
|
|
var mapping = this._originalMappings[index];
|
|
|
|
if (mapping.source === needle.source) {
|
|
return {
|
|
line: util.getArg(mapping, 'generatedLine', null),
|
|
column: util.getArg(mapping, 'generatedColumn', null),
|
|
lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)
|
|
};
|
|
}
|
|
}
|
|
|
|
return {
|
|
line: null,
|
|
column: null,
|
|
lastColumn: null
|
|
};
|
|
};
|
|
|
|
exports.BasicSourceMapConsumer = BasicSourceMapConsumer;
|
|
|
|
/**
|
|
* An IndexedSourceMapConsumer instance represents a parsed source map which
|
|
* we can query for information. It differs from BasicSourceMapConsumer in
|
|
* that it takes "indexed" source maps (i.e. ones with a "sections" field) as
|
|
* input.
|
|
*
|
|
* The only parameter is a raw source map (either as a JSON string, or already
|
|
* parsed to an object). According to the spec for indexed source maps, they
|
|
* have the following attributes:
|
|
*
|
|
* - version: Which version of the source map spec this map is following.
|
|
* - file: Optional. The generated file this source map is associated with.
|
|
* - sections: A list of section definitions.
|
|
*
|
|
* Each value under the "sections" field has two fields:
|
|
* - offset: The offset into the original specified at which this section
|
|
* begins to apply, defined as an object with a "line" and "column"
|
|
* field.
|
|
* - map: A source map definition. This source map could also be indexed,
|
|
* but doesn't have to be.
|
|
*
|
|
* Instead of the "map" field, it's also possible to have a "url" field
|
|
* specifying a URL to retrieve a source map from, but that's currently
|
|
* unsupported.
|
|
*
|
|
* Here's an example source map, taken from the source map spec[0], but
|
|
* modified to omit a section which uses the "url" field.
|
|
*
|
|
* {
|
|
* version : 3,
|
|
* file: "app.js",
|
|
* sections: [{
|
|
* offset: {line:100, column:10},
|
|
* map: {
|
|
* version : 3,
|
|
* file: "section.js",
|
|
* sources: ["foo.js", "bar.js"],
|
|
* names: ["src", "maps", "are", "fun"],
|
|
* mappings: "AAAA,E;;ABCDE;"
|
|
* }
|
|
* }],
|
|
* }
|
|
*
|
|
* [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit#heading=h.535es3xeprgt
|
|
*/
|
|
function IndexedSourceMapConsumer(aSourceMap) {
|
|
var sourceMap = aSourceMap;
|
|
if (typeof aSourceMap === 'string') {
|
|
sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, ''));
|
|
}
|
|
|
|
var version = util.getArg(sourceMap, 'version');
|
|
var sections = util.getArg(sourceMap, 'sections');
|
|
|
|
if (version != this._version) {
|
|
throw new Error('Unsupported version: ' + version);
|
|
}
|
|
|
|
this._sources = new ArraySet();
|
|
this._names = new ArraySet();
|
|
|
|
var lastOffset = {
|
|
line: -1,
|
|
column: 0
|
|
};
|
|
this._sections = sections.map(function (s) {
|
|
if (s.url) {
|
|
// The url field will require support for asynchronicity.
|
|
// See https://github.com/mozilla/source-map/issues/16
|
|
throw new Error('Support for url field in sections not implemented.');
|
|
}
|
|
var offset = util.getArg(s, 'offset');
|
|
var offsetLine = util.getArg(offset, 'line');
|
|
var offsetColumn = util.getArg(offset, 'column');
|
|
|
|
if (offsetLine < lastOffset.line ||
|
|
(offsetLine === lastOffset.line && offsetColumn < lastOffset.column)) {
|
|
throw new Error('Section offsets must be ordered and non-overlapping.');
|
|
}
|
|
lastOffset = offset;
|
|
|
|
return {
|
|
generatedOffset: {
|
|
// The offset fields are 0-based, but we use 1-based indices when
|
|
// encoding/decoding from VLQ.
|
|
generatedLine: offsetLine + 1,
|
|
generatedColumn: offsetColumn + 1
|
|
},
|
|
consumer: new SourceMapConsumer(util.getArg(s, 'map'))
|
|
}
|
|
});
|
|
}
|
|
|
|
IndexedSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype);
|
|
IndexedSourceMapConsumer.prototype.constructor = SourceMapConsumer;
|
|
|
|
/**
|
|
* The version of the source mapping spec that we are consuming.
|
|
*/
|
|
IndexedSourceMapConsumer.prototype._version = 3;
|
|
|
|
/**
|
|
* The list of original sources.
|
|
*/
|
|
Object.defineProperty(IndexedSourceMapConsumer.prototype, 'sources', {
|
|
get: function () {
|
|
var sources = [];
|
|
for (var i = 0; i < this._sections.length; i++) {
|
|
for (var j = 0; j < this._sections[i].consumer.sources.length; j++) {
|
|
sources.push(this._sections[i].consumer.sources[j]);
|
|
}
|
|
}
|
|
return sources;
|
|
}
|
|
});
|
|
|
|
/**
|
|
* Returns the original source, line, and column information for the generated
|
|
* source's line and column positions provided. The only argument is an object
|
|
* with the following properties:
|
|
*
|
|
* - line: The line number in the generated source.
|
|
* - column: The column number in the generated source.
|
|
*
|
|
* and an object is returned with the following properties:
|
|
*
|
|
* - source: The original source file, or null.
|
|
* - line: The line number in the original source, or null.
|
|
* - column: The column number in the original source, or null.
|
|
* - name: The original identifier, or null.
|
|
*/
|
|
IndexedSourceMapConsumer.prototype.originalPositionFor =
|
|
function IndexedSourceMapConsumer_originalPositionFor(aArgs) {
|
|
var needle = {
|
|
generatedLine: util.getArg(aArgs, 'line'),
|
|
generatedColumn: util.getArg(aArgs, 'column')
|
|
};
|
|
|
|
// Find the section containing the generated position we're trying to map
|
|
// to an original position.
|
|
var sectionIndex = binarySearch.search(needle, this._sections,
|
|
function(needle, section) {
|
|
var cmp = needle.generatedLine - section.generatedOffset.generatedLine;
|
|
if (cmp) {
|
|
return cmp;
|
|
}
|
|
|
|
return (needle.generatedColumn -
|
|
section.generatedOffset.generatedColumn);
|
|
});
|
|
var section = this._sections[sectionIndex];
|
|
|
|
if (!section) {
|
|
return {
|
|
source: null,
|
|
line: null,
|
|
column: null,
|
|
name: null
|
|
};
|
|
}
|
|
|
|
return section.consumer.originalPositionFor({
|
|
line: needle.generatedLine -
|
|
(section.generatedOffset.generatedLine - 1),
|
|
column: needle.generatedColumn -
|
|
(section.generatedOffset.generatedLine === needle.generatedLine
|
|
? section.generatedOffset.generatedColumn - 1
|
|
: 0),
|
|
bias: aArgs.bias
|
|
});
|
|
};
|
|
|
|
/**
|
|
* Return true if we have the source content for every source in the source
|
|
* map, false otherwise.
|
|
*/
|
|
IndexedSourceMapConsumer.prototype.hasContentsOfAllSources =
|
|
function IndexedSourceMapConsumer_hasContentsOfAllSources() {
|
|
return this._sections.every(function (s) {
|
|
return s.consumer.hasContentsOfAllSources();
|
|
});
|
|
};
|
|
|
|
/**
|
|
* Returns the original source content. The only argument is the url of the
|
|
* original source file. Returns null if no original source content is
|
|
* available.
|
|
*/
|
|
IndexedSourceMapConsumer.prototype.sourceContentFor =
|
|
function IndexedSourceMapConsumer_sourceContentFor(aSource, nullOnMissing) {
|
|
for (var i = 0; i < this._sections.length; i++) {
|
|
var section = this._sections[i];
|
|
|
|
var content = section.consumer.sourceContentFor(aSource, true);
|
|
if (content) {
|
|
return content;
|
|
}
|
|
}
|
|
if (nullOnMissing) {
|
|
return null;
|
|
}
|
|
else {
|
|
throw new Error('"' + aSource + '" is not in the SourceMap.');
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Returns the generated line and column information for the original source,
|
|
* line, and column positions provided. The only argument is an object with
|
|
* the following properties:
|
|
*
|
|
* - source: The filename of the original source.
|
|
* - line: The line number in the original source.
|
|
* - column: The column number in the original source.
|
|
*
|
|
* and an object is returned with the following properties:
|
|
*
|
|
* - line: The line number in the generated source, or null.
|
|
* - column: The column number in the generated source, or null.
|
|
*/
|
|
IndexedSourceMapConsumer.prototype.generatedPositionFor =
|
|
function IndexedSourceMapConsumer_generatedPositionFor(aArgs) {
|
|
for (var i = 0; i < this._sections.length; i++) {
|
|
var section = this._sections[i];
|
|
|
|
// Only consider this section if the requested source is in the list of
|
|
// sources of the consumer.
|
|
if (section.consumer.sources.indexOf(util.getArg(aArgs, 'source')) === -1) {
|
|
continue;
|
|
}
|
|
var generatedPosition = section.consumer.generatedPositionFor(aArgs);
|
|
if (generatedPosition) {
|
|
var ret = {
|
|
line: generatedPosition.line +
|
|
(section.generatedOffset.generatedLine - 1),
|
|
column: generatedPosition.column +
|
|
(section.generatedOffset.generatedLine === generatedPosition.line
|
|
? section.generatedOffset.generatedColumn - 1
|
|
: 0)
|
|
};
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
return {
|
|
line: null,
|
|
column: null
|
|
};
|
|
};
|
|
|
|
/**
|
|
* Parse the mappings in a string in to a data structure which we can easily
|
|
* query (the ordered arrays in the `this.__generatedMappings` and
|
|
* `this.__originalMappings` properties).
|
|
*/
|
|
IndexedSourceMapConsumer.prototype._parseMappings =
|
|
function IndexedSourceMapConsumer_parseMappings(aStr, aSourceRoot) {
|
|
this.__generatedMappings = [];
|
|
this.__originalMappings = [];
|
|
for (var i = 0; i < this._sections.length; i++) {
|
|
var section = this._sections[i];
|
|
var sectionMappings = section.consumer._generatedMappings;
|
|
for (var j = 0; j < sectionMappings.length; j++) {
|
|
var mapping = sectionMappings[j];
|
|
|
|
var source = section.consumer._sources.at(mapping.source);
|
|
if (section.consumer.sourceRoot !== null) {
|
|
source = util.join(section.consumer.sourceRoot, source);
|
|
}
|
|
this._sources.add(source);
|
|
source = this._sources.indexOf(source);
|
|
|
|
var name = section.consumer._names.at(mapping.name);
|
|
this._names.add(name);
|
|
name = this._names.indexOf(name);
|
|
|
|
// The mappings coming from the consumer for the section have
|
|
// generated positions relative to the start of the section, so we
|
|
// need to offset them to be relative to the start of the concatenated
|
|
// generated file.
|
|
var adjustedMapping = {
|
|
source: source,
|
|
generatedLine: mapping.generatedLine +
|
|
(section.generatedOffset.generatedLine - 1),
|
|
generatedColumn: mapping.generatedColumn +
|
|
(section.generatedOffset.generatedLine === mapping.generatedLine
|
|
? section.generatedOffset.generatedColumn - 1
|
|
: 0),
|
|
originalLine: mapping.originalLine,
|
|
originalColumn: mapping.originalColumn,
|
|
name: name
|
|
};
|
|
|
|
this.__generatedMappings.push(adjustedMapping);
|
|
if (typeof adjustedMapping.originalLine === 'number') {
|
|
this.__originalMappings.push(adjustedMapping);
|
|
}
|
|
}
|
|
}
|
|
|
|
quickSort(this.__generatedMappings, util.compareByGeneratedPositionsDeflated);
|
|
quickSort(this.__originalMappings, util.compareByOriginalPositions);
|
|
};
|
|
|
|
exports.IndexedSourceMapConsumer = IndexedSourceMapConsumer;
|
|
|
|
|
|
/***/ },
|
|
/* 8 */
|
|
/***/ function(module, exports) {
|
|
|
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
|
/*
|
|
* Copyright 2011 Mozilla Foundation and contributors
|
|
* Licensed under the New BSD license. See LICENSE or:
|
|
* http://opensource.org/licenses/BSD-3-Clause
|
|
*/
|
|
|
|
exports.GREATEST_LOWER_BOUND = 1;
|
|
exports.LEAST_UPPER_BOUND = 2;
|
|
|
|
/**
|
|
* Recursive implementation of binary search.
|
|
*
|
|
* @param aLow Indices here and lower do not contain the needle.
|
|
* @param aHigh Indices here and higher do not contain the needle.
|
|
* @param aNeedle The element being searched for.
|
|
* @param aHaystack The non-empty array being searched.
|
|
* @param aCompare Function which takes two elements and returns -1, 0, or 1.
|
|
* @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or
|
|
* 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the
|
|
* closest element that is smaller than or greater than the one we are
|
|
* searching for, respectively, if the exact element cannot be found.
|
|
*/
|
|
function recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare, aBias) {
|
|
// This function terminates when one of the following is true:
|
|
//
|
|
// 1. We find the exact element we are looking for.
|
|
//
|
|
// 2. We did not find the exact element, but we can return the index of
|
|
// the next-closest element.
|
|
//
|
|
// 3. We did not find the exact element, and there is no next-closest
|
|
// element than the one we are searching for, so we return -1.
|
|
var mid = Math.floor((aHigh - aLow) / 2) + aLow;
|
|
var cmp = aCompare(aNeedle, aHaystack[mid], true);
|
|
if (cmp === 0) {
|
|
// Found the element we are looking for.
|
|
return mid;
|
|
}
|
|
else if (cmp > 0) {
|
|
// Our needle is greater than aHaystack[mid].
|
|
if (aHigh - mid > 1) {
|
|
// The element is in the upper half.
|
|
return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare, aBias);
|
|
}
|
|
|
|
// The exact needle element was not found in this haystack. Determine if
|
|
// we are in termination case (3) or (2) and return the appropriate thing.
|
|
if (aBias == exports.LEAST_UPPER_BOUND) {
|
|
return aHigh < aHaystack.length ? aHigh : -1;
|
|
} else {
|
|
return mid;
|
|
}
|
|
}
|
|
else {
|
|
// Our needle is less than aHaystack[mid].
|
|
if (mid - aLow > 1) {
|
|
// The element is in the lower half.
|
|
return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare, aBias);
|
|
}
|
|
|
|
// we are in termination case (3) or (2) and return the appropriate thing.
|
|
if (aBias == exports.LEAST_UPPER_BOUND) {
|
|
return mid;
|
|
} else {
|
|
return aLow < 0 ? -1 : aLow;
|
|
}
|
|
}
|
|
}
|
|
|
|
/**
|
|
* This is an implementation of binary search which will always try and return
|
|
* the index of the closest element if there is no exact hit. This is because
|
|
* mappings between original and generated line/col pairs are single points,
|
|
* and there is an implicit region between each of them, so a miss just means
|
|
* that you aren't on the very start of a region.
|
|
*
|
|
* @param aNeedle The element you are looking for.
|
|
* @param aHaystack The array that is being searched.
|
|
* @param aCompare A function which takes the needle and an element in the
|
|
* array and returns -1, 0, or 1 depending on whether the needle is less
|
|
* than, equal to, or greater than the element, respectively.
|
|
* @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or
|
|
* 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the
|
|
* closest element that is smaller than or greater than the one we are
|
|
* searching for, respectively, if the exact element cannot be found.
|
|
* Defaults to 'binarySearch.GREATEST_LOWER_BOUND'.
|
|
*/
|
|
exports.search = function search(aNeedle, aHaystack, aCompare, aBias) {
|
|
if (aHaystack.length === 0) {
|
|
return -1;
|
|
}
|
|
|
|
var index = recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack,
|
|
aCompare, aBias || exports.GREATEST_LOWER_BOUND);
|
|
if (index < 0) {
|
|
return -1;
|
|
}
|
|
|
|
// We have found either the exact element, or the next-closest element than
|
|
// the one we are searching for. However, there may be more than one such
|
|
// element. Make sure we always return the smallest of these.
|
|
while (index - 1 >= 0) {
|
|
if (aCompare(aHaystack[index], aHaystack[index - 1], true) !== 0) {
|
|
break;
|
|
}
|
|
--index;
|
|
}
|
|
|
|
return index;
|
|
};
|
|
|
|
|
|
/***/ },
|
|
/* 9 */
|
|
/***/ function(module, exports) {
|
|
|
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
|
/*
|
|
* Copyright 2011 Mozilla Foundation and contributors
|
|
* Licensed under the New BSD license. See LICENSE or:
|
|
* http://opensource.org/licenses/BSD-3-Clause
|
|
*/
|
|
|
|
// It turns out that some (most?) JavaScript engines don't self-host
|
|
// `Array.prototype.sort`. This makes sense because C++ will likely remain
|
|
// faster than JS when doing raw CPU-intensive sorting. However, when using a
|
|
// custom comparator function, calling back and forth between the VM's C++ and
|
|
// JIT'd JS is rather slow *and* loses JIT type information, resulting in
|
|
// worse generated code for the comparator function than would be optimal. In
|
|
// fact, when sorting with a comparator, these costs outweigh the benefits of
|
|
// sorting in C++. By using our own JS-implemented Quick Sort (below), we get
|
|
// a ~3500ms mean speed-up in `bench/bench.html`.
|
|
|
|
/**
|
|
* Swap the elements indexed by `x` and `y` in the array `ary`.
|
|
*
|
|
* @param {Array} ary
|
|
* The array.
|
|
* @param {Number} x
|
|
* The index of the first item.
|
|
* @param {Number} y
|
|
* The index of the second item.
|
|
*/
|
|
function swap(ary, x, y) {
|
|
var temp = ary[x];
|
|
ary[x] = ary[y];
|
|
ary[y] = temp;
|
|
}
|
|
|
|
/**
|
|
* Returns a random integer within the range `low .. high` inclusive.
|
|
*
|
|
* @param {Number} low
|
|
* The lower bound on the range.
|
|
* @param {Number} high
|
|
* The upper bound on the range.
|
|
*/
|
|
function randomIntInRange(low, high) {
|
|
return Math.round(low + (Math.random() * (high - low)));
|
|
}
|
|
|
|
/**
|
|
* The Quick Sort algorithm.
|
|
*
|
|
* @param {Array} ary
|
|
* An array to sort.
|
|
* @param {function} comparator
|
|
* Function to use to compare two items.
|
|
* @param {Number} p
|
|
* Start index of the array
|
|
* @param {Number} r
|
|
* End index of the array
|
|
*/
|
|
function doQuickSort(ary, comparator, p, r) {
|
|
// If our lower bound is less than our upper bound, we (1) partition the
|
|
// array into two pieces and (2) recurse on each half. If it is not, this is
|
|
// the empty array and our base case.
|
|
|
|
if (p < r) {
|
|
// (1) Partitioning.
|
|
//
|
|
// The partitioning chooses a pivot between `p` and `r` and moves all
|
|
// elements that are less than or equal to the pivot to the before it, and
|
|
// all the elements that are greater than it after it. The effect is that
|
|
// once partition is done, the pivot is in the exact place it will be when
|
|
// the array is put in sorted order, and it will not need to be moved
|
|
// again. This runs in O(n) time.
|
|
|
|
// Always choose a random pivot so that an input array which is reverse
|
|
// sorted does not cause O(n^2) running time.
|
|
var pivotIndex = randomIntInRange(p, r);
|
|
var i = p - 1;
|
|
|
|
swap(ary, pivotIndex, r);
|
|
var pivot = ary[r];
|
|
|
|
// Immediately after `j` is incremented in this loop, the following hold
|
|
// true:
|
|
//
|
|
// * Every element in `ary[p .. i]` is less than or equal to the pivot.
|
|
//
|
|
// * Every element in `ary[i+1 .. j-1]` is greater than the pivot.
|
|
for (var j = p; j < r; j++) {
|
|
if (comparator(ary[j], pivot) <= 0) {
|
|
i += 1;
|
|
swap(ary, i, j);
|
|
}
|
|
}
|
|
|
|
swap(ary, i + 1, j);
|
|
var q = i + 1;
|
|
|
|
// (2) Recurse on each half.
|
|
|
|
doQuickSort(ary, comparator, p, q - 1);
|
|
doQuickSort(ary, comparator, q + 1, r);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Sort the given array in-place with the given comparator function.
|
|
*
|
|
* @param {Array} ary
|
|
* An array to sort.
|
|
* @param {function} comparator
|
|
* Function to use to compare two items.
|
|
*/
|
|
exports.quickSort = function (ary, comparator) {
|
|
doQuickSort(ary, comparator, 0, ary.length - 1);
|
|
};
|
|
|
|
|
|
/***/ },
|
|
/* 10 */
|
|
/***/ function(module, exports, __webpack_require__) {
|
|
|
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
|
/*
|
|
* Copyright 2011 Mozilla Foundation and contributors
|
|
* Licensed under the New BSD license. See LICENSE or:
|
|
* http://opensource.org/licenses/BSD-3-Clause
|
|
*/
|
|
|
|
var SourceMapGenerator = __webpack_require__(1).SourceMapGenerator;
|
|
var util = __webpack_require__(4);
|
|
|
|
// Matches a Windows-style `\r\n` newline or a `\n` newline used by all other
|
|
// operating systems these days (capturing the result).
|
|
var REGEX_NEWLINE = /(\r?\n)/;
|
|
|
|
// Newline character code for charCodeAt() comparisons
|
|
var NEWLINE_CODE = 10;
|
|
|
|
// Private symbol for identifying `SourceNode`s when multiple versions of
|
|
// the source-map library are loaded. This MUST NOT CHANGE across
|
|
// versions!
|
|
var isSourceNode = "$$$isSourceNode$$$";
|
|
|
|
/**
|
|
* SourceNodes provide a way to abstract over interpolating/concatenating
|
|
* snippets of generated JavaScript source code while maintaining the line and
|
|
* column information associated with the original source code.
|
|
*
|
|
* @param aLine The original line number.
|
|
* @param aColumn The original column number.
|
|
* @param aSource The original source's filename.
|
|
* @param aChunks Optional. An array of strings which are snippets of
|
|
* generated JS, or other SourceNodes.
|
|
* @param aName The original identifier.
|
|
*/
|
|
function SourceNode(aLine, aColumn, aSource, aChunks, aName) {
|
|
this.children = [];
|
|
this.sourceContents = {};
|
|
this.line = aLine == null ? null : aLine;
|
|
this.column = aColumn == null ? null : aColumn;
|
|
this.source = aSource == null ? null : aSource;
|
|
this.name = aName == null ? null : aName;
|
|
this[isSourceNode] = true;
|
|
if (aChunks != null) this.add(aChunks);
|
|
}
|
|
|
|
/**
|
|
* Creates a SourceNode from generated code and a SourceMapConsumer.
|
|
*
|
|
* @param aGeneratedCode The generated code
|
|
* @param aSourceMapConsumer The SourceMap for the generated code
|
|
* @param aRelativePath Optional. The path that relative sources in the
|
|
* SourceMapConsumer should be relative to.
|
|
*/
|
|
SourceNode.fromStringWithSourceMap =
|
|
function SourceNode_fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer, aRelativePath) {
|
|
// The SourceNode we want to fill with the generated code
|
|
// and the SourceMap
|
|
var node = new SourceNode();
|
|
|
|
// All even indices of this array are one line of the generated code,
|
|
// while all odd indices are the newlines between two adjacent lines
|
|
// (since `REGEX_NEWLINE` captures its match).
|
|
// Processed fragments are removed from this array, by calling `shiftNextLine`.
|
|
var remainingLines = aGeneratedCode.split(REGEX_NEWLINE);
|
|
var shiftNextLine = function() {
|
|
var lineContents = remainingLines.shift();
|
|
// The last line of a file might not have a newline.
|
|
var newLine = remainingLines.shift() || "";
|
|
return lineContents + newLine;
|
|
};
|
|
|
|
// We need to remember the position of "remainingLines"
|
|
var lastGeneratedLine = 1, lastGeneratedColumn = 0;
|
|
|
|
// The generate SourceNodes we need a code range.
|
|
// To extract it current and last mapping is used.
|
|
// Here we store the last mapping.
|
|
var lastMapping = null;
|
|
|
|
aSourceMapConsumer.eachMapping(function (mapping) {
|
|
if (lastMapping !== null) {
|
|
// We add the code from "lastMapping" to "mapping":
|
|
// First check if there is a new line in between.
|
|
if (lastGeneratedLine < mapping.generatedLine) {
|
|
// Associate first line with "lastMapping"
|
|
addMappingWithCode(lastMapping, shiftNextLine());
|
|
lastGeneratedLine++;
|
|
lastGeneratedColumn = 0;
|
|
// The remaining code is added without mapping
|
|
} else {
|
|
// There is no new line in between.
|
|
// Associate the code between "lastGeneratedColumn" and
|
|
// "mapping.generatedColumn" with "lastMapping"
|
|
var nextLine = remainingLines[0];
|
|
var code = nextLine.substr(0, mapping.generatedColumn -
|
|
lastGeneratedColumn);
|
|
remainingLines[0] = nextLine.substr(mapping.generatedColumn -
|
|
lastGeneratedColumn);
|
|
lastGeneratedColumn = mapping.generatedColumn;
|
|
addMappingWithCode(lastMapping, code);
|
|
// No more remaining code, continue
|
|
lastMapping = mapping;
|
|
return;
|
|
}
|
|
}
|
|
// We add the generated code until the first mapping
|
|
// to the SourceNode without any mapping.
|
|
// Each line is added as separate string.
|
|
while (lastGeneratedLine < mapping.generatedLine) {
|
|
node.add(shiftNextLine());
|
|
lastGeneratedLine++;
|
|
}
|
|
if (lastGeneratedColumn < mapping.generatedColumn) {
|
|
var nextLine = remainingLines[0];
|
|
node.add(nextLine.substr(0, mapping.generatedColumn));
|
|
remainingLines[0] = nextLine.substr(mapping.generatedColumn);
|
|
lastGeneratedColumn = mapping.generatedColumn;
|
|
}
|
|
lastMapping = mapping;
|
|
}, this);
|
|
// We have processed all mappings.
|
|
if (remainingLines.length > 0) {
|
|
if (lastMapping) {
|
|
// Associate the remaining code in the current line with "lastMapping"
|
|
addMappingWithCode(lastMapping, shiftNextLine());
|
|
}
|
|
// and add the remaining lines without any mapping
|
|
node.add(remainingLines.join(""));
|
|
}
|
|
|
|
// Copy sourcesContent into SourceNode
|
|
aSourceMapConsumer.sources.forEach(function (sourceFile) {
|
|
var content = aSourceMapConsumer.sourceContentFor(sourceFile);
|
|
if (content != null) {
|
|
if (aRelativePath != null) {
|
|
sourceFile = util.join(aRelativePath, sourceFile);
|
|
}
|
|
node.setSourceContent(sourceFile, content);
|
|
}
|
|
});
|
|
|
|
return node;
|
|
|
|
function addMappingWithCode(mapping, code) {
|
|
if (mapping === null || mapping.source === undefined) {
|
|
node.add(code);
|
|
} else {
|
|
var source = aRelativePath
|
|
? util.join(aRelativePath, mapping.source)
|
|
: mapping.source;
|
|
node.add(new SourceNode(mapping.originalLine,
|
|
mapping.originalColumn,
|
|
source,
|
|
code,
|
|
mapping.name));
|
|
}
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Add a chunk of generated JS to this source node.
|
|
*
|
|
* @param aChunk A string snippet of generated JS code, another instance of
|
|
* SourceNode, or an array where each member is one of those things.
|
|
*/
|
|
SourceNode.prototype.add = function SourceNode_add(aChunk) {
|
|
if (Array.isArray(aChunk)) {
|
|
aChunk.forEach(function (chunk) {
|
|
this.add(chunk);
|
|
}, this);
|
|
}
|
|
else if (aChunk[isSourceNode] || typeof aChunk === "string") {
|
|
if (aChunk) {
|
|
this.children.push(aChunk);
|
|
}
|
|
}
|
|
else {
|
|
throw new TypeError(
|
|
"Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk
|
|
);
|
|
}
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Add a chunk of generated JS to the beginning of this source node.
|
|
*
|
|
* @param aChunk A string snippet of generated JS code, another instance of
|
|
* SourceNode, or an array where each member is one of those things.
|
|
*/
|
|
SourceNode.prototype.prepend = function SourceNode_prepend(aChunk) {
|
|
if (Array.isArray(aChunk)) {
|
|
for (var i = aChunk.length-1; i >= 0; i--) {
|
|
this.prepend(aChunk[i]);
|
|
}
|
|
}
|
|
else if (aChunk[isSourceNode] || typeof aChunk === "string") {
|
|
this.children.unshift(aChunk);
|
|
}
|
|
else {
|
|
throw new TypeError(
|
|
"Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk
|
|
);
|
|
}
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Walk over the tree of JS snippets in this node and its children. The
|
|
* walking function is called once for each snippet of JS and is passed that
|
|
* snippet and the its original associated source's line/column location.
|
|
*
|
|
* @param aFn The traversal function.
|
|
*/
|
|
SourceNode.prototype.walk = function SourceNode_walk(aFn) {
|
|
var chunk;
|
|
for (var i = 0, len = this.children.length; i < len; i++) {
|
|
chunk = this.children[i];
|
|
if (chunk[isSourceNode]) {
|
|
chunk.walk(aFn);
|
|
}
|
|
else {
|
|
if (chunk !== '') {
|
|
aFn(chunk, { source: this.source,
|
|
line: this.line,
|
|
column: this.column,
|
|
name: this.name });
|
|
}
|
|
}
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Like `String.prototype.join` except for SourceNodes. Inserts `aStr` between
|
|
* each of `this.children`.
|
|
*
|
|
* @param aSep The separator.
|
|
*/
|
|
SourceNode.prototype.join = function SourceNode_join(aSep) {
|
|
var newChildren;
|
|
var i;
|
|
var len = this.children.length;
|
|
if (len > 0) {
|
|
newChildren = [];
|
|
for (i = 0; i < len-1; i++) {
|
|
newChildren.push(this.children[i]);
|
|
newChildren.push(aSep);
|
|
}
|
|
newChildren.push(this.children[i]);
|
|
this.children = newChildren;
|
|
}
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Call String.prototype.replace on the very right-most source snippet. Useful
|
|
* for trimming whitespace from the end of a source node, etc.
|
|
*
|
|
* @param aPattern The pattern to replace.
|
|
* @param aReplacement The thing to replace the pattern with.
|
|
*/
|
|
SourceNode.prototype.replaceRight = function SourceNode_replaceRight(aPattern, aReplacement) {
|
|
var lastChild = this.children[this.children.length - 1];
|
|
if (lastChild[isSourceNode]) {
|
|
lastChild.replaceRight(aPattern, aReplacement);
|
|
}
|
|
else if (typeof lastChild === 'string') {
|
|
this.children[this.children.length - 1] = lastChild.replace(aPattern, aReplacement);
|
|
}
|
|
else {
|
|
this.children.push(''.replace(aPattern, aReplacement));
|
|
}
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Set the source content for a source file. This will be added to the SourceMapGenerator
|
|
* in the sourcesContent field.
|
|
*
|
|
* @param aSourceFile The filename of the source file
|
|
* @param aSourceContent The content of the source file
|
|
*/
|
|
SourceNode.prototype.setSourceContent =
|
|
function SourceNode_setSourceContent(aSourceFile, aSourceContent) {
|
|
this.sourceContents[util.toSetString(aSourceFile)] = aSourceContent;
|
|
};
|
|
|
|
/**
|
|
* Walk over the tree of SourceNodes. The walking function is called for each
|
|
* source file content and is passed the filename and source content.
|
|
*
|
|
* @param aFn The traversal function.
|
|
*/
|
|
SourceNode.prototype.walkSourceContents =
|
|
function SourceNode_walkSourceContents(aFn) {
|
|
for (var i = 0, len = this.children.length; i < len; i++) {
|
|
if (this.children[i][isSourceNode]) {
|
|
this.children[i].walkSourceContents(aFn);
|
|
}
|
|
}
|
|
|
|
var sources = Object.keys(this.sourceContents);
|
|
for (var i = 0, len = sources.length; i < len; i++) {
|
|
aFn(util.fromSetString(sources[i]), this.sourceContents[sources[i]]);
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Return the string representation of this source node. Walks over the tree
|
|
* and concatenates all the various snippets together to one string.
|
|
*/
|
|
SourceNode.prototype.toString = function SourceNode_toString() {
|
|
var str = "";
|
|
this.walk(function (chunk) {
|
|
str += chunk;
|
|
});
|
|
return str;
|
|
};
|
|
|
|
/**
|
|
* Returns the string representation of this source node along with a source
|
|
* map.
|
|
*/
|
|
SourceNode.prototype.toStringWithSourceMap = function SourceNode_toStringWithSourceMap(aArgs) {
|
|
var generated = {
|
|
code: "",
|
|
line: 1,
|
|
column: 0
|
|
};
|
|
var map = new SourceMapGenerator(aArgs);
|
|
var sourceMappingActive = false;
|
|
var lastOriginalSource = null;
|
|
var lastOriginalLine = null;
|
|
var lastOriginalColumn = null;
|
|
var lastOriginalName = null;
|
|
this.walk(function (chunk, original) {
|
|
generated.code += chunk;
|
|
if (original.source !== null
|
|
&& original.line !== null
|
|
&& original.column !== null) {
|
|
if(lastOriginalSource !== original.source
|
|
|| lastOriginalLine !== original.line
|
|
|| lastOriginalColumn !== original.column
|
|
|| lastOriginalName !== original.name) {
|
|
map.addMapping({
|
|
source: original.source,
|
|
original: {
|
|
line: original.line,
|
|
column: original.column
|
|
},
|
|
generated: {
|
|
line: generated.line,
|
|
column: generated.column
|
|
},
|
|
name: original.name
|
|
});
|
|
}
|
|
lastOriginalSource = original.source;
|
|
lastOriginalLine = original.line;
|
|
lastOriginalColumn = original.column;
|
|
lastOriginalName = original.name;
|
|
sourceMappingActive = true;
|
|
} else if (sourceMappingActive) {
|
|
map.addMapping({
|
|
generated: {
|
|
line: generated.line,
|
|
column: generated.column
|
|
}
|
|
});
|
|
lastOriginalSource = null;
|
|
sourceMappingActive = false;
|
|
}
|
|
for (var idx = 0, length = chunk.length; idx < length; idx++) {
|
|
if (chunk.charCodeAt(idx) === NEWLINE_CODE) {
|
|
generated.line++;
|
|
generated.column = 0;
|
|
// Mappings end at eol
|
|
if (idx + 1 === length) {
|
|
lastOriginalSource = null;
|
|
sourceMappingActive = false;
|
|
} else if (sourceMappingActive) {
|
|
map.addMapping({
|
|
source: original.source,
|
|
original: {
|
|
line: original.line,
|
|
column: original.column
|
|
},
|
|
generated: {
|
|
line: generated.line,
|
|
column: generated.column
|
|
},
|
|
name: original.name
|
|
});
|
|
}
|
|
} else {
|
|
generated.column++;
|
|
}
|
|
}
|
|
});
|
|
this.walkSourceContents(function (sourceFile, sourceContent) {
|
|
map.setSourceContent(sourceFile, sourceContent);
|
|
});
|
|
|
|
return { code: generated.code, map: map };
|
|
};
|
|
|
|
exports.SourceNode = SourceNode;
|
|
|
|
|
|
/***/ }
|
|
/******/ ])
|
|
});
|
|
;//Distributed under the BSD license:
|
|
//Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
|
define('uglifyjs', ['exports', 'source-map', 'logger', 'env!env/file'], function (exports, MOZ_SourceMap, logger, rjsFile) {
|
|
|
|
/***********************************************************************
|
|
|
|
A JavaScript tokenizer / parser / beautifier / compressor.
|
|
https://github.com/mishoo/UglifyJS2
|
|
|
|
-------------------------------- (C) ---------------------------------
|
|
|
|
Author: Mihai Bazon
|
|
<mihai.bazon@gmail.com>
|
|
http://mihai.bazon.net/blog
|
|
|
|
Distributed under the BSD license:
|
|
|
|
Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
|
|
|
Redistribution and use in source and binary forms, with or without
|
|
modification, are permitted provided that the following conditions
|
|
are met:
|
|
|
|
* Redistributions of source code must retain the above
|
|
copyright notice, this list of conditions and the following
|
|
disclaimer.
|
|
|
|
* Redistributions in binary form must reproduce the above
|
|
copyright notice, this list of conditions and the following
|
|
disclaimer in the documentation and/or other materials
|
|
provided with the distribution.
|
|
|
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
|
|
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
|
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
|
|
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
|
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
|
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
|
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
|
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
|
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
|
SUCH DAMAGE.
|
|
|
|
***********************************************************************/
|
|
|
|
"use strict";
|
|
|
|
function array_to_hash(a) {
|
|
var ret = Object.create(null);
|
|
for (var i = 0; i < a.length; ++i)
|
|
ret[a[i]] = true;
|
|
return ret;
|
|
};
|
|
|
|
function slice(a, start) {
|
|
return Array.prototype.slice.call(a, start || 0);
|
|
};
|
|
|
|
function characters(str) {
|
|
return str.split("");
|
|
};
|
|
|
|
function member(name, array) {
|
|
return array.indexOf(name) >= 0;
|
|
};
|
|
|
|
function find_if(func, array) {
|
|
for (var i = 0, n = array.length; i < n; ++i) {
|
|
if (func(array[i]))
|
|
return array[i];
|
|
}
|
|
};
|
|
|
|
function repeat_string(str, i) {
|
|
if (i <= 0) return "";
|
|
if (i == 1) return str;
|
|
var d = repeat_string(str, i >> 1);
|
|
d += d;
|
|
if (i & 1) d += str;
|
|
return d;
|
|
};
|
|
|
|
function configure_error_stack(fn) {
|
|
Object.defineProperty(fn.prototype, "stack", {
|
|
get: function() {
|
|
var err = new Error(this.message);
|
|
err.name = this.name;
|
|
try {
|
|
throw err;
|
|
} catch(e) {
|
|
return e.stack;
|
|
}
|
|
}
|
|
});
|
|
}
|
|
|
|
function DefaultsError(msg, defs) {
|
|
this.message = msg;
|
|
this.defs = defs;
|
|
};
|
|
DefaultsError.prototype = Object.create(Error.prototype);
|
|
DefaultsError.prototype.constructor = DefaultsError;
|
|
DefaultsError.prototype.name = "DefaultsError";
|
|
configure_error_stack(DefaultsError);
|
|
|
|
DefaultsError.croak = function(msg, defs) {
|
|
throw new DefaultsError(msg, defs);
|
|
};
|
|
|
|
function defaults(args, defs, croak) {
|
|
if (args === true)
|
|
args = {};
|
|
var ret = args || {};
|
|
if (croak) for (var i in ret) if (HOP(ret, i) && !HOP(defs, i))
|
|
DefaultsError.croak("`" + i + "` is not a supported option", defs);
|
|
for (var i in defs) if (HOP(defs, i)) {
|
|
ret[i] = (args && HOP(args, i)) ? args[i] : defs[i];
|
|
}
|
|
return ret;
|
|
};
|
|
|
|
function merge(obj, ext) {
|
|
var count = 0;
|
|
for (var i in ext) if (HOP(ext, i)) {
|
|
obj[i] = ext[i];
|
|
count++;
|
|
}
|
|
return count;
|
|
};
|
|
|
|
function noop() {}
|
|
function return_false() { return false; }
|
|
function return_true() { return true; }
|
|
function return_this() { return this; }
|
|
function return_null() { return null; }
|
|
|
|
var MAP = (function(){
|
|
function MAP(a, f, backwards) {
|
|
var ret = [], top = [], i;
|
|
function doit() {
|
|
var val = f(a[i], i);
|
|
var is_last = val instanceof Last;
|
|
if (is_last) val = val.v;
|
|
if (val instanceof AtTop) {
|
|
val = val.v;
|
|
if (val instanceof Splice) {
|
|
top.push.apply(top, backwards ? val.v.slice().reverse() : val.v);
|
|
} else {
|
|
top.push(val);
|
|
}
|
|
}
|
|
else if (val !== skip) {
|
|
if (val instanceof Splice) {
|
|
ret.push.apply(ret, backwards ? val.v.slice().reverse() : val.v);
|
|
} else {
|
|
ret.push(val);
|
|
}
|
|
}
|
|
return is_last;
|
|
};
|
|
if (a instanceof Array) {
|
|
if (backwards) {
|
|
for (i = a.length; --i >= 0;) if (doit()) break;
|
|
ret.reverse();
|
|
top.reverse();
|
|
} else {
|
|
for (i = 0; i < a.length; ++i) if (doit()) break;
|
|
}
|
|
}
|
|
else {
|
|
for (i in a) if (HOP(a, i)) if (doit()) break;
|
|
}
|
|
return top.concat(ret);
|
|
};
|
|
MAP.at_top = function(val) { return new AtTop(val) };
|
|
MAP.splice = function(val) { return new Splice(val) };
|
|
MAP.last = function(val) { return new Last(val) };
|
|
var skip = MAP.skip = {};
|
|
function AtTop(val) { this.v = val };
|
|
function Splice(val) { this.v = val };
|
|
function Last(val) { this.v = val };
|
|
return MAP;
|
|
})();
|
|
|
|
function push_uniq(array, el) {
|
|
if (array.indexOf(el) < 0)
|
|
array.push(el);
|
|
};
|
|
|
|
function string_template(text, props) {
|
|
return text.replace(/\{(.+?)\}/g, function(str, p){
|
|
return props && props[p];
|
|
});
|
|
};
|
|
|
|
function remove(array, el) {
|
|
for (var i = array.length; --i >= 0;) {
|
|
if (array[i] === el) array.splice(i, 1);
|
|
}
|
|
};
|
|
|
|
function mergeSort(array, cmp) {
|
|
if (array.length < 2) return array.slice();
|
|
function merge(a, b) {
|
|
var r = [], ai = 0, bi = 0, i = 0;
|
|
while (ai < a.length && bi < b.length) {
|
|
cmp(a[ai], b[bi]) <= 0
|
|
? r[i++] = a[ai++]
|
|
: r[i++] = b[bi++];
|
|
}
|
|
if (ai < a.length) r.push.apply(r, a.slice(ai));
|
|
if (bi < b.length) r.push.apply(r, b.slice(bi));
|
|
return r;
|
|
};
|
|
function _ms(a) {
|
|
if (a.length <= 1)
|
|
return a;
|
|
var m = Math.floor(a.length / 2), left = a.slice(0, m), right = a.slice(m);
|
|
left = _ms(left);
|
|
right = _ms(right);
|
|
return merge(left, right);
|
|
};
|
|
return _ms(array);
|
|
};
|
|
|
|
function set_difference(a, b) {
|
|
return a.filter(function(el){
|
|
return b.indexOf(el) < 0;
|
|
});
|
|
};
|
|
|
|
function set_intersection(a, b) {
|
|
return a.filter(function(el){
|
|
return b.indexOf(el) >= 0;
|
|
});
|
|
};
|
|
|
|
// this function is taken from Acorn [1], written by Marijn Haverbeke
|
|
// [1] https://github.com/marijnh/acorn
|
|
function makePredicate(words) {
|
|
if (!(words instanceof Array)) words = words.split(" ");
|
|
var f = "", cats = [];
|
|
out: for (var i = 0; i < words.length; ++i) {
|
|
for (var j = 0; j < cats.length; ++j)
|
|
if (cats[j][0].length == words[i].length) {
|
|
cats[j].push(words[i]);
|
|
continue out;
|
|
}
|
|
cats.push([words[i]]);
|
|
}
|
|
function quote(word) {
|
|
return JSON.stringify(word).replace(/[\u2028\u2029]/g, function(s) {
|
|
switch (s) {
|
|
case "\u2028": return "\\u2028";
|
|
case "\u2029": return "\\u2029";
|
|
}
|
|
return s;
|
|
});
|
|
}
|
|
function compareTo(arr) {
|
|
if (arr.length == 1) return f += "return str === " + quote(arr[0]) + ";";
|
|
f += "switch(str){";
|
|
for (var i = 0; i < arr.length; ++i) f += "case " + quote(arr[i]) + ":";
|
|
f += "return true}return false;";
|
|
}
|
|
// When there are more than three length categories, an outer
|
|
// switch first dispatches on the lengths, to save on comparisons.
|
|
if (cats.length > 3) {
|
|
cats.sort(function(a, b) {return b.length - a.length;});
|
|
f += "switch(str.length){";
|
|
for (var i = 0; i < cats.length; ++i) {
|
|
var cat = cats[i];
|
|
f += "case " + cat[0].length + ":";
|
|
compareTo(cat);
|
|
}
|
|
f += "}";
|
|
// Otherwise, simply generate a flat `switch` statement.
|
|
} else {
|
|
compareTo(words);
|
|
}
|
|
return new Function("str", f);
|
|
};
|
|
|
|
function all(array, predicate) {
|
|
for (var i = array.length; --i >= 0;)
|
|
if (!predicate(array[i]))
|
|
return false;
|
|
return true;
|
|
};
|
|
|
|
function Dictionary() {
|
|
this._values = Object.create(null);
|
|
this._size = 0;
|
|
};
|
|
Dictionary.prototype = {
|
|
set: function(key, val) {
|
|
if (!this.has(key)) ++this._size;
|
|
this._values["$" + key] = val;
|
|
return this;
|
|
},
|
|
add: function(key, val) {
|
|
if (this.has(key)) {
|
|
this.get(key).push(val);
|
|
} else {
|
|
this.set(key, [ val ]);
|
|
}
|
|
return this;
|
|
},
|
|
get: function(key) { return this._values["$" + key] },
|
|
del: function(key) {
|
|
if (this.has(key)) {
|
|
--this._size;
|
|
delete this._values["$" + key];
|
|
}
|
|
return this;
|
|
},
|
|
has: function(key) { return ("$" + key) in this._values },
|
|
each: function(f) {
|
|
for (var i in this._values)
|
|
f(this._values[i], i.substr(1));
|
|
},
|
|
size: function() {
|
|
return this._size;
|
|
},
|
|
map: function(f) {
|
|
var ret = [];
|
|
for (var i in this._values)
|
|
ret.push(f(this._values[i], i.substr(1)));
|
|
return ret;
|
|
},
|
|
toObject: function() { return this._values }
|
|
};
|
|
Dictionary.fromObject = function(obj) {
|
|
var dict = new Dictionary();
|
|
dict._size = merge(dict._values, obj);
|
|
return dict;
|
|
};
|
|
|
|
function HOP(obj, prop) {
|
|
return Object.prototype.hasOwnProperty.call(obj, prop);
|
|
}
|
|
|
|
// return true if the node at the top of the stack (that means the
|
|
// innermost node in the current output) is lexically the first in
|
|
// a statement.
|
|
function first_in_statement(stack) {
|
|
var node = stack.parent(-1);
|
|
for (var i = 0, p; p = stack.parent(i); i++) {
|
|
if (p instanceof AST_Statement && p.body === node)
|
|
return true;
|
|
if ((p instanceof AST_Seq && p.car === node ) ||
|
|
(p instanceof AST_Call && p.expression === node && !(p instanceof AST_New) ) ||
|
|
(p instanceof AST_Dot && p.expression === node ) ||
|
|
(p instanceof AST_Sub && p.expression === node ) ||
|
|
(p instanceof AST_Conditional && p.condition === node ) ||
|
|
(p instanceof AST_Binary && p.left === node ) ||
|
|
(p instanceof AST_UnaryPostfix && p.expression === node ))
|
|
{
|
|
node = p;
|
|
} else {
|
|
return false;
|
|
}
|
|
}
|
|
}
|
|
|
|
/***********************************************************************
|
|
|
|
A JavaScript tokenizer / parser / beautifier / compressor.
|
|
https://github.com/mishoo/UglifyJS2
|
|
|
|
-------------------------------- (C) ---------------------------------
|
|
|
|
Author: Mihai Bazon
|
|
<mihai.bazon@gmail.com>
|
|
http://mihai.bazon.net/blog
|
|
|
|
Distributed under the BSD license:
|
|
|
|
Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
|
|
|
Redistribution and use in source and binary forms, with or without
|
|
modification, are permitted provided that the following conditions
|
|
are met:
|
|
|
|
* Redistributions of source code must retain the above
|
|
copyright notice, this list of conditions and the following
|
|
disclaimer.
|
|
|
|
* Redistributions in binary form must reproduce the above
|
|
copyright notice, this list of conditions and the following
|
|
disclaimer in the documentation and/or other materials
|
|
provided with the distribution.
|
|
|
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
|
|
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
|
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
|
|
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
|
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
|
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
|
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
|
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
|
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
|
SUCH DAMAGE.
|
|
|
|
***********************************************************************/
|
|
|
|
"use strict";
|
|
|
|
function DEFNODE(type, props, methods, base) {
|
|
if (arguments.length < 4) base = AST_Node;
|
|
if (!props) props = [];
|
|
else props = props.split(/\s+/);
|
|
var self_props = props;
|
|
if (base && base.PROPS)
|
|
props = props.concat(base.PROPS);
|
|
var code = "return function AST_" + type + "(props){ if (props) { ";
|
|
for (var i = props.length; --i >= 0;) {
|
|
code += "this." + props[i] + " = props." + props[i] + ";";
|
|
}
|
|
var proto = base && new base;
|
|
if (proto && proto.initialize || (methods && methods.initialize))
|
|
code += "this.initialize();";
|
|
code += "}}";
|
|
var ctor = new Function(code)();
|
|
if (proto) {
|
|
ctor.prototype = proto;
|
|
ctor.BASE = base;
|
|
}
|
|
if (base) base.SUBCLASSES.push(ctor);
|
|
ctor.prototype.CTOR = ctor;
|
|
ctor.PROPS = props || null;
|
|
ctor.SELF_PROPS = self_props;
|
|
ctor.SUBCLASSES = [];
|
|
if (type) {
|
|
ctor.prototype.TYPE = ctor.TYPE = type;
|
|
}
|
|
if (methods) for (i in methods) if (HOP(methods, i)) {
|
|
if (/^\$/.test(i)) {
|
|
ctor[i.substr(1)] = methods[i];
|
|
} else {
|
|
ctor.prototype[i] = methods[i];
|
|
}
|
|
}
|
|
ctor.DEFMETHOD = function(name, method) {
|
|
this.prototype[name] = method;
|
|
};
|
|
if (typeof exports !== "undefined") {
|
|
exports["AST_" + type] = ctor;
|
|
}
|
|
return ctor;
|
|
};
|
|
|
|
var AST_Token = DEFNODE("Token", "type value line col pos endline endcol endpos nlb comments_before file raw", {
|
|
}, null);
|
|
|
|
var AST_Node = DEFNODE("Node", "start end", {
|
|
_clone: function(deep) {
|
|
if (deep) {
|
|
var self = this.clone();
|
|
return self.transform(new TreeTransformer(function(node) {
|
|
if (node !== self) {
|
|
return node.clone(true);
|
|
}
|
|
}));
|
|
}
|
|
return new this.CTOR(this);
|
|
},
|
|
clone: function(deep) {
|
|
return this._clone(deep);
|
|
},
|
|
$documentation: "Base class of all AST nodes",
|
|
$propdoc: {
|
|
start: "[AST_Token] The first token of this node",
|
|
end: "[AST_Token] The last token of this node"
|
|
},
|
|
_walk: function(visitor) {
|
|
return visitor._visit(this);
|
|
},
|
|
walk: function(visitor) {
|
|
return this._walk(visitor); // not sure the indirection will be any help
|
|
}
|
|
}, null);
|
|
|
|
AST_Node.warn_function = null;
|
|
AST_Node.warn = function(txt, props) {
|
|
if (AST_Node.warn_function)
|
|
AST_Node.warn_function(string_template(txt, props));
|
|
};
|
|
|
|
/* -----[ statements ]----- */
|
|
|
|
var AST_Statement = DEFNODE("Statement", null, {
|
|
$documentation: "Base class of all statements",
|
|
});
|
|
|
|
var AST_Debugger = DEFNODE("Debugger", null, {
|
|
$documentation: "Represents a debugger statement",
|
|
}, AST_Statement);
|
|
|
|
var AST_Directive = DEFNODE("Directive", "value scope quote", {
|
|
$documentation: "Represents a directive, like \"use strict\";",
|
|
$propdoc: {
|
|
value: "[string] The value of this directive as a plain string (it's not an AST_String!)",
|
|
scope: "[AST_Scope/S] The scope that this directive affects",
|
|
quote: "[string] the original quote character"
|
|
},
|
|
}, AST_Statement);
|
|
|
|
var AST_SimpleStatement = DEFNODE("SimpleStatement", "body", {
|
|
$documentation: "A statement consisting of an expression, i.e. a = 1 + 2",
|
|
$propdoc: {
|
|
body: "[AST_Node] an expression node (should not be instanceof AST_Statement)"
|
|
},
|
|
_walk: function(visitor) {
|
|
return visitor._visit(this, function(){
|
|
this.body._walk(visitor);
|
|
});
|
|
}
|
|
}, AST_Statement);
|
|
|
|
function walk_body(node, visitor) {
|
|
var body = node.body;
|
|
if (body instanceof AST_Statement) {
|
|
body._walk(visitor);
|
|
}
|
|
else for (var i = 0, len = body.length; i < len; i++) {
|
|
body[i]._walk(visitor);
|
|
}
|
|
};
|
|
|
|
var AST_Block = DEFNODE("Block", "body", {
|
|
$documentation: "A body of statements (usually bracketed)",
|
|
$propdoc: {
|
|
body: "[AST_Statement*] an array of statements"
|
|
},
|
|
_walk: function(visitor) {
|
|
return visitor._visit(this, function(){
|
|
walk_body(this, visitor);
|
|
});
|
|
}
|
|
}, AST_Statement);
|
|
|
|
var AST_BlockStatement = DEFNODE("BlockStatement", null, {
|
|
$documentation: "A block statement",
|
|
}, AST_Block);
|
|
|
|
var AST_EmptyStatement = DEFNODE("EmptyStatement", null, {
|
|
$documentation: "The empty statement (empty block or simply a semicolon)",
|
|
_walk: function(visitor) {
|
|
return visitor._visit(this);
|
|
}
|
|
}, AST_Statement);
|
|
|
|
var AST_StatementWithBody = DEFNODE("StatementWithBody", "body", {
|
|
$documentation: "Base class for all statements that contain one nested body: `For`, `ForIn`, `Do`, `While`, `With`",
|
|
$propdoc: {
|
|
body: "[AST_Statement] the body; this should always be present, even if it's an AST_EmptyStatement"
|
|
},
|
|
_walk: function(visitor) {
|
|
return visitor._visit(this, function(){
|
|
this.body._walk(visitor);
|
|
});
|
|
}
|
|
}, AST_Statement);
|
|
|
|
var AST_LabeledStatement = DEFNODE("LabeledStatement", "label", {
|
|
$documentation: "Statement with a label",
|
|
$propdoc: {
|
|
label: "[AST_Label] a label definition"
|
|
},
|
|
_walk: function(visitor) {
|
|
return visitor._visit(this, function(){
|
|
this.label._walk(visitor);
|
|
this.body._walk(visitor);
|
|
});
|
|
},
|
|
clone: function(deep) {
|
|
var node = this._clone(deep);
|
|
if (deep) {
|
|
var label = node.label;
|
|
var def = this.label;
|
|
node.walk(new TreeWalker(function(node) {
|
|
if (node instanceof AST_LoopControl
|
|
&& node.label && node.label.thedef === def) {
|
|
node.label.thedef = label;
|
|
label.references.push(node);
|
|
}
|
|
}));
|
|
}
|
|
return node;
|
|
}
|
|
}, AST_StatementWithBody);
|
|
|
|
var AST_IterationStatement = DEFNODE("IterationStatement", null, {
|
|
$documentation: "Internal class. All loops inherit from it."
|
|
}, AST_StatementWithBody);
|
|
|
|
var AST_DWLoop = DEFNODE("DWLoop", "condition", {
|
|
$documentation: "Base class for do/while statements",
|
|
$propdoc: {
|
|
condition: "[AST_Node] the loop condition. Should not be instanceof AST_Statement"
|
|
}
|
|
}, AST_IterationStatement);
|
|
|
|
var AST_Do = DEFNODE("Do", null, {
|
|
$documentation: "A `do` statement",
|
|
_walk: function(visitor) {
|
|
return visitor._visit(this, function(){
|
|
this.body._walk(visitor);
|
|
this.condition._walk(visitor);
|
|
});
|
|
}
|
|
}, AST_DWLoop);
|
|
|
|
var AST_While = DEFNODE("While", null, {
|
|
$documentation: "A `while` statement",
|
|
_walk: function(visitor) {
|
|
return visitor._visit(this, function(){
|
|
this.condition._walk(visitor);
|
|
this.body._walk(visitor);
|
|
});
|
|
}
|
|
}, AST_DWLoop);
|
|
|
|
var AST_For = DEFNODE("For", "init condition step", {
|
|
$documentation: "A `for` statement",
|
|
$propdoc: {
|
|
init: "[AST_Node?] the `for` initialization code, or null if empty",
|
|
condition: "[AST_Node?] the `for` termination clause, or null if empty",
|
|
step: "[AST_Node?] the `for` update clause, or null if empty"
|
|
},
|
|
_walk: function(visitor) {
|
|
return visitor._visit(this, function(){
|
|
if (this.init) this.init._walk(visitor);
|
|
if (this.condition) this.condition._walk(visitor);
|
|
if (this.step) this.step._walk(visitor);
|
|
this.body._walk(visitor);
|
|
});
|
|
}
|
|
}, AST_IterationStatement);
|
|
|
|
var AST_ForIn = DEFNODE("ForIn", "init name object", {
|
|
$documentation: "A `for ... in` statement",
|
|
$propdoc: {
|
|
init: "[AST_Node] the `for/in` initialization code",
|
|
name: "[AST_SymbolRef?] the loop variable, only if `init` is AST_Var",
|
|
object: "[AST_Node] the object that we're looping through"
|
|
},
|
|
_walk: function(visitor) {
|
|
return visitor._visit(this, function(){
|
|
this.init._walk(visitor);
|
|
this.object._walk(visitor);
|
|
this.body._walk(visitor);
|
|
});
|
|
}
|
|
}, AST_IterationStatement);
|
|
|
|
var AST_With = DEFNODE("With", "expression", {
|
|
$documentation: "A `with` statement",
|
|
$propdoc: {
|
|
expression: "[AST_Node] the `with` expression"
|
|
},
|
|
_walk: function(visitor) {
|
|
return visitor._visit(this, function(){
|
|
this.expression._walk(visitor);
|
|
this.body._walk(visitor);
|
|
});
|
|
}
|
|
}, AST_StatementWithBody);
|
|
|
|
/* -----[ scope and functions ]----- */
|
|
|
|
var AST_Scope = DEFNODE("Scope", "directives variables functions uses_with uses_eval parent_scope enclosed cname", {
|
|
$documentation: "Base class for all statements introducing a lexical scope",
|
|
$propdoc: {
|
|
directives: "[string*/S] an array of directives declared in this scope",
|
|
variables: "[Object/S] a map of name -> SymbolDef for all variables/functions defined in this scope",
|
|
functions: "[Object/S] like `variables`, but only lists function declarations",
|
|
uses_with: "[boolean/S] tells whether this scope uses the `with` statement",
|
|
uses_eval: "[boolean/S] tells whether this scope contains a direct call to the global `eval`",
|
|
parent_scope: "[AST_Scope?/S] link to the parent scope",
|
|
enclosed: "[SymbolDef*/S] a list of all symbol definitions that are accessed from this scope or any subscopes",
|
|
cname: "[integer/S] current index for mangling variables (used internally by the mangler)",
|
|
},
|
|
}, AST_Block);
|
|
|
|
var AST_Toplevel = DEFNODE("Toplevel", "globals", {
|
|
$documentation: "The toplevel scope",
|
|
$propdoc: {
|
|
globals: "[Object/S] a map of name -> SymbolDef for all undeclared names",
|
|
},
|
|
wrap_enclose: function(arg_parameter_pairs) {
|
|
var self = this;
|
|
var args = [];
|
|
var parameters = [];
|
|
|
|
arg_parameter_pairs.forEach(function(pair) {
|
|
var splitAt = pair.lastIndexOf(":");
|
|
|
|
args.push(pair.substr(0, splitAt));
|
|
parameters.push(pair.substr(splitAt + 1));
|
|
});
|
|
|
|
var wrapped_tl = "(function(" + parameters.join(",") + "){ '$ORIG'; })(" + args.join(",") + ")";
|
|
wrapped_tl = parse(wrapped_tl);
|
|
wrapped_tl = wrapped_tl.transform(new TreeTransformer(function before(node){
|
|
if (node instanceof AST_Directive && node.value == "$ORIG") {
|
|
return MAP.splice(self.body);
|
|
}
|
|
}));
|
|
return wrapped_tl;
|
|
},
|
|
wrap_commonjs: function(name, export_all) {
|
|
var self = this;
|
|
var to_export = [];
|
|
if (export_all) {
|
|
self.figure_out_scope();
|
|
self.walk(new TreeWalker(function(node){
|
|
if (node instanceof AST_SymbolDeclaration && node.definition().global) {
|
|
if (!find_if(function(n){ return n.name == node.name }, to_export))
|
|
to_export.push(node);
|
|
}
|
|
}));
|
|
}
|
|
var wrapped_tl = "(function(exports, global){ '$ORIG'; '$EXPORTS'; global['" + name + "'] = exports; }({}, (function(){return this}())))";
|
|
wrapped_tl = parse(wrapped_tl);
|
|
wrapped_tl = wrapped_tl.transform(new TreeTransformer(function before(node){
|
|
if (node instanceof AST_Directive) {
|
|
switch (node.value) {
|
|
case "$ORIG":
|
|
return MAP.splice(self.body);
|
|
case "$EXPORTS":
|
|
var body = [];
|
|
to_export.forEach(function(sym){
|
|
body.push(new AST_SimpleStatement({
|
|
body: new AST_Assign({
|
|
left: new AST_Sub({
|
|
expression: new AST_SymbolRef({ name: "exports" }),
|
|
property: new AST_String({ value: sym.name }),
|
|
}),
|
|
operator: "=",
|
|
right: new AST_SymbolRef(sym),
|
|
}),
|
|
}));
|
|
});
|
|
return MAP.splice(body);
|
|
}
|
|
}
|
|
}));
|
|
return wrapped_tl;
|
|
}
|
|
}, AST_Scope);
|
|
|
|
var AST_Lambda = DEFNODE("Lambda", "name argnames uses_arguments", {
|
|
$documentation: "Base class for functions",
|
|
$propdoc: {
|
|
name: "[AST_SymbolDeclaration?] the name of this function",
|
|
argnames: "[AST_SymbolFunarg*] array of function arguments",
|
|
uses_arguments: "[boolean/S] tells whether this function accesses the arguments array"
|
|
},
|
|
_walk: function(visitor) {
|
|
return visitor._visit(this, function(){
|
|
if (this.name) this.name._walk(visitor);
|
|
var argnames = this.argnames;
|
|
for (var i = 0, len = argnames.length; i < len; i++) {
|
|
argnames[i]._walk(visitor);
|
|
}
|
|
walk_body(this, visitor);
|
|
});
|
|
}
|
|
}, AST_Scope);
|
|
|
|
var AST_Accessor = DEFNODE("Accessor", null, {
|
|
$documentation: "A setter/getter function. The `name` property is always null."
|
|
}, AST_Lambda);
|
|
|
|
var AST_Function = DEFNODE("Function", null, {
|
|
$documentation: "A function expression"
|
|
}, AST_Lambda);
|
|
|
|
var AST_Defun = DEFNODE("Defun", null, {
|
|
$documentation: "A function definition"
|
|
}, AST_Lambda);
|
|
|
|
/* -----[ JUMPS ]----- */
|
|
|
|
var AST_Jump = DEFNODE("Jump", null, {
|
|
$documentation: "Base class for “jumps” (for now that's `return`, `throw`, `break` and `continue`)"
|
|
}, AST_Statement);
|
|
|
|
var AST_Exit = DEFNODE("Exit", "value", {
|
|
$documentation: "Base class for “exits” (`return` and `throw`)",
|
|
$propdoc: {
|
|
value: "[AST_Node?] the value returned or thrown by this statement; could be null for AST_Return"
|
|
},
|
|
_walk: function(visitor) {
|
|
return visitor._visit(this, this.value && function(){
|
|
this.value._walk(visitor);
|
|
});
|
|
}
|
|
}, AST_Jump);
|
|
|
|
var AST_Return = DEFNODE("Return", null, {
|
|
$documentation: "A `return` statement"
|
|
}, AST_Exit);
|
|
|
|
var AST_Throw = DEFNODE("Throw", null, {
|
|
$documentation: "A `throw` statement"
|
|
}, AST_Exit);
|
|
|
|
var AST_LoopControl = DEFNODE("LoopControl", "label", {
|
|
$documentation: "Base class for loop control statements (`break` and `continue`)",
|
|
$propdoc: {
|
|
label: "[AST_LabelRef?] the label, or null if none",
|
|
},
|
|
_walk: function(visitor) {
|
|
return visitor._visit(this, this.label && function(){
|
|
this.label._walk(visitor);
|
|
});
|
|
}
|
|
}, AST_Jump);
|
|
|
|
var AST_Break = DEFNODE("Break", null, {
|
|
$documentation: "A `break` statement"
|
|
}, AST_LoopControl);
|
|
|
|
var AST_Continue = DEFNODE("Continue", null, {
|
|
$documentation: "A `continue` statement"
|
|
}, AST_LoopControl);
|
|
|
|
/* -----[ IF ]----- */
|
|
|
|
var AST_If = DEFNODE("If", "condition alternative", {
|
|
$documentation: "A `if` statement",
|
|
$propdoc: {
|
|
condition: "[AST_Node] the `if` condition",
|
|
alternative: "[AST_Statement?] the `else` part, or null if not present"
|
|
},
|
|
_walk: function(visitor) {
|
|
return visitor._visit(this, function(){
|
|
this.condition._walk(visitor);
|
|
this.body._walk(visitor);
|
|
if (this.alternative) this.alternative._walk(visitor);
|
|
});
|
|
}
|
|
}, AST_StatementWithBody);
|
|
|
|
/* -----[ SWITCH ]----- */
|
|
|
|
var AST_Switch = DEFNODE("Switch", "expression", {
|
|
$documentation: "A `switch` statement",
|
|
$propdoc: {
|
|
expression: "[AST_Node] the `switch` “discriminant”"
|
|
},
|
|
_walk: function(visitor) {
|
|
return visitor._visit(this, function(){
|
|
this.expression._walk(visitor);
|
|
walk_body(this, visitor);
|
|
});
|
|
}
|
|
}, AST_Block);
|
|
|
|
var AST_SwitchBranch = DEFNODE("SwitchBranch", null, {
|
|
$documentation: "Base class for `switch` branches",
|
|
}, AST_Block);
|
|
|
|
var AST_Default = DEFNODE("Default", null, {
|
|
$documentation: "A `default` switch branch",
|
|
}, AST_SwitchBranch);
|
|
|
|
var AST_Case = DEFNODE("Case", "expression", {
|
|
$documentation: "A `case` switch branch",
|
|
$propdoc: {
|
|
expression: "[AST_Node] the `case` expression"
|
|
},
|
|
_walk: function(visitor) {
|
|
return visitor._visit(this, function(){
|
|
this.expression._walk(visitor);
|
|
walk_body(this, visitor);
|
|
});
|
|
}
|
|
}, AST_SwitchBranch);
|
|
|
|
/* -----[ EXCEPTIONS ]----- */
|
|
|
|
var AST_Try = DEFNODE("Try", "bcatch bfinally", {
|
|
$documentation: "A `try` statement",
|
|
$propdoc: {
|
|
bcatch: "[AST_Catch?] the catch block, or null if not present",
|
|
bfinally: "[AST_Finally?] the finally block, or null if not present"
|
|
},
|
|
_walk: function(visitor) {
|
|
return visitor._visit(this, function(){
|
|
walk_body(this, visitor);
|
|
if (this.bcatch) this.bcatch._walk(visitor);
|
|
if (this.bfinally) this.bfinally._walk(visitor);
|
|
});
|
|
}
|
|
}, AST_Block);
|
|
|
|
var AST_Catch = DEFNODE("Catch", "argname", {
|
|
$documentation: "A `catch` node; only makes sense as part of a `try` statement",
|
|
$propdoc: {
|
|
argname: "[AST_SymbolCatch] symbol for the exception"
|
|
},
|
|
_walk: function(visitor) {
|
|
return visitor._visit(this, function(){
|
|
this.argname._walk(visitor);
|
|
walk_body(this, visitor);
|
|
});
|
|
}
|
|
}, AST_Block);
|
|
|
|
var AST_Finally = DEFNODE("Finally", null, {
|
|
$documentation: "A `finally` node; only makes sense as part of a `try` statement"
|
|
}, AST_Block);
|
|
|
|
/* -----[ VAR/CONST ]----- */
|
|
|
|
var AST_Definitions = DEFNODE("Definitions", "definitions", {
|
|
$documentation: "Base class for `var` or `const` nodes (variable declarations/initializations)",
|
|
$propdoc: {
|
|
definitions: "[AST_VarDef*] array of variable definitions"
|
|
},
|
|
_walk: function(visitor) {
|
|
return visitor._visit(this, function(){
|
|
var definitions = this.definitions;
|
|
for (var i = 0, len = definitions.length; i < len; i++) {
|
|
definitions[i]._walk(visitor);
|
|
}
|
|
});
|
|
}
|
|
}, AST_Statement);
|
|
|
|
var AST_Var = DEFNODE("Var", null, {
|
|
$documentation: "A `var` statement"
|
|
}, AST_Definitions);
|
|
|
|
var AST_Const = DEFNODE("Const", null, {
|
|
$documentation: "A `const` statement"
|
|
}, AST_Definitions);
|
|
|
|
var AST_VarDef = DEFNODE("VarDef", "name value", {
|
|
$documentation: "A variable declaration; only appears in a AST_Definitions node",
|
|
$propdoc: {
|
|
name: "[AST_SymbolVar|AST_SymbolConst] name of the variable",
|
|
value: "[AST_Node?] initializer, or null of there's no initializer"
|
|
},
|
|
_walk: function(visitor) {
|
|
return visitor._visit(this, function(){
|
|
this.name._walk(visitor);
|
|
if (this.value) this.value._walk(visitor);
|
|
});
|
|
}
|
|
});
|
|
|
|
/* -----[ OTHER ]----- */
|
|
|
|
var AST_Call = DEFNODE("Call", "expression args", {
|
|
$documentation: "A function call expression",
|
|
$propdoc: {
|
|
expression: "[AST_Node] expression to invoke as function",
|
|
args: "[AST_Node*] array of arguments"
|
|
},
|
|
_walk: function(visitor) {
|
|
return visitor._visit(this, function(){
|
|
this.expression._walk(visitor);
|
|
var args = this.args;
|
|
for (var i = 0, len = args.length; i < len; i++) {
|
|
args[i]._walk(visitor);
|
|
}
|
|
});
|
|
}
|
|
});
|
|
|
|
var AST_New = DEFNODE("New", null, {
|
|
$documentation: "An object instantiation. Derives from a function call since it has exactly the same properties"
|
|
}, AST_Call);
|
|
|
|
var AST_Seq = DEFNODE("Seq", "car cdr", {
|
|
$documentation: "A sequence expression (two comma-separated expressions)",
|
|
$propdoc: {
|
|
car: "[AST_Node] first element in sequence",
|
|
cdr: "[AST_Node] second element in sequence"
|
|
},
|
|
$cons: function(x, y) {
|
|
var seq = new AST_Seq(x);
|
|
seq.car = x;
|
|
seq.cdr = y;
|
|
return seq;
|
|
},
|
|
$from_array: function(array) {
|
|
if (array.length == 0) return null;
|
|
if (array.length == 1) return array[0].clone();
|
|
var list = null;
|
|
for (var i = array.length; --i >= 0;) {
|
|
list = AST_Seq.cons(array[i], list);
|
|
}
|
|
var p = list;
|
|
while (p) {
|
|
if (p.cdr && !p.cdr.cdr) {
|
|
p.cdr = p.cdr.car;
|
|
break;
|
|
}
|
|
p = p.cdr;
|
|
}
|
|
return list;
|
|
},
|
|
to_array: function() {
|
|
var p = this, a = [];
|
|
while (p) {
|
|
a.push(p.car);
|
|
if (p.cdr && !(p.cdr instanceof AST_Seq)) {
|
|
a.push(p.cdr);
|
|
break;
|
|
}
|
|
p = p.cdr;
|
|
}
|
|
return a;
|
|
},
|
|
add: function(node) {
|
|
var p = this;
|
|
while (p) {
|
|
if (!(p.cdr instanceof AST_Seq)) {
|
|
var cell = AST_Seq.cons(p.cdr, node);
|
|
return p.cdr = cell;
|
|
}
|
|
p = p.cdr;
|
|
}
|
|
},
|
|
len: function() {
|
|
if (this.cdr instanceof AST_Seq) {
|
|
return this.cdr.len() + 1;
|
|
} else {
|
|
return 2;
|
|
}
|
|
},
|
|
_walk: function(visitor) {
|
|
return visitor._visit(this, function(){
|
|
this.car._walk(visitor);
|
|
if (this.cdr) this.cdr._walk(visitor);
|
|
});
|
|
}
|
|
});
|
|
|
|
var AST_PropAccess = DEFNODE("PropAccess", "expression property", {
|
|
$documentation: "Base class for property access expressions, i.e. `a.foo` or `a[\"foo\"]`",
|
|
$propdoc: {
|
|
expression: "[AST_Node] the “container” expression",
|
|
property: "[AST_Node|string] the property to access. For AST_Dot this is always a plain string, while for AST_Sub it's an arbitrary AST_Node"
|
|
}
|
|
});
|
|
|
|
var AST_Dot = DEFNODE("Dot", null, {
|
|
$documentation: "A dotted property access expression",
|
|
_walk: function(visitor) {
|
|
return visitor._visit(this, function(){
|
|
this.expression._walk(visitor);
|
|
});
|
|
}
|
|
}, AST_PropAccess);
|
|
|
|
var AST_Sub = DEFNODE("Sub", null, {
|
|
$documentation: "Index-style property access, i.e. `a[\"foo\"]`",
|
|
_walk: function(visitor) {
|
|
return visitor._visit(this, function(){
|
|
this.expression._walk(visitor);
|
|
this.property._walk(visitor);
|
|
});
|
|
}
|
|
}, AST_PropAccess);
|
|
|
|
var AST_Unary = DEFNODE("Unary", "operator expression", {
|
|
$documentation: "Base class for unary expressions",
|
|
$propdoc: {
|
|
operator: "[string] the operator",
|
|
expression: "[AST_Node] expression that this unary operator applies to"
|
|
},
|
|
_walk: function(visitor) {
|
|
return visitor._visit(this, function(){
|
|
this.expression._walk(visitor);
|
|
});
|
|
}
|
|
});
|
|
|
|
var AST_UnaryPrefix = DEFNODE("UnaryPrefix", null, {
|
|
$documentation: "Unary prefix expression, i.e. `typeof i` or `++i`"
|
|
}, AST_Unary);
|
|
|
|
var AST_UnaryPostfix = DEFNODE("UnaryPostfix", null, {
|
|
$documentation: "Unary postfix expression, i.e. `i++`"
|
|
}, AST_Unary);
|
|
|
|
var AST_Binary = DEFNODE("Binary", "left operator right", {
|
|
$documentation: "Binary expression, i.e. `a + b`",
|
|
$propdoc: {
|
|
left: "[AST_Node] left-hand side expression",
|
|
operator: "[string] the operator",
|
|
right: "[AST_Node] right-hand side expression"
|
|
},
|
|
_walk: function(visitor) {
|
|
return visitor._visit(this, function(){
|
|
this.left._walk(visitor);
|
|
this.right._walk(visitor);
|
|
});
|
|
}
|
|
});
|
|
|
|
var AST_Conditional = DEFNODE("Conditional", "condition consequent alternative", {
|
|
$documentation: "Conditional expression using the ternary operator, i.e. `a ? b : c`",
|
|
$propdoc: {
|
|
condition: "[AST_Node]",
|
|
consequent: "[AST_Node]",
|
|
alternative: "[AST_Node]"
|
|
},
|
|
_walk: function(visitor) {
|
|
return visitor._visit(this, function(){
|
|
this.condition._walk(visitor);
|
|
this.consequent._walk(visitor);
|
|
this.alternative._walk(visitor);
|
|
});
|
|
}
|
|
});
|
|
|
|
var AST_Assign = DEFNODE("Assign", null, {
|
|
$documentation: "An assignment expression — `a = b + 5`",
|
|
}, AST_Binary);
|
|
|
|
/* -----[ LITERALS ]----- */
|
|
|
|
var AST_Array = DEFNODE("Array", "elements", {
|
|
$documentation: "An array literal",
|
|
$propdoc: {
|
|
elements: "[AST_Node*] array of elements"
|
|
},
|
|
_walk: function(visitor) {
|
|
return visitor._visit(this, function(){
|
|
var elements = this.elements;
|
|
for (var i = 0, len = elements.length; i < len; i++) {
|
|
elements[i]._walk(visitor);
|
|
}
|
|
});
|
|
}
|
|
});
|
|
|
|
var AST_Object = DEFNODE("Object", "properties", {
|
|
$documentation: "An object literal",
|
|
$propdoc: {
|
|
properties: "[AST_ObjectProperty*] array of properties"
|
|
},
|
|
_walk: function(visitor) {
|
|
return visitor._visit(this, function(){
|
|
var properties = this.properties;
|
|
for (var i = 0, len = properties.length; i < len; i++) {
|
|
properties[i]._walk(visitor);
|
|
}
|
|
});
|
|
}
|
|
});
|
|
|
|
var AST_ObjectProperty = DEFNODE("ObjectProperty", "key value", {
|
|
$documentation: "Base class for literal object properties",
|
|
$propdoc: {
|
|
key: "[string] the property name converted to a string for ObjectKeyVal. For setters and getters this is an AST_SymbolAccessor.",
|
|
value: "[AST_Node] property value. For setters and getters this is an AST_Accessor."
|
|
},
|
|
_walk: function(visitor) {
|
|
return visitor._visit(this, function(){
|
|
this.value._walk(visitor);
|
|
});
|
|
}
|
|
});
|
|
|
|
var AST_ObjectKeyVal = DEFNODE("ObjectKeyVal", "quote", {
|
|
$documentation: "A key: value object property",
|
|
$propdoc: {
|
|
quote: "[string] the original quote character"
|
|
}
|
|
}, AST_ObjectProperty);
|
|
|
|
var AST_ObjectSetter = DEFNODE("ObjectSetter", null, {
|
|
$documentation: "An object setter property",
|
|
}, AST_ObjectProperty);
|
|
|
|
var AST_ObjectGetter = DEFNODE("ObjectGetter", null, {
|
|
$documentation: "An object getter property",
|
|
}, AST_ObjectProperty);
|
|
|
|
var AST_Symbol = DEFNODE("Symbol", "scope name thedef", {
|
|
$propdoc: {
|
|
name: "[string] name of this symbol",
|
|
scope: "[AST_Scope/S] the current scope (not necessarily the definition scope)",
|
|
thedef: "[SymbolDef/S] the definition of this symbol"
|
|
},
|
|
$documentation: "Base class for all symbols",
|
|
});
|
|
|
|
var AST_SymbolAccessor = DEFNODE("SymbolAccessor", null, {
|
|
$documentation: "The name of a property accessor (setter/getter function)"
|
|
}, AST_Symbol);
|
|
|
|
var AST_SymbolDeclaration = DEFNODE("SymbolDeclaration", "init", {
|
|
$documentation: "A declaration symbol (symbol in var/const, function name or argument, symbol in catch)",
|
|
}, AST_Symbol);
|
|
|
|
var AST_SymbolVar = DEFNODE("SymbolVar", null, {
|
|
$documentation: "Symbol defining a variable",
|
|
}, AST_SymbolDeclaration);
|
|
|
|
var AST_SymbolConst = DEFNODE("SymbolConst", null, {
|
|
$documentation: "A constant declaration"
|
|
}, AST_SymbolDeclaration);
|
|
|
|
var AST_SymbolFunarg = DEFNODE("SymbolFunarg", null, {
|
|
$documentation: "Symbol naming a function argument",
|
|
}, AST_SymbolVar);
|
|
|
|
var AST_SymbolDefun = DEFNODE("SymbolDefun", null, {
|
|
$documentation: "Symbol defining a function",
|
|
}, AST_SymbolDeclaration);
|
|
|
|
var AST_SymbolLambda = DEFNODE("SymbolLambda", null, {
|
|
$documentation: "Symbol naming a function expression",
|
|
}, AST_SymbolDeclaration);
|
|
|
|
var AST_SymbolCatch = DEFNODE("SymbolCatch", null, {
|
|
$documentation: "Symbol naming the exception in catch",
|
|
}, AST_SymbolDeclaration);
|
|
|
|
var AST_Label = DEFNODE("Label", "references", {
|
|
$documentation: "Symbol naming a label (declaration)",
|
|
$propdoc: {
|
|
references: "[AST_LoopControl*] a list of nodes referring to this label"
|
|
},
|
|
initialize: function() {
|
|
this.references = [];
|
|
this.thedef = this;
|
|
}
|
|
}, AST_Symbol);
|
|
|
|
var AST_SymbolRef = DEFNODE("SymbolRef", null, {
|
|
$documentation: "Reference to some symbol (not definition/declaration)",
|
|
}, AST_Symbol);
|
|
|
|
var AST_LabelRef = DEFNODE("LabelRef", null, {
|
|
$documentation: "Reference to a label symbol",
|
|
}, AST_Symbol);
|
|
|
|
var AST_This = DEFNODE("This", null, {
|
|
$documentation: "The `this` symbol",
|
|
}, AST_Symbol);
|
|
|
|
var AST_Constant = DEFNODE("Constant", null, {
|
|
$documentation: "Base class for all constants",
|
|
getValue: function() {
|
|
return this.value;
|
|
}
|
|
});
|
|
|
|
var AST_String = DEFNODE("String", "value quote", {
|
|
$documentation: "A string literal",
|
|
$propdoc: {
|
|
value: "[string] the contents of this string",
|
|
quote: "[string] the original quote character"
|
|
}
|
|
}, AST_Constant);
|
|
|
|
var AST_Number = DEFNODE("Number", "value literal", {
|
|
$documentation: "A number literal",
|
|
$propdoc: {
|
|
value: "[number] the numeric value",
|
|
literal: "[string] numeric value as string (optional)"
|
|
}
|
|
}, AST_Constant);
|
|
|
|
var AST_RegExp = DEFNODE("RegExp", "value", {
|
|
$documentation: "A regexp literal",
|
|
$propdoc: {
|
|
value: "[RegExp] the actual regexp"
|
|
}
|
|
}, AST_Constant);
|
|
|
|
var AST_Atom = DEFNODE("Atom", null, {
|
|
$documentation: "Base class for atoms",
|
|
}, AST_Constant);
|
|
|
|
var AST_Null = DEFNODE("Null", null, {
|
|
$documentation: "The `null` atom",
|
|
value: null
|
|
}, AST_Atom);
|
|
|
|
var AST_NaN = DEFNODE("NaN", null, {
|
|
$documentation: "The impossible value",
|
|
value: 0/0
|
|
}, AST_Atom);
|
|
|
|
var AST_Undefined = DEFNODE("Undefined", null, {
|
|
$documentation: "The `undefined` value",
|
|
value: (function(){}())
|
|
}, AST_Atom);
|
|
|
|
var AST_Hole = DEFNODE("Hole", null, {
|
|
$documentation: "A hole in an array",
|
|
value: (function(){}())
|
|
}, AST_Atom);
|
|
|
|
var AST_Infinity = DEFNODE("Infinity", null, {
|
|
$documentation: "The `Infinity` value",
|
|
value: 1/0
|
|
}, AST_Atom);
|
|
|
|
var AST_Boolean = DEFNODE("Boolean", null, {
|
|
$documentation: "Base class for booleans",
|
|
}, AST_Atom);
|
|
|
|
var AST_False = DEFNODE("False", null, {
|
|
$documentation: "The `false` atom",
|
|
value: false
|
|
}, AST_Boolean);
|
|
|
|
var AST_True = DEFNODE("True", null, {
|
|
$documentation: "The `true` atom",
|
|
value: true
|
|
}, AST_Boolean);
|
|
|
|
/* -----[ TreeWalker ]----- */
|
|
|
|
function TreeWalker(callback) {
|
|
this.visit = callback;
|
|
this.stack = [];
|
|
this.directives = Object.create(null);
|
|
};
|
|
TreeWalker.prototype = {
|
|
_visit: function(node, descend) {
|
|
this.push(node);
|
|
var ret = this.visit(node, descend ? function(){
|
|
descend.call(node);
|
|
} : noop);
|
|
if (!ret && descend) {
|
|
descend.call(node);
|
|
}
|
|
this.pop(node);
|
|
return ret;
|
|
},
|
|
parent: function(n) {
|
|
return this.stack[this.stack.length - 2 - (n || 0)];
|
|
},
|
|
push: function (node) {
|
|
if (node instanceof AST_Lambda) {
|
|
this.directives = Object.create(this.directives);
|
|
} else if (node instanceof AST_Directive && !this.directives[node.value]) {
|
|
this.directives[node.value] = node;
|
|
}
|
|
this.stack.push(node);
|
|
},
|
|
pop: function(node) {
|
|
this.stack.pop();
|
|
if (node instanceof AST_Lambda) {
|
|
this.directives = Object.getPrototypeOf(this.directives);
|
|
}
|
|
},
|
|
self: function() {
|
|
return this.stack[this.stack.length - 1];
|
|
},
|
|
find_parent: function(type) {
|
|
var stack = this.stack;
|
|
for (var i = stack.length; --i >= 0;) {
|
|
var x = stack[i];
|
|
if (x instanceof type) return x;
|
|
}
|
|
},
|
|
has_directive: function(type) {
|
|
var dir = this.directives[type];
|
|
if (dir) return dir;
|
|
var node = this.stack[this.stack.length - 1];
|
|
if (node instanceof AST_Scope) {
|
|
for (var i = 0; i < node.body.length; ++i) {
|
|
var st = node.body[i];
|
|
if (!(st instanceof AST_Directive)) break;
|
|
if (st.value == type) return st;
|
|
}
|
|
}
|
|
},
|
|
in_boolean_context: function() {
|
|
var stack = this.stack;
|
|
var i = stack.length, self = stack[--i];
|
|
while (i > 0) {
|
|
var p = stack[--i];
|
|
if ((p instanceof AST_If && p.condition === self) ||
|
|
(p instanceof AST_Conditional && p.condition === self) ||
|
|
(p instanceof AST_DWLoop && p.condition === self) ||
|
|
(p instanceof AST_For && p.condition === self) ||
|
|
(p instanceof AST_UnaryPrefix && p.operator == "!" && p.expression === self))
|
|
{
|
|
return true;
|
|
}
|
|
if (!(p instanceof AST_Binary && (p.operator == "&&" || p.operator == "||")))
|
|
return false;
|
|
self = p;
|
|
}
|
|
},
|
|
loopcontrol_target: function(node) {
|
|
var stack = this.stack;
|
|
if (node.label) for (var i = stack.length; --i >= 0;) {
|
|
var x = stack[i];
|
|
if (x instanceof AST_LabeledStatement && x.label.name == node.label.name)
|
|
return x.body;
|
|
} else for (var i = stack.length; --i >= 0;) {
|
|
var x = stack[i];
|
|
if (x instanceof AST_IterationStatement
|
|
|| node instanceof AST_Break && x instanceof AST_Switch)
|
|
return x;
|
|
}
|
|
}
|
|
};
|
|
|
|
/***********************************************************************
|
|
|
|
A JavaScript tokenizer / parser / beautifier / compressor.
|
|
https://github.com/mishoo/UglifyJS2
|
|
|
|
-------------------------------- (C) ---------------------------------
|
|
|
|
Author: Mihai Bazon
|
|
<mihai.bazon@gmail.com>
|
|
http://mihai.bazon.net/blog
|
|
|
|
Distributed under the BSD license:
|
|
|
|
Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
|
Parser based on parse-js (http://marijn.haverbeke.nl/parse-js/).
|
|
|
|
Redistribution and use in source and binary forms, with or without
|
|
modification, are permitted provided that the following conditions
|
|
are met:
|
|
|
|
* Redistributions of source code must retain the above
|
|
copyright notice, this list of conditions and the following
|
|
disclaimer.
|
|
|
|
* Redistributions in binary form must reproduce the above
|
|
copyright notice, this list of conditions and the following
|
|
disclaimer in the documentation and/or other materials
|
|
provided with the distribution.
|
|
|
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
|
|
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
|
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
|
|
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
|
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
|
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
|
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
|
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
|
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
|
SUCH DAMAGE.
|
|
|
|
***********************************************************************/
|
|
|
|
"use strict";
|
|
|
|
var KEYWORDS = 'break case catch const continue debugger default delete do else finally for function if in instanceof new return switch throw try typeof var void while with';
|
|
var KEYWORDS_ATOM = 'false null true';
|
|
var RESERVED_WORDS = 'abstract boolean byte char class double enum export extends final float goto implements import int interface let long native package private protected public short static super synchronized this throws transient volatile yield'
|
|
+ " " + KEYWORDS_ATOM + " " + KEYWORDS;
|
|
var KEYWORDS_BEFORE_EXPRESSION = 'return new delete throw else case';
|
|
|
|
KEYWORDS = makePredicate(KEYWORDS);
|
|
RESERVED_WORDS = makePredicate(RESERVED_WORDS);
|
|
KEYWORDS_BEFORE_EXPRESSION = makePredicate(KEYWORDS_BEFORE_EXPRESSION);
|
|
KEYWORDS_ATOM = makePredicate(KEYWORDS_ATOM);
|
|
|
|
var OPERATOR_CHARS = makePredicate(characters("+-*&%=<>!?|~^"));
|
|
|
|
var RE_HEX_NUMBER = /^0x[0-9a-f]+$/i;
|
|
var RE_OCT_NUMBER = /^0[0-7]+$/;
|
|
|
|
var OPERATORS = makePredicate([
|
|
"in",
|
|
"instanceof",
|
|
"typeof",
|
|
"new",
|
|
"void",
|
|
"delete",
|
|
"++",
|
|
"--",
|
|
"+",
|
|
"-",
|
|
"!",
|
|
"~",
|
|
"&",
|
|
"|",
|
|
"^",
|
|
"*",
|
|
"/",
|
|
"%",
|
|
">>",
|
|
"<<",
|
|
">>>",
|
|
"<",
|
|
">",
|
|
"<=",
|
|
">=",
|
|
"==",
|
|
"===",
|
|
"!=",
|
|
"!==",
|
|
"?",
|
|
"=",
|
|
"+=",
|
|
"-=",
|
|
"/=",
|
|
"*=",
|
|
"%=",
|
|
">>=",
|
|
"<<=",
|
|
">>>=",
|
|
"|=",
|
|
"^=",
|
|
"&=",
|
|
"&&",
|
|
"||"
|
|
]);
|
|
|
|
var WHITESPACE_CHARS = makePredicate(characters(" \u00a0\n\r\t\f\u000b\u200b\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u2028\u2029\u202f\u205f\u3000\uFEFF"));
|
|
|
|
var NEWLINE_CHARS = makePredicate(characters("\n\r\u2028\u2029"));
|
|
|
|
var PUNC_BEFORE_EXPRESSION = makePredicate(characters("[{(,;:"));
|
|
|
|
var PUNC_CHARS = makePredicate(characters("[]{}(),;:"));
|
|
|
|
var REGEXP_MODIFIERS = makePredicate(characters("gmsiy"));
|
|
|
|
/* -----[ Tokenizer ]----- */
|
|
|
|
// regexps adapted from http://xregexp.com/plugins/#unicode
|
|
var UNICODE = {
|
|
letter: new RegExp("[\\u0041-\\u005A\\u0061-\\u007A\\u00AA\\u00B5\\u00BA\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02C1\\u02C6-\\u02D1\\u02E0-\\u02E4\\u02EC\\u02EE\\u0370-\\u0374\\u0376\\u0377\\u037A-\\u037D\\u037F\\u0386\\u0388-\\u038A\\u038C\\u038E-\\u03A1\\u03A3-\\u03F5\\u03F7-\\u0481\\u048A-\\u052F\\u0531-\\u0556\\u0559\\u0561-\\u0587\\u05D0-\\u05EA\\u05F0-\\u05F2\\u0620-\\u064A\\u066E\\u066F\\u0671-\\u06D3\\u06D5\\u06E5\\u06E6\\u06EE\\u06EF\\u06FA-\\u06FC\\u06FF\\u0710\\u0712-\\u072F\\u074D-\\u07A5\\u07B1\\u07CA-\\u07EA\\u07F4\\u07F5\\u07FA\\u0800-\\u0815\\u081A\\u0824\\u0828\\u0840-\\u0858\\u08A0-\\u08B2\\u0904-\\u0939\\u093D\\u0950\\u0958-\\u0961\\u0971-\\u0980\\u0985-\\u098C\\u098F\\u0990\\u0993-\\u09A8\\u09AA-\\u09B0\\u09B2\\u09B6-\\u09B9\\u09BD\\u09CE\\u09DC\\u09DD\\u09DF-\\u09E1\\u09F0\\u09F1\\u0A05-\\u0A0A\\u0A0F\\u0A10\\u0A13-\\u0A28\\u0A2A-\\u0A30\\u0A32\\u0A33\\u0A35\\u0A36\\u0A38\\u0A39\\u0A59-\\u0A5C\\u0A5E\\u0A72-\\u0A74\\u0A85-\\u0A8D\\u0A8F-\\u0A91\\u0A93-\\u0AA8\\u0AAA-\\u0AB0\\u0AB2\\u0AB3\\u0AB5-\\u0AB9\\u0ABD\\u0AD0\\u0AE0\\u0AE1\\u0B05-\\u0B0C\\u0B0F\\u0B10\\u0B13-\\u0B28\\u0B2A-\\u0B30\\u0B32\\u0B33\\u0B35-\\u0B39\\u0B3D\\u0B5C\\u0B5D\\u0B5F-\\u0B61\\u0B71\\u0B83\\u0B85-\\u0B8A\\u0B8E-\\u0B90\\u0B92-\\u0B95\\u0B99\\u0B9A\\u0B9C\\u0B9E\\u0B9F\\u0BA3\\u0BA4\\u0BA8-\\u0BAA\\u0BAE-\\u0BB9\\u0BD0\\u0C05-\\u0C0C\\u0C0E-\\u0C10\\u0C12-\\u0C28\\u0C2A-\\u0C39\\u0C3D\\u0C58\\u0C59\\u0C60\\u0C61\\u0C85-\\u0C8C\\u0C8E-\\u0C90\\u0C92-\\u0CA8\\u0CAA-\\u0CB3\\u0CB5-\\u0CB9\\u0CBD\\u0CDE\\u0CE0\\u0CE1\\u0CF1\\u0CF2\\u0D05-\\u0D0C\\u0D0E-\\u0D10\\u0D12-\\u0D3A\\u0D3D\\u0D4E\\u0D60\\u0D61\\u0D7A-\\u0D7F\\u0D85-\\u0D96\\u0D9A-\\u0DB1\\u0DB3-\\u0DBB\\u0DBD\\u0DC0-\\u0DC6\\u0E01-\\u0E30\\u0E32\\u0E33\\u0E40-\\u0E46\\u0E81\\u0E82\\u0E84\\u0E87\\u0E88\\u0E8A\\u0E8D\\u0E94-\\u0E97\\u0E99-\\u0E9F\\u0EA1-\\u0EA3\\u0EA5\\u0EA7\\u0EAA\\u0EAB\\u0EAD-\\u0EB0\\u0EB2\\u0EB3\\u0EBD\\u0EC0-\\u0EC4\\u0EC6\\u0EDC-\\u0EDF\\u0F00\\u0F40-\\u0F47\\u0F49-\\u0F6C\\u0F88-\\u0F8C\\u1000-\\u102A\\u103F\\u1050-\\u1055\\u105A-\\u105D\\u1061\\u1065\\u1066\\u106E-\\u1070\\u1075-\\u1081\\u108E\\u10A0-\\u10C5\\u10C7\\u10CD\\u10D0-\\u10FA\\u10FC-\\u1248\\u124A-\\u124D\\u1250-\\u1256\\u1258\\u125A-\\u125D\\u1260-\\u1288\\u128A-\\u128D\\u1290-\\u12B0\\u12B2-\\u12B5\\u12B8-\\u12BE\\u12C0\\u12C2-\\u12C5\\u12C8-\\u12D6\\u12D8-\\u1310\\u1312-\\u1315\\u1318-\\u135A\\u1380-\\u138F\\u13A0-\\u13F4\\u1401-\\u166C\\u166F-\\u167F\\u1681-\\u169A\\u16A0-\\u16EA\\u16EE-\\u16F8\\u1700-\\u170C\\u170E-\\u1711\\u1720-\\u1731\\u1740-\\u1751\\u1760-\\u176C\\u176E-\\u1770\\u1780-\\u17B3\\u17D7\\u17DC\\u1820-\\u1877\\u1880-\\u18A8\\u18AA\\u18B0-\\u18F5\\u1900-\\u191E\\u1950-\\u196D\\u1970-\\u1974\\u1980-\\u19AB\\u19C1-\\u19C7\\u1A00-\\u1A16\\u1A20-\\u1A54\\u1AA7\\u1B05-\\u1B33\\u1B45-\\u1B4B\\u1B83-\\u1BA0\\u1BAE\\u1BAF\\u1BBA-\\u1BE5\\u1C00-\\u1C23\\u1C4D-\\u1C4F\\u1C5A-\\u1C7D\\u1CE9-\\u1CEC\\u1CEE-\\u1CF1\\u1CF5\\u1CF6\\u1D00-\\u1DBF\\u1E00-\\u1F15\\u1F18-\\u1F1D\\u1F20-\\u1F45\\u1F48-\\u1F4D\\u1F50-\\u1F57\\u1F59\\u1F5B\\u1F5D\\u1F5F-\\u1F7D\\u1F80-\\u1FB4\\u1FB6-\\u1FBC\\u1FBE\\u1FC2-\\u1FC4\\u1FC6-\\u1FCC\\u1FD0-\\u1FD3\\u1FD6-\\u1FDB\\u1FE0-\\u1FEC\\u1FF2-\\u1FF4\\u1FF6-\\u1FFC\\u2071\\u207F\\u2090-\\u209C\\u2102\\u2107\\u210A-\\u2113\\u2115\\u2119-\\u211D\\u2124\\u2126\\u2128\\u212A-\\u212D\\u212F-\\u2139\\u213C-\\u213F\\u2145-\\u2149\\u214E\\u2160-\\u2188\\u2C00-\\u2C2E\\u2C30-\\u2C5E\\u2C60-\\u2CE4\\u2CEB-\\u2CEE\\u2CF2\\u2CF3\\u2D00-\\u2D25\\u2D27\\u2D2D\\u2D30-\\u2D67\\u2D6F\\u2D80-\\u2D96\\u2DA0-\\u2DA6\\u2DA8-\\u2DAE\\u2DB0-\\u2DB6\\u2DB8-\\u2DBE\\u2DC0-\\u2DC6\\u2DC8-\\u2DCE\\u2DD0-\\u2DD6\\u2DD8-\\u2DDE\\u2E2F\\u3005-\\u3007\\u3021-\\u3029\\u3031-\\u3035\\u3038-\\u303C\\u3041-\\u3096\\u309D-\\u309F\\u30A1-\\u30FA\\u30FC-\\u30FF\\u3105-\\u312D\\u3131-\\u318E\\u31A0-\\u31BA\\u31F0-\\u31FF\\u3400-\\u4DB5\\u4E00-\\u9FCC\\uA000-\\uA48C\\uA4D0-\\uA4FD\\uA500-\\uA60C\\uA610-\\uA61F\\uA62A\\uA62B\\uA640-\\uA66E\\uA67F-\\uA69D\\uA6A0-\\uA6EF\\uA717-\\uA71F\\uA722-\\uA788\\uA78B-\\uA78E\\uA790-\\uA7AD\\uA7B0\\uA7B1\\uA7F7-\\uA801\\uA803-\\uA805\\uA807-\\uA80A\\uA80C-\\uA822\\uA840-\\uA873\\uA882-\\uA8B3\\uA8F2-\\uA8F7\\uA8FB\\uA90A-\\uA925\\uA930-\\uA946\\uA960-\\uA97C\\uA984-\\uA9B2\\uA9CF\\uA9E0-\\uA9E4\\uA9E6-\\uA9EF\\uA9FA-\\uA9FE\\uAA00-\\uAA28\\uAA40-\\uAA42\\uAA44-\\uAA4B\\uAA60-\\uAA76\\uAA7A\\uAA7E-\\uAAAF\\uAAB1\\uAAB5\\uAAB6\\uAAB9-\\uAABD\\uAAC0\\uAAC2\\uAADB-\\uAADD\\uAAE0-\\uAAEA\\uAAF2-\\uAAF4\\uAB01-\\uAB06\\uAB09-\\uAB0E\\uAB11-\\uAB16\\uAB20-\\uAB26\\uAB28-\\uAB2E\\uAB30-\\uAB5A\\uAB5C-\\uAB5F\\uAB64\\uAB65\\uABC0-\\uABE2\\uAC00-\\uD7A3\\uD7B0-\\uD7C6\\uD7CB-\\uD7FB\\uF900-\\uFA6D\\uFA70-\\uFAD9\\uFB00-\\uFB06\\uFB13-\\uFB17\\uFB1D\\uFB1F-\\uFB28\\uFB2A-\\uFB36\\uFB38-\\uFB3C\\uFB3E\\uFB40\\uFB41\\uFB43\\uFB44\\uFB46-\\uFBB1\\uFBD3-\\uFD3D\\uFD50-\\uFD8F\\uFD92-\\uFDC7\\uFDF0-\\uFDFB\\uFE70-\\uFE74\\uFE76-\\uFEFC\\uFF21-\\uFF3A\\uFF41-\\uFF5A\\uFF66-\\uFFBE\\uFFC2-\\uFFC7\\uFFCA-\\uFFCF\\uFFD2-\\uFFD7\\uFFDA-\\uFFDC]"),
|
|
digit: new RegExp("[\\u0030-\\u0039\\u0660-\\u0669\\u06F0-\\u06F9\\u07C0-\\u07C9\\u0966-\\u096F\\u09E6-\\u09EF\\u0A66-\\u0A6F\\u0AE6-\\u0AEF\\u0B66-\\u0B6F\\u0BE6-\\u0BEF\\u0C66-\\u0C6F\\u0CE6-\\u0CEF\\u0D66-\\u0D6F\\u0DE6-\\u0DEF\\u0E50-\\u0E59\\u0ED0-\\u0ED9\\u0F20-\\u0F29\\u1040-\\u1049\\u1090-\\u1099\\u17E0-\\u17E9\\u1810-\\u1819\\u1946-\\u194F\\u19D0-\\u19D9\\u1A80-\\u1A89\\u1A90-\\u1A99\\u1B50-\\u1B59\\u1BB0-\\u1BB9\\u1C40-\\u1C49\\u1C50-\\u1C59\\uA620-\\uA629\\uA8D0-\\uA8D9\\uA900-\\uA909\\uA9D0-\\uA9D9\\uA9F0-\\uA9F9\\uAA50-\\uAA59\\uABF0-\\uABF9\\uFF10-\\uFF19]"),
|
|
non_spacing_mark: new RegExp("[\\u0300-\\u036F\\u0483-\\u0487\\u0591-\\u05BD\\u05BF\\u05C1\\u05C2\\u05C4\\u05C5\\u05C7\\u0610-\\u061A\\u064B-\\u065E\\u0670\\u06D6-\\u06DC\\u06DF-\\u06E4\\u06E7\\u06E8\\u06EA-\\u06ED\\u0711\\u0730-\\u074A\\u07A6-\\u07B0\\u07EB-\\u07F3\\u0816-\\u0819\\u081B-\\u0823\\u0825-\\u0827\\u0829-\\u082D\\u0900-\\u0902\\u093C\\u0941-\\u0948\\u094D\\u0951-\\u0955\\u0962\\u0963\\u0981\\u09BC\\u09C1-\\u09C4\\u09CD\\u09E2\\u09E3\\u0A01\\u0A02\\u0A3C\\u0A41\\u0A42\\u0A47\\u0A48\\u0A4B-\\u0A4D\\u0A51\\u0A70\\u0A71\\u0A75\\u0A81\\u0A82\\u0ABC\\u0AC1-\\u0AC5\\u0AC7\\u0AC8\\u0ACD\\u0AE2\\u0AE3\\u0B01\\u0B3C\\u0B3F\\u0B41-\\u0B44\\u0B4D\\u0B56\\u0B62\\u0B63\\u0B82\\u0BC0\\u0BCD\\u0C3E-\\u0C40\\u0C46-\\u0C48\\u0C4A-\\u0C4D\\u0C55\\u0C56\\u0C62\\u0C63\\u0CBC\\u0CBF\\u0CC6\\u0CCC\\u0CCD\\u0CE2\\u0CE3\\u0D41-\\u0D44\\u0D4D\\u0D62\\u0D63\\u0DCA\\u0DD2-\\u0DD4\\u0DD6\\u0E31\\u0E34-\\u0E3A\\u0E47-\\u0E4E\\u0EB1\\u0EB4-\\u0EB9\\u0EBB\\u0EBC\\u0EC8-\\u0ECD\\u0F18\\u0F19\\u0F35\\u0F37\\u0F39\\u0F71-\\u0F7E\\u0F80-\\u0F84\\u0F86\\u0F87\\u0F90-\\u0F97\\u0F99-\\u0FBC\\u0FC6\\u102D-\\u1030\\u1032-\\u1037\\u1039\\u103A\\u103D\\u103E\\u1058\\u1059\\u105E-\\u1060\\u1071-\\u1074\\u1082\\u1085\\u1086\\u108D\\u109D\\u135F\\u1712-\\u1714\\u1732-\\u1734\\u1752\\u1753\\u1772\\u1773\\u17B7-\\u17BD\\u17C6\\u17C9-\\u17D3\\u17DD\\u180B-\\u180D\\u18A9\\u1920-\\u1922\\u1927\\u1928\\u1932\\u1939-\\u193B\\u1A17\\u1A18\\u1A56\\u1A58-\\u1A5E\\u1A60\\u1A62\\u1A65-\\u1A6C\\u1A73-\\u1A7C\\u1A7F\\u1B00-\\u1B03\\u1B34\\u1B36-\\u1B3A\\u1B3C\\u1B42\\u1B6B-\\u1B73\\u1B80\\u1B81\\u1BA2-\\u1BA5\\u1BA8\\u1BA9\\u1C2C-\\u1C33\\u1C36\\u1C37\\u1CD0-\\u1CD2\\u1CD4-\\u1CE0\\u1CE2-\\u1CE8\\u1CED\\u1DC0-\\u1DE6\\u1DFD-\\u1DFF\\u20D0-\\u20DC\\u20E1\\u20E5-\\u20F0\\u2CEF-\\u2CF1\\u2DE0-\\u2DFF\\u302A-\\u302F\\u3099\\u309A\\uA66F\\uA67C\\uA67D\\uA6F0\\uA6F1\\uA802\\uA806\\uA80B\\uA825\\uA826\\uA8C4\\uA8E0-\\uA8F1\\uA926-\\uA92D\\uA947-\\uA951\\uA980-\\uA982\\uA9B3\\uA9B6-\\uA9B9\\uA9BC\\uAA29-\\uAA2E\\uAA31\\uAA32\\uAA35\\uAA36\\uAA43\\uAA4C\\uAAB0\\uAAB2-\\uAAB4\\uAAB7\\uAAB8\\uAABE\\uAABF\\uAAC1\\uABE5\\uABE8\\uABED\\uFB1E\\uFE00-\\uFE0F\\uFE20-\\uFE26]"),
|
|
space_combining_mark: new RegExp("[\\u0903\\u093E-\\u0940\\u0949-\\u094C\\u094E\\u0982\\u0983\\u09BE-\\u09C0\\u09C7\\u09C8\\u09CB\\u09CC\\u09D7\\u0A03\\u0A3E-\\u0A40\\u0A83\\u0ABE-\\u0AC0\\u0AC9\\u0ACB\\u0ACC\\u0B02\\u0B03\\u0B3E\\u0B40\\u0B47\\u0B48\\u0B4B\\u0B4C\\u0B57\\u0BBE\\u0BBF\\u0BC1\\u0BC2\\u0BC6-\\u0BC8\\u0BCA-\\u0BCC\\u0BD7\\u0C01-\\u0C03\\u0C41-\\u0C44\\u0C82\\u0C83\\u0CBE\\u0CC0-\\u0CC4\\u0CC7\\u0CC8\\u0CCA\\u0CCB\\u0CD5\\u0CD6\\u0D02\\u0D03\\u0D3E-\\u0D40\\u0D46-\\u0D48\\u0D4A-\\u0D4C\\u0D57\\u0D82\\u0D83\\u0DCF-\\u0DD1\\u0DD8-\\u0DDF\\u0DF2\\u0DF3\\u0F3E\\u0F3F\\u0F7F\\u102B\\u102C\\u1031\\u1038\\u103B\\u103C\\u1056\\u1057\\u1062-\\u1064\\u1067-\\u106D\\u1083\\u1084\\u1087-\\u108C\\u108F\\u109A-\\u109C\\u17B6\\u17BE-\\u17C5\\u17C7\\u17C8\\u1923-\\u1926\\u1929-\\u192B\\u1930\\u1931\\u1933-\\u1938\\u19B0-\\u19C0\\u19C8\\u19C9\\u1A19-\\u1A1B\\u1A55\\u1A57\\u1A61\\u1A63\\u1A64\\u1A6D-\\u1A72\\u1B04\\u1B35\\u1B3B\\u1B3D-\\u1B41\\u1B43\\u1B44\\u1B82\\u1BA1\\u1BA6\\u1BA7\\u1BAA\\u1C24-\\u1C2B\\u1C34\\u1C35\\u1CE1\\u1CF2\\uA823\\uA824\\uA827\\uA880\\uA881\\uA8B4-\\uA8C3\\uA952\\uA953\\uA983\\uA9B4\\uA9B5\\uA9BA\\uA9BB\\uA9BD-\\uA9C0\\uAA2F\\uAA30\\uAA33\\uAA34\\uAA4D\\uAA7B\\uABE3\\uABE4\\uABE6\\uABE7\\uABE9\\uABEA\\uABEC]"),
|
|
connector_punctuation: new RegExp("[\\u005F\\u203F\\u2040\\u2054\\uFE33\\uFE34\\uFE4D-\\uFE4F\\uFF3F]")
|
|
};
|
|
|
|
function is_letter(code) {
|
|
return (code >= 97 && code <= 122)
|
|
|| (code >= 65 && code <= 90)
|
|
|| (code >= 0xaa && UNICODE.letter.test(String.fromCharCode(code)));
|
|
};
|
|
|
|
function is_digit(code) {
|
|
return code >= 48 && code <= 57;
|
|
};
|
|
|
|
function is_alphanumeric_char(code) {
|
|
return is_digit(code) || is_letter(code);
|
|
};
|
|
|
|
function is_unicode_digit(code) {
|
|
return UNICODE.digit.test(String.fromCharCode(code));
|
|
}
|
|
|
|
function is_unicode_combining_mark(ch) {
|
|
return UNICODE.non_spacing_mark.test(ch) || UNICODE.space_combining_mark.test(ch);
|
|
};
|
|
|
|
function is_unicode_connector_punctuation(ch) {
|
|
return UNICODE.connector_punctuation.test(ch);
|
|
};
|
|
|
|
function is_identifier(name) {
|
|
return !RESERVED_WORDS(name) && /^[a-z_$][a-z0-9_$]*$/i.test(name);
|
|
};
|
|
|
|
function is_identifier_start(code) {
|
|
return code == 36 || code == 95 || is_letter(code);
|
|
};
|
|
|
|
function is_identifier_char(ch) {
|
|
var code = ch.charCodeAt(0);
|
|
return is_identifier_start(code)
|
|
|| is_digit(code)
|
|
|| code == 8204 // \u200c: zero-width non-joiner <ZWNJ>
|
|
|| code == 8205 // \u200d: zero-width joiner <ZWJ> (in my ECMA-262 PDF, this is also 200c)
|
|
|| is_unicode_combining_mark(ch)
|
|
|| is_unicode_connector_punctuation(ch)
|
|
|| is_unicode_digit(code)
|
|
;
|
|
};
|
|
|
|
function is_identifier_string(str){
|
|
return /^[a-z_$][a-z0-9_$]*$/i.test(str);
|
|
};
|
|
|
|
function parse_js_number(num) {
|
|
if (RE_HEX_NUMBER.test(num)) {
|
|
return parseInt(num.substr(2), 16);
|
|
} else if (RE_OCT_NUMBER.test(num)) {
|
|
return parseInt(num.substr(1), 8);
|
|
} else {
|
|
var val = parseFloat(num);
|
|
if (val == num) return val;
|
|
}
|
|
};
|
|
|
|
function JS_Parse_Error(message, filename, line, col, pos) {
|
|
this.message = message;
|
|
this.filename = filename;
|
|
this.line = line;
|
|
this.col = col;
|
|
this.pos = pos;
|
|
};
|
|
JS_Parse_Error.prototype = Object.create(Error.prototype);
|
|
JS_Parse_Error.prototype.constructor = JS_Parse_Error;
|
|
JS_Parse_Error.prototype.name = "SyntaxError";
|
|
configure_error_stack(JS_Parse_Error);
|
|
|
|
function js_error(message, filename, line, col, pos) {
|
|
throw new JS_Parse_Error(message, filename, line, col, pos);
|
|
};
|
|
|
|
function is_token(token, type, val) {
|
|
return token.type == type && (val == null || token.value == val);
|
|
};
|
|
|
|
var EX_EOF = {};
|
|
|
|
function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|
|
|
var S = {
|
|
text : $TEXT,
|
|
filename : filename,
|
|
pos : 0,
|
|
tokpos : 0,
|
|
line : 1,
|
|
tokline : 0,
|
|
col : 0,
|
|
tokcol : 0,
|
|
newline_before : false,
|
|
regex_allowed : false,
|
|
comments_before : [],
|
|
directives : {},
|
|
directive_stack : []
|
|
};
|
|
|
|
function peek() { return S.text.charAt(S.pos); };
|
|
|
|
function next(signal_eof, in_string) {
|
|
var ch = S.text.charAt(S.pos++);
|
|
if (signal_eof && !ch)
|
|
throw EX_EOF;
|
|
if (NEWLINE_CHARS(ch)) {
|
|
S.newline_before = S.newline_before || !in_string;
|
|
++S.line;
|
|
S.col = 0;
|
|
if (!in_string && ch == "\r" && peek() == "\n") {
|
|
// treat a \r\n sequence as a single \n
|
|
++S.pos;
|
|
ch = "\n";
|
|
}
|
|
} else {
|
|
++S.col;
|
|
}
|
|
return ch;
|
|
};
|
|
|
|
function forward(i) {
|
|
while (i-- > 0) next();
|
|
};
|
|
|
|
function looking_at(str) {
|
|
return S.text.substr(S.pos, str.length) == str;
|
|
};
|
|
|
|
function find_eol() {
|
|
var text = S.text;
|
|
for (var i = S.pos, n = S.text.length; i < n; ++i) {
|
|
var ch = text[i];
|
|
if (NEWLINE_CHARS(ch))
|
|
return i;
|
|
}
|
|
return -1;
|
|
};
|
|
|
|
function find(what, signal_eof) {
|
|
var pos = S.text.indexOf(what, S.pos);
|
|
if (signal_eof && pos == -1) throw EX_EOF;
|
|
return pos;
|
|
};
|
|
|
|
function start_token() {
|
|
S.tokline = S.line;
|
|
S.tokcol = S.col;
|
|
S.tokpos = S.pos;
|
|
};
|
|
|
|
var prev_was_dot = false;
|
|
function token(type, value, is_comment) {
|
|
S.regex_allowed = ((type == "operator" && !UNARY_POSTFIX(value)) ||
|
|
(type == "keyword" && KEYWORDS_BEFORE_EXPRESSION(value)) ||
|
|
(type == "punc" && PUNC_BEFORE_EXPRESSION(value)));
|
|
if (type == "punc" && value == ".") {
|
|
prev_was_dot = true;
|
|
} else if (!is_comment) {
|
|
prev_was_dot = false;
|
|
}
|
|
var ret = {
|
|
type : type,
|
|
value : value,
|
|
line : S.tokline,
|
|
col : S.tokcol,
|
|
pos : S.tokpos,
|
|
endline : S.line,
|
|
endcol : S.col,
|
|
endpos : S.pos,
|
|
nlb : S.newline_before,
|
|
file : filename
|
|
};
|
|
if (/^(?:num|string|regexp)$/i.test(type)) {
|
|
ret.raw = $TEXT.substring(ret.pos, ret.endpos);
|
|
}
|
|
if (!is_comment) {
|
|
ret.comments_before = S.comments_before;
|
|
S.comments_before = [];
|
|
// make note of any newlines in the comments that came before
|
|
for (var i = 0, len = ret.comments_before.length; i < len; i++) {
|
|
ret.nlb = ret.nlb || ret.comments_before[i].nlb;
|
|
}
|
|
}
|
|
S.newline_before = false;
|
|
return new AST_Token(ret);
|
|
};
|
|
|
|
function skip_whitespace() {
|
|
while (WHITESPACE_CHARS(peek()))
|
|
next();
|
|
};
|
|
|
|
function read_while(pred) {
|
|
var ret = "", ch, i = 0;
|
|
while ((ch = peek()) && pred(ch, i++))
|
|
ret += next();
|
|
return ret;
|
|
};
|
|
|
|
function parse_error(err) {
|
|
js_error(err, filename, S.tokline, S.tokcol, S.tokpos);
|
|
};
|
|
|
|
function read_num(prefix) {
|
|
var has_e = false, after_e = false, has_x = false, has_dot = prefix == ".";
|
|
var num = read_while(function(ch, i){
|
|
var code = ch.charCodeAt(0);
|
|
switch (code) {
|
|
case 120: case 88: // xX
|
|
return has_x ? false : (has_x = true);
|
|
case 101: case 69: // eE
|
|
return has_x ? true : has_e ? false : (has_e = after_e = true);
|
|
case 45: // -
|
|
return after_e || (i == 0 && !prefix);
|
|
case 43: // +
|
|
return after_e;
|
|
case (after_e = false, 46): // .
|
|
return (!has_dot && !has_x && !has_e) ? (has_dot = true) : false;
|
|
}
|
|
return is_alphanumeric_char(code);
|
|
});
|
|
if (prefix) num = prefix + num;
|
|
if (RE_OCT_NUMBER.test(num) && next_token.has_directive("use strict")) {
|
|
parse_error("Legacy octal literals are not allowed in strict mode");
|
|
}
|
|
var valid = parse_js_number(num);
|
|
if (!isNaN(valid)) {
|
|
return token("num", valid);
|
|
} else {
|
|
parse_error("Invalid syntax: " + num);
|
|
}
|
|
};
|
|
|
|
function read_escaped_char(in_string) {
|
|
var ch = next(true, in_string);
|
|
switch (ch.charCodeAt(0)) {
|
|
case 110 : return "\n";
|
|
case 114 : return "\r";
|
|
case 116 : return "\t";
|
|
case 98 : return "\b";
|
|
case 118 : return "\u000b"; // \v
|
|
case 102 : return "\f";
|
|
case 120 : return String.fromCharCode(hex_bytes(2)); // \x
|
|
case 117 : return String.fromCharCode(hex_bytes(4)); // \u
|
|
case 10 : return ""; // newline
|
|
case 13 : // \r
|
|
if (peek() == "\n") { // DOS newline
|
|
next(true, in_string);
|
|
return "";
|
|
}
|
|
}
|
|
if (ch >= "0" && ch <= "7")
|
|
return read_octal_escape_sequence(ch);
|
|
return ch;
|
|
};
|
|
|
|
function read_octal_escape_sequence(ch) {
|
|
// Read
|
|
var p = peek();
|
|
if (p >= "0" && p <= "7") {
|
|
ch += next(true);
|
|
if (ch[0] <= "3" && (p = peek()) >= "0" && p <= "7")
|
|
ch += next(true);
|
|
}
|
|
|
|
// Parse
|
|
if (ch === "0") return "\0";
|
|
if (ch.length > 0 && next_token.has_directive("use strict"))
|
|
parse_error("Legacy octal escape sequences are not allowed in strict mode");
|
|
return String.fromCharCode(parseInt(ch, 8));
|
|
}
|
|
|
|
function hex_bytes(n) {
|
|
var num = 0;
|
|
for (; n > 0; --n) {
|
|
var digit = parseInt(next(true), 16);
|
|
if (isNaN(digit))
|
|
parse_error("Invalid hex-character pattern in string");
|
|
num = (num << 4) | digit;
|
|
}
|
|
return num;
|
|
};
|
|
|
|
var read_string = with_eof_error("Unterminated string constant", function(quote_char){
|
|
var quote = next(), ret = "";
|
|
for (;;) {
|
|
var ch = next(true, true);
|
|
if (ch == "\\") ch = read_escaped_char(true);
|
|
else if (NEWLINE_CHARS(ch)) parse_error("Unterminated string constant");
|
|
else if (ch == quote) break;
|
|
ret += ch;
|
|
}
|
|
var tok = token("string", ret);
|
|
tok.quote = quote_char;
|
|
return tok;
|
|
});
|
|
|
|
function skip_line_comment(type) {
|
|
var regex_allowed = S.regex_allowed;
|
|
var i = find_eol(), ret;
|
|
if (i == -1) {
|
|
ret = S.text.substr(S.pos);
|
|
S.pos = S.text.length;
|
|
} else {
|
|
ret = S.text.substring(S.pos, i);
|
|
S.pos = i;
|
|
}
|
|
S.col = S.tokcol + (S.pos - S.tokpos);
|
|
S.comments_before.push(token(type, ret, true));
|
|
S.regex_allowed = regex_allowed;
|
|
return next_token;
|
|
};
|
|
|
|
var skip_multiline_comment = with_eof_error("Unterminated multiline comment", function(){
|
|
var regex_allowed = S.regex_allowed;
|
|
var i = find("*/", true);
|
|
var text = S.text.substring(S.pos, i).replace(/\r\n|\r|\u2028|\u2029/g, '\n');
|
|
// update stream position
|
|
forward(text.length /* doesn't count \r\n as 2 char while S.pos - i does */ + 2);
|
|
S.comments_before.push(token("comment2", text, true));
|
|
S.regex_allowed = regex_allowed;
|
|
return next_token;
|
|
});
|
|
|
|
function read_name() {
|
|
var backslash = false, name = "", ch, escaped = false, hex;
|
|
while ((ch = peek()) != null) {
|
|
if (!backslash) {
|
|
if (ch == "\\") escaped = backslash = true, next();
|
|
else if (is_identifier_char(ch)) name += next();
|
|
else break;
|
|
}
|
|
else {
|
|
if (ch != "u") parse_error("Expecting UnicodeEscapeSequence -- uXXXX");
|
|
ch = read_escaped_char();
|
|
if (!is_identifier_char(ch)) parse_error("Unicode char: " + ch.charCodeAt(0) + " is not valid in identifier");
|
|
name += ch;
|
|
backslash = false;
|
|
}
|
|
}
|
|
if (KEYWORDS(name) && escaped) {
|
|
hex = name.charCodeAt(0).toString(16).toUpperCase();
|
|
name = "\\u" + "0000".substr(hex.length) + hex + name.slice(1);
|
|
}
|
|
return name;
|
|
};
|
|
|
|
var read_regexp = with_eof_error("Unterminated regular expression", function(regexp){
|
|
var prev_backslash = false, ch, in_class = false;
|
|
while ((ch = next(true))) if (NEWLINE_CHARS(ch)) {
|
|
parse_error("Unexpected line terminator");
|
|
} else if (prev_backslash) {
|
|
regexp += "\\" + ch;
|
|
prev_backslash = false;
|
|
} else if (ch == "[") {
|
|
in_class = true;
|
|
regexp += ch;
|
|
} else if (ch == "]" && in_class) {
|
|
in_class = false;
|
|
regexp += ch;
|
|
} else if (ch == "/" && !in_class) {
|
|
break;
|
|
} else if (ch == "\\") {
|
|
prev_backslash = true;
|
|
} else {
|
|
regexp += ch;
|
|
}
|
|
var mods = read_name();
|
|
try {
|
|
return token("regexp", new RegExp(regexp, mods));
|
|
} catch(e) {
|
|
parse_error(e.message);
|
|
}
|
|
});
|
|
|
|
function read_operator(prefix) {
|
|
function grow(op) {
|
|
if (!peek()) return op;
|
|
var bigger = op + peek();
|
|
if (OPERATORS(bigger)) {
|
|
next();
|
|
return grow(bigger);
|
|
} else {
|
|
return op;
|
|
}
|
|
};
|
|
return token("operator", grow(prefix || next()));
|
|
};
|
|
|
|
function handle_slash() {
|
|
next();
|
|
switch (peek()) {
|
|
case "/":
|
|
next();
|
|
return skip_line_comment("comment1");
|
|
case "*":
|
|
next();
|
|
return skip_multiline_comment();
|
|
}
|
|
return S.regex_allowed ? read_regexp("") : read_operator("/");
|
|
};
|
|
|
|
function handle_dot() {
|
|
next();
|
|
return is_digit(peek().charCodeAt(0))
|
|
? read_num(".")
|
|
: token("punc", ".");
|
|
};
|
|
|
|
function read_word() {
|
|
var word = read_name();
|
|
if (prev_was_dot) return token("name", word);
|
|
return KEYWORDS_ATOM(word) ? token("atom", word)
|
|
: !KEYWORDS(word) ? token("name", word)
|
|
: OPERATORS(word) ? token("operator", word)
|
|
: token("keyword", word);
|
|
};
|
|
|
|
function with_eof_error(eof_error, cont) {
|
|
return function(x) {
|
|
try {
|
|
return cont(x);
|
|
} catch(ex) {
|
|
if (ex === EX_EOF) parse_error(eof_error);
|
|
else throw ex;
|
|
}
|
|
};
|
|
};
|
|
|
|
function next_token(force_regexp) {
|
|
if (force_regexp != null)
|
|
return read_regexp(force_regexp);
|
|
if (shebang && S.pos == 0 && looking_at("#!")) {
|
|
start_token();
|
|
forward(2);
|
|
skip_line_comment("comment5");
|
|
}
|
|
for (;;) {
|
|
skip_whitespace();
|
|
start_token();
|
|
if (html5_comments) {
|
|
if (looking_at("<!--")) {
|
|
forward(4);
|
|
skip_line_comment("comment3");
|
|
continue;
|
|
}
|
|
if (looking_at("-->") && S.newline_before) {
|
|
forward(3);
|
|
skip_line_comment("comment4");
|
|
continue;
|
|
}
|
|
}
|
|
var ch = peek();
|
|
if (!ch) return token("eof");
|
|
var code = ch.charCodeAt(0);
|
|
switch (code) {
|
|
case 34: case 39: return read_string(ch);
|
|
case 46: return handle_dot();
|
|
case 47: {
|
|
var tok = handle_slash();
|
|
if (tok === next_token) continue;
|
|
return tok;
|
|
}
|
|
}
|
|
if (is_digit(code)) return read_num();
|
|
if (PUNC_CHARS(ch)) return token("punc", next());
|
|
if (OPERATOR_CHARS(ch)) return read_operator();
|
|
if (code == 92 || is_identifier_start(code)) return read_word();
|
|
break;
|
|
}
|
|
parse_error("Unexpected character '" + ch + "'");
|
|
};
|
|
|
|
next_token.context = function(nc) {
|
|
if (nc) S = nc;
|
|
return S;
|
|
};
|
|
|
|
next_token.add_directive = function(directive) {
|
|
S.directive_stack[S.directive_stack.length - 1].push(directive);
|
|
|
|
if (S.directives[directive] === undefined) {
|
|
S.directives[directive] = 1;
|
|
} else {
|
|
S.directives[directive]++;
|
|
}
|
|
}
|
|
|
|
next_token.push_directives_stack = function() {
|
|
S.directive_stack.push([]);
|
|
}
|
|
|
|
next_token.pop_directives_stack = function() {
|
|
var directives = S.directive_stack[S.directive_stack.length - 1];
|
|
|
|
for (var i = 0; i < directives.length; i++) {
|
|
S.directives[directives[i]]--;
|
|
}
|
|
|
|
S.directive_stack.pop();
|
|
}
|
|
|
|
next_token.has_directive = function(directive) {
|
|
return S.directives[directive] !== undefined &&
|
|
S.directives[directive] > 0;
|
|
}
|
|
|
|
return next_token;
|
|
|
|
};
|
|
|
|
/* -----[ Parser (constants) ]----- */
|
|
|
|
var UNARY_PREFIX = makePredicate([
|
|
"typeof",
|
|
"void",
|
|
"delete",
|
|
"--",
|
|
"++",
|
|
"!",
|
|
"~",
|
|
"-",
|
|
"+"
|
|
]);
|
|
|
|
var UNARY_POSTFIX = makePredicate([ "--", "++" ]);
|
|
|
|
var ASSIGNMENT = makePredicate([ "=", "+=", "-=", "/=", "*=", "%=", ">>=", "<<=", ">>>=", "|=", "^=", "&=" ]);
|
|
|
|
var PRECEDENCE = (function(a, ret){
|
|
for (var i = 0; i < a.length; ++i) {
|
|
var b = a[i];
|
|
for (var j = 0; j < b.length; ++j) {
|
|
ret[b[j]] = i + 1;
|
|
}
|
|
}
|
|
return ret;
|
|
})(
|
|
[
|
|
["||"],
|
|
["&&"],
|
|
["|"],
|
|
["^"],
|
|
["&"],
|
|
["==", "===", "!=", "!=="],
|
|
["<", ">", "<=", ">=", "in", "instanceof"],
|
|
[">>", "<<", ">>>"],
|
|
["+", "-"],
|
|
["*", "/", "%"]
|
|
],
|
|
{}
|
|
);
|
|
|
|
var STATEMENTS_WITH_LABELS = array_to_hash([ "for", "do", "while", "switch" ]);
|
|
|
|
var ATOMIC_START_TOKEN = array_to_hash([ "atom", "num", "string", "regexp", "name" ]);
|
|
|
|
/* -----[ Parser ]----- */
|
|
|
|
function parse($TEXT, options) {
|
|
|
|
options = defaults(options, {
|
|
bare_returns : false,
|
|
cli : false,
|
|
expression : false,
|
|
filename : null,
|
|
html5_comments : true,
|
|
shebang : true,
|
|
strict : false,
|
|
toplevel : null,
|
|
});
|
|
|
|
var S = {
|
|
input : (typeof $TEXT == "string"
|
|
? tokenizer($TEXT, options.filename,
|
|
options.html5_comments, options.shebang)
|
|
: $TEXT),
|
|
token : null,
|
|
prev : null,
|
|
peeked : null,
|
|
in_function : 0,
|
|
in_directives : true,
|
|
in_loop : 0,
|
|
labels : []
|
|
};
|
|
|
|
S.token = next();
|
|
|
|
function is(type, value) {
|
|
return is_token(S.token, type, value);
|
|
};
|
|
|
|
function peek() { return S.peeked || (S.peeked = S.input()); };
|
|
|
|
function next() {
|
|
S.prev = S.token;
|
|
if (S.peeked) {
|
|
S.token = S.peeked;
|
|
S.peeked = null;
|
|
} else {
|
|
S.token = S.input();
|
|
}
|
|
S.in_directives = S.in_directives && (
|
|
S.token.type == "string" || is("punc", ";")
|
|
);
|
|
return S.token;
|
|
};
|
|
|
|
function prev() {
|
|
return S.prev;
|
|
};
|
|
|
|
function croak(msg, line, col, pos) {
|
|
var ctx = S.input.context();
|
|
js_error(msg,
|
|
ctx.filename,
|
|
line != null ? line : ctx.tokline,
|
|
col != null ? col : ctx.tokcol,
|
|
pos != null ? pos : ctx.tokpos);
|
|
};
|
|
|
|
function token_error(token, msg) {
|
|
croak(msg, token.line, token.col);
|
|
};
|
|
|
|
function unexpected(token) {
|
|
if (token == null)
|
|
token = S.token;
|
|
token_error(token, "Unexpected token: " + token.type + " (" + token.value + ")");
|
|
};
|
|
|
|
function expect_token(type, val) {
|
|
if (is(type, val)) {
|
|
return next();
|
|
}
|
|
token_error(S.token, "Unexpected token " + S.token.type + " «" + S.token.value + "»" + ", expected " + type + " «" + val + "»");
|
|
};
|
|
|
|
function expect(punc) { return expect_token("punc", punc); };
|
|
|
|
function can_insert_semicolon() {
|
|
return !options.strict && (
|
|
S.token.nlb || is("eof") || is("punc", "}")
|
|
);
|
|
};
|
|
|
|
function semicolon(optional) {
|
|
if (is("punc", ";")) next();
|
|
else if (!optional && !can_insert_semicolon()) unexpected();
|
|
};
|
|
|
|
function parenthesised() {
|
|
expect("(");
|
|
var exp = expression(true);
|
|
expect(")");
|
|
return exp;
|
|
};
|
|
|
|
function embed_tokens(parser) {
|
|
return function() {
|
|
var start = S.token;
|
|
var expr = parser();
|
|
var end = prev();
|
|
expr.start = start;
|
|
expr.end = end;
|
|
return expr;
|
|
};
|
|
};
|
|
|
|
function handle_regexp() {
|
|
if (is("operator", "/") || is("operator", "/=")) {
|
|
S.peeked = null;
|
|
S.token = S.input(S.token.value.substr(1)); // force regexp
|
|
}
|
|
};
|
|
|
|
var statement = embed_tokens(function() {
|
|
handle_regexp();
|
|
switch (S.token.type) {
|
|
case "string":
|
|
if (S.in_directives) {
|
|
var token = peek();
|
|
if (S.token.raw.indexOf("\\") == -1
|
|
&& (token.nlb
|
|
|| is_token(token, "eof")
|
|
|| is_token(token, "punc", ";")
|
|
|| is_token(token, "punc", "}"))) {
|
|
S.input.add_directive(S.token.value);
|
|
} else {
|
|
S.in_directives = false;
|
|
}
|
|
}
|
|
var dir = S.in_directives, stat = simple_statement();
|
|
return dir ? new AST_Directive(stat.body) : stat;
|
|
case "num":
|
|
case "regexp":
|
|
case "operator":
|
|
case "atom":
|
|
return simple_statement();
|
|
|
|
case "name":
|
|
return is_token(peek(), "punc", ":")
|
|
? labeled_statement()
|
|
: simple_statement();
|
|
|
|
case "punc":
|
|
switch (S.token.value) {
|
|
case "{":
|
|
return new AST_BlockStatement({
|
|
start : S.token,
|
|
body : block_(),
|
|
end : prev()
|
|
});
|
|
case "[":
|
|
case "(":
|
|
return simple_statement();
|
|
case ";":
|
|
S.in_directives = false;
|
|
next();
|
|
return new AST_EmptyStatement();
|
|
default:
|
|
unexpected();
|
|
}
|
|
|
|
case "keyword":
|
|
switch (S.token.value) {
|
|
case "break":
|
|
next();
|
|
return break_cont(AST_Break);
|
|
|
|
case "continue":
|
|
next();
|
|
return break_cont(AST_Continue);
|
|
|
|
case "debugger":
|
|
next();
|
|
semicolon();
|
|
return new AST_Debugger();
|
|
|
|
case "do":
|
|
next();
|
|
var body = in_loop(statement);
|
|
expect_token("keyword", "while");
|
|
var condition = parenthesised();
|
|
semicolon(true);
|
|
return new AST_Do({
|
|
body : body,
|
|
condition : condition
|
|
});
|
|
|
|
case "while":
|
|
next();
|
|
return new AST_While({
|
|
condition : parenthesised(),
|
|
body : in_loop(statement)
|
|
});
|
|
|
|
case "for":
|
|
next();
|
|
return for_();
|
|
|
|
case "function":
|
|
next();
|
|
return function_(AST_Defun);
|
|
|
|
case "if":
|
|
next();
|
|
return if_();
|
|
|
|
case "return":
|
|
if (S.in_function == 0 && !options.bare_returns)
|
|
croak("'return' outside of function");
|
|
next();
|
|
var value = null;
|
|
if (is("punc", ";")) {
|
|
next();
|
|
} else if (!can_insert_semicolon()) {
|
|
value = expression(true);
|
|
semicolon();
|
|
}
|
|
return new AST_Return({
|
|
value: value
|
|
});
|
|
|
|
case "switch":
|
|
next();
|
|
return new AST_Switch({
|
|
expression : parenthesised(),
|
|
body : in_loop(switch_body_)
|
|
});
|
|
|
|
case "throw":
|
|
next();
|
|
if (S.token.nlb)
|
|
croak("Illegal newline after 'throw'");
|
|
var value = expression(true);
|
|
semicolon();
|
|
return new AST_Throw({
|
|
value: value
|
|
});
|
|
|
|
case "try":
|
|
next();
|
|
return try_();
|
|
|
|
case "var":
|
|
next();
|
|
var node = var_();
|
|
semicolon();
|
|
return node;
|
|
|
|
case "const":
|
|
next();
|
|
var node = const_();
|
|
semicolon();
|
|
return node;
|
|
|
|
case "with":
|
|
if (S.input.has_directive("use strict")) {
|
|
croak("Strict mode may not include a with statement");
|
|
}
|
|
next();
|
|
return new AST_With({
|
|
expression : parenthesised(),
|
|
body : statement()
|
|
});
|
|
}
|
|
}
|
|
unexpected();
|
|
});
|
|
|
|
function labeled_statement() {
|
|
var label = as_symbol(AST_Label);
|
|
if (find_if(function(l){ return l.name == label.name }, S.labels)) {
|
|
// ECMA-262, 12.12: An ECMAScript program is considered
|
|
// syntactically incorrect if it contains a
|
|
// LabelledStatement that is enclosed by a
|
|
// LabelledStatement with the same Identifier as label.
|
|
croak("Label " + label.name + " defined twice");
|
|
}
|
|
expect(":");
|
|
S.labels.push(label);
|
|
var stat = statement();
|
|
S.labels.pop();
|
|
if (!(stat instanceof AST_IterationStatement)) {
|
|
// check for `continue` that refers to this label.
|
|
// those should be reported as syntax errors.
|
|
// https://github.com/mishoo/UglifyJS2/issues/287
|
|
label.references.forEach(function(ref){
|
|
if (ref instanceof AST_Continue) {
|
|
ref = ref.label.start;
|
|
croak("Continue label `" + label.name + "` refers to non-IterationStatement.",
|
|
ref.line, ref.col, ref.pos);
|
|
}
|
|
});
|
|
}
|
|
return new AST_LabeledStatement({ body: stat, label: label });
|
|
};
|
|
|
|
function simple_statement(tmp) {
|
|
return new AST_SimpleStatement({ body: (tmp = expression(true), semicolon(), tmp) });
|
|
};
|
|
|
|
function break_cont(type) {
|
|
var label = null, ldef;
|
|
if (!can_insert_semicolon()) {
|
|
label = as_symbol(AST_LabelRef, true);
|
|
}
|
|
if (label != null) {
|
|
ldef = find_if(function(l){ return l.name == label.name }, S.labels);
|
|
if (!ldef)
|
|
croak("Undefined label " + label.name);
|
|
label.thedef = ldef;
|
|
}
|
|
else if (S.in_loop == 0)
|
|
croak(type.TYPE + " not inside a loop or switch");
|
|
semicolon();
|
|
var stat = new type({ label: label });
|
|
if (ldef) ldef.references.push(stat);
|
|
return stat;
|
|
};
|
|
|
|
function for_() {
|
|
expect("(");
|
|
var init = null;
|
|
if (!is("punc", ";")) {
|
|
init = is("keyword", "var")
|
|
? (next(), var_(true))
|
|
: expression(true, true);
|
|
if (is("operator", "in")) {
|
|
if (init instanceof AST_Var && init.definitions.length > 1)
|
|
croak("Only one variable declaration allowed in for..in loop");
|
|
next();
|
|
return for_in(init);
|
|
}
|
|
}
|
|
return regular_for(init);
|
|
};
|
|
|
|
function regular_for(init) {
|
|
expect(";");
|
|
var test = is("punc", ";") ? null : expression(true);
|
|
expect(";");
|
|
var step = is("punc", ")") ? null : expression(true);
|
|
expect(")");
|
|
return new AST_For({
|
|
init : init,
|
|
condition : test,
|
|
step : step,
|
|
body : in_loop(statement)
|
|
});
|
|
};
|
|
|
|
function for_in(init) {
|
|
var lhs = init instanceof AST_Var ? init.definitions[0].name : null;
|
|
var obj = expression(true);
|
|
expect(")");
|
|
return new AST_ForIn({
|
|
init : init,
|
|
name : lhs,
|
|
object : obj,
|
|
body : in_loop(statement)
|
|
});
|
|
};
|
|
|
|
var function_ = function(ctor) {
|
|
var in_statement = ctor === AST_Defun;
|
|
var name = is("name") ? as_symbol(in_statement ? AST_SymbolDefun : AST_SymbolLambda) : null;
|
|
if (in_statement && !name)
|
|
unexpected();
|
|
expect("(");
|
|
return new ctor({
|
|
name: name,
|
|
argnames: (function(first, a){
|
|
while (!is("punc", ")")) {
|
|
if (first) first = false; else expect(",");
|
|
a.push(as_symbol(AST_SymbolFunarg));
|
|
}
|
|
next();
|
|
return a;
|
|
})(true, []),
|
|
body: (function(loop, labels){
|
|
++S.in_function;
|
|
S.in_directives = true;
|
|
S.input.push_directives_stack();
|
|
S.in_loop = 0;
|
|
S.labels = [];
|
|
var a = block_();
|
|
S.input.pop_directives_stack();
|
|
--S.in_function;
|
|
S.in_loop = loop;
|
|
S.labels = labels;
|
|
return a;
|
|
})(S.in_loop, S.labels)
|
|
});
|
|
};
|
|
|
|
function if_() {
|
|
var cond = parenthesised(), body = statement(), belse = null;
|
|
if (is("keyword", "else")) {
|
|
next();
|
|
belse = statement();
|
|
}
|
|
return new AST_If({
|
|
condition : cond,
|
|
body : body,
|
|
alternative : belse
|
|
});
|
|
};
|
|
|
|
function block_() {
|
|
expect("{");
|
|
var a = [];
|
|
while (!is("punc", "}")) {
|
|
if (is("eof")) unexpected();
|
|
a.push(statement());
|
|
}
|
|
next();
|
|
return a;
|
|
};
|
|
|
|
function switch_body_() {
|
|
expect("{");
|
|
var a = [], cur = null, branch = null, tmp;
|
|
while (!is("punc", "}")) {
|
|
if (is("eof")) unexpected();
|
|
if (is("keyword", "case")) {
|
|
if (branch) branch.end = prev();
|
|
cur = [];
|
|
branch = new AST_Case({
|
|
start : (tmp = S.token, next(), tmp),
|
|
expression : expression(true),
|
|
body : cur
|
|
});
|
|
a.push(branch);
|
|
expect(":");
|
|
}
|
|
else if (is("keyword", "default")) {
|
|
if (branch) branch.end = prev();
|
|
cur = [];
|
|
branch = new AST_Default({
|
|
start : (tmp = S.token, next(), expect(":"), tmp),
|
|
body : cur
|
|
});
|
|
a.push(branch);
|
|
}
|
|
else {
|
|
if (!cur) unexpected();
|
|
cur.push(statement());
|
|
}
|
|
}
|
|
if (branch) branch.end = prev();
|
|
next();
|
|
return a;
|
|
};
|
|
|
|
function try_() {
|
|
var body = block_(), bcatch = null, bfinally = null;
|
|
if (is("keyword", "catch")) {
|
|
var start = S.token;
|
|
next();
|
|
expect("(");
|
|
var name = as_symbol(AST_SymbolCatch);
|
|
expect(")");
|
|
bcatch = new AST_Catch({
|
|
start : start,
|
|
argname : name,
|
|
body : block_(),
|
|
end : prev()
|
|
});
|
|
}
|
|
if (is("keyword", "finally")) {
|
|
var start = S.token;
|
|
next();
|
|
bfinally = new AST_Finally({
|
|
start : start,
|
|
body : block_(),
|
|
end : prev()
|
|
});
|
|
}
|
|
if (!bcatch && !bfinally)
|
|
croak("Missing catch/finally blocks");
|
|
return new AST_Try({
|
|
body : body,
|
|
bcatch : bcatch,
|
|
bfinally : bfinally
|
|
});
|
|
};
|
|
|
|
function vardefs(no_in, in_const) {
|
|
var a = [];
|
|
for (;;) {
|
|
a.push(new AST_VarDef({
|
|
start : S.token,
|
|
name : as_symbol(in_const ? AST_SymbolConst : AST_SymbolVar),
|
|
value : is("operator", "=") ? (next(), expression(false, no_in)) : null,
|
|
end : prev()
|
|
}));
|
|
if (!is("punc", ","))
|
|
break;
|
|
next();
|
|
}
|
|
return a;
|
|
};
|
|
|
|
var var_ = function(no_in) {
|
|
return new AST_Var({
|
|
start : prev(),
|
|
definitions : vardefs(no_in, false),
|
|
end : prev()
|
|
});
|
|
};
|
|
|
|
var const_ = function() {
|
|
return new AST_Const({
|
|
start : prev(),
|
|
definitions : vardefs(false, true),
|
|
end : prev()
|
|
});
|
|
};
|
|
|
|
var new_ = function(allow_calls) {
|
|
var start = S.token;
|
|
expect_token("operator", "new");
|
|
var newexp = expr_atom(false), args;
|
|
if (is("punc", "(")) {
|
|
next();
|
|
args = expr_list(")");
|
|
} else {
|
|
args = [];
|
|
}
|
|
return subscripts(new AST_New({
|
|
start : start,
|
|
expression : newexp,
|
|
args : args,
|
|
end : prev()
|
|
}), allow_calls);
|
|
};
|
|
|
|
function as_atom_node() {
|
|
var tok = S.token, ret;
|
|
switch (tok.type) {
|
|
case "name":
|
|
case "keyword":
|
|
ret = _make_symbol(AST_SymbolRef);
|
|
break;
|
|
case "num":
|
|
ret = new AST_Number({ start: tok, end: tok, value: tok.value });
|
|
break;
|
|
case "string":
|
|
ret = new AST_String({
|
|
start : tok,
|
|
end : tok,
|
|
value : tok.value,
|
|
quote : tok.quote
|
|
});
|
|
break;
|
|
case "regexp":
|
|
ret = new AST_RegExp({ start: tok, end: tok, value: tok.value });
|
|
break;
|
|
case "atom":
|
|
switch (tok.value) {
|
|
case "false":
|
|
ret = new AST_False({ start: tok, end: tok });
|
|
break;
|
|
case "true":
|
|
ret = new AST_True({ start: tok, end: tok });
|
|
break;
|
|
case "null":
|
|
ret = new AST_Null({ start: tok, end: tok });
|
|
break;
|
|
}
|
|
break;
|
|
case "operator":
|
|
if (!is_identifier_string(tok.value)) {
|
|
croak("Invalid getter/setter name: " + tok.value,
|
|
tok.line, tok.col, tok.pos);
|
|
}
|
|
ret = _make_symbol(AST_SymbolRef);
|
|
break;
|
|
}
|
|
next();
|
|
return ret;
|
|
};
|
|
|
|
var expr_atom = function(allow_calls) {
|
|
if (is("operator", "new")) {
|
|
return new_(allow_calls);
|
|
}
|
|
var start = S.token;
|
|
if (is("punc")) {
|
|
switch (start.value) {
|
|
case "(":
|
|
next();
|
|
var ex = expression(true);
|
|
ex.start = start;
|
|
ex.end = S.token;
|
|
expect(")");
|
|
return subscripts(ex, allow_calls);
|
|
case "[":
|
|
return subscripts(array_(), allow_calls);
|
|
case "{":
|
|
return subscripts(object_(), allow_calls);
|
|
}
|
|
unexpected();
|
|
}
|
|
if (is("keyword", "function")) {
|
|
next();
|
|
var func = function_(AST_Function);
|
|
func.start = start;
|
|
func.end = prev();
|
|
return subscripts(func, allow_calls);
|
|
}
|
|
if (ATOMIC_START_TOKEN[S.token.type]) {
|
|
return subscripts(as_atom_node(), allow_calls);
|
|
}
|
|
unexpected();
|
|
};
|
|
|
|
function expr_list(closing, allow_trailing_comma, allow_empty) {
|
|
var first = true, a = [];
|
|
while (!is("punc", closing)) {
|
|
if (first) first = false; else expect(",");
|
|
if (allow_trailing_comma && is("punc", closing)) break;
|
|
if (is("punc", ",") && allow_empty) {
|
|
a.push(new AST_Hole({ start: S.token, end: S.token }));
|
|
} else {
|
|
a.push(expression(false));
|
|
}
|
|
}
|
|
next();
|
|
return a;
|
|
};
|
|
|
|
var array_ = embed_tokens(function() {
|
|
expect("[");
|
|
return new AST_Array({
|
|
elements: expr_list("]", !options.strict, true)
|
|
});
|
|
});
|
|
|
|
var create_accessor = embed_tokens(function() {
|
|
return function_(AST_Accessor);
|
|
});
|
|
|
|
var object_ = embed_tokens(function() {
|
|
expect("{");
|
|
var first = true, a = [];
|
|
while (!is("punc", "}")) {
|
|
if (first) first = false; else expect(",");
|
|
if (!options.strict && is("punc", "}"))
|
|
// allow trailing comma
|
|
break;
|
|
var start = S.token;
|
|
var type = start.type;
|
|
var name = as_property_name();
|
|
if (type == "name" && !is("punc", ":")) {
|
|
var key = new AST_SymbolAccessor({
|
|
start: S.token,
|
|
name: as_property_name(),
|
|
end: prev()
|
|
});
|
|
if (name == "get") {
|
|
a.push(new AST_ObjectGetter({
|
|
start : start,
|
|
key : key,
|
|
value : create_accessor(),
|
|
end : prev()
|
|
}));
|
|
continue;
|
|
}
|
|
if (name == "set") {
|
|
a.push(new AST_ObjectSetter({
|
|
start : start,
|
|
key : key,
|
|
value : create_accessor(),
|
|
end : prev()
|
|
}));
|
|
continue;
|
|
}
|
|
}
|
|
expect(":");
|
|
a.push(new AST_ObjectKeyVal({
|
|
start : start,
|
|
quote : start.quote,
|
|
key : name,
|
|
value : expression(false),
|
|
end : prev()
|
|
}));
|
|
}
|
|
next();
|
|
return new AST_Object({ properties: a });
|
|
});
|
|
|
|
function as_property_name() {
|
|
var tmp = S.token;
|
|
switch (tmp.type) {
|
|
case "operator":
|
|
if (!KEYWORDS(tmp.value)) unexpected();
|
|
case "num":
|
|
case "string":
|
|
case "name":
|
|
case "keyword":
|
|
case "atom":
|
|
next();
|
|
return tmp.value;
|
|
default:
|
|
unexpected();
|
|
}
|
|
};
|
|
|
|
function as_name() {
|
|
var tmp = S.token;
|
|
if (tmp.type != "name") unexpected();
|
|
next();
|
|
return tmp.value;
|
|
};
|
|
|
|
function _make_symbol(type) {
|
|
var name = S.token.value;
|
|
return new (name == "this" ? AST_This : type)({
|
|
name : String(name),
|
|
start : S.token,
|
|
end : S.token
|
|
});
|
|
};
|
|
|
|
function as_symbol(type, noerror) {
|
|
if (!is("name")) {
|
|
if (!noerror) croak("Name expected");
|
|
return null;
|
|
}
|
|
var sym = _make_symbol(type);
|
|
next();
|
|
return sym;
|
|
};
|
|
|
|
var subscripts = function(expr, allow_calls) {
|
|
var start = expr.start;
|
|
if (is("punc", ".")) {
|
|
next();
|
|
return subscripts(new AST_Dot({
|
|
start : start,
|
|
expression : expr,
|
|
property : as_name(),
|
|
end : prev()
|
|
}), allow_calls);
|
|
}
|
|
if (is("punc", "[")) {
|
|
next();
|
|
var prop = expression(true);
|
|
expect("]");
|
|
return subscripts(new AST_Sub({
|
|
start : start,
|
|
expression : expr,
|
|
property : prop,
|
|
end : prev()
|
|
}), allow_calls);
|
|
}
|
|
if (allow_calls && is("punc", "(")) {
|
|
next();
|
|
return subscripts(new AST_Call({
|
|
start : start,
|
|
expression : expr,
|
|
args : expr_list(")"),
|
|
end : prev()
|
|
}), true);
|
|
}
|
|
return expr;
|
|
};
|
|
|
|
var maybe_unary = function(allow_calls) {
|
|
var start = S.token;
|
|
if (is("operator") && UNARY_PREFIX(start.value)) {
|
|
next();
|
|
handle_regexp();
|
|
var ex = make_unary(AST_UnaryPrefix, start, maybe_unary(allow_calls));
|
|
ex.start = start;
|
|
ex.end = prev();
|
|
return ex;
|
|
}
|
|
var val = expr_atom(allow_calls);
|
|
while (is("operator") && UNARY_POSTFIX(S.token.value) && !S.token.nlb) {
|
|
val = make_unary(AST_UnaryPostfix, S.token, val);
|
|
val.start = start;
|
|
val.end = S.token;
|
|
next();
|
|
}
|
|
return val;
|
|
};
|
|
|
|
function make_unary(ctor, token, expr) {
|
|
var op = token.value;
|
|
if ((op == "++" || op == "--") && !is_assignable(expr))
|
|
croak("Invalid use of " + op + " operator", token.line, token.col, token.pos);
|
|
return new ctor({ operator: op, expression: expr });
|
|
};
|
|
|
|
var expr_op = function(left, min_prec, no_in) {
|
|
var op = is("operator") ? S.token.value : null;
|
|
if (op == "in" && no_in) op = null;
|
|
var prec = op != null ? PRECEDENCE[op] : null;
|
|
if (prec != null && prec > min_prec) {
|
|
next();
|
|
var right = expr_op(maybe_unary(true), prec, no_in);
|
|
return expr_op(new AST_Binary({
|
|
start : left.start,
|
|
left : left,
|
|
operator : op,
|
|
right : right,
|
|
end : right.end
|
|
}), min_prec, no_in);
|
|
}
|
|
return left;
|
|
};
|
|
|
|
function expr_ops(no_in) {
|
|
return expr_op(maybe_unary(true), 0, no_in);
|
|
};
|
|
|
|
var maybe_conditional = function(no_in) {
|
|
var start = S.token;
|
|
var expr = expr_ops(no_in);
|
|
if (is("operator", "?")) {
|
|
next();
|
|
var yes = expression(false);
|
|
expect(":");
|
|
return new AST_Conditional({
|
|
start : start,
|
|
condition : expr,
|
|
consequent : yes,
|
|
alternative : expression(false, no_in),
|
|
end : prev()
|
|
});
|
|
}
|
|
return expr;
|
|
};
|
|
|
|
function is_assignable(expr) {
|
|
if (options.cli) return true;
|
|
return expr instanceof AST_PropAccess || expr instanceof AST_SymbolRef;
|
|
};
|
|
|
|
var maybe_assign = function(no_in) {
|
|
var start = S.token;
|
|
var left = maybe_conditional(no_in), val = S.token.value;
|
|
if (is("operator") && ASSIGNMENT(val)) {
|
|
if (is_assignable(left)) {
|
|
next();
|
|
return new AST_Assign({
|
|
start : start,
|
|
left : left,
|
|
operator : val,
|
|
right : maybe_assign(no_in),
|
|
end : prev()
|
|
});
|
|
}
|
|
croak("Invalid assignment");
|
|
}
|
|
return left;
|
|
};
|
|
|
|
var expression = function(commas, no_in) {
|
|
var start = S.token;
|
|
var expr = maybe_assign(no_in);
|
|
if (commas && is("punc", ",")) {
|
|
next();
|
|
return new AST_Seq({
|
|
start : start,
|
|
car : expr,
|
|
cdr : expression(true, no_in),
|
|
end : peek()
|
|
});
|
|
}
|
|
return expr;
|
|
};
|
|
|
|
function in_loop(cont) {
|
|
++S.in_loop;
|
|
var ret = cont();
|
|
--S.in_loop;
|
|
return ret;
|
|
};
|
|
|
|
if (options.expression) {
|
|
return expression(true);
|
|
}
|
|
|
|
return (function(){
|
|
var start = S.token;
|
|
var body = [];
|
|
S.input.push_directives_stack();
|
|
while (!is("eof"))
|
|
body.push(statement());
|
|
S.input.pop_directives_stack();
|
|
var end = prev();
|
|
var toplevel = options.toplevel;
|
|
if (toplevel) {
|
|
toplevel.body = toplevel.body.concat(body);
|
|
toplevel.end = end;
|
|
} else {
|
|
toplevel = new AST_Toplevel({ start: start, body: body, end: end });
|
|
}
|
|
return toplevel;
|
|
})();
|
|
|
|
};
|
|
|
|
/***********************************************************************
|
|
|
|
A JavaScript tokenizer / parser / beautifier / compressor.
|
|
https://github.com/mishoo/UglifyJS2
|
|
|
|
-------------------------------- (C) ---------------------------------
|
|
|
|
Author: Mihai Bazon
|
|
<mihai.bazon@gmail.com>
|
|
http://mihai.bazon.net/blog
|
|
|
|
Distributed under the BSD license:
|
|
|
|
Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
|
|
|
Redistribution and use in source and binary forms, with or without
|
|
modification, are permitted provided that the following conditions
|
|
are met:
|
|
|
|
* Redistributions of source code must retain the above
|
|
copyright notice, this list of conditions and the following
|
|
disclaimer.
|
|
|
|
* Redistributions in binary form must reproduce the above
|
|
copyright notice, this list of conditions and the following
|
|
disclaimer in the documentation and/or other materials
|
|
provided with the distribution.
|
|
|
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
|
|
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
|
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
|
|
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
|
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
|
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
|
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
|
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
|
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
|
SUCH DAMAGE.
|
|
|
|
***********************************************************************/
|
|
|
|
"use strict";
|
|
|
|
// Tree transformer helpers.
|
|
|
|
function TreeTransformer(before, after) {
|
|
TreeWalker.call(this);
|
|
this.before = before;
|
|
this.after = after;
|
|
}
|
|
TreeTransformer.prototype = new TreeWalker;
|
|
|
|
(function(undefined){
|
|
|
|
function _(node, descend) {
|
|
node.DEFMETHOD("transform", function(tw, in_list){
|
|
var x, y;
|
|
tw.push(this);
|
|
if (tw.before) x = tw.before(this, descend, in_list);
|
|
if (x === undefined) {
|
|
if (!tw.after) {
|
|
x = this;
|
|
descend(x, tw);
|
|
} else {
|
|
tw.stack[tw.stack.length - 1] = x = this;
|
|
descend(x, tw);
|
|
y = tw.after(x, in_list);
|
|
if (y !== undefined) x = y;
|
|
}
|
|
}
|
|
tw.pop(this);
|
|
return x;
|
|
});
|
|
};
|
|
|
|
function do_list(list, tw) {
|
|
return MAP(list, function(node){
|
|
return node.transform(tw, true);
|
|
});
|
|
};
|
|
|
|
_(AST_Node, noop);
|
|
|
|
_(AST_LabeledStatement, function(self, tw){
|
|
self.label = self.label.transform(tw);
|
|
self.body = self.body.transform(tw);
|
|
});
|
|
|
|
_(AST_SimpleStatement, function(self, tw){
|
|
self.body = self.body.transform(tw);
|
|
});
|
|
|
|
_(AST_Block, function(self, tw){
|
|
self.body = do_list(self.body, tw);
|
|
});
|
|
|
|
_(AST_DWLoop, function(self, tw){
|
|
self.condition = self.condition.transform(tw);
|
|
self.body = self.body.transform(tw);
|
|
});
|
|
|
|
_(AST_For, function(self, tw){
|
|
if (self.init) self.init = self.init.transform(tw);
|
|
if (self.condition) self.condition = self.condition.transform(tw);
|
|
if (self.step) self.step = self.step.transform(tw);
|
|
self.body = self.body.transform(tw);
|
|
});
|
|
|
|
_(AST_ForIn, function(self, tw){
|
|
self.init = self.init.transform(tw);
|
|
self.object = self.object.transform(tw);
|
|
self.body = self.body.transform(tw);
|
|
});
|
|
|
|
_(AST_With, function(self, tw){
|
|
self.expression = self.expression.transform(tw);
|
|
self.body = self.body.transform(tw);
|
|
});
|
|
|
|
_(AST_Exit, function(self, tw){
|
|
if (self.value) self.value = self.value.transform(tw);
|
|
});
|
|
|
|
_(AST_LoopControl, function(self, tw){
|
|
if (self.label) self.label = self.label.transform(tw);
|
|
});
|
|
|
|
_(AST_If, function(self, tw){
|
|
self.condition = self.condition.transform(tw);
|
|
self.body = self.body.transform(tw);
|
|
if (self.alternative) self.alternative = self.alternative.transform(tw);
|
|
});
|
|
|
|
_(AST_Switch, function(self, tw){
|
|
self.expression = self.expression.transform(tw);
|
|
self.body = do_list(self.body, tw);
|
|
});
|
|
|
|
_(AST_Case, function(self, tw){
|
|
self.expression = self.expression.transform(tw);
|
|
self.body = do_list(self.body, tw);
|
|
});
|
|
|
|
_(AST_Try, function(self, tw){
|
|
self.body = do_list(self.body, tw);
|
|
if (self.bcatch) self.bcatch = self.bcatch.transform(tw);
|
|
if (self.bfinally) self.bfinally = self.bfinally.transform(tw);
|
|
});
|
|
|
|
_(AST_Catch, function(self, tw){
|
|
self.argname = self.argname.transform(tw);
|
|
self.body = do_list(self.body, tw);
|
|
});
|
|
|
|
_(AST_Definitions, function(self, tw){
|
|
self.definitions = do_list(self.definitions, tw);
|
|
});
|
|
|
|
_(AST_VarDef, function(self, tw){
|
|
self.name = self.name.transform(tw);
|
|
if (self.value) self.value = self.value.transform(tw);
|
|
});
|
|
|
|
_(AST_Lambda, function(self, tw){
|
|
if (self.name) self.name = self.name.transform(tw);
|
|
self.argnames = do_list(self.argnames, tw);
|
|
self.body = do_list(self.body, tw);
|
|
});
|
|
|
|
_(AST_Call, function(self, tw){
|
|
self.expression = self.expression.transform(tw);
|
|
self.args = do_list(self.args, tw);
|
|
});
|
|
|
|
_(AST_Seq, function(self, tw){
|
|
self.car = self.car.transform(tw);
|
|
self.cdr = self.cdr.transform(tw);
|
|
});
|
|
|
|
_(AST_Dot, function(self, tw){
|
|
self.expression = self.expression.transform(tw);
|
|
});
|
|
|
|
_(AST_Sub, function(self, tw){
|
|
self.expression = self.expression.transform(tw);
|
|
self.property = self.property.transform(tw);
|
|
});
|
|
|
|
_(AST_Unary, function(self, tw){
|
|
self.expression = self.expression.transform(tw);
|
|
});
|
|
|
|
_(AST_Binary, function(self, tw){
|
|
self.left = self.left.transform(tw);
|
|
self.right = self.right.transform(tw);
|
|
});
|
|
|
|
_(AST_Conditional, function(self, tw){
|
|
self.condition = self.condition.transform(tw);
|
|
self.consequent = self.consequent.transform(tw);
|
|
self.alternative = self.alternative.transform(tw);
|
|
});
|
|
|
|
_(AST_Array, function(self, tw){
|
|
self.elements = do_list(self.elements, tw);
|
|
});
|
|
|
|
_(AST_Object, function(self, tw){
|
|
self.properties = do_list(self.properties, tw);
|
|
});
|
|
|
|
_(AST_ObjectProperty, function(self, tw){
|
|
self.value = self.value.transform(tw);
|
|
});
|
|
|
|
})();
|
|
|
|
/***********************************************************************
|
|
|
|
A JavaScript tokenizer / parser / beautifier / compressor.
|
|
https://github.com/mishoo/UglifyJS2
|
|
|
|
-------------------------------- (C) ---------------------------------
|
|
|
|
Author: Mihai Bazon
|
|
<mihai.bazon@gmail.com>
|
|
http://mihai.bazon.net/blog
|
|
|
|
Distributed under the BSD license:
|
|
|
|
Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
|
|
|
Redistribution and use in source and binary forms, with or without
|
|
modification, are permitted provided that the following conditions
|
|
are met:
|
|
|
|
* Redistributions of source code must retain the above
|
|
copyright notice, this list of conditions and the following
|
|
disclaimer.
|
|
|
|
* Redistributions in binary form must reproduce the above
|
|
copyright notice, this list of conditions and the following
|
|
disclaimer in the documentation and/or other materials
|
|
provided with the distribution.
|
|
|
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
|
|
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
|
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
|
|
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
|
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
|
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
|
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
|
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
|
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
|
SUCH DAMAGE.
|
|
|
|
***********************************************************************/
|
|
|
|
"use strict";
|
|
|
|
function SymbolDef(scope, index, orig) {
|
|
this.name = orig.name;
|
|
this.orig = [ orig ];
|
|
this.scope = scope;
|
|
this.references = [];
|
|
this.global = false;
|
|
this.mangled_name = null;
|
|
this.undeclared = false;
|
|
this.index = index;
|
|
this.id = SymbolDef.next_id++;
|
|
};
|
|
|
|
SymbolDef.next_id = 1;
|
|
|
|
SymbolDef.prototype = {
|
|
unmangleable: function(options) {
|
|
if (!options) options = {};
|
|
|
|
return (this.global && !options.toplevel)
|
|
|| this.undeclared
|
|
|| (!options.eval && (this.scope.uses_eval || this.scope.uses_with))
|
|
|| (options.keep_fnames
|
|
&& (this.orig[0] instanceof AST_SymbolLambda
|
|
|| this.orig[0] instanceof AST_SymbolDefun));
|
|
},
|
|
mangle: function(options) {
|
|
var cache = options.cache && options.cache.props;
|
|
if (this.global && cache && cache.has(this.name)) {
|
|
this.mangled_name = cache.get(this.name);
|
|
}
|
|
else if (!this.mangled_name && !this.unmangleable(options)) {
|
|
var s = this.scope;
|
|
var sym = this.orig[0];
|
|
if (!options.screw_ie8 && sym instanceof AST_SymbolLambda)
|
|
s = s.parent_scope;
|
|
var def;
|
|
if (this.defun && (def = this.defun.variables.get(this.name))) {
|
|
this.mangled_name = def.mangled_name || def.name;
|
|
} else
|
|
this.mangled_name = s.next_mangled(options, this);
|
|
if (this.global && cache) {
|
|
cache.set(this.name, this.mangled_name);
|
|
}
|
|
}
|
|
}
|
|
};
|
|
|
|
AST_Toplevel.DEFMETHOD("figure_out_scope", function(options){
|
|
options = defaults(options, {
|
|
cache: null,
|
|
screw_ie8: true,
|
|
});
|
|
|
|
// pass 1: setup scope chaining and handle definitions
|
|
var self = this;
|
|
var scope = self.parent_scope = null;
|
|
var labels = new Dictionary();
|
|
var defun = null;
|
|
var tw = new TreeWalker(function(node, descend){
|
|
if (node instanceof AST_Catch) {
|
|
var save_scope = scope;
|
|
scope = new AST_Scope(node);
|
|
scope.init_scope_vars(save_scope);
|
|
descend();
|
|
scope = save_scope;
|
|
return true;
|
|
}
|
|
if (node instanceof AST_Scope) {
|
|
node.init_scope_vars(scope);
|
|
var save_scope = scope;
|
|
var save_defun = defun;
|
|
var save_labels = labels;
|
|
defun = scope = node;
|
|
labels = new Dictionary();
|
|
descend();
|
|
scope = save_scope;
|
|
defun = save_defun;
|
|
labels = save_labels;
|
|
return true; // don't descend again in TreeWalker
|
|
}
|
|
if (node instanceof AST_LabeledStatement) {
|
|
var l = node.label;
|
|
if (labels.has(l.name)) {
|
|
throw new Error(string_template("Label {name} defined twice", l));
|
|
}
|
|
labels.set(l.name, l);
|
|
descend();
|
|
labels.del(l.name);
|
|
return true; // no descend again
|
|
}
|
|
if (node instanceof AST_With) {
|
|
for (var s = scope; s; s = s.parent_scope)
|
|
s.uses_with = true;
|
|
return;
|
|
}
|
|
if (node instanceof AST_Symbol) {
|
|
node.scope = scope;
|
|
}
|
|
if (node instanceof AST_Label) {
|
|
node.thedef = node;
|
|
node.references = [];
|
|
}
|
|
if (node instanceof AST_SymbolLambda) {
|
|
defun.def_function(node);
|
|
}
|
|
else if (node instanceof AST_SymbolDefun) {
|
|
// Careful here, the scope where this should be defined is
|
|
// the parent scope. The reason is that we enter a new
|
|
// scope when we encounter the AST_Defun node (which is
|
|
// instanceof AST_Scope) but we get to the symbol a bit
|
|
// later.
|
|
(node.scope = defun.parent_scope).def_function(node);
|
|
}
|
|
else if (node instanceof AST_SymbolVar
|
|
|| node instanceof AST_SymbolConst) {
|
|
defun.def_variable(node);
|
|
if (defun !== scope) {
|
|
node.mark_enclosed(options);
|
|
var def = scope.find_variable(node);
|
|
if (node.thedef !== def) {
|
|
node.thedef = def;
|
|
node.reference(options);
|
|
}
|
|
}
|
|
}
|
|
else if (node instanceof AST_SymbolCatch) {
|
|
scope.def_variable(node).defun = defun;
|
|
}
|
|
else if (node instanceof AST_LabelRef) {
|
|
var sym = labels.get(node.name);
|
|
if (!sym) throw new Error(string_template("Undefined label {name} [{line},{col}]", {
|
|
name: node.name,
|
|
line: node.start.line,
|
|
col: node.start.col
|
|
}));
|
|
node.thedef = sym;
|
|
}
|
|
});
|
|
self.walk(tw);
|
|
|
|
// pass 2: find back references and eval
|
|
var func = null;
|
|
var globals = self.globals = new Dictionary();
|
|
var tw = new TreeWalker(function(node, descend){
|
|
if (node instanceof AST_Lambda) {
|
|
var prev_func = func;
|
|
func = node;
|
|
descend();
|
|
func = prev_func;
|
|
return true;
|
|
}
|
|
if (node instanceof AST_LoopControl && node.label) {
|
|
node.label.thedef.references.push(node);
|
|
return true;
|
|
}
|
|
if (node instanceof AST_SymbolRef) {
|
|
var name = node.name;
|
|
if (name == "eval" && tw.parent() instanceof AST_Call) {
|
|
for (var s = node.scope; s && !s.uses_eval; s = s.parent_scope) {
|
|
s.uses_eval = true;
|
|
}
|
|
}
|
|
var sym = node.scope.find_variable(name);
|
|
if (node.scope instanceof AST_Lambda && name == "arguments") {
|
|
node.scope.uses_arguments = true;
|
|
}
|
|
if (!sym) {
|
|
sym = self.def_global(node);
|
|
}
|
|
node.thedef = sym;
|
|
node.reference(options);
|
|
return true;
|
|
}
|
|
});
|
|
self.walk(tw);
|
|
|
|
// pass 3: fix up any scoping issue with IE8
|
|
if (!options.screw_ie8) {
|
|
self.walk(new TreeWalker(function(node, descend) {
|
|
if (node instanceof AST_SymbolCatch) {
|
|
var name = node.name;
|
|
var refs = node.thedef.references;
|
|
var scope = node.thedef.defun;
|
|
var def = scope.find_variable(name) || self.globals.get(name) || scope.def_variable(node);
|
|
refs.forEach(function(ref) {
|
|
ref.thedef = def;
|
|
ref.reference(options);
|
|
});
|
|
node.thedef = def;
|
|
return true;
|
|
}
|
|
}));
|
|
}
|
|
|
|
if (options.cache) {
|
|
this.cname = options.cache.cname;
|
|
}
|
|
});
|
|
|
|
AST_Toplevel.DEFMETHOD("def_global", function(node){
|
|
var globals = this.globals, name = node.name;
|
|
if (globals.has(name)) {
|
|
return globals.get(name);
|
|
} else {
|
|
var g = new SymbolDef(this, globals.size(), node);
|
|
g.undeclared = true;
|
|
g.global = true;
|
|
globals.set(name, g);
|
|
return g;
|
|
}
|
|
});
|
|
|
|
AST_Scope.DEFMETHOD("init_scope_vars", function(parent_scope){
|
|
this.variables = new Dictionary(); // map name to AST_SymbolVar (variables defined in this scope; includes functions)
|
|
this.functions = new Dictionary(); // map name to AST_SymbolDefun (functions defined in this scope)
|
|
this.uses_with = false; // will be set to true if this or some nested scope uses the `with` statement
|
|
this.uses_eval = false; // will be set to true if this or nested scope uses the global `eval`
|
|
this.parent_scope = parent_scope; // the parent scope
|
|
this.enclosed = []; // a list of variables from this or outer scope(s) that are referenced from this or inner scopes
|
|
this.cname = -1; // the current index for mangling functions/variables
|
|
});
|
|
|
|
AST_Lambda.DEFMETHOD("init_scope_vars", function(){
|
|
AST_Scope.prototype.init_scope_vars.apply(this, arguments);
|
|
this.uses_arguments = false;
|
|
this.def_variable(new AST_SymbolVar({
|
|
name: "arguments",
|
|
start: this.start,
|
|
end: this.end
|
|
}));
|
|
});
|
|
|
|
AST_Symbol.DEFMETHOD("mark_enclosed", function(options) {
|
|
var def = this.definition();
|
|
var s = this.scope;
|
|
while (s) {
|
|
push_uniq(s.enclosed, def);
|
|
if (options.keep_fnames) {
|
|
s.functions.each(function(d) {
|
|
push_uniq(def.scope.enclosed, d);
|
|
});
|
|
}
|
|
if (s === def.scope) break;
|
|
s = s.parent_scope;
|
|
}
|
|
});
|
|
|
|
AST_Symbol.DEFMETHOD("reference", function(options) {
|
|
this.definition().references.push(this);
|
|
this.mark_enclosed(options);
|
|
});
|
|
|
|
AST_Scope.DEFMETHOD("find_variable", function(name){
|
|
if (name instanceof AST_Symbol) name = name.name;
|
|
return this.variables.get(name)
|
|
|| (this.parent_scope && this.parent_scope.find_variable(name));
|
|
});
|
|
|
|
AST_Scope.DEFMETHOD("def_function", function(symbol){
|
|
this.functions.set(symbol.name, this.def_variable(symbol));
|
|
});
|
|
|
|
AST_Scope.DEFMETHOD("def_variable", function(symbol){
|
|
var def;
|
|
if (!this.variables.has(symbol.name)) {
|
|
def = new SymbolDef(this, this.variables.size(), symbol);
|
|
this.variables.set(symbol.name, def);
|
|
def.global = !this.parent_scope;
|
|
} else {
|
|
def = this.variables.get(symbol.name);
|
|
def.orig.push(symbol);
|
|
}
|
|
return symbol.thedef = def;
|
|
});
|
|
|
|
AST_Scope.DEFMETHOD("next_mangled", function(options){
|
|
var ext = this.enclosed;
|
|
out: while (true) {
|
|
var m = base54(++this.cname);
|
|
if (!is_identifier(m)) continue; // skip over "do"
|
|
|
|
// https://github.com/mishoo/UglifyJS2/issues/242 -- do not
|
|
// shadow a name excepted from mangling.
|
|
if (options.except.indexOf(m) >= 0) continue;
|
|
|
|
// we must ensure that the mangled name does not shadow a name
|
|
// from some parent scope that is referenced in this or in
|
|
// inner scopes.
|
|
for (var i = ext.length; --i >= 0;) {
|
|
var sym = ext[i];
|
|
var name = sym.mangled_name || (sym.unmangleable(options) && sym.name);
|
|
if (m == name) continue out;
|
|
}
|
|
return m;
|
|
}
|
|
});
|
|
|
|
AST_Function.DEFMETHOD("next_mangled", function(options, def){
|
|
// #179, #326
|
|
// in Safari strict mode, something like (function x(x){...}) is a syntax error;
|
|
// a function expression's argument cannot shadow the function expression's name
|
|
|
|
var tricky_def = def.orig[0] instanceof AST_SymbolFunarg && this.name && this.name.definition();
|
|
|
|
// the function's mangled_name is null when keep_fnames is true
|
|
var tricky_name = tricky_def ? tricky_def.mangled_name || tricky_def.name : null;
|
|
|
|
while (true) {
|
|
var name = AST_Lambda.prototype.next_mangled.call(this, options, def);
|
|
if (!tricky_name || tricky_name != name)
|
|
return name;
|
|
}
|
|
});
|
|
|
|
AST_Symbol.DEFMETHOD("unmangleable", function(options){
|
|
return this.definition().unmangleable(options);
|
|
});
|
|
|
|
// labels are always mangleable
|
|
AST_Label.DEFMETHOD("unmangleable", function(){
|
|
return false;
|
|
});
|
|
|
|
AST_Symbol.DEFMETHOD("unreferenced", function(){
|
|
return this.definition().references.length == 0
|
|
&& !(this.scope.uses_eval || this.scope.uses_with);
|
|
});
|
|
|
|
AST_Symbol.DEFMETHOD("undeclared", function(){
|
|
return this.definition().undeclared;
|
|
});
|
|
|
|
AST_LabelRef.DEFMETHOD("undeclared", function(){
|
|
return false;
|
|
});
|
|
|
|
AST_Label.DEFMETHOD("undeclared", function(){
|
|
return false;
|
|
});
|
|
|
|
AST_Symbol.DEFMETHOD("definition", function(){
|
|
return this.thedef;
|
|
});
|
|
|
|
AST_Symbol.DEFMETHOD("global", function(){
|
|
return this.definition().global;
|
|
});
|
|
|
|
AST_Toplevel.DEFMETHOD("_default_mangler_options", function(options){
|
|
return defaults(options, {
|
|
eval : false,
|
|
except : [],
|
|
keep_fnames : false,
|
|
screw_ie8 : true,
|
|
sort : false, // Ignored. Flag retained for backwards compatibility.
|
|
toplevel : false,
|
|
});
|
|
});
|
|
|
|
AST_Toplevel.DEFMETHOD("mangle_names", function(options){
|
|
options = this._default_mangler_options(options);
|
|
|
|
// Never mangle arguments
|
|
options.except.push('arguments');
|
|
|
|
// We only need to mangle declaration nodes. Special logic wired
|
|
// into the code generator will display the mangled name if it's
|
|
// present (and for AST_SymbolRef-s it'll use the mangled name of
|
|
// the AST_SymbolDeclaration that it points to).
|
|
var lname = -1;
|
|
var to_mangle = [];
|
|
|
|
if (options.cache) {
|
|
this.globals.each(function(symbol){
|
|
if (options.except.indexOf(symbol.name) < 0) {
|
|
to_mangle.push(symbol);
|
|
}
|
|
});
|
|
}
|
|
|
|
var tw = new TreeWalker(function(node, descend){
|
|
if (node instanceof AST_LabeledStatement) {
|
|
// lname is incremented when we get to the AST_Label
|
|
var save_nesting = lname;
|
|
descend();
|
|
lname = save_nesting;
|
|
return true; // don't descend again in TreeWalker
|
|
}
|
|
if (node instanceof AST_Scope) {
|
|
var p = tw.parent(), a = [];
|
|
node.variables.each(function(symbol){
|
|
if (options.except.indexOf(symbol.name) < 0) {
|
|
a.push(symbol);
|
|
}
|
|
});
|
|
to_mangle.push.apply(to_mangle, a);
|
|
return;
|
|
}
|
|
if (node instanceof AST_Label) {
|
|
var name;
|
|
do name = base54(++lname); while (!is_identifier(name));
|
|
node.mangled_name = name;
|
|
return true;
|
|
}
|
|
if (options.screw_ie8 && node instanceof AST_SymbolCatch) {
|
|
to_mangle.push(node.definition());
|
|
return;
|
|
}
|
|
});
|
|
this.walk(tw);
|
|
to_mangle.forEach(function(def){ def.mangle(options) });
|
|
|
|
if (options.cache) {
|
|
options.cache.cname = this.cname;
|
|
}
|
|
});
|
|
|
|
AST_Toplevel.DEFMETHOD("compute_char_frequency", function(options){
|
|
options = this._default_mangler_options(options);
|
|
var tw = new TreeWalker(function(node){
|
|
if (node instanceof AST_Constant)
|
|
base54.consider(node.print_to_string());
|
|
else if (node instanceof AST_Return)
|
|
base54.consider("return");
|
|
else if (node instanceof AST_Throw)
|
|
base54.consider("throw");
|
|
else if (node instanceof AST_Continue)
|
|
base54.consider("continue");
|
|
else if (node instanceof AST_Break)
|
|
base54.consider("break");
|
|
else if (node instanceof AST_Debugger)
|
|
base54.consider("debugger");
|
|
else if (node instanceof AST_Directive)
|
|
base54.consider(node.value);
|
|
else if (node instanceof AST_While)
|
|
base54.consider("while");
|
|
else if (node instanceof AST_Do)
|
|
base54.consider("do while");
|
|
else if (node instanceof AST_If) {
|
|
base54.consider("if");
|
|
if (node.alternative) base54.consider("else");
|
|
}
|
|
else if (node instanceof AST_Var)
|
|
base54.consider("var");
|
|
else if (node instanceof AST_Const)
|
|
base54.consider("const");
|
|
else if (node instanceof AST_Lambda)
|
|
base54.consider("function");
|
|
else if (node instanceof AST_For)
|
|
base54.consider("for");
|
|
else if (node instanceof AST_ForIn)
|
|
base54.consider("for in");
|
|
else if (node instanceof AST_Switch)
|
|
base54.consider("switch");
|
|
else if (node instanceof AST_Case)
|
|
base54.consider("case");
|
|
else if (node instanceof AST_Default)
|
|
base54.consider("default");
|
|
else if (node instanceof AST_With)
|
|
base54.consider("with");
|
|
else if (node instanceof AST_ObjectSetter)
|
|
base54.consider("set" + node.key);
|
|
else if (node instanceof AST_ObjectGetter)
|
|
base54.consider("get" + node.key);
|
|
else if (node instanceof AST_ObjectKeyVal)
|
|
base54.consider(node.key);
|
|
else if (node instanceof AST_New)
|
|
base54.consider("new");
|
|
else if (node instanceof AST_This)
|
|
base54.consider("this");
|
|
else if (node instanceof AST_Try)
|
|
base54.consider("try");
|
|
else if (node instanceof AST_Catch)
|
|
base54.consider("catch");
|
|
else if (node instanceof AST_Finally)
|
|
base54.consider("finally");
|
|
else if (node instanceof AST_Symbol && node.unmangleable(options))
|
|
base54.consider(node.name);
|
|
else if (node instanceof AST_Unary || node instanceof AST_Binary)
|
|
base54.consider(node.operator);
|
|
else if (node instanceof AST_Dot)
|
|
base54.consider(node.property);
|
|
});
|
|
this.walk(tw);
|
|
base54.sort();
|
|
});
|
|
|
|
var base54 = (function() {
|
|
var string = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ$_0123456789";
|
|
var chars, frequency;
|
|
function reset() {
|
|
frequency = Object.create(null);
|
|
chars = string.split("").map(function(ch){ return ch.charCodeAt(0) });
|
|
chars.forEach(function(ch){ frequency[ch] = 0 });
|
|
}
|
|
base54.consider = function(str){
|
|
for (var i = str.length; --i >= 0;) {
|
|
var code = str.charCodeAt(i);
|
|
if (code in frequency) ++frequency[code];
|
|
}
|
|
};
|
|
base54.sort = function() {
|
|
chars = mergeSort(chars, function(a, b){
|
|
if (is_digit(a) && !is_digit(b)) return 1;
|
|
if (is_digit(b) && !is_digit(a)) return -1;
|
|
return frequency[b] - frequency[a];
|
|
});
|
|
};
|
|
base54.reset = reset;
|
|
reset();
|
|
base54.get = function(){ return chars };
|
|
base54.freq = function(){ return frequency };
|
|
function base54(num) {
|
|
var ret = "", base = 54;
|
|
num++;
|
|
do {
|
|
num--;
|
|
ret += String.fromCharCode(chars[num % base]);
|
|
num = Math.floor(num / base);
|
|
base = 64;
|
|
} while (num > 0);
|
|
return ret;
|
|
};
|
|
return base54;
|
|
})();
|
|
|
|
AST_Toplevel.DEFMETHOD("scope_warnings", function(options){
|
|
options = defaults(options, {
|
|
assign_to_global : true,
|
|
eval : true,
|
|
func_arguments : true,
|
|
nested_defuns : true,
|
|
undeclared : false, // this makes a lot of noise
|
|
unreferenced : true,
|
|
});
|
|
var tw = new TreeWalker(function(node){
|
|
if (options.undeclared
|
|
&& node instanceof AST_SymbolRef
|
|
&& node.undeclared())
|
|
{
|
|
// XXX: this also warns about JS standard names,
|
|
// i.e. Object, Array, parseInt etc. Should add a list of
|
|
// exceptions.
|
|
AST_Node.warn("Undeclared symbol: {name} [{file}:{line},{col}]", {
|
|
name: node.name,
|
|
file: node.start.file,
|
|
line: node.start.line,
|
|
col: node.start.col
|
|
});
|
|
}
|
|
if (options.assign_to_global)
|
|
{
|
|
var sym = null;
|
|
if (node instanceof AST_Assign && node.left instanceof AST_SymbolRef)
|
|
sym = node.left;
|
|
else if (node instanceof AST_ForIn && node.init instanceof AST_SymbolRef)
|
|
sym = node.init;
|
|
if (sym
|
|
&& (sym.undeclared()
|
|
|| (sym.global() && sym.scope !== sym.definition().scope))) {
|
|
AST_Node.warn("{msg}: {name} [{file}:{line},{col}]", {
|
|
msg: sym.undeclared() ? "Accidental global?" : "Assignment to global",
|
|
name: sym.name,
|
|
file: sym.start.file,
|
|
line: sym.start.line,
|
|
col: sym.start.col
|
|
});
|
|
}
|
|
}
|
|
if (options.eval
|
|
&& node instanceof AST_SymbolRef
|
|
&& node.undeclared()
|
|
&& node.name == "eval") {
|
|
AST_Node.warn("Eval is used [{file}:{line},{col}]", node.start);
|
|
}
|
|
if (options.unreferenced
|
|
&& (node instanceof AST_SymbolDeclaration || node instanceof AST_Label)
|
|
&& !(node instanceof AST_SymbolCatch)
|
|
&& node.unreferenced()) {
|
|
AST_Node.warn("{type} {name} is declared but not referenced [{file}:{line},{col}]", {
|
|
type: node instanceof AST_Label ? "Label" : "Symbol",
|
|
name: node.name,
|
|
file: node.start.file,
|
|
line: node.start.line,
|
|
col: node.start.col
|
|
});
|
|
}
|
|
if (options.func_arguments
|
|
&& node instanceof AST_Lambda
|
|
&& node.uses_arguments) {
|
|
AST_Node.warn("arguments used in function {name} [{file}:{line},{col}]", {
|
|
name: node.name ? node.name.name : "anonymous",
|
|
file: node.start.file,
|
|
line: node.start.line,
|
|
col: node.start.col
|
|
});
|
|
}
|
|
if (options.nested_defuns
|
|
&& node instanceof AST_Defun
|
|
&& !(tw.parent() instanceof AST_Scope)) {
|
|
AST_Node.warn("Function {name} declared in nested statement \"{type}\" [{file}:{line},{col}]", {
|
|
name: node.name.name,
|
|
type: tw.parent().TYPE,
|
|
file: node.start.file,
|
|
line: node.start.line,
|
|
col: node.start.col
|
|
});
|
|
}
|
|
});
|
|
this.walk(tw);
|
|
});
|
|
|
|
/***********************************************************************
|
|
|
|
A JavaScript tokenizer / parser / beautifier / compressor.
|
|
https://github.com/mishoo/UglifyJS2
|
|
|
|
-------------------------------- (C) ---------------------------------
|
|
|
|
Author: Mihai Bazon
|
|
<mihai.bazon@gmail.com>
|
|
http://mihai.bazon.net/blog
|
|
|
|
Distributed under the BSD license:
|
|
|
|
Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
|
|
|
Redistribution and use in source and binary forms, with or without
|
|
modification, are permitted provided that the following conditions
|
|
are met:
|
|
|
|
* Redistributions of source code must retain the above
|
|
copyright notice, this list of conditions and the following
|
|
disclaimer.
|
|
|
|
* Redistributions in binary form must reproduce the above
|
|
copyright notice, this list of conditions and the following
|
|
disclaimer in the documentation and/or other materials
|
|
provided with the distribution.
|
|
|
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
|
|
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
|
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
|
|
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
|
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
|
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
|
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
|
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
|
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
|
SUCH DAMAGE.
|
|
|
|
***********************************************************************/
|
|
|
|
"use strict";
|
|
|
|
var EXPECT_DIRECTIVE = /^$|[;{][\s\n]*$/;
|
|
|
|
function is_some_comments(comment) {
|
|
// multiline comment
|
|
return comment.type == "comment2" && /@preserve|@license|@cc_on/i.test(comment.value);
|
|
}
|
|
|
|
function OutputStream(options) {
|
|
|
|
options = defaults(options, {
|
|
ascii_only : false,
|
|
beautify : false,
|
|
bracketize : false,
|
|
comments : false,
|
|
indent_level : 4,
|
|
indent_start : 0,
|
|
inline_script : true,
|
|
keep_quoted_props: false,
|
|
max_line_len : false,
|
|
preamble : null,
|
|
preserve_line : false,
|
|
quote_keys : false,
|
|
quote_style : 0,
|
|
screw_ie8 : true,
|
|
semicolons : true,
|
|
shebang : true,
|
|
source_map : null,
|
|
space_colon : true,
|
|
unescape_regexps : false,
|
|
width : 80,
|
|
wrap_iife : false,
|
|
}, true);
|
|
|
|
// Convert comment option to RegExp if neccessary and set up comments filter
|
|
var comment_filter = return_false; // Default case, throw all comments away
|
|
if (options.comments) {
|
|
var comments = options.comments;
|
|
if (typeof options.comments === "string" && /^\/.*\/[a-zA-Z]*$/.test(options.comments)) {
|
|
var regex_pos = options.comments.lastIndexOf("/");
|
|
comments = new RegExp(
|
|
options.comments.substr(1, regex_pos - 1),
|
|
options.comments.substr(regex_pos + 1)
|
|
);
|
|
}
|
|
if (comments instanceof RegExp) {
|
|
comment_filter = function(comment) {
|
|
return comment.type != "comment5" && comments.test(comment.value);
|
|
};
|
|
}
|
|
else if (typeof comments === "function") {
|
|
comment_filter = function(comment) {
|
|
return comment.type != "comment5" && comments(this, comment);
|
|
};
|
|
}
|
|
else if (comments === "some") {
|
|
comment_filter = is_some_comments;
|
|
} else { // NOTE includes "all" option
|
|
comment_filter = return_true;
|
|
}
|
|
}
|
|
|
|
var indentation = 0;
|
|
var current_col = 0;
|
|
var current_line = 1;
|
|
var current_pos = 0;
|
|
var OUTPUT = "";
|
|
|
|
function to_ascii(str, identifier) {
|
|
return str.replace(/[\u0000-\u001f\u007f-\uffff]/g, function(ch) {
|
|
var code = ch.charCodeAt(0).toString(16);
|
|
if (code.length <= 2 && !identifier) {
|
|
while (code.length < 2) code = "0" + code;
|
|
return "\\x" + code;
|
|
} else {
|
|
while (code.length < 4) code = "0" + code;
|
|
return "\\u" + code;
|
|
}
|
|
});
|
|
};
|
|
|
|
function make_string(str, quote) {
|
|
var dq = 0, sq = 0;
|
|
str = str.replace(/[\\\b\f\n\r\v\t\x22\x27\u2028\u2029\0\ufeff]/g,
|
|
function(s, i){
|
|
switch (s) {
|
|
case '"': ++dq; return '"';
|
|
case "'": ++sq; return "'";
|
|
case "\\": return "\\\\";
|
|
case "\n": return "\\n";
|
|
case "\r": return "\\r";
|
|
case "\t": return "\\t";
|
|
case "\b": return "\\b";
|
|
case "\f": return "\\f";
|
|
case "\x0B": return options.screw_ie8 ? "\\v" : "\\x0B";
|
|
case "\u2028": return "\\u2028";
|
|
case "\u2029": return "\\u2029";
|
|
case "\ufeff": return "\\ufeff";
|
|
case "\0":
|
|
return /[0-7]/.test(str.charAt(i+1)) ? "\\x00" : "\\0";
|
|
}
|
|
return s;
|
|
});
|
|
function quote_single() {
|
|
return "'" + str.replace(/\x27/g, "\\'") + "'";
|
|
}
|
|
function quote_double() {
|
|
return '"' + str.replace(/\x22/g, '\\"') + '"';
|
|
}
|
|
if (options.ascii_only) str = to_ascii(str);
|
|
switch (options.quote_style) {
|
|
case 1:
|
|
return quote_single();
|
|
case 2:
|
|
return quote_double();
|
|
case 3:
|
|
return quote == "'" ? quote_single() : quote_double();
|
|
default:
|
|
return dq > sq ? quote_single() : quote_double();
|
|
}
|
|
};
|
|
|
|
function encode_string(str, quote) {
|
|
var ret = make_string(str, quote);
|
|
if (options.inline_script) {
|
|
ret = ret.replace(/<\x2fscript([>\/\t\n\f\r ])/gi, "<\\/script$1");
|
|
ret = ret.replace(/\x3c!--/g, "\\x3c!--");
|
|
ret = ret.replace(/--\x3e/g, "--\\x3e");
|
|
}
|
|
return ret;
|
|
};
|
|
|
|
function make_name(name) {
|
|
name = name.toString();
|
|
if (options.ascii_only)
|
|
name = to_ascii(name, true);
|
|
return name;
|
|
};
|
|
|
|
function make_indent(back) {
|
|
return repeat_string(" ", options.indent_start + indentation - back * options.indent_level);
|
|
};
|
|
|
|
/* -----[ beautification/minification ]----- */
|
|
|
|
var might_need_space = false;
|
|
var might_need_semicolon = false;
|
|
var might_add_newline = 0;
|
|
var last = "";
|
|
|
|
var ensure_line_len = options.max_line_len ? function() {
|
|
if (current_col > options.max_line_len) {
|
|
if (might_add_newline) {
|
|
var left = OUTPUT.slice(0, might_add_newline);
|
|
var right = OUTPUT.slice(might_add_newline);
|
|
OUTPUT = left + "\n" + right;
|
|
current_line++;
|
|
current_pos++;
|
|
current_col = right.length;
|
|
}
|
|
if (current_col > options.max_line_len) {
|
|
AST_Node.warn("Output exceeds {max_line_len} characters", options);
|
|
}
|
|
}
|
|
might_add_newline = 0;
|
|
} : noop;
|
|
|
|
var requireSemicolonChars = makePredicate("( [ + * / - , .");
|
|
|
|
function print(str) {
|
|
str = String(str);
|
|
var ch = str.charAt(0);
|
|
var prev = last.charAt(last.length - 1);
|
|
if (might_need_semicolon) {
|
|
might_need_semicolon = false;
|
|
|
|
if (prev == ":" && ch == "}" || (!ch || ";}".indexOf(ch) < 0) && prev != ";") {
|
|
if (options.semicolons || requireSemicolonChars(ch)) {
|
|
OUTPUT += ";";
|
|
current_col++;
|
|
current_pos++;
|
|
} else {
|
|
ensure_line_len();
|
|
OUTPUT += "\n";
|
|
current_pos++;
|
|
current_line++;
|
|
current_col = 0;
|
|
|
|
if (/^\s+$/.test(str)) {
|
|
// reset the semicolon flag, since we didn't print one
|
|
// now and might still have to later
|
|
might_need_semicolon = true;
|
|
}
|
|
}
|
|
|
|
if (!options.beautify)
|
|
might_need_space = false;
|
|
}
|
|
}
|
|
|
|
if (!options.beautify && options.preserve_line && stack[stack.length - 1]) {
|
|
var target_line = stack[stack.length - 1].start.line;
|
|
while (current_line < target_line) {
|
|
ensure_line_len();
|
|
OUTPUT += "\n";
|
|
current_pos++;
|
|
current_line++;
|
|
current_col = 0;
|
|
might_need_space = false;
|
|
}
|
|
}
|
|
|
|
if (might_need_space) {
|
|
if ((is_identifier_char(prev)
|
|
&& (is_identifier_char(ch) || ch == "\\"))
|
|
|| (ch == "/" && ch == prev)
|
|
|| ((ch == "+" || ch == "-") && ch == last))
|
|
{
|
|
OUTPUT += " ";
|
|
current_col++;
|
|
current_pos++;
|
|
}
|
|
might_need_space = false;
|
|
}
|
|
OUTPUT += str;
|
|
current_pos += str.length;
|
|
var a = str.split(/\r?\n/), n = a.length - 1;
|
|
current_line += n;
|
|
current_col += a[0].length;
|
|
if (n > 0) {
|
|
ensure_line_len();
|
|
current_col = a[n].length;
|
|
}
|
|
last = str;
|
|
};
|
|
|
|
var space = options.beautify ? function() {
|
|
print(" ");
|
|
} : function() {
|
|
might_need_space = true;
|
|
};
|
|
|
|
var indent = options.beautify ? function(half) {
|
|
if (options.beautify) {
|
|
print(make_indent(half ? 0.5 : 0));
|
|
}
|
|
} : noop;
|
|
|
|
var with_indent = options.beautify ? function(col, cont) {
|
|
if (col === true) col = next_indent();
|
|
var save_indentation = indentation;
|
|
indentation = col;
|
|
var ret = cont();
|
|
indentation = save_indentation;
|
|
return ret;
|
|
} : function(col, cont) { return cont() };
|
|
|
|
var newline = options.beautify ? function() {
|
|
print("\n");
|
|
} : options.max_line_len ? function() {
|
|
ensure_line_len();
|
|
might_add_newline = OUTPUT.length;
|
|
} : noop;
|
|
|
|
var semicolon = options.beautify ? function() {
|
|
print(";");
|
|
} : function() {
|
|
might_need_semicolon = true;
|
|
};
|
|
|
|
function force_semicolon() {
|
|
might_need_semicolon = false;
|
|
print(";");
|
|
};
|
|
|
|
function next_indent() {
|
|
return indentation + options.indent_level;
|
|
};
|
|
|
|
function with_block(cont) {
|
|
var ret;
|
|
print("{");
|
|
newline();
|
|
with_indent(next_indent(), function(){
|
|
ret = cont();
|
|
});
|
|
indent();
|
|
print("}");
|
|
return ret;
|
|
};
|
|
|
|
function with_parens(cont) {
|
|
print("(");
|
|
//XXX: still nice to have that for argument lists
|
|
//var ret = with_indent(current_col, cont);
|
|
var ret = cont();
|
|
print(")");
|
|
return ret;
|
|
};
|
|
|
|
function with_square(cont) {
|
|
print("[");
|
|
//var ret = with_indent(current_col, cont);
|
|
var ret = cont();
|
|
print("]");
|
|
return ret;
|
|
};
|
|
|
|
function comma() {
|
|
print(",");
|
|
space();
|
|
};
|
|
|
|
function colon() {
|
|
print(":");
|
|
if (options.space_colon) space();
|
|
};
|
|
|
|
var add_mapping = options.source_map ? function(token, name) {
|
|
try {
|
|
if (token) options.source_map.add(
|
|
token.file || "?",
|
|
current_line, current_col,
|
|
token.line, token.col,
|
|
(!name && token.type == "name") ? token.value : name
|
|
);
|
|
} catch(ex) {
|
|
AST_Node.warn("Couldn't figure out mapping for {file}:{line},{col} → {cline},{ccol} [{name}]", {
|
|
file: token.file,
|
|
line: token.line,
|
|
col: token.col,
|
|
cline: current_line,
|
|
ccol: current_col,
|
|
name: name || ""
|
|
})
|
|
}
|
|
} : noop;
|
|
|
|
function get() {
|
|
if (might_add_newline) {
|
|
ensure_line_len();
|
|
}
|
|
return OUTPUT;
|
|
};
|
|
|
|
var stack = [];
|
|
return {
|
|
get : get,
|
|
toString : get,
|
|
indent : indent,
|
|
indentation : function() { return indentation },
|
|
current_width : function() { return current_col - indentation },
|
|
should_break : function() { return options.width && this.current_width() >= options.width },
|
|
newline : newline,
|
|
print : print,
|
|
space : space,
|
|
comma : comma,
|
|
colon : colon,
|
|
last : function() { return last },
|
|
semicolon : semicolon,
|
|
force_semicolon : force_semicolon,
|
|
to_ascii : to_ascii,
|
|
print_name : function(name) { print(make_name(name)) },
|
|
print_string : function(str, quote, escape_directive) {
|
|
var encoded = encode_string(str, quote);
|
|
if (escape_directive === true && encoded.indexOf("\\") === -1) {
|
|
// Insert semicolons to break directive prologue
|
|
if (!EXPECT_DIRECTIVE.test(OUTPUT)) {
|
|
force_semicolon();
|
|
}
|
|
force_semicolon();
|
|
}
|
|
print(encoded);
|
|
},
|
|
encode_string : encode_string,
|
|
next_indent : next_indent,
|
|
with_indent : with_indent,
|
|
with_block : with_block,
|
|
with_parens : with_parens,
|
|
with_square : with_square,
|
|
add_mapping : add_mapping,
|
|
option : function(opt) { return options[opt] },
|
|
comment_filter : comment_filter,
|
|
line : function() { return current_line },
|
|
col : function() { return current_col },
|
|
pos : function() { return current_pos },
|
|
push_node : function(node) { stack.push(node) },
|
|
pop_node : function() { return stack.pop() },
|
|
parent : function(n) {
|
|
return stack[stack.length - 2 - (n || 0)];
|
|
}
|
|
};
|
|
|
|
};
|
|
|
|
/* -----[ code generators ]----- */
|
|
|
|
(function(){
|
|
|
|
/* -----[ utils ]----- */
|
|
|
|
function DEFPRINT(nodetype, generator) {
|
|
nodetype.DEFMETHOD("_codegen", generator);
|
|
};
|
|
|
|
var use_asm = false;
|
|
var in_directive = false;
|
|
|
|
AST_Node.DEFMETHOD("print", function(stream, force_parens){
|
|
var self = this, generator = self._codegen, prev_use_asm = use_asm;
|
|
if (self instanceof AST_Directive && self.value == "use asm" && stream.parent() instanceof AST_Scope) {
|
|
use_asm = true;
|
|
}
|
|
function doit() {
|
|
self.add_comments(stream);
|
|
self.add_source_map(stream);
|
|
generator(self, stream);
|
|
}
|
|
stream.push_node(self);
|
|
if (force_parens || self.needs_parens(stream)) {
|
|
stream.with_parens(doit);
|
|
} else {
|
|
doit();
|
|
}
|
|
stream.pop_node();
|
|
if (self instanceof AST_Scope) {
|
|
use_asm = prev_use_asm;
|
|
}
|
|
});
|
|
|
|
AST_Node.DEFMETHOD("print_to_string", function(options){
|
|
var s = OutputStream(options);
|
|
if (!options) s._readonly = true;
|
|
this.print(s);
|
|
return s.get();
|
|
});
|
|
|
|
/* -----[ comments ]----- */
|
|
|
|
AST_Node.DEFMETHOD("add_comments", function(output){
|
|
if (output._readonly) return;
|
|
var self = this;
|
|
var start = self.start;
|
|
if (start && !start._comments_dumped) {
|
|
start._comments_dumped = true;
|
|
var comments = start.comments_before || [];
|
|
|
|
// XXX: ugly fix for https://github.com/mishoo/UglifyJS2/issues/112
|
|
// and https://github.com/mishoo/UglifyJS2/issues/372
|
|
if (self instanceof AST_Exit && self.value) {
|
|
self.value.walk(new TreeWalker(function(node){
|
|
if (node.start && node.start.comments_before) {
|
|
comments = comments.concat(node.start.comments_before);
|
|
node.start.comments_before = [];
|
|
}
|
|
if (node instanceof AST_Function ||
|
|
node instanceof AST_Array ||
|
|
node instanceof AST_Object)
|
|
{
|
|
return true; // don't go inside.
|
|
}
|
|
}));
|
|
}
|
|
|
|
if (output.pos() == 0) {
|
|
if (comments.length > 0 && output.option("shebang") && comments[0].type == "comment5") {
|
|
output.print("#!" + comments.shift().value + "\n");
|
|
output.indent();
|
|
}
|
|
var preamble = output.option("preamble");
|
|
if (preamble) {
|
|
output.print(preamble.replace(/\r\n?|[\n\u2028\u2029]|\s*$/g, "\n"));
|
|
}
|
|
}
|
|
|
|
comments = comments.filter(output.comment_filter, self);
|
|
|
|
// Keep single line comments after nlb, after nlb
|
|
if (!output.option("beautify") && comments.length > 0 &&
|
|
/comment[134]/.test(comments[0].type) &&
|
|
output.col() !== 0 && comments[0].nlb)
|
|
{
|
|
output.print("\n");
|
|
}
|
|
|
|
comments.forEach(function(c){
|
|
if (/comment[134]/.test(c.type)) {
|
|
output.print("//" + c.value + "\n");
|
|
output.indent();
|
|
}
|
|
else if (c.type == "comment2") {
|
|
output.print("/*" + c.value + "*/");
|
|
if (start.nlb) {
|
|
output.print("\n");
|
|
output.indent();
|
|
} else {
|
|
output.space();
|
|
}
|
|
}
|
|
});
|
|
}
|
|
});
|
|
|
|
/* -----[ PARENTHESES ]----- */
|
|
|
|
function PARENS(nodetype, func) {
|
|
if (Array.isArray(nodetype)) {
|
|
nodetype.forEach(function(nodetype){
|
|
PARENS(nodetype, func);
|
|
});
|
|
} else {
|
|
nodetype.DEFMETHOD("needs_parens", func);
|
|
}
|
|
};
|
|
|
|
PARENS(AST_Node, function(){
|
|
return false;
|
|
});
|
|
|
|
// a function expression needs parens around it when it's provably
|
|
// the first token to appear in a statement.
|
|
PARENS(AST_Function, function(output){
|
|
if (first_in_statement(output)) {
|
|
return true;
|
|
}
|
|
|
|
if (output.option('wrap_iife')) {
|
|
var p = output.parent();
|
|
return p instanceof AST_Call && p.expression === this;
|
|
}
|
|
|
|
return false;
|
|
});
|
|
|
|
// same goes for an object literal, because otherwise it would be
|
|
// interpreted as a block of code.
|
|
PARENS(AST_Object, function(output){
|
|
return first_in_statement(output);
|
|
});
|
|
|
|
PARENS(AST_Unary, function(output){
|
|
var p = output.parent();
|
|
return p instanceof AST_PropAccess && p.expression === this
|
|
|| p instanceof AST_Call && p.expression === this;
|
|
});
|
|
|
|
PARENS(AST_Seq, function(output){
|
|
var p = output.parent();
|
|
return p instanceof AST_Call // (foo, bar)() or foo(1, (2, 3), 4)
|
|
|| p instanceof AST_Unary // !(foo, bar, baz)
|
|
|| p instanceof AST_Binary // 1 + (2, 3) + 4 ==> 8
|
|
|| p instanceof AST_VarDef // var a = (1, 2), b = a + a; ==> b == 4
|
|
|| p instanceof AST_PropAccess // (1, {foo:2}).foo or (1, {foo:2})["foo"] ==> 2
|
|
|| p instanceof AST_Array // [ 1, (2, 3), 4 ] ==> [ 1, 3, 4 ]
|
|
|| p instanceof AST_ObjectProperty // { foo: (1, 2) }.foo ==> 2
|
|
|| p instanceof AST_Conditional /* (false, true) ? (a = 10, b = 20) : (c = 30)
|
|
* ==> 20 (side effect, set a := 10 and b := 20) */
|
|
;
|
|
});
|
|
|
|
PARENS(AST_Binary, function(output){
|
|
var p = output.parent();
|
|
// (foo && bar)()
|
|
if (p instanceof AST_Call && p.expression === this)
|
|
return true;
|
|
// typeof (foo && bar)
|
|
if (p instanceof AST_Unary)
|
|
return true;
|
|
// (foo && bar)["prop"], (foo && bar).prop
|
|
if (p instanceof AST_PropAccess && p.expression === this)
|
|
return true;
|
|
// this deals with precedence: 3 * (2 + 1)
|
|
if (p instanceof AST_Binary) {
|
|
var po = p.operator, pp = PRECEDENCE[po];
|
|
var so = this.operator, sp = PRECEDENCE[so];
|
|
if (pp > sp
|
|
|| (pp == sp
|
|
&& this === p.right)) {
|
|
return true;
|
|
}
|
|
}
|
|
});
|
|
|
|
PARENS(AST_PropAccess, function(output){
|
|
var p = output.parent();
|
|
if (p instanceof AST_New && p.expression === this) {
|
|
// i.e. new (foo.bar().baz)
|
|
//
|
|
// if there's one call into this subtree, then we need
|
|
// parens around it too, otherwise the call will be
|
|
// interpreted as passing the arguments to the upper New
|
|
// expression.
|
|
try {
|
|
this.walk(new TreeWalker(function(node){
|
|
if (node instanceof AST_Call) throw p;
|
|
}));
|
|
} catch(ex) {
|
|
if (ex !== p) throw ex;
|
|
return true;
|
|
}
|
|
}
|
|
});
|
|
|
|
PARENS(AST_Call, function(output){
|
|
var p = output.parent(), p1;
|
|
if (p instanceof AST_New && p.expression === this)
|
|
return true;
|
|
|
|
// workaround for Safari bug.
|
|
// https://bugs.webkit.org/show_bug.cgi?id=123506
|
|
return this.expression instanceof AST_Function
|
|
&& p instanceof AST_PropAccess
|
|
&& p.expression === this
|
|
&& (p1 = output.parent(1)) instanceof AST_Assign
|
|
&& p1.left === p;
|
|
});
|
|
|
|
PARENS(AST_New, function(output){
|
|
var p = output.parent();
|
|
if (!need_constructor_parens(this, output)
|
|
&& (p instanceof AST_PropAccess // (new Date).getTime(), (new Date)["getTime"]()
|
|
|| p instanceof AST_Call && p.expression === this)) // (new foo)(bar)
|
|
return true;
|
|
});
|
|
|
|
PARENS(AST_Number, function(output){
|
|
var p = output.parent();
|
|
if (p instanceof AST_PropAccess && p.expression === this) {
|
|
var value = this.getValue();
|
|
if (value < 0 || /^0/.test(make_num(value))) {
|
|
return true;
|
|
}
|
|
}
|
|
});
|
|
|
|
PARENS([ AST_Assign, AST_Conditional ], function (output){
|
|
var p = output.parent();
|
|
// !(a = false) → true
|
|
if (p instanceof AST_Unary)
|
|
return true;
|
|
// 1 + (a = 2) + 3 → 6, side effect setting a = 2
|
|
if (p instanceof AST_Binary && !(p instanceof AST_Assign))
|
|
return true;
|
|
// (a = func)() —or— new (a = Object)()
|
|
if (p instanceof AST_Call && p.expression === this)
|
|
return true;
|
|
// (a = foo) ? bar : baz
|
|
if (p instanceof AST_Conditional && p.condition === this)
|
|
return true;
|
|
// (a = foo)["prop"] —or— (a = foo).prop
|
|
if (p instanceof AST_PropAccess && p.expression === this)
|
|
return true;
|
|
});
|
|
|
|
/* -----[ PRINTERS ]----- */
|
|
|
|
DEFPRINT(AST_Directive, function(self, output){
|
|
output.print_string(self.value, self.quote);
|
|
output.semicolon();
|
|
});
|
|
DEFPRINT(AST_Debugger, function(self, output){
|
|
output.print("debugger");
|
|
output.semicolon();
|
|
});
|
|
|
|
/* -----[ statements ]----- */
|
|
|
|
function display_body(body, is_toplevel, output, allow_directives) {
|
|
var last = body.length - 1;
|
|
in_directive = allow_directives;
|
|
body.forEach(function(stmt, i){
|
|
if (in_directive === true && !(stmt instanceof AST_Directive ||
|
|
stmt instanceof AST_EmptyStatement ||
|
|
(stmt instanceof AST_SimpleStatement && stmt.body instanceof AST_String)
|
|
)) {
|
|
in_directive = false;
|
|
}
|
|
if (!(stmt instanceof AST_EmptyStatement)) {
|
|
output.indent();
|
|
stmt.print(output);
|
|
if (!(i == last && is_toplevel)) {
|
|
output.newline();
|
|
if (is_toplevel) output.newline();
|
|
}
|
|
}
|
|
if (in_directive === true &&
|
|
stmt instanceof AST_SimpleStatement &&
|
|
stmt.body instanceof AST_String
|
|
) {
|
|
in_directive = false;
|
|
}
|
|
});
|
|
in_directive = false;
|
|
};
|
|
|
|
AST_StatementWithBody.DEFMETHOD("_do_print_body", function(output){
|
|
force_statement(this.body, output);
|
|
});
|
|
|
|
DEFPRINT(AST_Statement, function(self, output){
|
|
self.body.print(output);
|
|
output.semicolon();
|
|
});
|
|
DEFPRINT(AST_Toplevel, function(self, output){
|
|
display_body(self.body, true, output, true);
|
|
output.print("");
|
|
});
|
|
DEFPRINT(AST_LabeledStatement, function(self, output){
|
|
self.label.print(output);
|
|
output.colon();
|
|
self.body.print(output);
|
|
});
|
|
DEFPRINT(AST_SimpleStatement, function(self, output){
|
|
self.body.print(output);
|
|
output.semicolon();
|
|
});
|
|
function print_bracketed(body, output, allow_directives) {
|
|
if (body.length > 0) output.with_block(function(){
|
|
display_body(body, false, output, allow_directives);
|
|
});
|
|
else output.print("{}");
|
|
};
|
|
DEFPRINT(AST_BlockStatement, function(self, output){
|
|
print_bracketed(self.body, output);
|
|
});
|
|
DEFPRINT(AST_EmptyStatement, function(self, output){
|
|
output.semicolon();
|
|
});
|
|
DEFPRINT(AST_Do, function(self, output){
|
|
output.print("do");
|
|
output.space();
|
|
make_block(self.body, output);
|
|
output.space();
|
|
output.print("while");
|
|
output.space();
|
|
output.with_parens(function(){
|
|
self.condition.print(output);
|
|
});
|
|
output.semicolon();
|
|
});
|
|
DEFPRINT(AST_While, function(self, output){
|
|
output.print("while");
|
|
output.space();
|
|
output.with_parens(function(){
|
|
self.condition.print(output);
|
|
});
|
|
output.space();
|
|
self._do_print_body(output);
|
|
});
|
|
DEFPRINT(AST_For, function(self, output){
|
|
output.print("for");
|
|
output.space();
|
|
output.with_parens(function(){
|
|
if (self.init) {
|
|
if (self.init instanceof AST_Definitions) {
|
|
self.init.print(output);
|
|
} else {
|
|
parenthesize_for_noin(self.init, output, true);
|
|
}
|
|
output.print(";");
|
|
output.space();
|
|
} else {
|
|
output.print(";");
|
|
}
|
|
if (self.condition) {
|
|
self.condition.print(output);
|
|
output.print(";");
|
|
output.space();
|
|
} else {
|
|
output.print(";");
|
|
}
|
|
if (self.step) {
|
|
self.step.print(output);
|
|
}
|
|
});
|
|
output.space();
|
|
self._do_print_body(output);
|
|
});
|
|
DEFPRINT(AST_ForIn, function(self, output){
|
|
output.print("for");
|
|
output.space();
|
|
output.with_parens(function(){
|
|
self.init.print(output);
|
|
output.space();
|
|
output.print("in");
|
|
output.space();
|
|
self.object.print(output);
|
|
});
|
|
output.space();
|
|
self._do_print_body(output);
|
|
});
|
|
DEFPRINT(AST_With, function(self, output){
|
|
output.print("with");
|
|
output.space();
|
|
output.with_parens(function(){
|
|
self.expression.print(output);
|
|
});
|
|
output.space();
|
|
self._do_print_body(output);
|
|
});
|
|
|
|
/* -----[ functions ]----- */
|
|
AST_Lambda.DEFMETHOD("_do_print", function(output, nokeyword){
|
|
var self = this;
|
|
if (!nokeyword) {
|
|
output.print("function");
|
|
}
|
|
if (self.name) {
|
|
output.space();
|
|
self.name.print(output);
|
|
}
|
|
output.with_parens(function(){
|
|
self.argnames.forEach(function(arg, i){
|
|
if (i) output.comma();
|
|
arg.print(output);
|
|
});
|
|
});
|
|
output.space();
|
|
print_bracketed(self.body, output, true);
|
|
});
|
|
DEFPRINT(AST_Lambda, function(self, output){
|
|
self._do_print(output);
|
|
});
|
|
|
|
/* -----[ exits ]----- */
|
|
AST_Exit.DEFMETHOD("_do_print", function(output, kind){
|
|
output.print(kind);
|
|
if (this.value) {
|
|
output.space();
|
|
this.value.print(output);
|
|
}
|
|
output.semicolon();
|
|
});
|
|
DEFPRINT(AST_Return, function(self, output){
|
|
self._do_print(output, "return");
|
|
});
|
|
DEFPRINT(AST_Throw, function(self, output){
|
|
self._do_print(output, "throw");
|
|
});
|
|
|
|
/* -----[ loop control ]----- */
|
|
AST_LoopControl.DEFMETHOD("_do_print", function(output, kind){
|
|
output.print(kind);
|
|
if (this.label) {
|
|
output.space();
|
|
this.label.print(output);
|
|
}
|
|
output.semicolon();
|
|
});
|
|
DEFPRINT(AST_Break, function(self, output){
|
|
self._do_print(output, "break");
|
|
});
|
|
DEFPRINT(AST_Continue, function(self, output){
|
|
self._do_print(output, "continue");
|
|
});
|
|
|
|
/* -----[ if ]----- */
|
|
function make_then(self, output) {
|
|
var b = self.body;
|
|
if (output.option("bracketize")
|
|
|| !output.option("screw_ie8") && b instanceof AST_Do)
|
|
return make_block(b, output);
|
|
// The squeezer replaces "block"-s that contain only a single
|
|
// statement with the statement itself; technically, the AST
|
|
// is correct, but this can create problems when we output an
|
|
// IF having an ELSE clause where the THEN clause ends in an
|
|
// IF *without* an ELSE block (then the outer ELSE would refer
|
|
// to the inner IF). This function checks for this case and
|
|
// adds the block brackets if needed.
|
|
if (!b) return output.force_semicolon();
|
|
while (true) {
|
|
if (b instanceof AST_If) {
|
|
if (!b.alternative) {
|
|
make_block(self.body, output);
|
|
return;
|
|
}
|
|
b = b.alternative;
|
|
}
|
|
else if (b instanceof AST_StatementWithBody) {
|
|
b = b.body;
|
|
}
|
|
else break;
|
|
}
|
|
force_statement(self.body, output);
|
|
};
|
|
DEFPRINT(AST_If, function(self, output){
|
|
output.print("if");
|
|
output.space();
|
|
output.with_parens(function(){
|
|
self.condition.print(output);
|
|
});
|
|
output.space();
|
|
if (self.alternative) {
|
|
make_then(self, output);
|
|
output.space();
|
|
output.print("else");
|
|
output.space();
|
|
if (self.alternative instanceof AST_If)
|
|
self.alternative.print(output);
|
|
else
|
|
force_statement(self.alternative, output);
|
|
} else {
|
|
self._do_print_body(output);
|
|
}
|
|
});
|
|
|
|
/* -----[ switch ]----- */
|
|
DEFPRINT(AST_Switch, function(self, output){
|
|
output.print("switch");
|
|
output.space();
|
|
output.with_parens(function(){
|
|
self.expression.print(output);
|
|
});
|
|
output.space();
|
|
var last = self.body.length - 1;
|
|
if (last < 0) output.print("{}");
|
|
else output.with_block(function(){
|
|
self.body.forEach(function(branch, i){
|
|
output.indent(true);
|
|
branch.print(output);
|
|
if (i < last && branch.body.length > 0)
|
|
output.newline();
|
|
});
|
|
});
|
|
});
|
|
AST_SwitchBranch.DEFMETHOD("_do_print_body", function(output){
|
|
output.newline();
|
|
this.body.forEach(function(stmt){
|
|
output.indent();
|
|
stmt.print(output);
|
|
output.newline();
|
|
});
|
|
});
|
|
DEFPRINT(AST_Default, function(self, output){
|
|
output.print("default:");
|
|
self._do_print_body(output);
|
|
});
|
|
DEFPRINT(AST_Case, function(self, output){
|
|
output.print("case");
|
|
output.space();
|
|
self.expression.print(output);
|
|
output.print(":");
|
|
self._do_print_body(output);
|
|
});
|
|
|
|
/* -----[ exceptions ]----- */
|
|
DEFPRINT(AST_Try, function(self, output){
|
|
output.print("try");
|
|
output.space();
|
|
print_bracketed(self.body, output);
|
|
if (self.bcatch) {
|
|
output.space();
|
|
self.bcatch.print(output);
|
|
}
|
|
if (self.bfinally) {
|
|
output.space();
|
|
self.bfinally.print(output);
|
|
}
|
|
});
|
|
DEFPRINT(AST_Catch, function(self, output){
|
|
output.print("catch");
|
|
output.space();
|
|
output.with_parens(function(){
|
|
self.argname.print(output);
|
|
});
|
|
output.space();
|
|
print_bracketed(self.body, output);
|
|
});
|
|
DEFPRINT(AST_Finally, function(self, output){
|
|
output.print("finally");
|
|
output.space();
|
|
print_bracketed(self.body, output);
|
|
});
|
|
|
|
/* -----[ var/const ]----- */
|
|
AST_Definitions.DEFMETHOD("_do_print", function(output, kind){
|
|
output.print(kind);
|
|
output.space();
|
|
this.definitions.forEach(function(def, i){
|
|
if (i) output.comma();
|
|
def.print(output);
|
|
});
|
|
var p = output.parent();
|
|
var in_for = p instanceof AST_For || p instanceof AST_ForIn;
|
|
var avoid_semicolon = in_for && p.init === this;
|
|
if (!avoid_semicolon)
|
|
output.semicolon();
|
|
});
|
|
DEFPRINT(AST_Var, function(self, output){
|
|
self._do_print(output, "var");
|
|
});
|
|
DEFPRINT(AST_Const, function(self, output){
|
|
self._do_print(output, "const");
|
|
});
|
|
|
|
function parenthesize_for_noin(node, output, noin) {
|
|
if (!noin) node.print(output);
|
|
else try {
|
|
// need to take some precautions here:
|
|
// https://github.com/mishoo/UglifyJS2/issues/60
|
|
node.walk(new TreeWalker(function(node){
|
|
if (node instanceof AST_Binary && node.operator == "in")
|
|
throw output;
|
|
}));
|
|
node.print(output);
|
|
} catch(ex) {
|
|
if (ex !== output) throw ex;
|
|
node.print(output, true);
|
|
}
|
|
};
|
|
|
|
DEFPRINT(AST_VarDef, function(self, output){
|
|
self.name.print(output);
|
|
if (self.value) {
|
|
output.space();
|
|
output.print("=");
|
|
output.space();
|
|
var p = output.parent(1);
|
|
var noin = p instanceof AST_For || p instanceof AST_ForIn;
|
|
parenthesize_for_noin(self.value, output, noin);
|
|
}
|
|
});
|
|
|
|
/* -----[ other expressions ]----- */
|
|
DEFPRINT(AST_Call, function(self, output){
|
|
self.expression.print(output);
|
|
if (self instanceof AST_New && !need_constructor_parens(self, output))
|
|
return;
|
|
output.with_parens(function(){
|
|
self.args.forEach(function(expr, i){
|
|
if (i) output.comma();
|
|
expr.print(output);
|
|
});
|
|
});
|
|
});
|
|
DEFPRINT(AST_New, function(self, output){
|
|
output.print("new");
|
|
output.space();
|
|
AST_Call.prototype._codegen(self, output);
|
|
});
|
|
|
|
AST_Seq.DEFMETHOD("_do_print", function(output){
|
|
this.car.print(output);
|
|
if (this.cdr) {
|
|
output.comma();
|
|
if (output.should_break()) {
|
|
output.newline();
|
|
output.indent();
|
|
}
|
|
this.cdr.print(output);
|
|
}
|
|
});
|
|
DEFPRINT(AST_Seq, function(self, output){
|
|
self._do_print(output);
|
|
// var p = output.parent();
|
|
// if (p instanceof AST_Statement) {
|
|
// output.with_indent(output.next_indent(), function(){
|
|
// self._do_print(output);
|
|
// });
|
|
// } else {
|
|
// self._do_print(output);
|
|
// }
|
|
});
|
|
DEFPRINT(AST_Dot, function(self, output){
|
|
var expr = self.expression;
|
|
expr.print(output);
|
|
if (expr instanceof AST_Number && expr.getValue() >= 0) {
|
|
if (!/[xa-f.)]/i.test(output.last())) {
|
|
output.print(".");
|
|
}
|
|
}
|
|
output.print(".");
|
|
// the name after dot would be mapped about here.
|
|
output.add_mapping(self.end);
|
|
output.print_name(self.property);
|
|
});
|
|
DEFPRINT(AST_Sub, function(self, output){
|
|
self.expression.print(output);
|
|
output.print("[");
|
|
self.property.print(output);
|
|
output.print("]");
|
|
});
|
|
DEFPRINT(AST_UnaryPrefix, function(self, output){
|
|
var op = self.operator;
|
|
output.print(op);
|
|
if (/^[a-z]/i.test(op)
|
|
|| (/[+-]$/.test(op)
|
|
&& self.expression instanceof AST_UnaryPrefix
|
|
&& /^[+-]/.test(self.expression.operator))) {
|
|
output.space();
|
|
}
|
|
self.expression.print(output);
|
|
});
|
|
DEFPRINT(AST_UnaryPostfix, function(self, output){
|
|
self.expression.print(output);
|
|
output.print(self.operator);
|
|
});
|
|
DEFPRINT(AST_Binary, function(self, output){
|
|
var op = self.operator;
|
|
self.left.print(output);
|
|
if (op[0] == ">" /* ">>" ">>>" ">" ">=" */
|
|
&& self.left instanceof AST_UnaryPostfix
|
|
&& self.left.operator == "--") {
|
|
// space is mandatory to avoid outputting -->
|
|
output.print(" ");
|
|
} else {
|
|
// the space is optional depending on "beautify"
|
|
output.space();
|
|
}
|
|
output.print(op);
|
|
if ((op == "<" || op == "<<")
|
|
&& self.right instanceof AST_UnaryPrefix
|
|
&& self.right.operator == "!"
|
|
&& self.right.expression instanceof AST_UnaryPrefix
|
|
&& self.right.expression.operator == "--") {
|
|
// space is mandatory to avoid outputting <!--
|
|
output.print(" ");
|
|
} else {
|
|
// the space is optional depending on "beautify"
|
|
output.space();
|
|
}
|
|
self.right.print(output);
|
|
});
|
|
DEFPRINT(AST_Conditional, function(self, output){
|
|
self.condition.print(output);
|
|
output.space();
|
|
output.print("?");
|
|
output.space();
|
|
self.consequent.print(output);
|
|
output.space();
|
|
output.colon();
|
|
self.alternative.print(output);
|
|
});
|
|
|
|
/* -----[ literals ]----- */
|
|
DEFPRINT(AST_Array, function(self, output){
|
|
output.with_square(function(){
|
|
var a = self.elements, len = a.length;
|
|
if (len > 0) output.space();
|
|
a.forEach(function(exp, i){
|
|
if (i) output.comma();
|
|
exp.print(output);
|
|
// If the final element is a hole, we need to make sure it
|
|
// doesn't look like a trailing comma, by inserting an actual
|
|
// trailing comma.
|
|
if (i === len - 1 && exp instanceof AST_Hole)
|
|
output.comma();
|
|
});
|
|
if (len > 0) output.space();
|
|
});
|
|
});
|
|
DEFPRINT(AST_Object, function(self, output){
|
|
if (self.properties.length > 0) output.with_block(function(){
|
|
self.properties.forEach(function(prop, i){
|
|
if (i) {
|
|
output.print(",");
|
|
output.newline();
|
|
}
|
|
output.indent();
|
|
prop.print(output);
|
|
});
|
|
output.newline();
|
|
});
|
|
else output.print("{}");
|
|
});
|
|
|
|
function print_property_name(key, quote, output) {
|
|
if (output.option("quote_keys")) {
|
|
output.print_string(key + "");
|
|
} else if ((typeof key == "number"
|
|
|| !output.option("beautify")
|
|
&& +key + "" == key)
|
|
&& parseFloat(key) >= 0) {
|
|
output.print(make_num(key));
|
|
} else if (RESERVED_WORDS(key) ? output.option("screw_ie8") : is_identifier_string(key)) {
|
|
if (quote && output.option("keep_quoted_props")) {
|
|
output.print_string(key, quote);
|
|
} else {
|
|
output.print_name(key);
|
|
}
|
|
} else {
|
|
output.print_string(key, quote);
|
|
}
|
|
}
|
|
|
|
DEFPRINT(AST_ObjectKeyVal, function(self, output){
|
|
print_property_name(self.key, self.quote, output);
|
|
output.colon();
|
|
self.value.print(output);
|
|
});
|
|
AST_ObjectProperty.DEFMETHOD("_print_getter_setter", function(type, output) {
|
|
output.print(type);
|
|
output.space();
|
|
print_property_name(this.key.name, this.quote, output);
|
|
this.value._do_print(output, true);
|
|
});
|
|
DEFPRINT(AST_ObjectSetter, function(self, output){
|
|
self._print_getter_setter("set", output);
|
|
});
|
|
DEFPRINT(AST_ObjectGetter, function(self, output){
|
|
self._print_getter_setter("get", output);
|
|
});
|
|
DEFPRINT(AST_Symbol, function(self, output){
|
|
var def = self.definition();
|
|
output.print_name(def ? def.mangled_name || def.name : self.name);
|
|
});
|
|
DEFPRINT(AST_Hole, noop);
|
|
DEFPRINT(AST_This, function(self, output){
|
|
output.print("this");
|
|
});
|
|
DEFPRINT(AST_Constant, function(self, output){
|
|
output.print(self.getValue());
|
|
});
|
|
DEFPRINT(AST_String, function(self, output){
|
|
output.print_string(self.getValue(), self.quote, in_directive);
|
|
});
|
|
DEFPRINT(AST_Number, function(self, output){
|
|
if (use_asm && self.start && self.start.raw != null) {
|
|
output.print(self.start.raw);
|
|
} else {
|
|
output.print(make_num(self.getValue()));
|
|
}
|
|
});
|
|
|
|
function regexp_safe_literal(code) {
|
|
return [
|
|
0x5c , // \
|
|
0x2f , // /
|
|
0x2e , // .
|
|
0x2b , // +
|
|
0x2a , // *
|
|
0x3f , // ?
|
|
0x28 , // (
|
|
0x29 , // )
|
|
0x5b , // [
|
|
0x5d , // ]
|
|
0x7b , // {
|
|
0x7d , // }
|
|
0x24 , // $
|
|
0x5e , // ^
|
|
0x3a , // :
|
|
0x7c , // |
|
|
0x21 , // !
|
|
0x0a , // \n
|
|
0x0d , // \r
|
|
0x00 , // \0
|
|
0xfeff , // Unicode BOM
|
|
0x2028 , // unicode "line separator"
|
|
0x2029 , // unicode "paragraph separator"
|
|
].indexOf(code) < 0;
|
|
};
|
|
|
|
DEFPRINT(AST_RegExp, function(self, output){
|
|
var str = self.getValue().toString();
|
|
if (output.option("ascii_only")) {
|
|
str = output.to_ascii(str);
|
|
} else if (output.option("unescape_regexps")) {
|
|
str = str.split("\\\\").map(function(str){
|
|
return str.replace(/\\u[0-9a-fA-F]{4}|\\x[0-9a-fA-F]{2}/g, function(s){
|
|
var code = parseInt(s.substr(2), 16);
|
|
return regexp_safe_literal(code) ? String.fromCharCode(code) : s;
|
|
});
|
|
}).join("\\\\");
|
|
}
|
|
output.print(str);
|
|
var p = output.parent();
|
|
if (p instanceof AST_Binary && /^in/.test(p.operator) && p.left === self)
|
|
output.print(" ");
|
|
});
|
|
|
|
function force_statement(stat, output) {
|
|
if (output.option("bracketize")) {
|
|
make_block(stat, output);
|
|
} else {
|
|
if (!stat || stat instanceof AST_EmptyStatement)
|
|
output.force_semicolon();
|
|
else
|
|
stat.print(output);
|
|
}
|
|
};
|
|
|
|
// self should be AST_New. decide if we want to show parens or not.
|
|
function need_constructor_parens(self, output) {
|
|
// Always print parentheses with arguments
|
|
if (self.args.length > 0) return true;
|
|
|
|
return output.option("beautify");
|
|
};
|
|
|
|
function best_of(a) {
|
|
var best = a[0], len = best.length;
|
|
for (var i = 1; i < a.length; ++i) {
|
|
if (a[i].length < len) {
|
|
best = a[i];
|
|
len = best.length;
|
|
}
|
|
}
|
|
return best;
|
|
};
|
|
|
|
function make_num(num) {
|
|
var str = num.toString(10), a = [ str.replace(/^0\./, ".").replace('e+', 'e') ], m;
|
|
if (Math.floor(num) === num) {
|
|
if (num >= 0) {
|
|
a.push("0x" + num.toString(16).toLowerCase(), // probably pointless
|
|
"0" + num.toString(8)); // same.
|
|
} else {
|
|
a.push("-0x" + (-num).toString(16).toLowerCase(), // probably pointless
|
|
"-0" + (-num).toString(8)); // same.
|
|
}
|
|
if ((m = /^(.*?)(0+)$/.exec(num))) {
|
|
a.push(m[1] + "e" + m[2].length);
|
|
}
|
|
} else if ((m = /^0?\.(0+)(.*)$/.exec(num))) {
|
|
a.push(m[2] + "e-" + (m[1].length + m[2].length),
|
|
str.substr(str.indexOf(".")));
|
|
}
|
|
return best_of(a);
|
|
};
|
|
|
|
function make_block(stmt, output) {
|
|
if (!stmt || stmt instanceof AST_EmptyStatement)
|
|
output.print("{}");
|
|
else if (stmt instanceof AST_BlockStatement)
|
|
stmt.print(output);
|
|
else output.with_block(function(){
|
|
output.indent();
|
|
stmt.print(output);
|
|
output.newline();
|
|
});
|
|
};
|
|
|
|
/* -----[ source map generators ]----- */
|
|
|
|
function DEFMAP(nodetype, generator) {
|
|
nodetype.DEFMETHOD("add_source_map", function(stream){
|
|
generator(this, stream);
|
|
});
|
|
};
|
|
|
|
// We could easily add info for ALL nodes, but it seems to me that
|
|
// would be quite wasteful, hence this noop in the base class.
|
|
DEFMAP(AST_Node, noop);
|
|
|
|
function basic_sourcemap_gen(self, output) {
|
|
output.add_mapping(self.start);
|
|
};
|
|
|
|
// XXX: I'm not exactly sure if we need it for all of these nodes,
|
|
// or if we should add even more.
|
|
|
|
DEFMAP(AST_Directive, basic_sourcemap_gen);
|
|
DEFMAP(AST_Debugger, basic_sourcemap_gen);
|
|
DEFMAP(AST_Symbol, basic_sourcemap_gen);
|
|
DEFMAP(AST_Jump, basic_sourcemap_gen);
|
|
DEFMAP(AST_StatementWithBody, basic_sourcemap_gen);
|
|
DEFMAP(AST_LabeledStatement, noop); // since the label symbol will mark it
|
|
DEFMAP(AST_Lambda, basic_sourcemap_gen);
|
|
DEFMAP(AST_Switch, basic_sourcemap_gen);
|
|
DEFMAP(AST_SwitchBranch, basic_sourcemap_gen);
|
|
DEFMAP(AST_BlockStatement, basic_sourcemap_gen);
|
|
DEFMAP(AST_Toplevel, noop);
|
|
DEFMAP(AST_New, basic_sourcemap_gen);
|
|
DEFMAP(AST_Try, basic_sourcemap_gen);
|
|
DEFMAP(AST_Catch, basic_sourcemap_gen);
|
|
DEFMAP(AST_Finally, basic_sourcemap_gen);
|
|
DEFMAP(AST_Definitions, basic_sourcemap_gen);
|
|
DEFMAP(AST_Constant, basic_sourcemap_gen);
|
|
DEFMAP(AST_ObjectSetter, function(self, output){
|
|
output.add_mapping(self.start, self.key.name);
|
|
});
|
|
DEFMAP(AST_ObjectGetter, function(self, output){
|
|
output.add_mapping(self.start, self.key.name);
|
|
});
|
|
DEFMAP(AST_ObjectProperty, function(self, output){
|
|
output.add_mapping(self.start, self.key);
|
|
});
|
|
|
|
})();
|
|
|
|
/***********************************************************************
|
|
|
|
A JavaScript tokenizer / parser / beautifier / compressor.
|
|
https://github.com/mishoo/UglifyJS2
|
|
|
|
-------------------------------- (C) ---------------------------------
|
|
|
|
Author: Mihai Bazon
|
|
<mihai.bazon@gmail.com>
|
|
http://mihai.bazon.net/blog
|
|
|
|
Distributed under the BSD license:
|
|
|
|
Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
|
|
|
Redistribution and use in source and binary forms, with or without
|
|
modification, are permitted provided that the following conditions
|
|
are met:
|
|
|
|
* Redistributions of source code must retain the above
|
|
copyright notice, this list of conditions and the following
|
|
disclaimer.
|
|
|
|
* Redistributions in binary form must reproduce the above
|
|
copyright notice, this list of conditions and the following
|
|
disclaimer in the documentation and/or other materials
|
|
provided with the distribution.
|
|
|
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
|
|
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
|
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
|
|
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
|
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
|
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
|
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
|
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
|
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
|
SUCH DAMAGE.
|
|
|
|
***********************************************************************/
|
|
|
|
"use strict";
|
|
|
|
function Compressor(options, false_by_default) {
|
|
if (!(this instanceof Compressor))
|
|
return new Compressor(options, false_by_default);
|
|
TreeTransformer.call(this, this.before, this.after);
|
|
this.options = defaults(options, {
|
|
angular : false,
|
|
booleans : !false_by_default,
|
|
cascade : !false_by_default,
|
|
collapse_vars : !false_by_default,
|
|
comparisons : !false_by_default,
|
|
conditionals : !false_by_default,
|
|
dead_code : !false_by_default,
|
|
drop_console : false,
|
|
drop_debugger : !false_by_default,
|
|
evaluate : !false_by_default,
|
|
expression : false,
|
|
global_defs : {},
|
|
hoist_funs : !false_by_default,
|
|
hoist_vars : false,
|
|
if_return : !false_by_default,
|
|
join_vars : !false_by_default,
|
|
keep_fargs : true,
|
|
keep_fnames : false,
|
|
keep_infinity : false,
|
|
loops : !false_by_default,
|
|
negate_iife : !false_by_default,
|
|
passes : 1,
|
|
properties : !false_by_default,
|
|
pure_getters : !false_by_default && "strict",
|
|
pure_funcs : null,
|
|
reduce_vars : !false_by_default,
|
|
screw_ie8 : true,
|
|
sequences : !false_by_default,
|
|
side_effects : !false_by_default,
|
|
switches : !false_by_default,
|
|
top_retain : null,
|
|
toplevel : !!(options && options["top_retain"]),
|
|
unsafe : false,
|
|
unsafe_comps : false,
|
|
unsafe_math : false,
|
|
unsafe_proto : false,
|
|
unsafe_regexp : false,
|
|
unused : !false_by_default,
|
|
warnings : true,
|
|
}, true);
|
|
var pure_funcs = this.options["pure_funcs"];
|
|
if (typeof pure_funcs == "function") {
|
|
this.pure_funcs = pure_funcs;
|
|
} else {
|
|
this.pure_funcs = pure_funcs ? function(node) {
|
|
return pure_funcs.indexOf(node.expression.print_to_string()) < 0;
|
|
} : return_true;
|
|
}
|
|
var top_retain = this.options["top_retain"];
|
|
if (top_retain instanceof RegExp) {
|
|
this.top_retain = function(def) {
|
|
return top_retain.test(def.name);
|
|
};
|
|
} else if (typeof top_retain == "function") {
|
|
this.top_retain = top_retain;
|
|
} else if (top_retain) {
|
|
if (typeof top_retain == "string") {
|
|
top_retain = top_retain.split(/,/);
|
|
}
|
|
this.top_retain = function(def) {
|
|
return top_retain.indexOf(def.name) >= 0;
|
|
};
|
|
}
|
|
var sequences = this.options["sequences"];
|
|
this.sequences_limit = sequences == 1 ? 200 : sequences | 0;
|
|
this.warnings_produced = {};
|
|
};
|
|
|
|
Compressor.prototype = new TreeTransformer;
|
|
merge(Compressor.prototype, {
|
|
option: function(key) { return this.options[key] },
|
|
compress: function(node) {
|
|
if (this.option("expression")) {
|
|
node = node.process_expression(true);
|
|
}
|
|
var passes = +this.options.passes || 1;
|
|
for (var pass = 0; pass < passes && pass < 3; ++pass) {
|
|
if (pass > 0 || this.option("reduce_vars"))
|
|
node.reset_opt_flags(this, true);
|
|
node = node.transform(this);
|
|
}
|
|
if (this.option("expression")) {
|
|
node = node.process_expression(false);
|
|
}
|
|
return node;
|
|
},
|
|
info: function() {
|
|
if (this.options.warnings == "verbose") {
|
|
AST_Node.warn.apply(AST_Node, arguments);
|
|
}
|
|
},
|
|
warn: function(text, props) {
|
|
if (this.options.warnings) {
|
|
// only emit unique warnings
|
|
var message = string_template(text, props);
|
|
if (!(message in this.warnings_produced)) {
|
|
this.warnings_produced[message] = true;
|
|
AST_Node.warn.apply(AST_Node, arguments);
|
|
}
|
|
}
|
|
},
|
|
clear_warnings: function() {
|
|
this.warnings_produced = {};
|
|
},
|
|
before: function(node, descend, in_list) {
|
|
if (node._squeezed) return node;
|
|
var was_scope = false;
|
|
if (node instanceof AST_Scope) {
|
|
node = node.hoist_declarations(this);
|
|
was_scope = true;
|
|
}
|
|
// Before https://github.com/mishoo/UglifyJS2/pull/1602 AST_Node.optimize()
|
|
// would call AST_Node.transform() if a different instance of AST_Node is
|
|
// produced after OPT().
|
|
// This corrupts TreeWalker.stack, which cause AST look-ups to malfunction.
|
|
// Migrate and defer all children's AST_Node.transform() to below, which
|
|
// will now happen after this parent AST_Node has been properly substituted
|
|
// thus gives a consistent AST snapshot.
|
|
descend(node, this);
|
|
// Existing code relies on how AST_Node.optimize() worked, and omitting the
|
|
// following replacement call would result in degraded efficiency of both
|
|
// output and performance.
|
|
descend(node, this);
|
|
var opt = node.optimize(this);
|
|
if (was_scope && opt instanceof AST_Scope) {
|
|
opt.drop_unused(this);
|
|
descend(opt, this);
|
|
}
|
|
if (opt === node) opt._squeezed = true;
|
|
return opt;
|
|
}
|
|
});
|
|
|
|
(function(){
|
|
|
|
function OPT(node, optimizer) {
|
|
node.DEFMETHOD("optimize", function(compressor){
|
|
var self = this;
|
|
if (self._optimized) return self;
|
|
if (compressor.has_directive("use asm")) return self;
|
|
var opt = optimizer(self, compressor);
|
|
opt._optimized = true;
|
|
return opt;
|
|
});
|
|
};
|
|
|
|
OPT(AST_Node, function(self, compressor){
|
|
return self;
|
|
});
|
|
|
|
AST_Node.DEFMETHOD("equivalent_to", function(node){
|
|
return this.TYPE == node.TYPE && this.print_to_string() == node.print_to_string();
|
|
});
|
|
|
|
AST_Node.DEFMETHOD("process_expression", function(insert, compressor) {
|
|
var self = this;
|
|
var tt = new TreeTransformer(function(node) {
|
|
if (insert && node instanceof AST_SimpleStatement) {
|
|
return make_node(AST_Return, node, {
|
|
value: node.body
|
|
});
|
|
}
|
|
if (!insert && node instanceof AST_Return) {
|
|
if (compressor) {
|
|
var value = node.value && node.value.drop_side_effect_free(compressor, true);
|
|
return value ? make_node(AST_SimpleStatement, node, {
|
|
body: value
|
|
}) : make_node(AST_EmptyStatement, node);
|
|
}
|
|
return make_node(AST_SimpleStatement, node, {
|
|
body: node.value || make_node(AST_UnaryPrefix, node, {
|
|
operator: "void",
|
|
expression: make_node(AST_Number, node, {
|
|
value: 0
|
|
})
|
|
})
|
|
});
|
|
}
|
|
if (node instanceof AST_Lambda && node !== self) {
|
|
return node;
|
|
}
|
|
if (node instanceof AST_Block) {
|
|
var index = node.body.length - 1;
|
|
if (index >= 0) {
|
|
node.body[index] = node.body[index].transform(tt);
|
|
}
|
|
}
|
|
if (node instanceof AST_If) {
|
|
node.body = node.body.transform(tt);
|
|
if (node.alternative) {
|
|
node.alternative = node.alternative.transform(tt);
|
|
}
|
|
}
|
|
if (node instanceof AST_With) {
|
|
node.body = node.body.transform(tt);
|
|
}
|
|
return node;
|
|
});
|
|
return self.transform(tt);
|
|
});
|
|
|
|
AST_Node.DEFMETHOD("reset_opt_flags", function(compressor, rescan){
|
|
var reduce_vars = rescan && compressor.option("reduce_vars");
|
|
var toplevel = compressor.option("toplevel");
|
|
var safe_ids = Object.create(null);
|
|
var suppressor = new TreeWalker(function(node) {
|
|
if (node instanceof AST_Symbol) {
|
|
var d = node.definition();
|
|
if (node instanceof AST_SymbolRef) d.references.push(node);
|
|
d.fixed = false;
|
|
}
|
|
});
|
|
var tw = new TreeWalker(function(node, descend){
|
|
node._squeezed = false;
|
|
node._optimized = false;
|
|
if (reduce_vars) {
|
|
if (node instanceof AST_Toplevel) node.globals.each(reset_def);
|
|
if (node instanceof AST_Scope) node.variables.each(reset_def);
|
|
if (node instanceof AST_SymbolRef) {
|
|
var d = node.definition();
|
|
d.references.push(node);
|
|
if (d.fixed === undefined || !is_safe(d)
|
|
|| is_modified(node, 0, node.fixed_value() instanceof AST_Lambda)) {
|
|
d.fixed = false;
|
|
} else {
|
|
var parent = tw.parent();
|
|
if (parent instanceof AST_Assign && parent.operator == "=" && node === parent.right
|
|
|| parent instanceof AST_Call && node !== parent.expression
|
|
|| parent instanceof AST_Return && node === parent.value && node.scope !== d.scope
|
|
|| parent instanceof AST_VarDef && node === parent.value) {
|
|
d.escaped = true;
|
|
}
|
|
}
|
|
}
|
|
if (node instanceof AST_SymbolCatch) {
|
|
node.definition().fixed = false;
|
|
}
|
|
if (node instanceof AST_VarDef) {
|
|
var d = node.name.definition();
|
|
if (d.fixed == null) {
|
|
if (node.value) {
|
|
d.fixed = function() {
|
|
return node.value;
|
|
};
|
|
mark(d, false);
|
|
descend();
|
|
} else {
|
|
d.fixed = null;
|
|
}
|
|
mark(d, true);
|
|
return true;
|
|
} else if (node.value) {
|
|
d.fixed = false;
|
|
}
|
|
}
|
|
if (node instanceof AST_Defun) {
|
|
var d = node.name.definition();
|
|
if (!toplevel && d.global || is_safe(d)) {
|
|
d.fixed = false;
|
|
} else {
|
|
d.fixed = node;
|
|
mark(d, true);
|
|
}
|
|
var save_ids = safe_ids;
|
|
safe_ids = Object.create(null);
|
|
descend();
|
|
safe_ids = save_ids;
|
|
return true;
|
|
}
|
|
if (node instanceof AST_Function) {
|
|
push();
|
|
var iife;
|
|
if (!node.name
|
|
&& (iife = tw.parent()) instanceof AST_Call
|
|
&& iife.expression === node) {
|
|
// Virtually turn IIFE parameters into variable definitions:
|
|
// (function(a,b) {...})(c,d) => (function() {var a=c,b=d; ...})()
|
|
// So existing transformation rules can work on them.
|
|
node.argnames.forEach(function(arg, i) {
|
|
var d = arg.definition();
|
|
if (!node.uses_arguments && d.fixed === undefined) {
|
|
d.fixed = function() {
|
|
return iife.args[i] || make_node(AST_Undefined, iife);
|
|
};
|
|
mark(d, true);
|
|
} else {
|
|
d.fixed = false;
|
|
}
|
|
});
|
|
}
|
|
descend();
|
|
pop();
|
|
return true;
|
|
}
|
|
if (node instanceof AST_Accessor) {
|
|
var save_ids = safe_ids;
|
|
safe_ids = Object.create(null);
|
|
descend();
|
|
safe_ids = save_ids;
|
|
return true;
|
|
}
|
|
if (node instanceof AST_Binary
|
|
&& (node.operator == "&&" || node.operator == "||")) {
|
|
node.left.walk(tw);
|
|
push();
|
|
node.right.walk(tw);
|
|
pop();
|
|
return true;
|
|
}
|
|
if (node instanceof AST_Conditional) {
|
|
node.condition.walk(tw);
|
|
push();
|
|
node.consequent.walk(tw);
|
|
pop();
|
|
push();
|
|
node.alternative.walk(tw);
|
|
pop();
|
|
return true;
|
|
}
|
|
if (node instanceof AST_If || node instanceof AST_DWLoop) {
|
|
node.condition.walk(tw);
|
|
push();
|
|
node.body.walk(tw);
|
|
pop();
|
|
if (node.alternative) {
|
|
push();
|
|
node.alternative.walk(tw);
|
|
pop();
|
|
}
|
|
return true;
|
|
}
|
|
if (node instanceof AST_LabeledStatement) {
|
|
push();
|
|
node.body.walk(tw);
|
|
pop();
|
|
return true;
|
|
}
|
|
if (node instanceof AST_For) {
|
|
if (node.init) node.init.walk(tw);
|
|
push();
|
|
if (node.condition) node.condition.walk(tw);
|
|
node.body.walk(tw);
|
|
if (node.step) node.step.walk(tw);
|
|
pop();
|
|
return true;
|
|
}
|
|
if (node instanceof AST_ForIn) {
|
|
node.init.walk(suppressor);
|
|
node.object.walk(tw);
|
|
push();
|
|
node.body.walk(tw);
|
|
pop();
|
|
return true;
|
|
}
|
|
if (node instanceof AST_Try) {
|
|
push();
|
|
walk_body(node, tw);
|
|
pop();
|
|
if (node.bcatch) {
|
|
push();
|
|
node.bcatch.walk(tw);
|
|
pop();
|
|
}
|
|
if (node.bfinally) node.bfinally.walk(tw);
|
|
return true;
|
|
}
|
|
if (node instanceof AST_SwitchBranch) {
|
|
push();
|
|
descend();
|
|
pop();
|
|
return true;
|
|
}
|
|
}
|
|
});
|
|
this.walk(tw);
|
|
|
|
function mark(def, safe) {
|
|
safe_ids[def.id] = safe;
|
|
}
|
|
|
|
function is_safe(def) {
|
|
if (safe_ids[def.id]) {
|
|
if (def.fixed == null) {
|
|
var orig = def.orig[0];
|
|
if (orig instanceof AST_SymbolFunarg || orig.name == "arguments") return false;
|
|
def.fixed = make_node(AST_Undefined, orig);
|
|
}
|
|
return true;
|
|
}
|
|
}
|
|
|
|
function push() {
|
|
safe_ids = Object.create(safe_ids);
|
|
}
|
|
|
|
function pop() {
|
|
safe_ids = Object.getPrototypeOf(safe_ids);
|
|
}
|
|
|
|
function reset_def(def) {
|
|
def.escaped = false;
|
|
if (def.scope.uses_eval) {
|
|
def.fixed = false;
|
|
} else if (toplevel || !def.global || def.orig[0] instanceof AST_SymbolConst) {
|
|
def.fixed = undefined;
|
|
} else {
|
|
def.fixed = false;
|
|
}
|
|
def.references = [];
|
|
def.should_replace = undefined;
|
|
}
|
|
|
|
function is_modified(node, level, func) {
|
|
var parent = tw.parent(level);
|
|
if (is_lhs(node, parent)
|
|
|| !func && parent instanceof AST_Call && parent.expression === node) {
|
|
return true;
|
|
} else if (parent instanceof AST_PropAccess && parent.expression === node) {
|
|
return !func && is_modified(parent, level + 1);
|
|
}
|
|
}
|
|
});
|
|
|
|
AST_SymbolRef.DEFMETHOD("fixed_value", function() {
|
|
var fixed = this.definition().fixed;
|
|
if (!fixed || fixed instanceof AST_Node) return fixed;
|
|
return fixed();
|
|
});
|
|
|
|
function is_reference_const(ref) {
|
|
if (!(ref instanceof AST_SymbolRef)) return false;
|
|
var orig = ref.definition().orig;
|
|
for (var i = orig.length; --i >= 0;) {
|
|
if (orig[i] instanceof AST_SymbolConst) return true;
|
|
}
|
|
}
|
|
|
|
function find_variable(compressor, name) {
|
|
var scope, i = 0;
|
|
while (scope = compressor.parent(i++)) {
|
|
if (scope instanceof AST_Scope) break;
|
|
if (scope instanceof AST_Catch) {
|
|
scope = scope.argname.definition().scope;
|
|
break;
|
|
}
|
|
}
|
|
return scope.find_variable(name);
|
|
}
|
|
|
|
function make_node(ctor, orig, props) {
|
|
if (!props) props = {};
|
|
if (orig) {
|
|
if (!props.start) props.start = orig.start;
|
|
if (!props.end) props.end = orig.end;
|
|
}
|
|
return new ctor(props);
|
|
};
|
|
|
|
function make_node_from_constant(val, orig) {
|
|
switch (typeof val) {
|
|
case "string":
|
|
return make_node(AST_String, orig, {
|
|
value: val
|
|
});
|
|
case "number":
|
|
if (isNaN(val)) return make_node(AST_NaN, orig);
|
|
if (isFinite(val)) {
|
|
return 1 / val < 0 ? make_node(AST_UnaryPrefix, orig, {
|
|
operator: "-",
|
|
expression: make_node(AST_Number, orig, { value: -val })
|
|
}) : make_node(AST_Number, orig, { value: val });
|
|
}
|
|
return val < 0 ? make_node(AST_UnaryPrefix, orig, {
|
|
operator: "-",
|
|
expression: make_node(AST_Infinity, orig)
|
|
}) : make_node(AST_Infinity, orig);
|
|
case "boolean":
|
|
return make_node(val ? AST_True : AST_False, orig);
|
|
case "undefined":
|
|
return make_node(AST_Undefined, orig);
|
|
default:
|
|
if (val === null) {
|
|
return make_node(AST_Null, orig, { value: null });
|
|
}
|
|
if (val instanceof RegExp) {
|
|
return make_node(AST_RegExp, orig, { value: val });
|
|
}
|
|
throw new Error(string_template("Can't handle constant of type: {type}", {
|
|
type: typeof val
|
|
}));
|
|
}
|
|
};
|
|
|
|
// we shouldn't compress (1,func)(something) to
|
|
// func(something) because that changes the meaning of
|
|
// the func (becomes lexical instead of global).
|
|
function maintain_this_binding(parent, orig, val) {
|
|
if (parent instanceof AST_UnaryPrefix && parent.operator == "delete"
|
|
|| parent instanceof AST_Call && parent.expression === orig
|
|
&& (val instanceof AST_PropAccess || val instanceof AST_SymbolRef && val.name == "eval")) {
|
|
return make_node(AST_Seq, orig, {
|
|
car: make_node(AST_Number, orig, {
|
|
value: 0
|
|
}),
|
|
cdr: val
|
|
});
|
|
}
|
|
return val;
|
|
}
|
|
|
|
function as_statement_array(thing) {
|
|
if (thing === null) return [];
|
|
if (thing instanceof AST_BlockStatement) return thing.body;
|
|
if (thing instanceof AST_EmptyStatement) return [];
|
|
if (thing instanceof AST_Statement) return [ thing ];
|
|
throw new Error("Can't convert thing to statement array");
|
|
};
|
|
|
|
function is_empty(thing) {
|
|
if (thing === null) return true;
|
|
if (thing instanceof AST_EmptyStatement) return true;
|
|
if (thing instanceof AST_BlockStatement) return thing.body.length == 0;
|
|
return false;
|
|
};
|
|
|
|
function loop_body(x) {
|
|
if (x instanceof AST_Switch) return x;
|
|
if (x instanceof AST_For || x instanceof AST_ForIn || x instanceof AST_DWLoop) {
|
|
return (x.body instanceof AST_BlockStatement ? x.body : x);
|
|
}
|
|
return x;
|
|
};
|
|
|
|
function is_iife_call(node) {
|
|
if (node instanceof AST_Call && !(node instanceof AST_New)) {
|
|
return node.expression instanceof AST_Function || is_iife_call(node.expression);
|
|
}
|
|
return false;
|
|
}
|
|
|
|
function tighten_body(statements, compressor) {
|
|
var CHANGED, max_iter = 10;
|
|
do {
|
|
CHANGED = false;
|
|
if (compressor.option("angular")) {
|
|
statements = process_for_angular(statements);
|
|
}
|
|
statements = eliminate_spurious_blocks(statements);
|
|
if (compressor.option("dead_code")) {
|
|
statements = eliminate_dead_code(statements, compressor);
|
|
}
|
|
if (compressor.option("if_return")) {
|
|
statements = handle_if_return(statements, compressor);
|
|
}
|
|
if (compressor.sequences_limit > 0) {
|
|
statements = sequencesize(statements, compressor);
|
|
}
|
|
if (compressor.option("join_vars")) {
|
|
statements = join_consecutive_vars(statements, compressor);
|
|
}
|
|
if (compressor.option("collapse_vars")) {
|
|
statements = collapse_single_use_vars(statements, compressor);
|
|
}
|
|
} while (CHANGED && max_iter-- > 0);
|
|
|
|
return statements;
|
|
|
|
function collapse_single_use_vars(statements, compressor) {
|
|
// Iterate statements backwards looking for a statement with a var/const
|
|
// declaration immediately preceding it. Grab the rightmost var definition
|
|
// and if it has exactly one reference then attempt to replace its reference
|
|
// in the statement with the var value and then erase the var definition.
|
|
|
|
var self = compressor.self();
|
|
var var_defs_removed = false;
|
|
var toplevel = compressor.option("toplevel");
|
|
for (var stat_index = statements.length; --stat_index >= 0;) {
|
|
var stat = statements[stat_index];
|
|
if (stat instanceof AST_Definitions) continue;
|
|
|
|
// Process child blocks of statement if present.
|
|
[stat, stat.body, stat.alternative, stat.bcatch, stat.bfinally].forEach(function(node) {
|
|
node && node.body && collapse_single_use_vars(node.body, compressor);
|
|
});
|
|
|
|
// The variable definition must precede a statement.
|
|
if (stat_index <= 0) break;
|
|
var prev_stat_index = stat_index - 1;
|
|
var prev_stat = statements[prev_stat_index];
|
|
if (!(prev_stat instanceof AST_Definitions)) continue;
|
|
var var_defs = prev_stat.definitions;
|
|
if (var_defs == null) continue;
|
|
|
|
var var_names_seen = {};
|
|
var side_effects_encountered = false;
|
|
var lvalues_encountered = false;
|
|
var lvalues = {};
|
|
|
|
// Scan variable definitions from right to left.
|
|
for (var var_defs_index = var_defs.length; --var_defs_index >= 0;) {
|
|
|
|
// Obtain var declaration and var name with basic sanity check.
|
|
var var_decl = var_defs[var_defs_index];
|
|
if (var_decl.value == null) break;
|
|
var var_name = var_decl.name.name;
|
|
if (!var_name || !var_name.length) break;
|
|
|
|
// Bail if we've seen a var definition of same name before.
|
|
if (var_name in var_names_seen) break;
|
|
var_names_seen[var_name] = true;
|
|
|
|
// Only interested in cases with just one reference to the variable.
|
|
var def = self.find_variable && self.find_variable(var_name);
|
|
if (!def || !def.references || def.references.length !== 1
|
|
|| var_name == "arguments" || (!toplevel && def.global)) {
|
|
side_effects_encountered = true;
|
|
continue;
|
|
}
|
|
var ref = def.references[0];
|
|
|
|
// Don't replace ref if eval() or with statement in scope.
|
|
if (ref.scope.uses_eval || ref.scope.uses_with) break;
|
|
|
|
// Constant single use vars can be replaced in any scope.
|
|
if (var_decl.value.is_constant()) {
|
|
var ctt = new TreeTransformer(function(node) {
|
|
var parent = ctt.parent();
|
|
if (parent instanceof AST_IterationStatement
|
|
&& (parent.condition === node || parent.init === node)) {
|
|
return node;
|
|
}
|
|
if (node === ref)
|
|
return replace_var(node, parent, true);
|
|
});
|
|
stat.transform(ctt);
|
|
continue;
|
|
}
|
|
|
|
// Restrict var replacement to constants if side effects encountered.
|
|
if (side_effects_encountered |= lvalues_encountered) continue;
|
|
|
|
var value_has_side_effects = var_decl.value.has_side_effects(compressor);
|
|
// Non-constant single use vars can only be replaced in same scope.
|
|
if (ref.scope !== self) {
|
|
side_effects_encountered |= value_has_side_effects;
|
|
continue;
|
|
}
|
|
|
|
// Detect lvalues in var value.
|
|
var tw = new TreeWalker(function(node){
|
|
if (node instanceof AST_SymbolRef && is_lvalue(node, tw.parent())) {
|
|
lvalues[node.name] = lvalues_encountered = true;
|
|
}
|
|
});
|
|
var_decl.value.walk(tw);
|
|
|
|
// Replace the non-constant single use var in statement if side effect free.
|
|
var unwind = false;
|
|
var tt = new TreeTransformer(
|
|
function preorder(node) {
|
|
if (unwind) return node;
|
|
var parent = tt.parent();
|
|
if (node instanceof AST_Lambda
|
|
|| node instanceof AST_Try
|
|
|| node instanceof AST_With
|
|
|| node instanceof AST_Case
|
|
|| node instanceof AST_IterationStatement
|
|
|| (parent instanceof AST_If && node !== parent.condition)
|
|
|| (parent instanceof AST_Conditional && node !== parent.condition)
|
|
|| (node instanceof AST_SymbolRef
|
|
&& value_has_side_effects
|
|
&& !are_references_in_scope(node.definition(), self))
|
|
|| (parent instanceof AST_Binary
|
|
&& (parent.operator == "&&" || parent.operator == "||")
|
|
&& node === parent.right)
|
|
|| (parent instanceof AST_Switch && node !== parent.expression)) {
|
|
return side_effects_encountered = unwind = true, node;
|
|
}
|
|
function are_references_in_scope(def, scope) {
|
|
if (def.orig.length === 1
|
|
&& def.orig[0] instanceof AST_SymbolDefun) return true;
|
|
if (def.scope !== scope) return false;
|
|
var refs = def.references;
|
|
for (var i = 0, len = refs.length; i < len; i++) {
|
|
if (refs[i].scope !== scope) return false;
|
|
}
|
|
return true;
|
|
}
|
|
},
|
|
function postorder(node) {
|
|
if (unwind) return node;
|
|
if (node === ref)
|
|
return unwind = true, replace_var(node, tt.parent(), false);
|
|
if (side_effects_encountered |= node.has_side_effects(compressor))
|
|
return unwind = true, node;
|
|
if (lvalues_encountered && node instanceof AST_SymbolRef && node.name in lvalues) {
|
|
side_effects_encountered = true;
|
|
return unwind = true, node;
|
|
}
|
|
}
|
|
);
|
|
stat.transform(tt);
|
|
}
|
|
}
|
|
|
|
// Remove extraneous empty statments in block after removing var definitions.
|
|
// Leave at least one statement in `statements`.
|
|
if (var_defs_removed) for (var i = statements.length; --i >= 0;) {
|
|
if (statements.length > 1 && statements[i] instanceof AST_EmptyStatement)
|
|
statements.splice(i, 1);
|
|
}
|
|
|
|
return statements;
|
|
|
|
function is_lvalue(node, parent) {
|
|
return node instanceof AST_SymbolRef && is_lhs(node, parent);
|
|
}
|
|
function replace_var(node, parent, is_constant) {
|
|
if (is_lvalue(node, parent)) return node;
|
|
|
|
// Remove var definition and return its value to the TreeTransformer to replace.
|
|
var value = maintain_this_binding(parent, node, var_decl.value);
|
|
var_decl.value = null;
|
|
|
|
var_defs.splice(var_defs_index, 1);
|
|
if (var_defs.length === 0) {
|
|
statements[prev_stat_index] = make_node(AST_EmptyStatement, self);
|
|
var_defs_removed = true;
|
|
}
|
|
// Further optimize statement after substitution.
|
|
stat.reset_opt_flags(compressor);
|
|
|
|
compressor.info("Collapsing " + (is_constant ? "constant" : "variable") +
|
|
" " + var_name + " [{file}:{line},{col}]", node.start);
|
|
CHANGED = true;
|
|
return value;
|
|
}
|
|
}
|
|
|
|
function process_for_angular(statements) {
|
|
function has_inject(comment) {
|
|
return /@ngInject/.test(comment.value);
|
|
}
|
|
function make_arguments_names_list(func) {
|
|
return func.argnames.map(function(sym){
|
|
return make_node(AST_String, sym, { value: sym.name });
|
|
});
|
|
}
|
|
function make_array(orig, elements) {
|
|
return make_node(AST_Array, orig, { elements: elements });
|
|
}
|
|
function make_injector(func, name) {
|
|
return make_node(AST_SimpleStatement, func, {
|
|
body: make_node(AST_Assign, func, {
|
|
operator: "=",
|
|
left: make_node(AST_Dot, name, {
|
|
expression: make_node(AST_SymbolRef, name, name),
|
|
property: "$inject"
|
|
}),
|
|
right: make_array(func, make_arguments_names_list(func))
|
|
})
|
|
});
|
|
}
|
|
function check_expression(body) {
|
|
if (body && body.args) {
|
|
// if this is a function call check all of arguments passed
|
|
body.args.forEach(function(argument, index, array) {
|
|
var comments = argument.start.comments_before;
|
|
// if the argument is function preceded by @ngInject
|
|
if (argument instanceof AST_Lambda && comments.length && has_inject(comments[0])) {
|
|
// replace the function with an array of names of its parameters and function at the end
|
|
array[index] = make_array(argument, make_arguments_names_list(argument).concat(argument));
|
|
}
|
|
});
|
|
// if this is chained call check previous one recursively
|
|
if (body.expression && body.expression.expression) {
|
|
check_expression(body.expression.expression);
|
|
}
|
|
}
|
|
}
|
|
return statements.reduce(function(a, stat){
|
|
a.push(stat);
|
|
|
|
if (stat.body && stat.body.args) {
|
|
check_expression(stat.body);
|
|
} else {
|
|
var token = stat.start;
|
|
var comments = token.comments_before;
|
|
if (comments && comments.length > 0) {
|
|
var last = comments.pop();
|
|
if (has_inject(last)) {
|
|
// case 1: defun
|
|
if (stat instanceof AST_Defun) {
|
|
a.push(make_injector(stat, stat.name));
|
|
}
|
|
else if (stat instanceof AST_Definitions) {
|
|
stat.definitions.forEach(function(def) {
|
|
if (def.value && def.value instanceof AST_Lambda) {
|
|
a.push(make_injector(def.value, def.name));
|
|
}
|
|
});
|
|
}
|
|
else {
|
|
compressor.warn("Unknown statement marked with @ngInject [{file}:{line},{col}]", token);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
return a;
|
|
}, []);
|
|
}
|
|
|
|
function eliminate_spurious_blocks(statements) {
|
|
var seen_dirs = [];
|
|
return statements.reduce(function(a, stat){
|
|
if (stat instanceof AST_BlockStatement) {
|
|
CHANGED = true;
|
|
a.push.apply(a, eliminate_spurious_blocks(stat.body));
|
|
} else if (stat instanceof AST_EmptyStatement) {
|
|
CHANGED = true;
|
|
} else if (stat instanceof AST_Directive) {
|
|
if (seen_dirs.indexOf(stat.value) < 0) {
|
|
a.push(stat);
|
|
seen_dirs.push(stat.value);
|
|
} else {
|
|
CHANGED = true;
|
|
}
|
|
} else {
|
|
a.push(stat);
|
|
}
|
|
return a;
|
|
}, []);
|
|
};
|
|
|
|
function handle_if_return(statements, compressor) {
|
|
var self = compressor.self();
|
|
var multiple_if_returns = has_multiple_if_returns(statements);
|
|
var in_lambda = self instanceof AST_Lambda;
|
|
var ret = []; // Optimized statements, build from tail to front
|
|
loop: for (var i = statements.length; --i >= 0;) {
|
|
var stat = statements[i];
|
|
switch (true) {
|
|
case (in_lambda && stat instanceof AST_Return && !stat.value && ret.length == 0):
|
|
CHANGED = true;
|
|
// note, ret.length is probably always zero
|
|
// because we drop unreachable code before this
|
|
// step. nevertheless, it's good to check.
|
|
continue loop;
|
|
case stat instanceof AST_If:
|
|
if (stat.body instanceof AST_Return) {
|
|
//---
|
|
// pretty silly case, but:
|
|
// if (foo()) return; return; ==> foo(); return;
|
|
if (((in_lambda && ret.length == 0)
|
|
|| (ret[0] instanceof AST_Return && !ret[0].value))
|
|
&& !stat.body.value && !stat.alternative) {
|
|
CHANGED = true;
|
|
var cond = make_node(AST_SimpleStatement, stat.condition, {
|
|
body: stat.condition
|
|
});
|
|
ret.unshift(cond);
|
|
continue loop;
|
|
}
|
|
//---
|
|
// if (foo()) return x; return y; ==> return foo() ? x : y;
|
|
if (ret[0] instanceof AST_Return && stat.body.value && ret[0].value && !stat.alternative) {
|
|
CHANGED = true;
|
|
stat = stat.clone();
|
|
stat.alternative = ret[0];
|
|
ret[0] = stat.transform(compressor);
|
|
continue loop;
|
|
}
|
|
//---
|
|
// if (foo()) return x; [ return ; ] ==> return foo() ? x : undefined;
|
|
if (multiple_if_returns && (ret.length == 0 || ret[0] instanceof AST_Return)
|
|
&& stat.body.value && !stat.alternative && in_lambda) {
|
|
CHANGED = true;
|
|
stat = stat.clone();
|
|
stat.alternative = ret[0] || make_node(AST_Return, stat, {
|
|
value: null
|
|
});
|
|
ret[0] = stat.transform(compressor);
|
|
continue loop;
|
|
}
|
|
//---
|
|
// if (foo()) return; [ else x... ]; y... ==> if (!foo()) { x...; y... }
|
|
if (!stat.body.value && in_lambda) {
|
|
CHANGED = true;
|
|
stat = stat.clone();
|
|
stat.condition = stat.condition.negate(compressor);
|
|
var body = as_statement_array(stat.alternative).concat(ret);
|
|
var funs = extract_functions_from_statement_array(body);
|
|
stat.body = make_node(AST_BlockStatement, stat, {
|
|
body: body
|
|
});
|
|
stat.alternative = null;
|
|
ret = funs.concat([ stat.transform(compressor) ]);
|
|
continue loop;
|
|
}
|
|
|
|
//---
|
|
// if (a) return b; if (c) return d; e; ==> return a ? b : c ? d : void e;
|
|
//
|
|
// if sequences is not enabled, this can lead to an endless loop (issue #866).
|
|
// however, with sequences on this helps producing slightly better output for
|
|
// the example code.
|
|
if (compressor.option("sequences")
|
|
&& i > 0 && statements[i - 1] instanceof AST_If && statements[i - 1].body instanceof AST_Return
|
|
&& ret.length == 1 && in_lambda && ret[0] instanceof AST_SimpleStatement
|
|
&& !stat.alternative) {
|
|
CHANGED = true;
|
|
ret.push(make_node(AST_Return, ret[0], {
|
|
value: null
|
|
}).transform(compressor));
|
|
ret.unshift(stat);
|
|
continue loop;
|
|
}
|
|
}
|
|
|
|
var ab = aborts(stat.body);
|
|
var lct = ab instanceof AST_LoopControl ? compressor.loopcontrol_target(ab) : null;
|
|
if (ab && ((ab instanceof AST_Return && !ab.value && in_lambda)
|
|
|| (ab instanceof AST_Continue && self === loop_body(lct))
|
|
|| (ab instanceof AST_Break && lct instanceof AST_BlockStatement && self === lct))) {
|
|
if (ab.label) {
|
|
remove(ab.label.thedef.references, ab);
|
|
}
|
|
CHANGED = true;
|
|
var body = as_statement_array(stat.body).slice(0, -1);
|
|
stat = stat.clone();
|
|
stat.condition = stat.condition.negate(compressor);
|
|
stat.body = make_node(AST_BlockStatement, stat, {
|
|
body: as_statement_array(stat.alternative).concat(ret)
|
|
});
|
|
stat.alternative = make_node(AST_BlockStatement, stat, {
|
|
body: body
|
|
});
|
|
ret = [ stat.transform(compressor) ];
|
|
continue loop;
|
|
}
|
|
|
|
var ab = aborts(stat.alternative);
|
|
var lct = ab instanceof AST_LoopControl ? compressor.loopcontrol_target(ab) : null;
|
|
if (ab && ((ab instanceof AST_Return && !ab.value && in_lambda)
|
|
|| (ab instanceof AST_Continue && self === loop_body(lct))
|
|
|| (ab instanceof AST_Break && lct instanceof AST_BlockStatement && self === lct))) {
|
|
if (ab.label) {
|
|
remove(ab.label.thedef.references, ab);
|
|
}
|
|
CHANGED = true;
|
|
stat = stat.clone();
|
|
stat.body = make_node(AST_BlockStatement, stat.body, {
|
|
body: as_statement_array(stat.body).concat(ret)
|
|
});
|
|
stat.alternative = make_node(AST_BlockStatement, stat.alternative, {
|
|
body: as_statement_array(stat.alternative).slice(0, -1)
|
|
});
|
|
ret = [ stat.transform(compressor) ];
|
|
continue loop;
|
|
}
|
|
|
|
ret.unshift(stat);
|
|
break;
|
|
default:
|
|
ret.unshift(stat);
|
|
break;
|
|
}
|
|
}
|
|
return ret;
|
|
|
|
function has_multiple_if_returns(statements) {
|
|
var n = 0;
|
|
for (var i = statements.length; --i >= 0;) {
|
|
var stat = statements[i];
|
|
if (stat instanceof AST_If && stat.body instanceof AST_Return) {
|
|
if (++n > 1) return true;
|
|
}
|
|
}
|
|
return false;
|
|
}
|
|
};
|
|
|
|
function eliminate_dead_code(statements, compressor) {
|
|
var has_quit = false;
|
|
var orig = statements.length;
|
|
var self = compressor.self();
|
|
statements = statements.reduce(function(a, stat){
|
|
if (has_quit) {
|
|
extract_declarations_from_unreachable_code(compressor, stat, a);
|
|
} else {
|
|
if (stat instanceof AST_LoopControl) {
|
|
var lct = compressor.loopcontrol_target(stat);
|
|
if ((stat instanceof AST_Break
|
|
&& !(lct instanceof AST_IterationStatement)
|
|
&& loop_body(lct) === self) || (stat instanceof AST_Continue
|
|
&& loop_body(lct) === self)) {
|
|
if (stat.label) {
|
|
remove(stat.label.thedef.references, stat);
|
|
}
|
|
} else {
|
|
a.push(stat);
|
|
}
|
|
} else {
|
|
a.push(stat);
|
|
}
|
|
if (aborts(stat)) has_quit = true;
|
|
}
|
|
return a;
|
|
}, []);
|
|
CHANGED = statements.length != orig;
|
|
return statements;
|
|
};
|
|
|
|
function sequencesize(statements, compressor) {
|
|
if (statements.length < 2) return statements;
|
|
var seq = [], ret = [];
|
|
function push_seq() {
|
|
seq = AST_Seq.from_array(seq);
|
|
if (seq) ret.push(make_node(AST_SimpleStatement, seq, {
|
|
body: seq
|
|
}));
|
|
seq = [];
|
|
};
|
|
statements.forEach(function(stat){
|
|
if (stat instanceof AST_SimpleStatement) {
|
|
if (seqLength(seq) >= compressor.sequences_limit) push_seq();
|
|
var body = stat.body;
|
|
if (seq.length > 0) body = body.drop_side_effect_free(compressor);
|
|
if (body) seq.push(body);
|
|
} else {
|
|
push_seq();
|
|
ret.push(stat);
|
|
}
|
|
});
|
|
push_seq();
|
|
ret = sequencesize_2(ret, compressor);
|
|
CHANGED = ret.length != statements.length;
|
|
return ret;
|
|
};
|
|
|
|
function seqLength(a) {
|
|
for (var len = 0, i = 0; i < a.length; ++i) {
|
|
var stat = a[i];
|
|
if (stat instanceof AST_Seq) {
|
|
len += stat.len();
|
|
} else {
|
|
len++;
|
|
}
|
|
}
|
|
return len;
|
|
};
|
|
|
|
function sequencesize_2(statements, compressor) {
|
|
function cons_seq(right) {
|
|
ret.pop();
|
|
var left = prev.body;
|
|
if (left instanceof AST_Seq) {
|
|
left.add(right);
|
|
} else {
|
|
left = AST_Seq.cons(left, right);
|
|
}
|
|
return left.transform(compressor);
|
|
};
|
|
var ret = [], prev = null;
|
|
statements.forEach(function(stat){
|
|
if (prev) {
|
|
if (stat instanceof AST_For) {
|
|
var opera = {};
|
|
try {
|
|
prev.body.walk(new TreeWalker(function(node){
|
|
if (node instanceof AST_Binary && node.operator == "in")
|
|
throw opera;
|
|
}));
|
|
if (stat.init && !(stat.init instanceof AST_Definitions)) {
|
|
stat.init = cons_seq(stat.init);
|
|
}
|
|
else if (!stat.init) {
|
|
stat.init = prev.body.drop_side_effect_free(compressor);
|
|
ret.pop();
|
|
}
|
|
} catch(ex) {
|
|
if (ex !== opera) throw ex;
|
|
}
|
|
}
|
|
else if (stat instanceof AST_If) {
|
|
stat.condition = cons_seq(stat.condition);
|
|
}
|
|
else if (stat instanceof AST_With) {
|
|
stat.expression = cons_seq(stat.expression);
|
|
}
|
|
else if (stat instanceof AST_Exit && stat.value) {
|
|
stat.value = cons_seq(stat.value);
|
|
}
|
|
else if (stat instanceof AST_Exit) {
|
|
stat.value = cons_seq(make_node(AST_Undefined, stat).transform(compressor));
|
|
}
|
|
else if (stat instanceof AST_Switch) {
|
|
stat.expression = cons_seq(stat.expression);
|
|
}
|
|
}
|
|
ret.push(stat);
|
|
prev = stat instanceof AST_SimpleStatement ? stat : null;
|
|
});
|
|
return ret;
|
|
};
|
|
|
|
function join_consecutive_vars(statements, compressor) {
|
|
var prev = null;
|
|
return statements.reduce(function(a, stat){
|
|
if (stat instanceof AST_Definitions && prev && prev.TYPE == stat.TYPE) {
|
|
prev.definitions = prev.definitions.concat(stat.definitions);
|
|
CHANGED = true;
|
|
}
|
|
else if (stat instanceof AST_For
|
|
&& prev instanceof AST_Var
|
|
&& (!stat.init || stat.init.TYPE == prev.TYPE)) {
|
|
CHANGED = true;
|
|
a.pop();
|
|
if (stat.init) {
|
|
stat.init.definitions = prev.definitions.concat(stat.init.definitions);
|
|
} else {
|
|
stat.init = prev;
|
|
}
|
|
a.push(stat);
|
|
prev = stat;
|
|
}
|
|
else {
|
|
prev = stat;
|
|
a.push(stat);
|
|
}
|
|
return a;
|
|
}, []);
|
|
};
|
|
|
|
};
|
|
|
|
function extract_functions_from_statement_array(statements) {
|
|
var funs = [];
|
|
for (var i = statements.length - 1; i >= 0; --i) {
|
|
var stat = statements[i];
|
|
if (stat instanceof AST_Defun) {
|
|
statements.splice(i, 1);
|
|
funs.unshift(stat);
|
|
}
|
|
}
|
|
return funs;
|
|
}
|
|
|
|
function extract_declarations_from_unreachable_code(compressor, stat, target) {
|
|
if (!(stat instanceof AST_Defun)) {
|
|
compressor.warn("Dropping unreachable code [{file}:{line},{col}]", stat.start);
|
|
}
|
|
stat.walk(new TreeWalker(function(node){
|
|
if (node instanceof AST_Definitions) {
|
|
compressor.warn("Declarations in unreachable code! [{file}:{line},{col}]", node.start);
|
|
node.remove_initializers();
|
|
target.push(node);
|
|
return true;
|
|
}
|
|
if (node instanceof AST_Defun) {
|
|
target.push(node);
|
|
return true;
|
|
}
|
|
if (node instanceof AST_Scope) {
|
|
return true;
|
|
}
|
|
}));
|
|
};
|
|
|
|
function is_undefined(node, compressor) {
|
|
return node.is_undefined
|
|
|| node instanceof AST_Undefined
|
|
|| node instanceof AST_UnaryPrefix
|
|
&& node.operator == "void"
|
|
&& !node.expression.has_side_effects(compressor);
|
|
}
|
|
|
|
// may_throw_on_access()
|
|
// returns true if this node may be null, undefined or contain `AST_Accessor`
|
|
(function(def) {
|
|
AST_Node.DEFMETHOD("may_throw_on_access", function(compressor) {
|
|
var pure_getters = compressor.option("pure_getters");
|
|
return !pure_getters || this._throw_on_access(pure_getters);
|
|
});
|
|
|
|
function is_strict(pure_getters) {
|
|
return /strict/.test(pure_getters);
|
|
}
|
|
|
|
def(AST_Node, is_strict);
|
|
def(AST_Null, return_true);
|
|
def(AST_Undefined, return_true);
|
|
def(AST_Constant, return_false);
|
|
def(AST_Array, return_false);
|
|
def(AST_Object, function(pure_getters) {
|
|
if (!is_strict(pure_getters)) return false;
|
|
for (var i = this.properties.length; --i >=0;)
|
|
if (this.properties[i].value instanceof AST_Accessor) return true;
|
|
return false;
|
|
});
|
|
def(AST_Function, return_false);
|
|
def(AST_UnaryPostfix, return_false);
|
|
def(AST_UnaryPrefix, function() {
|
|
return this.operator == "void";
|
|
});
|
|
def(AST_Binary, function(pure_getters) {
|
|
switch (this.operator) {
|
|
case "&&":
|
|
return this.left._throw_on_access(pure_getters);
|
|
case "||":
|
|
return this.left._throw_on_access(pure_getters)
|
|
&& this.right._throw_on_access(pure_getters);
|
|
default:
|
|
return false;
|
|
}
|
|
})
|
|
def(AST_Assign, function(pure_getters) {
|
|
return this.operator == "="
|
|
&& this.right._throw_on_access(pure_getters);
|
|
})
|
|
def(AST_Conditional, function(pure_getters) {
|
|
return this.consequent._throw_on_access(pure_getters)
|
|
|| this.alternative._throw_on_access(pure_getters);
|
|
})
|
|
def(AST_Seq, function(pure_getters) {
|
|
return this.cdr._throw_on_access(pure_getters);
|
|
});
|
|
def(AST_SymbolRef, function(pure_getters) {
|
|
if (this.is_undefined) return true;
|
|
if (!is_strict(pure_getters)) return false;
|
|
var fixed = this.fixed_value();
|
|
return !fixed || fixed._throw_on_access(pure_getters);
|
|
});
|
|
})(function(node, func) {
|
|
node.DEFMETHOD("_throw_on_access", func);
|
|
});
|
|
|
|
/* -----[ boolean/negation helpers ]----- */
|
|
|
|
// methods to determine whether an expression has a boolean result type
|
|
(function (def){
|
|
var unary_bool = [ "!", "delete" ];
|
|
var binary_bool = [ "in", "instanceof", "==", "!=", "===", "!==", "<", "<=", ">=", ">" ];
|
|
def(AST_Node, return_false);
|
|
def(AST_UnaryPrefix, function(){
|
|
return member(this.operator, unary_bool);
|
|
});
|
|
def(AST_Binary, function(){
|
|
return member(this.operator, binary_bool) ||
|
|
( (this.operator == "&&" || this.operator == "||") &&
|
|
this.left.is_boolean() && this.right.is_boolean() );
|
|
});
|
|
def(AST_Conditional, function(){
|
|
return this.consequent.is_boolean() && this.alternative.is_boolean();
|
|
});
|
|
def(AST_Assign, function(){
|
|
return this.operator == "=" && this.right.is_boolean();
|
|
});
|
|
def(AST_Seq, function(){
|
|
return this.cdr.is_boolean();
|
|
});
|
|
def(AST_True, return_true);
|
|
def(AST_False, return_true);
|
|
})(function(node, func){
|
|
node.DEFMETHOD("is_boolean", func);
|
|
});
|
|
|
|
// methods to determine if an expression has a numeric result type
|
|
(function (def){
|
|
def(AST_Node, return_false);
|
|
def(AST_Number, return_true);
|
|
var unary = makePredicate("+ - ~ ++ --");
|
|
def(AST_Unary, function(){
|
|
return unary(this.operator);
|
|
});
|
|
var binary = makePredicate("- * / % & | ^ << >> >>>");
|
|
def(AST_Binary, function(compressor){
|
|
return binary(this.operator) || this.operator == "+"
|
|
&& this.left.is_number(compressor)
|
|
&& this.right.is_number(compressor);
|
|
});
|
|
def(AST_Assign, function(compressor){
|
|
return binary(this.operator.slice(0, -1))
|
|
|| this.operator == "=" && this.right.is_number(compressor);
|
|
});
|
|
def(AST_Seq, function(compressor){
|
|
return this.cdr.is_number(compressor);
|
|
});
|
|
def(AST_Conditional, function(compressor){
|
|
return this.consequent.is_number(compressor) && this.alternative.is_number(compressor);
|
|
});
|
|
})(function(node, func){
|
|
node.DEFMETHOD("is_number", func);
|
|
});
|
|
|
|
// methods to determine if an expression has a string result type
|
|
(function (def){
|
|
def(AST_Node, return_false);
|
|
def(AST_String, return_true);
|
|
def(AST_UnaryPrefix, function(){
|
|
return this.operator == "typeof";
|
|
});
|
|
def(AST_Binary, function(compressor){
|
|
return this.operator == "+" &&
|
|
(this.left.is_string(compressor) || this.right.is_string(compressor));
|
|
});
|
|
def(AST_Assign, function(compressor){
|
|
return (this.operator == "=" || this.operator == "+=") && this.right.is_string(compressor);
|
|
});
|
|
def(AST_Seq, function(compressor){
|
|
return this.cdr.is_string(compressor);
|
|
});
|
|
def(AST_Conditional, function(compressor){
|
|
return this.consequent.is_string(compressor) && this.alternative.is_string(compressor);
|
|
});
|
|
})(function(node, func){
|
|
node.DEFMETHOD("is_string", func);
|
|
});
|
|
|
|
var unary_side_effects = makePredicate("delete ++ --");
|
|
|
|
function is_lhs(node, parent) {
|
|
if (parent instanceof AST_Unary && unary_side_effects(parent.operator)) return parent.expression;
|
|
if (parent instanceof AST_Assign && parent.left === node) return node;
|
|
}
|
|
|
|
(function (def){
|
|
AST_Node.DEFMETHOD("resolve_defines", function(compressor) {
|
|
if (!compressor.option("global_defs")) return;
|
|
var def = this._find_defs(compressor, "");
|
|
if (def) {
|
|
var node, parent = this, level = 0;
|
|
do {
|
|
node = parent;
|
|
parent = compressor.parent(level++);
|
|
} while (parent instanceof AST_PropAccess && parent.expression === node);
|
|
if (is_lhs(node, parent)) {
|
|
compressor.warn('global_defs ' + this.print_to_string() + ' redefined [{file}:{line},{col}]', this.start);
|
|
} else {
|
|
return def;
|
|
}
|
|
}
|
|
});
|
|
function to_node(value, orig) {
|
|
if (value instanceof AST_Node) return make_node(value.CTOR, orig, value);
|
|
if (Array.isArray(value)) return make_node(AST_Array, orig, {
|
|
elements: value.map(function(value) {
|
|
return to_node(value, orig);
|
|
})
|
|
});
|
|
if (value && typeof value == "object") {
|
|
var props = [];
|
|
for (var key in value) {
|
|
props.push(make_node(AST_ObjectKeyVal, orig, {
|
|
key: key,
|
|
value: to_node(value[key], orig)
|
|
}));
|
|
}
|
|
return make_node(AST_Object, orig, {
|
|
properties: props
|
|
});
|
|
}
|
|
return make_node_from_constant(value, orig);
|
|
}
|
|
def(AST_Node, noop);
|
|
def(AST_Dot, function(compressor, suffix){
|
|
return this.expression._find_defs(compressor, "." + this.property + suffix);
|
|
});
|
|
def(AST_SymbolRef, function(compressor, suffix){
|
|
if (!this.global()) return;
|
|
var name;
|
|
var defines = compressor.option("global_defs");
|
|
if (defines && HOP(defines, (name = this.name + suffix))) {
|
|
var node = to_node(defines[name], this);
|
|
var top = compressor.find_parent(AST_Toplevel);
|
|
node.walk(new TreeWalker(function(node) {
|
|
if (node instanceof AST_SymbolRef) {
|
|
node.scope = top;
|
|
node.thedef = top.def_global(node);
|
|
}
|
|
}));
|
|
return node;
|
|
}
|
|
});
|
|
})(function(node, func){
|
|
node.DEFMETHOD("_find_defs", func);
|
|
});
|
|
|
|
function best_of_expression(ast1, ast2) {
|
|
return ast1.print_to_string().length >
|
|
ast2.print_to_string().length
|
|
? ast2 : ast1;
|
|
}
|
|
|
|
function best_of_statement(ast1, ast2) {
|
|
return best_of_expression(make_node(AST_SimpleStatement, ast1, {
|
|
body: ast1
|
|
}), make_node(AST_SimpleStatement, ast2, {
|
|
body: ast2
|
|
})).body;
|
|
}
|
|
|
|
function best_of(compressor, ast1, ast2) {
|
|
return (first_in_statement(compressor) ? best_of_statement : best_of_expression)(ast1, ast2);
|
|
}
|
|
|
|
// methods to evaluate a constant expression
|
|
(function (def){
|
|
// If the node has been successfully reduced to a constant,
|
|
// then its value is returned; otherwise the element itself
|
|
// is returned.
|
|
// They can be distinguished as constant value is never a
|
|
// descendant of AST_Node.
|
|
AST_Node.DEFMETHOD("evaluate", function(compressor){
|
|
if (!compressor.option("evaluate")) return this;
|
|
try {
|
|
var val = this._eval(compressor);
|
|
return !val || val instanceof RegExp || typeof val != "object" ? val : this;
|
|
} catch(ex) {
|
|
if (ex !== def) throw ex;
|
|
return this;
|
|
}
|
|
});
|
|
var unaryPrefix = makePredicate("! ~ - + void");
|
|
AST_Node.DEFMETHOD("is_constant", function(){
|
|
// Accomodate when compress option evaluate=false
|
|
// as well as the common constant expressions !0 and -1
|
|
if (this instanceof AST_Constant) {
|
|
return !(this instanceof AST_RegExp);
|
|
} else {
|
|
return this instanceof AST_UnaryPrefix
|
|
&& this.expression instanceof AST_Constant
|
|
&& unaryPrefix(this.operator);
|
|
}
|
|
});
|
|
// Obtain the constant value of an expression already known to be constant.
|
|
// Result only valid iff this.is_constant() is true.
|
|
AST_Node.DEFMETHOD("constant_value", function(compressor){
|
|
// Accomodate when option evaluate=false.
|
|
if (this instanceof AST_Constant && !(this instanceof AST_RegExp)) {
|
|
return this.value;
|
|
}
|
|
// Accomodate the common constant expressions !0 and -1 when option evaluate=false.
|
|
if (this instanceof AST_UnaryPrefix
|
|
&& this.expression instanceof AST_Constant) switch (this.operator) {
|
|
case "!":
|
|
return !this.expression.value;
|
|
case "~":
|
|
return ~this.expression.value;
|
|
case "-":
|
|
return -this.expression.value;
|
|
case "+":
|
|
return +this.expression.value;
|
|
default:
|
|
throw new Error(string_template("Cannot evaluate unary expression {value}", {
|
|
value: this.print_to_string()
|
|
}));
|
|
}
|
|
var result = this.evaluate(compressor);
|
|
if (result !== this) {
|
|
return result;
|
|
}
|
|
throw new Error(string_template("Cannot evaluate constant [{file}:{line},{col}]", this.start));
|
|
});
|
|
def(AST_Statement, function(){
|
|
throw new Error(string_template("Cannot evaluate a statement [{file}:{line},{col}]", this.start));
|
|
});
|
|
def(AST_Lambda, function(){
|
|
throw def;
|
|
});
|
|
function ev(node, compressor) {
|
|
if (!compressor) throw new Error("Compressor must be passed");
|
|
|
|
return node._eval(compressor);
|
|
};
|
|
def(AST_Node, function(){
|
|
throw def; // not constant
|
|
});
|
|
def(AST_Constant, function(){
|
|
return this.getValue();
|
|
});
|
|
def(AST_Array, function(compressor){
|
|
if (compressor.option("unsafe")) {
|
|
return this.elements.map(function(element) {
|
|
return ev(element, compressor);
|
|
});
|
|
}
|
|
throw def;
|
|
});
|
|
def(AST_Object, function(compressor){
|
|
if (compressor.option("unsafe")) {
|
|
var val = {};
|
|
for (var i = 0, len = this.properties.length; i < len; i++) {
|
|
var prop = this.properties[i];
|
|
var key = prop.key;
|
|
if (key instanceof AST_Symbol) {
|
|
key = key.name;
|
|
} else if (key instanceof AST_Node) {
|
|
key = ev(key, compressor);
|
|
}
|
|
if (typeof Object.prototype[key] === 'function') {
|
|
throw def;
|
|
}
|
|
val[key] = ev(prop.value, compressor);
|
|
}
|
|
return val;
|
|
}
|
|
throw def;
|
|
});
|
|
def(AST_UnaryPrefix, function(compressor){
|
|
var e = this.expression;
|
|
switch (this.operator) {
|
|
case "!": return !ev(e, compressor);
|
|
case "typeof":
|
|
// Function would be evaluated to an array and so typeof would
|
|
// incorrectly return 'object'. Hence making is a special case.
|
|
if (e instanceof AST_Function) return typeof function(){};
|
|
|
|
e = ev(e, compressor);
|
|
|
|
// typeof <RegExp> returns "object" or "function" on different platforms
|
|
// so cannot evaluate reliably
|
|
if (e instanceof RegExp) throw def;
|
|
|
|
return typeof e;
|
|
case "void": return void ev(e, compressor);
|
|
case "~": return ~ev(e, compressor);
|
|
case "-": return -ev(e, compressor);
|
|
case "+": return +ev(e, compressor);
|
|
}
|
|
throw def;
|
|
});
|
|
def(AST_Binary, function(c){
|
|
var left = this.left, right = this.right, result;
|
|
switch (this.operator) {
|
|
case "&&" : result = ev(left, c) && ev(right, c); break;
|
|
case "||" : result = ev(left, c) || ev(right, c); break;
|
|
case "|" : result = ev(left, c) | ev(right, c); break;
|
|
case "&" : result = ev(left, c) & ev(right, c); break;
|
|
case "^" : result = ev(left, c) ^ ev(right, c); break;
|
|
case "+" : result = ev(left, c) + ev(right, c); break;
|
|
case "*" : result = ev(left, c) * ev(right, c); break;
|
|
case "/" : result = ev(left, c) / ev(right, c); break;
|
|
case "%" : result = ev(left, c) % ev(right, c); break;
|
|
case "-" : result = ev(left, c) - ev(right, c); break;
|
|
case "<<" : result = ev(left, c) << ev(right, c); break;
|
|
case ">>" : result = ev(left, c) >> ev(right, c); break;
|
|
case ">>>" : result = ev(left, c) >>> ev(right, c); break;
|
|
case "==" : result = ev(left, c) == ev(right, c); break;
|
|
case "===" : result = ev(left, c) === ev(right, c); break;
|
|
case "!=" : result = ev(left, c) != ev(right, c); break;
|
|
case "!==" : result = ev(left, c) !== ev(right, c); break;
|
|
case "<" : result = ev(left, c) < ev(right, c); break;
|
|
case "<=" : result = ev(left, c) <= ev(right, c); break;
|
|
case ">" : result = ev(left, c) > ev(right, c); break;
|
|
case ">=" : result = ev(left, c) >= ev(right, c); break;
|
|
default:
|
|
throw def;
|
|
}
|
|
if (isNaN(result) && c.find_parent(AST_With)) {
|
|
// leave original expression as is
|
|
throw def;
|
|
}
|
|
return result;
|
|
});
|
|
def(AST_Conditional, function(compressor){
|
|
return ev(this.condition, compressor)
|
|
? ev(this.consequent, compressor)
|
|
: ev(this.alternative, compressor);
|
|
});
|
|
def(AST_SymbolRef, function(compressor){
|
|
if (!compressor.option("reduce_vars") || this._evaluating) throw def;
|
|
this._evaluating = true;
|
|
try {
|
|
var fixed = this.fixed_value();
|
|
if (!fixed) throw def;
|
|
var value = ev(fixed, compressor);
|
|
if (!HOP(fixed, "_eval")) fixed._eval = function() {
|
|
return value;
|
|
};
|
|
if (value && typeof value == "object" && this.definition().escaped) throw def;
|
|
return value;
|
|
} finally {
|
|
this._evaluating = false;
|
|
}
|
|
});
|
|
def(AST_PropAccess, function(compressor){
|
|
if (compressor.option("unsafe")) {
|
|
var key = this.property;
|
|
if (key instanceof AST_Node) {
|
|
key = ev(key, compressor);
|
|
}
|
|
var val = ev(this.expression, compressor);
|
|
if (val && HOP(val, key)) {
|
|
return val[key];
|
|
}
|
|
}
|
|
throw def;
|
|
});
|
|
})(function(node, func){
|
|
node.DEFMETHOD("_eval", func);
|
|
});
|
|
|
|
// method to negate an expression
|
|
(function(def){
|
|
function basic_negation(exp) {
|
|
return make_node(AST_UnaryPrefix, exp, {
|
|
operator: "!",
|
|
expression: exp
|
|
});
|
|
}
|
|
function best(orig, alt, first_in_statement) {
|
|
var negated = basic_negation(orig);
|
|
if (first_in_statement) {
|
|
var stat = make_node(AST_SimpleStatement, alt, {
|
|
body: alt
|
|
});
|
|
return best_of_expression(negated, stat) === stat ? alt : negated;
|
|
}
|
|
return best_of_expression(negated, alt);
|
|
}
|
|
def(AST_Node, function(){
|
|
return basic_negation(this);
|
|
});
|
|
def(AST_Statement, function(){
|
|
throw new Error("Cannot negate a statement");
|
|
});
|
|
def(AST_Function, function(){
|
|
return basic_negation(this);
|
|
});
|
|
def(AST_UnaryPrefix, function(){
|
|
if (this.operator == "!")
|
|
return this.expression;
|
|
return basic_negation(this);
|
|
});
|
|
def(AST_Seq, function(compressor){
|
|
var self = this.clone();
|
|
self.cdr = self.cdr.negate(compressor);
|
|
return self;
|
|
});
|
|
def(AST_Conditional, function(compressor, first_in_statement){
|
|
var self = this.clone();
|
|
self.consequent = self.consequent.negate(compressor);
|
|
self.alternative = self.alternative.negate(compressor);
|
|
return best(this, self, first_in_statement);
|
|
});
|
|
def(AST_Binary, function(compressor, first_in_statement){
|
|
var self = this.clone(), op = this.operator;
|
|
if (compressor.option("unsafe_comps")) {
|
|
switch (op) {
|
|
case "<=" : self.operator = ">" ; return self;
|
|
case "<" : self.operator = ">=" ; return self;
|
|
case ">=" : self.operator = "<" ; return self;
|
|
case ">" : self.operator = "<=" ; return self;
|
|
}
|
|
}
|
|
switch (op) {
|
|
case "==" : self.operator = "!="; return self;
|
|
case "!=" : self.operator = "=="; return self;
|
|
case "===": self.operator = "!=="; return self;
|
|
case "!==": self.operator = "==="; return self;
|
|
case "&&":
|
|
self.operator = "||";
|
|
self.left = self.left.negate(compressor, first_in_statement);
|
|
self.right = self.right.negate(compressor);
|
|
return best(this, self, first_in_statement);
|
|
case "||":
|
|
self.operator = "&&";
|
|
self.left = self.left.negate(compressor, first_in_statement);
|
|
self.right = self.right.negate(compressor);
|
|
return best(this, self, first_in_statement);
|
|
}
|
|
return basic_negation(this);
|
|
});
|
|
})(function(node, func){
|
|
node.DEFMETHOD("negate", function(compressor, first_in_statement){
|
|
return func.call(this, compressor, first_in_statement);
|
|
});
|
|
});
|
|
|
|
AST_Call.DEFMETHOD("has_pure_annotation", function(compressor) {
|
|
if (!compressor.option("side_effects")) return false;
|
|
if (this.pure !== undefined) return this.pure;
|
|
var pure = false;
|
|
var comments, last_comment;
|
|
if (this.start
|
|
&& (comments = this.start.comments_before)
|
|
&& comments.length
|
|
&& /[@#]__PURE__/.test((last_comment = comments[comments.length - 1]).value)) {
|
|
pure = last_comment;
|
|
}
|
|
return this.pure = pure;
|
|
});
|
|
|
|
// determine if expression has side effects
|
|
(function(def){
|
|
def(AST_Node, return_true);
|
|
|
|
def(AST_EmptyStatement, return_false);
|
|
def(AST_Constant, return_false);
|
|
def(AST_This, return_false);
|
|
|
|
def(AST_Call, function(compressor){
|
|
if (!this.has_pure_annotation(compressor) && compressor.pure_funcs(this)) return true;
|
|
for (var i = this.args.length; --i >= 0;) {
|
|
if (this.args[i].has_side_effects(compressor))
|
|
return true;
|
|
}
|
|
return false;
|
|
});
|
|
|
|
function any(list, compressor) {
|
|
for (var i = list.length; --i >= 0;)
|
|
if (list[i].has_side_effects(compressor))
|
|
return true;
|
|
return false;
|
|
}
|
|
|
|
def(AST_Block, function(compressor){
|
|
return any(this.body, compressor);
|
|
});
|
|
def(AST_Switch, function(compressor){
|
|
return this.expression.has_side_effects(compressor)
|
|
|| any(this.body, compressor);
|
|
});
|
|
def(AST_Case, function(compressor){
|
|
return this.expression.has_side_effects(compressor)
|
|
|| any(this.body, compressor);
|
|
});
|
|
def(AST_Try, function(compressor){
|
|
return any(this.body, compressor)
|
|
|| this.bcatch && this.bcatch.has_side_effects(compressor)
|
|
|| this.bfinally && this.bfinally.has_side_effects(compressor);
|
|
});
|
|
def(AST_If, function(compressor){
|
|
return this.condition.has_side_effects(compressor)
|
|
|| this.body && this.body.has_side_effects(compressor)
|
|
|| this.alternative && this.alternative.has_side_effects(compressor);
|
|
});
|
|
def(AST_LabeledStatement, function(compressor){
|
|
return this.body.has_side_effects(compressor);
|
|
});
|
|
def(AST_SimpleStatement, function(compressor){
|
|
return this.body.has_side_effects(compressor);
|
|
});
|
|
def(AST_Defun, return_true);
|
|
def(AST_Function, return_false);
|
|
def(AST_Binary, function(compressor){
|
|
return this.left.has_side_effects(compressor)
|
|
|| this.right.has_side_effects(compressor);
|
|
});
|
|
def(AST_Assign, return_true);
|
|
def(AST_Conditional, function(compressor){
|
|
return this.condition.has_side_effects(compressor)
|
|
|| this.consequent.has_side_effects(compressor)
|
|
|| this.alternative.has_side_effects(compressor);
|
|
});
|
|
def(AST_Unary, function(compressor){
|
|
return unary_side_effects(this.operator)
|
|
|| this.expression.has_side_effects(compressor);
|
|
});
|
|
def(AST_SymbolRef, function(compressor){
|
|
return this.undeclared();
|
|
});
|
|
def(AST_Object, function(compressor){
|
|
return any(this.properties, compressor);
|
|
});
|
|
def(AST_ObjectProperty, function(compressor){
|
|
return this.value.has_side_effects(compressor);
|
|
});
|
|
def(AST_Array, function(compressor){
|
|
return any(this.elements, compressor);
|
|
});
|
|
def(AST_Dot, function(compressor){
|
|
return this.expression.may_throw_on_access(compressor)
|
|
|| this.expression.has_side_effects(compressor);
|
|
});
|
|
def(AST_Sub, function(compressor){
|
|
return this.expression.may_throw_on_access(compressor)
|
|
|| this.expression.has_side_effects(compressor)
|
|
|| this.property.has_side_effects(compressor);
|
|
});
|
|
def(AST_Seq, function(compressor){
|
|
return this.car.has_side_effects(compressor)
|
|
|| this.cdr.has_side_effects(compressor);
|
|
});
|
|
})(function(node, func){
|
|
node.DEFMETHOD("has_side_effects", func);
|
|
});
|
|
|
|
// tell me if a statement aborts
|
|
function aborts(thing) {
|
|
return thing && thing.aborts();
|
|
};
|
|
(function(def){
|
|
def(AST_Statement, return_null);
|
|
def(AST_Jump, return_this);
|
|
function block_aborts(){
|
|
var n = this.body.length;
|
|
return n > 0 && aborts(this.body[n - 1]);
|
|
};
|
|
def(AST_BlockStatement, block_aborts);
|
|
def(AST_SwitchBranch, block_aborts);
|
|
def(AST_If, function(){
|
|
return this.alternative && aborts(this.body) && aborts(this.alternative) && this;
|
|
});
|
|
})(function(node, func){
|
|
node.DEFMETHOD("aborts", func);
|
|
});
|
|
|
|
/* -----[ optimizers ]----- */
|
|
|
|
OPT(AST_Directive, function(self, compressor){
|
|
if (compressor.has_directive(self.value) !== self) {
|
|
return make_node(AST_EmptyStatement, self);
|
|
}
|
|
return self;
|
|
});
|
|
|
|
OPT(AST_Debugger, function(self, compressor){
|
|
if (compressor.option("drop_debugger"))
|
|
return make_node(AST_EmptyStatement, self);
|
|
return self;
|
|
});
|
|
|
|
OPT(AST_LabeledStatement, function(self, compressor){
|
|
if (self.body instanceof AST_Break
|
|
&& compressor.loopcontrol_target(self.body) === self.body) {
|
|
return make_node(AST_EmptyStatement, self);
|
|
}
|
|
return self.label.references.length == 0 ? self.body : self;
|
|
});
|
|
|
|
OPT(AST_Block, function(self, compressor){
|
|
self.body = tighten_body(self.body, compressor);
|
|
return self;
|
|
});
|
|
|
|
OPT(AST_BlockStatement, function(self, compressor){
|
|
self.body = tighten_body(self.body, compressor);
|
|
switch (self.body.length) {
|
|
case 1: return self.body[0];
|
|
case 0: return make_node(AST_EmptyStatement, self);
|
|
}
|
|
return self;
|
|
});
|
|
|
|
AST_Scope.DEFMETHOD("drop_unused", function(compressor){
|
|
var self = this;
|
|
if (compressor.has_directive("use asm")) return self;
|
|
var toplevel = compressor.option("toplevel");
|
|
if (compressor.option("unused")
|
|
&& (!(self instanceof AST_Toplevel) || toplevel)
|
|
&& !self.uses_eval
|
|
&& !self.uses_with) {
|
|
var assign_as_unused = !/keep_assign/.test(compressor.option("unused"));
|
|
var drop_funcs = /funcs/.test(toplevel);
|
|
var drop_vars = /vars/.test(toplevel);
|
|
if (!(self instanceof AST_Toplevel) || toplevel == true) {
|
|
drop_funcs = drop_vars = true;
|
|
}
|
|
var in_use = [];
|
|
var in_use_ids = Object.create(null); // avoid expensive linear scans of in_use
|
|
if (self instanceof AST_Toplevel && compressor.top_retain) {
|
|
self.variables.each(function(def) {
|
|
if (compressor.top_retain(def) && !(def.id in in_use_ids)) {
|
|
in_use_ids[def.id] = true;
|
|
in_use.push(def);
|
|
}
|
|
});
|
|
}
|
|
var initializations = new Dictionary();
|
|
// pass 1: find out which symbols are directly used in
|
|
// this scope (not in nested scopes).
|
|
var scope = this;
|
|
var tw = new TreeWalker(function(node, descend){
|
|
if (node !== self) {
|
|
if (node instanceof AST_Defun) {
|
|
if (!drop_funcs && scope === self) {
|
|
var node_def = node.name.definition();
|
|
if (!(node_def.id in in_use_ids)) {
|
|
in_use_ids[node_def.id] = true;
|
|
in_use.push(node_def);
|
|
}
|
|
}
|
|
initializations.add(node.name.name, node);
|
|
return true; // don't go in nested scopes
|
|
}
|
|
if (node instanceof AST_Definitions && scope === self) {
|
|
node.definitions.forEach(function(def){
|
|
if (!drop_vars) {
|
|
var node_def = def.name.definition();
|
|
if (!(node_def.id in in_use_ids)) {
|
|
in_use_ids[node_def.id] = true;
|
|
in_use.push(node_def);
|
|
}
|
|
}
|
|
if (def.value) {
|
|
initializations.add(def.name.name, def.value);
|
|
if (def.value.has_side_effects(compressor)) {
|
|
def.value.walk(tw);
|
|
}
|
|
}
|
|
});
|
|
return true;
|
|
}
|
|
if (assign_as_unused
|
|
&& node instanceof AST_Assign
|
|
&& node.operator == "="
|
|
&& node.left instanceof AST_SymbolRef
|
|
&& !is_reference_const(node.left)
|
|
&& scope === self) {
|
|
node.right.walk(tw);
|
|
return true;
|
|
}
|
|
if (node instanceof AST_SymbolRef) {
|
|
var node_def = node.definition();
|
|
if (!(node_def.id in in_use_ids)) {
|
|
in_use_ids[node_def.id] = true;
|
|
in_use.push(node_def);
|
|
}
|
|
return true;
|
|
}
|
|
if (node instanceof AST_Scope) {
|
|
var save_scope = scope;
|
|
scope = node;
|
|
descend();
|
|
scope = save_scope;
|
|
return true;
|
|
}
|
|
}
|
|
});
|
|
self.walk(tw);
|
|
// pass 2: for every used symbol we need to walk its
|
|
// initialization code to figure out if it uses other
|
|
// symbols (that may not be in_use).
|
|
for (var i = 0; i < in_use.length; ++i) {
|
|
in_use[i].orig.forEach(function(decl){
|
|
// undeclared globals will be instanceof AST_SymbolRef
|
|
var init = initializations.get(decl.name);
|
|
if (init) init.forEach(function(init){
|
|
var tw = new TreeWalker(function(node){
|
|
if (node instanceof AST_SymbolRef) {
|
|
var node_def = node.definition();
|
|
if (!(node_def.id in in_use_ids)) {
|
|
in_use_ids[node_def.id] = true;
|
|
in_use.push(node_def);
|
|
}
|
|
}
|
|
});
|
|
init.walk(tw);
|
|
});
|
|
});
|
|
}
|
|
// pass 3: we should drop declarations not in_use
|
|
var tt = new TreeTransformer(
|
|
function before(node, descend, in_list) {
|
|
if (node instanceof AST_Function
|
|
&& node.name
|
|
&& !compressor.option("keep_fnames")) {
|
|
var def = node.name.definition();
|
|
// any declarations with same name will overshadow
|
|
// name of this anonymous function and can therefore
|
|
// never be used anywhere
|
|
if (!(def.id in in_use_ids) || def.orig.length > 1)
|
|
node.name = null;
|
|
}
|
|
if (node instanceof AST_Lambda && !(node instanceof AST_Accessor)) {
|
|
var trim = !compressor.option("keep_fargs");
|
|
for (var a = node.argnames, i = a.length; --i >= 0;) {
|
|
var sym = a[i];
|
|
if (!(sym.definition().id in in_use_ids)) {
|
|
sym.__unused = true;
|
|
if (trim) {
|
|
a.pop();
|
|
compressor[sym.unreferenced() ? "warn" : "info"]("Dropping unused function argument {name} [{file}:{line},{col}]", {
|
|
name : sym.name,
|
|
file : sym.start.file,
|
|
line : sym.start.line,
|
|
col : sym.start.col
|
|
});
|
|
}
|
|
}
|
|
else {
|
|
trim = false;
|
|
}
|
|
}
|
|
}
|
|
if (drop_funcs && node instanceof AST_Defun && node !== self) {
|
|
if (!(node.name.definition().id in in_use_ids)) {
|
|
compressor[node.name.unreferenced() ? "warn" : "info"]("Dropping unused function {name} [{file}:{line},{col}]", {
|
|
name : node.name.name,
|
|
file : node.name.start.file,
|
|
line : node.name.start.line,
|
|
col : node.name.start.col
|
|
});
|
|
return make_node(AST_EmptyStatement, node);
|
|
}
|
|
return node;
|
|
}
|
|
if (drop_vars && node instanceof AST_Definitions && !(tt.parent() instanceof AST_ForIn && tt.parent().init === node)) {
|
|
var def = node.definitions.filter(function(def){
|
|
if (def.value) def.value = def.value.transform(tt);
|
|
var sym = def.name.definition();
|
|
if (sym.id in in_use_ids) return true;
|
|
if (sym.orig[0] instanceof AST_SymbolCatch) {
|
|
def.value = def.value && def.value.drop_side_effect_free(compressor);
|
|
return true;
|
|
}
|
|
var w = {
|
|
name : def.name.name,
|
|
file : def.name.start.file,
|
|
line : def.name.start.line,
|
|
col : def.name.start.col
|
|
};
|
|
if (def.value && (def._unused_side_effects = def.value.drop_side_effect_free(compressor))) {
|
|
compressor.warn("Side effects in initialization of unused variable {name} [{file}:{line},{col}]", w);
|
|
return true;
|
|
}
|
|
compressor[def.name.unreferenced() ? "warn" : "info"]("Dropping unused variable {name} [{file}:{line},{col}]", w);
|
|
return false;
|
|
});
|
|
// place uninitialized names at the start
|
|
def = mergeSort(def, function(a, b){
|
|
if (!a.value && b.value) return -1;
|
|
if (!b.value && a.value) return 1;
|
|
return 0;
|
|
});
|
|
// for unused names whose initialization has
|
|
// side effects, we can cascade the init. code
|
|
// into the next one, or next statement.
|
|
var side_effects = [];
|
|
for (var i = 0; i < def.length;) {
|
|
var x = def[i];
|
|
if (x._unused_side_effects) {
|
|
side_effects.push(x._unused_side_effects);
|
|
def.splice(i, 1);
|
|
} else {
|
|
if (side_effects.length > 0) {
|
|
side_effects.push(x.value);
|
|
x.value = AST_Seq.from_array(side_effects);
|
|
side_effects = [];
|
|
}
|
|
++i;
|
|
}
|
|
}
|
|
if (side_effects.length > 0) {
|
|
side_effects = make_node(AST_BlockStatement, node, {
|
|
body: [ make_node(AST_SimpleStatement, node, {
|
|
body: AST_Seq.from_array(side_effects)
|
|
}) ]
|
|
});
|
|
} else {
|
|
side_effects = null;
|
|
}
|
|
if (def.length == 0 && !side_effects) {
|
|
return make_node(AST_EmptyStatement, node);
|
|
}
|
|
if (def.length == 0) {
|
|
return in_list ? MAP.splice(side_effects.body) : side_effects;
|
|
}
|
|
node.definitions = def;
|
|
if (side_effects) {
|
|
side_effects.body.unshift(node);
|
|
return in_list ? MAP.splice(side_effects.body) : side_effects;
|
|
}
|
|
return node;
|
|
}
|
|
if (drop_vars && assign_as_unused
|
|
&& node instanceof AST_Assign
|
|
&& node.operator == "="
|
|
&& node.left instanceof AST_SymbolRef) {
|
|
var def = node.left.definition();
|
|
if (!(def.id in in_use_ids)
|
|
&& self.variables.get(def.name) === def) {
|
|
return maintain_this_binding(tt.parent(), node, node.right.transform(tt));
|
|
}
|
|
}
|
|
// certain combination of unused name + side effect leads to:
|
|
// https://github.com/mishoo/UglifyJS2/issues/44
|
|
// https://github.com/mishoo/UglifyJS2/issues/1830
|
|
// that's an invalid AST.
|
|
// We fix it at this stage by moving the `var` outside the `for`.
|
|
if (node instanceof AST_For) {
|
|
descend(node, this);
|
|
if (node.init instanceof AST_BlockStatement) {
|
|
var block = node.init;
|
|
node.init = block.body.pop();
|
|
block.body.push(node);
|
|
return in_list ? MAP.splice(block.body) : block;
|
|
} else if (is_empty(node.init)) {
|
|
node.init = null;
|
|
}
|
|
return node;
|
|
}
|
|
if (node instanceof AST_LabeledStatement && node.body instanceof AST_For) {
|
|
descend(node, this);
|
|
if (node.body instanceof AST_BlockStatement) {
|
|
var block = node.body;
|
|
node.body = block.body.pop();
|
|
block.body.push(node);
|
|
return in_list ? MAP.splice(block.body) : block;
|
|
}
|
|
return node;
|
|
}
|
|
if (node instanceof AST_Scope && node !== self)
|
|
return node;
|
|
}
|
|
);
|
|
self.transform(tt);
|
|
}
|
|
});
|
|
|
|
AST_Scope.DEFMETHOD("hoist_declarations", function(compressor){
|
|
var self = this;
|
|
if (compressor.has_directive("use asm")) return self;
|
|
var hoist_funs = compressor.option("hoist_funs");
|
|
var hoist_vars = compressor.option("hoist_vars");
|
|
if (hoist_funs || hoist_vars) {
|
|
var dirs = [];
|
|
var hoisted = [];
|
|
var vars = new Dictionary(), vars_found = 0, var_decl = 0;
|
|
// let's count var_decl first, we seem to waste a lot of
|
|
// space if we hoist `var` when there's only one.
|
|
self.walk(new TreeWalker(function(node){
|
|
if (node instanceof AST_Scope && node !== self)
|
|
return true;
|
|
if (node instanceof AST_Var) {
|
|
++var_decl;
|
|
return true;
|
|
}
|
|
}));
|
|
hoist_vars = hoist_vars && var_decl > 1;
|
|
var tt = new TreeTransformer(
|
|
function before(node) {
|
|
if (node !== self) {
|
|
if (node instanceof AST_Directive) {
|
|
dirs.push(node);
|
|
return make_node(AST_EmptyStatement, node);
|
|
}
|
|
if (node instanceof AST_Defun && hoist_funs) {
|
|
hoisted.push(node);
|
|
return make_node(AST_EmptyStatement, node);
|
|
}
|
|
if (node instanceof AST_Var && hoist_vars) {
|
|
node.definitions.forEach(function(def){
|
|
vars.set(def.name.name, def);
|
|
++vars_found;
|
|
});
|
|
var seq = node.to_assignments(compressor);
|
|
var p = tt.parent();
|
|
if (p instanceof AST_ForIn && p.init === node) {
|
|
if (seq == null) {
|
|
var def = node.definitions[0].name;
|
|
return make_node(AST_SymbolRef, def, def);
|
|
}
|
|
return seq;
|
|
}
|
|
if (p instanceof AST_For && p.init === node) {
|
|
return seq;
|
|
}
|
|
if (!seq) return make_node(AST_EmptyStatement, node);
|
|
return make_node(AST_SimpleStatement, node, {
|
|
body: seq
|
|
});
|
|
}
|
|
if (node instanceof AST_Scope)
|
|
return node; // to avoid descending in nested scopes
|
|
}
|
|
}
|
|
);
|
|
self = self.transform(tt);
|
|
if (vars_found > 0) {
|
|
// collect only vars which don't show up in self's arguments list
|
|
var defs = [];
|
|
vars.each(function(def, name){
|
|
if (self instanceof AST_Lambda
|
|
&& find_if(function(x){ return x.name == def.name.name },
|
|
self.argnames)) {
|
|
vars.del(name);
|
|
} else {
|
|
def = def.clone();
|
|
def.value = null;
|
|
defs.push(def);
|
|
vars.set(name, def);
|
|
}
|
|
});
|
|
if (defs.length > 0) {
|
|
// try to merge in assignments
|
|
for (var i = 0; i < self.body.length;) {
|
|
if (self.body[i] instanceof AST_SimpleStatement) {
|
|
var expr = self.body[i].body, sym, assign;
|
|
if (expr instanceof AST_Assign
|
|
&& expr.operator == "="
|
|
&& (sym = expr.left) instanceof AST_Symbol
|
|
&& vars.has(sym.name))
|
|
{
|
|
var def = vars.get(sym.name);
|
|
if (def.value) break;
|
|
def.value = expr.right;
|
|
remove(defs, def);
|
|
defs.push(def);
|
|
self.body.splice(i, 1);
|
|
continue;
|
|
}
|
|
if (expr instanceof AST_Seq
|
|
&& (assign = expr.car) instanceof AST_Assign
|
|
&& assign.operator == "="
|
|
&& (sym = assign.left) instanceof AST_Symbol
|
|
&& vars.has(sym.name))
|
|
{
|
|
var def = vars.get(sym.name);
|
|
if (def.value) break;
|
|
def.value = assign.right;
|
|
remove(defs, def);
|
|
defs.push(def);
|
|
self.body[i].body = expr.cdr;
|
|
continue;
|
|
}
|
|
}
|
|
if (self.body[i] instanceof AST_EmptyStatement) {
|
|
self.body.splice(i, 1);
|
|
continue;
|
|
}
|
|
if (self.body[i] instanceof AST_BlockStatement) {
|
|
var tmp = [ i, 1 ].concat(self.body[i].body);
|
|
self.body.splice.apply(self.body, tmp);
|
|
continue;
|
|
}
|
|
break;
|
|
}
|
|
defs = make_node(AST_Var, self, {
|
|
definitions: defs
|
|
});
|
|
hoisted.push(defs);
|
|
};
|
|
}
|
|
self.body = dirs.concat(hoisted, self.body);
|
|
}
|
|
return self;
|
|
});
|
|
|
|
// drop_side_effect_free()
|
|
// remove side-effect-free parts which only affects return value
|
|
(function(def){
|
|
// Drop side-effect-free elements from an array of expressions.
|
|
// Returns an array of expressions with side-effects or null
|
|
// if all elements were dropped. Note: original array may be
|
|
// returned if nothing changed.
|
|
function trim(nodes, compressor, first_in_statement) {
|
|
var ret = [], changed = false;
|
|
for (var i = 0, len = nodes.length; i < len; i++) {
|
|
var node = nodes[i].drop_side_effect_free(compressor, first_in_statement);
|
|
changed |= node !== nodes[i];
|
|
if (node) {
|
|
ret.push(node);
|
|
first_in_statement = false;
|
|
}
|
|
}
|
|
return changed ? ret.length ? ret : null : nodes;
|
|
}
|
|
|
|
def(AST_Node, return_this);
|
|
def(AST_Constant, return_null);
|
|
def(AST_This, return_null);
|
|
def(AST_Call, function(compressor, first_in_statement){
|
|
if (!this.has_pure_annotation(compressor) && compressor.pure_funcs(this)) {
|
|
if (this.expression instanceof AST_Function
|
|
&& (!this.expression.name || !this.expression.name.definition().references.length)) {
|
|
var node = this.clone();
|
|
node.expression = node.expression.process_expression(false, compressor);
|
|
return node;
|
|
}
|
|
return this;
|
|
}
|
|
if (this.pure) {
|
|
compressor.warn("Dropping __PURE__ call [{file}:{line},{col}]", this.start);
|
|
this.pure.value = this.pure.value.replace(/[@#]__PURE__/g, ' ');
|
|
}
|
|
var args = trim(this.args, compressor, first_in_statement);
|
|
return args && AST_Seq.from_array(args);
|
|
});
|
|
def(AST_Accessor, return_null);
|
|
def(AST_Function, return_null);
|
|
def(AST_Binary, function(compressor, first_in_statement){
|
|
var right = this.right.drop_side_effect_free(compressor);
|
|
if (!right) return this.left.drop_side_effect_free(compressor, first_in_statement);
|
|
switch (this.operator) {
|
|
case "&&":
|
|
case "||":
|
|
if (right === this.right) return this;
|
|
var node = this.clone();
|
|
node.right = right;
|
|
return node;
|
|
default:
|
|
var left = this.left.drop_side_effect_free(compressor, first_in_statement);
|
|
if (!left) return this.right.drop_side_effect_free(compressor, first_in_statement);
|
|
return make_node(AST_Seq, this, {
|
|
car: left,
|
|
cdr: right
|
|
});
|
|
}
|
|
});
|
|
def(AST_Assign, return_this);
|
|
def(AST_Conditional, function(compressor){
|
|
var consequent = this.consequent.drop_side_effect_free(compressor);
|
|
var alternative = this.alternative.drop_side_effect_free(compressor);
|
|
if (consequent === this.consequent && alternative === this.alternative) return this;
|
|
if (!consequent) return alternative ? make_node(AST_Binary, this, {
|
|
operator: "||",
|
|
left: this.condition,
|
|
right: alternative
|
|
}) : this.condition.drop_side_effect_free(compressor);
|
|
if (!alternative) return make_node(AST_Binary, this, {
|
|
operator: "&&",
|
|
left: this.condition,
|
|
right: consequent
|
|
});
|
|
var node = this.clone();
|
|
node.consequent = consequent;
|
|
node.alternative = alternative;
|
|
return node;
|
|
});
|
|
def(AST_Unary, function(compressor, first_in_statement){
|
|
if (unary_side_effects(this.operator)) return this;
|
|
if (this.operator == "typeof" && this.expression instanceof AST_SymbolRef) return null;
|
|
var expression = this.expression.drop_side_effect_free(compressor, first_in_statement);
|
|
if (first_in_statement
|
|
&& this instanceof AST_UnaryPrefix
|
|
&& is_iife_call(expression)) {
|
|
if (expression === this.expression && this.operator.length === 1) return this;
|
|
return make_node(AST_UnaryPrefix, this, {
|
|
operator: this.operator.length === 1 ? this.operator : "!",
|
|
expression: expression
|
|
});
|
|
}
|
|
return expression;
|
|
});
|
|
def(AST_SymbolRef, function() {
|
|
return this.undeclared() ? this : null;
|
|
});
|
|
def(AST_Object, function(compressor, first_in_statement){
|
|
var values = trim(this.properties, compressor, first_in_statement);
|
|
return values && AST_Seq.from_array(values);
|
|
});
|
|
def(AST_ObjectProperty, function(compressor, first_in_statement){
|
|
return this.value.drop_side_effect_free(compressor, first_in_statement);
|
|
});
|
|
def(AST_Array, function(compressor, first_in_statement){
|
|
var values = trim(this.elements, compressor, first_in_statement);
|
|
return values && AST_Seq.from_array(values);
|
|
});
|
|
def(AST_Dot, function(compressor, first_in_statement){
|
|
if (this.expression.may_throw_on_access(compressor)) return this;
|
|
return this.expression.drop_side_effect_free(compressor, first_in_statement);
|
|
});
|
|
def(AST_Sub, function(compressor, first_in_statement){
|
|
if (this.expression.may_throw_on_access(compressor)) return this;
|
|
var expression = this.expression.drop_side_effect_free(compressor, first_in_statement);
|
|
if (!expression) return this.property.drop_side_effect_free(compressor, first_in_statement);
|
|
var property = this.property.drop_side_effect_free(compressor);
|
|
if (!property) return expression;
|
|
return make_node(AST_Seq, this, {
|
|
car: expression,
|
|
cdr: property
|
|
});
|
|
});
|
|
def(AST_Seq, function(compressor){
|
|
var cdr = this.cdr.drop_side_effect_free(compressor);
|
|
if (cdr === this.cdr) return this;
|
|
if (!cdr) return this.car;
|
|
return make_node(AST_Seq, this, {
|
|
car: this.car,
|
|
cdr: cdr
|
|
});
|
|
});
|
|
})(function(node, func){
|
|
node.DEFMETHOD("drop_side_effect_free", func);
|
|
});
|
|
|
|
OPT(AST_SimpleStatement, function(self, compressor){
|
|
if (compressor.option("side_effects")) {
|
|
var body = self.body;
|
|
var node = body.drop_side_effect_free(compressor, true);
|
|
if (!node) {
|
|
compressor.warn("Dropping side-effect-free statement [{file}:{line},{col}]", self.start);
|
|
return make_node(AST_EmptyStatement, self);
|
|
}
|
|
if (node !== body) {
|
|
return make_node(AST_SimpleStatement, self, { body: node });
|
|
}
|
|
}
|
|
return self;
|
|
});
|
|
|
|
OPT(AST_DWLoop, function(self, compressor){
|
|
if (!compressor.option("loops")) return self;
|
|
var cond = self.condition.evaluate(compressor);
|
|
if (cond !== self.condition) {
|
|
if (cond) {
|
|
return make_node(AST_For, self, {
|
|
body: self.body
|
|
});
|
|
}
|
|
if (compressor.option("dead_code") && self instanceof AST_While) {
|
|
var a = [];
|
|
extract_declarations_from_unreachable_code(compressor, self.body, a);
|
|
return make_node(AST_BlockStatement, self, { body: a }).optimize(compressor);
|
|
}
|
|
if (self instanceof AST_Do) {
|
|
var has_loop_control = false;
|
|
var tw = new TreeWalker(function(node) {
|
|
if (node instanceof AST_Scope || has_loop_control) return true;
|
|
if (node instanceof AST_LoopControl && tw.loopcontrol_target(node) === self)
|
|
return has_loop_control = true;
|
|
});
|
|
var parent = compressor.parent();
|
|
(parent instanceof AST_LabeledStatement ? parent : self).walk(tw);
|
|
if (!has_loop_control) return self.body;
|
|
}
|
|
}
|
|
if (self instanceof AST_While) {
|
|
return make_node(AST_For, self, self).optimize(compressor);
|
|
}
|
|
return self;
|
|
});
|
|
|
|
function if_break_in_loop(self, compressor) {
|
|
function drop_it(rest) {
|
|
rest = as_statement_array(rest);
|
|
if (self.body instanceof AST_BlockStatement) {
|
|
self.body = self.body.clone();
|
|
self.body.body = rest.concat(self.body.body.slice(1));
|
|
self.body = self.body.transform(compressor);
|
|
} else {
|
|
self.body = make_node(AST_BlockStatement, self.body, {
|
|
body: rest
|
|
}).transform(compressor);
|
|
}
|
|
if_break_in_loop(self, compressor);
|
|
}
|
|
var first = self.body instanceof AST_BlockStatement ? self.body.body[0] : self.body;
|
|
if (first instanceof AST_If) {
|
|
if (first.body instanceof AST_Break
|
|
&& compressor.loopcontrol_target(first.body) === compressor.self()) {
|
|
if (self.condition) {
|
|
self.condition = make_node(AST_Binary, self.condition, {
|
|
left: self.condition,
|
|
operator: "&&",
|
|
right: first.condition.negate(compressor),
|
|
});
|
|
} else {
|
|
self.condition = first.condition.negate(compressor);
|
|
}
|
|
drop_it(first.alternative);
|
|
}
|
|
else if (first.alternative instanceof AST_Break
|
|
&& compressor.loopcontrol_target(first.alternative) === compressor.self()) {
|
|
if (self.condition) {
|
|
self.condition = make_node(AST_Binary, self.condition, {
|
|
left: self.condition,
|
|
operator: "&&",
|
|
right: first.condition,
|
|
});
|
|
} else {
|
|
self.condition = first.condition;
|
|
}
|
|
drop_it(first.body);
|
|
}
|
|
}
|
|
};
|
|
|
|
OPT(AST_For, function(self, compressor){
|
|
if (!compressor.option("loops")) return self;
|
|
if (self.condition) {
|
|
var cond = self.condition.evaluate(compressor);
|
|
if (compressor.option("dead_code") && !cond) {
|
|
var a = [];
|
|
if (self.init instanceof AST_Statement) {
|
|
a.push(self.init);
|
|
}
|
|
else if (self.init) {
|
|
a.push(make_node(AST_SimpleStatement, self.init, {
|
|
body: self.init
|
|
}));
|
|
}
|
|
extract_declarations_from_unreachable_code(compressor, self.body, a);
|
|
return make_node(AST_BlockStatement, self, { body: a }).optimize(compressor);
|
|
}
|
|
if (cond !== self.condition) {
|
|
cond = make_node_from_constant(cond, self.condition).transform(compressor);
|
|
self.condition = best_of_expression(cond, self.condition);
|
|
}
|
|
}
|
|
if_break_in_loop(self, compressor);
|
|
return self;
|
|
});
|
|
|
|
OPT(AST_If, function(self, compressor){
|
|
if (is_empty(self.alternative)) self.alternative = null;
|
|
|
|
if (!compressor.option("conditionals")) return self;
|
|
// if condition can be statically determined, warn and drop
|
|
// one of the blocks. note, statically determined implies
|
|
// “has no side effects”; also it doesn't work for cases like
|
|
// `x && true`, though it probably should.
|
|
var cond = self.condition.evaluate(compressor);
|
|
if (cond !== self.condition) {
|
|
if (cond) {
|
|
compressor.warn("Condition always true [{file}:{line},{col}]", self.condition.start);
|
|
if (compressor.option("dead_code")) {
|
|
var a = [];
|
|
if (self.alternative) {
|
|
extract_declarations_from_unreachable_code(compressor, self.alternative, a);
|
|
}
|
|
a.push(self.body);
|
|
return make_node(AST_BlockStatement, self, { body: a }).optimize(compressor);
|
|
}
|
|
} else {
|
|
compressor.warn("Condition always false [{file}:{line},{col}]", self.condition.start);
|
|
if (compressor.option("dead_code")) {
|
|
var a = [];
|
|
extract_declarations_from_unreachable_code(compressor, self.body, a);
|
|
if (self.alternative) a.push(self.alternative);
|
|
return make_node(AST_BlockStatement, self, { body: a }).optimize(compressor);
|
|
}
|
|
}
|
|
cond = make_node_from_constant(cond, self.condition).transform(compressor);
|
|
self.condition = best_of_expression(cond, self.condition);
|
|
}
|
|
var negated = self.condition.negate(compressor);
|
|
var self_condition_length = self.condition.print_to_string().length;
|
|
var negated_length = negated.print_to_string().length;
|
|
var negated_is_best = negated_length < self_condition_length;
|
|
if (self.alternative && negated_is_best) {
|
|
negated_is_best = false; // because we already do the switch here.
|
|
// no need to swap values of self_condition_length and negated_length
|
|
// here because they are only used in an equality comparison later on.
|
|
self.condition = negated;
|
|
var tmp = self.body;
|
|
self.body = self.alternative || make_node(AST_EmptyStatement, self);
|
|
self.alternative = tmp;
|
|
}
|
|
if (is_empty(self.body) && is_empty(self.alternative)) {
|
|
return make_node(AST_SimpleStatement, self.condition, {
|
|
body: self.condition.clone()
|
|
}).optimize(compressor);
|
|
}
|
|
if (self.body instanceof AST_SimpleStatement
|
|
&& self.alternative instanceof AST_SimpleStatement) {
|
|
return make_node(AST_SimpleStatement, self, {
|
|
body: make_node(AST_Conditional, self, {
|
|
condition : self.condition,
|
|
consequent : self.body.body,
|
|
alternative : self.alternative.body
|
|
})
|
|
}).optimize(compressor);
|
|
}
|
|
if (is_empty(self.alternative) && self.body instanceof AST_SimpleStatement) {
|
|
if (self_condition_length === negated_length && !negated_is_best
|
|
&& self.condition instanceof AST_Binary && self.condition.operator == "||") {
|
|
// although the code length of self.condition and negated are the same,
|
|
// negated does not require additional surrounding parentheses.
|
|
// see https://github.com/mishoo/UglifyJS2/issues/979
|
|
negated_is_best = true;
|
|
}
|
|
if (negated_is_best) return make_node(AST_SimpleStatement, self, {
|
|
body: make_node(AST_Binary, self, {
|
|
operator : "||",
|
|
left : negated,
|
|
right : self.body.body
|
|
})
|
|
}).optimize(compressor);
|
|
return make_node(AST_SimpleStatement, self, {
|
|
body: make_node(AST_Binary, self, {
|
|
operator : "&&",
|
|
left : self.condition,
|
|
right : self.body.body
|
|
})
|
|
}).optimize(compressor);
|
|
}
|
|
if (self.body instanceof AST_EmptyStatement
|
|
&& self.alternative instanceof AST_SimpleStatement) {
|
|
return make_node(AST_SimpleStatement, self, {
|
|
body: make_node(AST_Binary, self, {
|
|
operator : "||",
|
|
left : self.condition,
|
|
right : self.alternative.body
|
|
})
|
|
}).optimize(compressor);
|
|
}
|
|
if (self.body instanceof AST_Exit
|
|
&& self.alternative instanceof AST_Exit
|
|
&& self.body.TYPE == self.alternative.TYPE) {
|
|
return make_node(self.body.CTOR, self, {
|
|
value: make_node(AST_Conditional, self, {
|
|
condition : self.condition,
|
|
consequent : self.body.value || make_node(AST_Undefined, self.body),
|
|
alternative : self.alternative.value || make_node(AST_Undefined, self.alternative)
|
|
}).transform(compressor)
|
|
}).optimize(compressor);
|
|
}
|
|
if (self.body instanceof AST_If
|
|
&& !self.body.alternative
|
|
&& !self.alternative) {
|
|
self = make_node(AST_If, self, {
|
|
condition: make_node(AST_Binary, self.condition, {
|
|
operator: "&&",
|
|
left: self.condition,
|
|
right: self.body.condition
|
|
}),
|
|
body: self.body.body,
|
|
alternative: null
|
|
});
|
|
}
|
|
if (aborts(self.body)) {
|
|
if (self.alternative) {
|
|
var alt = self.alternative;
|
|
self.alternative = null;
|
|
return make_node(AST_BlockStatement, self, {
|
|
body: [ self, alt ]
|
|
}).optimize(compressor);
|
|
}
|
|
}
|
|
if (aborts(self.alternative)) {
|
|
var body = self.body;
|
|
self.body = self.alternative;
|
|
self.condition = negated_is_best ? negated : self.condition.negate(compressor);
|
|
self.alternative = null;
|
|
return make_node(AST_BlockStatement, self, {
|
|
body: [ self, body ]
|
|
}).optimize(compressor);
|
|
}
|
|
return self;
|
|
});
|
|
|
|
OPT(AST_Switch, function(self, compressor){
|
|
if (!compressor.option("switches")) return self;
|
|
var branch;
|
|
var value = self.expression.evaluate(compressor);
|
|
if (value !== self.expression) {
|
|
var expression = make_node_from_constant(value, self.expression).transform(compressor);
|
|
self.expression = best_of_expression(expression, self.expression);
|
|
}
|
|
if (!compressor.option("dead_code")) return self;
|
|
var decl = [];
|
|
var body = [];
|
|
var default_branch;
|
|
var exact_match;
|
|
for (var i = 0, len = self.body.length; i < len && !exact_match; i++) {
|
|
branch = self.body[i];
|
|
if (branch instanceof AST_Default) {
|
|
if (!default_branch) {
|
|
default_branch = branch;
|
|
} else {
|
|
eliminate_branch(branch, body[body.length - 1]);
|
|
}
|
|
} else if (value !== self.expression) {
|
|
var exp = branch.expression.evaluate(compressor);
|
|
if (exp === value) {
|
|
exact_match = branch;
|
|
if (default_branch) {
|
|
var default_index = body.indexOf(default_branch);
|
|
body.splice(default_index, 1);
|
|
eliminate_branch(default_branch, body[default_index - 1]);
|
|
default_branch = null;
|
|
}
|
|
} else if (exp !== branch.expression) {
|
|
eliminate_branch(branch, body[body.length - 1]);
|
|
continue;
|
|
}
|
|
}
|
|
if (aborts(branch)) {
|
|
var prev = body[body.length - 1];
|
|
if (aborts(prev) && prev.body.length == branch.body.length
|
|
&& make_node(AST_BlockStatement, prev, prev).equivalent_to(make_node(AST_BlockStatement, branch, branch))) {
|
|
prev.body = [];
|
|
}
|
|
}
|
|
body.push(branch);
|
|
}
|
|
while (i < len) eliminate_branch(self.body[i++], body[body.length - 1]);
|
|
if (body.length > 0) {
|
|
body[0].body = decl.concat(body[0].body);
|
|
}
|
|
self.body = body;
|
|
while (branch = body[body.length - 1]) {
|
|
var stat = branch.body[branch.body.length - 1];
|
|
if (stat instanceof AST_Break && compressor.loopcontrol_target(stat) === self)
|
|
branch.body.pop();
|
|
if (branch.body.length || branch instanceof AST_Case
|
|
&& (default_branch || branch.expression.has_side_effects(compressor))) break;
|
|
if (body.pop() === default_branch) default_branch = null;
|
|
}
|
|
if (body.length == 0) {
|
|
return make_node(AST_BlockStatement, self, {
|
|
body: decl.concat(make_node(AST_SimpleStatement, self.expression, {
|
|
body: self.expression
|
|
}))
|
|
}).optimize(compressor);
|
|
}
|
|
if (body.length == 1 && (body[0] === exact_match || body[0] === default_branch)) {
|
|
var has_break = false;
|
|
var tw = new TreeWalker(function(node) {
|
|
if (has_break
|
|
|| node instanceof AST_Lambda
|
|
|| node instanceof AST_SimpleStatement) return true;
|
|
if (node instanceof AST_Break && tw.loopcontrol_target(node) === self)
|
|
has_break = true;
|
|
});
|
|
self.walk(tw);
|
|
if (!has_break) {
|
|
body = body[0].body.slice();
|
|
body.unshift(make_node(AST_SimpleStatement, self.expression, {
|
|
body: self.expression
|
|
}));
|
|
return make_node(AST_BlockStatement, self, {
|
|
body: body
|
|
}).optimize(compressor);
|
|
}
|
|
}
|
|
return self;
|
|
|
|
function eliminate_branch(branch, prev) {
|
|
if (prev && !aborts(prev)) {
|
|
prev.body = prev.body.concat(branch.body);
|
|
} else {
|
|
extract_declarations_from_unreachable_code(compressor, branch, decl);
|
|
}
|
|
}
|
|
});
|
|
|
|
OPT(AST_Try, function(self, compressor){
|
|
self.body = tighten_body(self.body, compressor);
|
|
if (self.bcatch && self.bfinally && all(self.bfinally.body, is_empty)) self.bfinally = null;
|
|
if (all(self.body, is_empty)) {
|
|
var body = [];
|
|
if (self.bcatch) extract_declarations_from_unreachable_code(compressor, self.bcatch, body);
|
|
if (self.bfinally) body = body.concat(self.bfinally.body);
|
|
return make_node(AST_BlockStatement, self, {
|
|
body: body
|
|
}).optimize(compressor);
|
|
}
|
|
return self;
|
|
});
|
|
|
|
AST_Definitions.DEFMETHOD("remove_initializers", function(){
|
|
this.definitions.forEach(function(def){ def.value = null });
|
|
});
|
|
|
|
AST_Definitions.DEFMETHOD("to_assignments", function(compressor){
|
|
var reduce_vars = compressor.option("reduce_vars");
|
|
var assignments = this.definitions.reduce(function(a, def){
|
|
if (def.value) {
|
|
var name = make_node(AST_SymbolRef, def.name, def.name);
|
|
a.push(make_node(AST_Assign, def, {
|
|
operator : "=",
|
|
left : name,
|
|
right : def.value
|
|
}));
|
|
if (reduce_vars) name.definition().fixed = false;
|
|
}
|
|
return a;
|
|
}, []);
|
|
if (assignments.length == 0) return null;
|
|
return AST_Seq.from_array(assignments);
|
|
});
|
|
|
|
OPT(AST_Definitions, function(self, compressor){
|
|
if (self.definitions.length == 0)
|
|
return make_node(AST_EmptyStatement, self);
|
|
return self;
|
|
});
|
|
|
|
OPT(AST_Call, function(self, compressor){
|
|
var exp = self.expression;
|
|
if (compressor.option("reduce_vars")
|
|
&& exp instanceof AST_SymbolRef) {
|
|
var def = exp.definition();
|
|
var fixed = exp.fixed_value();
|
|
if (fixed instanceof AST_Defun) {
|
|
def.fixed = fixed = make_node(AST_Function, fixed, fixed).clone(true);
|
|
}
|
|
if (fixed instanceof AST_Function) {
|
|
exp = fixed;
|
|
if (compressor.option("unused")
|
|
&& def.references.length == 1
|
|
&& !(def.scope.uses_arguments
|
|
&& def.orig[0] instanceof AST_SymbolFunarg)
|
|
&& !def.scope.uses_eval
|
|
&& compressor.find_parent(AST_Scope) === def.scope) {
|
|
self.expression = exp;
|
|
}
|
|
}
|
|
}
|
|
if (compressor.option("unused")
|
|
&& exp instanceof AST_Function
|
|
&& !exp.uses_arguments
|
|
&& !exp.uses_eval) {
|
|
var pos = 0, last = 0;
|
|
for (var i = 0, len = self.args.length; i < len; i++) {
|
|
var trim = i >= exp.argnames.length;
|
|
if (trim || exp.argnames[i].__unused) {
|
|
var node = self.args[i].drop_side_effect_free(compressor);
|
|
if (node) {
|
|
self.args[pos++] = node;
|
|
} else if (!trim) {
|
|
self.args[pos++] = make_node(AST_Number, self.args[i], {
|
|
value: 0
|
|
});
|
|
continue;
|
|
}
|
|
} else {
|
|
self.args[pos++] = self.args[i];
|
|
}
|
|
last = pos;
|
|
}
|
|
self.args.length = last;
|
|
}
|
|
if (compressor.option("unsafe")) {
|
|
if (exp instanceof AST_SymbolRef && exp.undeclared()) {
|
|
switch (exp.name) {
|
|
case "Array":
|
|
if (self.args.length != 1) {
|
|
return make_node(AST_Array, self, {
|
|
elements: self.args
|
|
}).optimize(compressor);
|
|
}
|
|
break;
|
|
case "Object":
|
|
if (self.args.length == 0) {
|
|
return make_node(AST_Object, self, {
|
|
properties: []
|
|
});
|
|
}
|
|
break;
|
|
case "String":
|
|
if (self.args.length == 0) return make_node(AST_String, self, {
|
|
value: ""
|
|
});
|
|
if (self.args.length <= 1) return make_node(AST_Binary, self, {
|
|
left: self.args[0],
|
|
operator: "+",
|
|
right: make_node(AST_String, self, { value: "" })
|
|
}).optimize(compressor);
|
|
break;
|
|
case "Number":
|
|
if (self.args.length == 0) return make_node(AST_Number, self, {
|
|
value: 0
|
|
});
|
|
if (self.args.length == 1) return make_node(AST_UnaryPrefix, self, {
|
|
expression: self.args[0],
|
|
operator: "+"
|
|
}).optimize(compressor);
|
|
case "Boolean":
|
|
if (self.args.length == 0) return make_node(AST_False, self);
|
|
if (self.args.length == 1) return make_node(AST_UnaryPrefix, self, {
|
|
expression: make_node(AST_UnaryPrefix, self, {
|
|
expression: self.args[0],
|
|
operator: "!"
|
|
}),
|
|
operator: "!"
|
|
}).optimize(compressor);
|
|
break;
|
|
case "Function":
|
|
// new Function() => function(){}
|
|
if (self.args.length == 0) return make_node(AST_Function, self, {
|
|
argnames: [],
|
|
body: []
|
|
});
|
|
if (all(self.args, function(x){ return x instanceof AST_String })) {
|
|
// quite a corner-case, but we can handle it:
|
|
// https://github.com/mishoo/UglifyJS2/issues/203
|
|
// if the code argument is a constant, then we can minify it.
|
|
try {
|
|
var code = "(function(" + self.args.slice(0, -1).map(function(arg){
|
|
return arg.value;
|
|
}).join(",") + "){" + self.args[self.args.length - 1].value + "})()";
|
|
var ast = parse(code);
|
|
ast.figure_out_scope({ screw_ie8: compressor.option("screw_ie8") });
|
|
var comp = new Compressor(compressor.options);
|
|
ast = ast.transform(comp);
|
|
ast.figure_out_scope({ screw_ie8: compressor.option("screw_ie8") });
|
|
ast.mangle_names();
|
|
var fun;
|
|
try {
|
|
ast.walk(new TreeWalker(function(node){
|
|
if (node instanceof AST_Lambda) {
|
|
fun = node;
|
|
throw ast;
|
|
}
|
|
}));
|
|
} catch(ex) {
|
|
if (ex !== ast) throw ex;
|
|
};
|
|
if (!fun) return self;
|
|
var args = fun.argnames.map(function(arg, i){
|
|
return make_node(AST_String, self.args[i], {
|
|
value: arg.print_to_string()
|
|
});
|
|
});
|
|
var code = OutputStream();
|
|
AST_BlockStatement.prototype._codegen.call(fun, fun, code);
|
|
code = code.toString().replace(/^\{|\}$/g, "");
|
|
args.push(make_node(AST_String, self.args[self.args.length - 1], {
|
|
value: code
|
|
}));
|
|
self.args = args;
|
|
return self;
|
|
} catch(ex) {
|
|
if (ex instanceof JS_Parse_Error) {
|
|
compressor.warn("Error parsing code passed to new Function [{file}:{line},{col}]", self.args[self.args.length - 1].start);
|
|
compressor.warn(ex.toString());
|
|
} else {
|
|
console.log(ex);
|
|
throw ex;
|
|
}
|
|
}
|
|
}
|
|
break;
|
|
}
|
|
}
|
|
else if (exp instanceof AST_Dot && exp.property == "toString" && self.args.length == 0) {
|
|
return make_node(AST_Binary, self, {
|
|
left: make_node(AST_String, self, { value: "" }),
|
|
operator: "+",
|
|
right: exp.expression
|
|
}).optimize(compressor);
|
|
}
|
|
else if (exp instanceof AST_Dot && exp.expression instanceof AST_Array && exp.property == "join") EXIT: {
|
|
var separator;
|
|
if (self.args.length > 0) {
|
|
separator = self.args[0].evaluate(compressor);
|
|
if (separator === self.args[0]) break EXIT; // not a constant
|
|
}
|
|
var elements = [];
|
|
var consts = [];
|
|
exp.expression.elements.forEach(function(el) {
|
|
var value = el.evaluate(compressor);
|
|
if (value !== el) {
|
|
consts.push(value);
|
|
} else {
|
|
if (consts.length > 0) {
|
|
elements.push(make_node(AST_String, self, {
|
|
value: consts.join(separator)
|
|
}));
|
|
consts.length = 0;
|
|
}
|
|
elements.push(el);
|
|
}
|
|
});
|
|
if (consts.length > 0) {
|
|
elements.push(make_node(AST_String, self, {
|
|
value: consts.join(separator)
|
|
}));
|
|
}
|
|
if (elements.length == 0) return make_node(AST_String, self, { value: "" });
|
|
if (elements.length == 1) {
|
|
if (elements[0].is_string(compressor)) {
|
|
return elements[0];
|
|
}
|
|
return make_node(AST_Binary, elements[0], {
|
|
operator : "+",
|
|
left : make_node(AST_String, self, { value: "" }),
|
|
right : elements[0]
|
|
});
|
|
}
|
|
if (separator == "") {
|
|
var first;
|
|
if (elements[0].is_string(compressor)
|
|
|| elements[1].is_string(compressor)) {
|
|
first = elements.shift();
|
|
} else {
|
|
first = make_node(AST_String, self, { value: "" });
|
|
}
|
|
return elements.reduce(function(prev, el){
|
|
return make_node(AST_Binary, el, {
|
|
operator : "+",
|
|
left : prev,
|
|
right : el
|
|
});
|
|
}, first).optimize(compressor);
|
|
}
|
|
// need this awkward cloning to not affect original element
|
|
// best_of will decide which one to get through.
|
|
var node = self.clone();
|
|
node.expression = node.expression.clone();
|
|
node.expression.expression = node.expression.expression.clone();
|
|
node.expression.expression.elements = elements;
|
|
return best_of(compressor, self, node);
|
|
}
|
|
else if (exp instanceof AST_Dot && exp.expression.is_string(compressor) && exp.property == "charAt") {
|
|
var arg = self.args[0];
|
|
var index = arg ? arg.evaluate(compressor) : 0;
|
|
if (index !== arg) {
|
|
return make_node(AST_Sub, exp, {
|
|
expression: exp.expression,
|
|
property: make_node_from_constant(index | 0, arg || exp)
|
|
}).optimize(compressor);
|
|
}
|
|
}
|
|
}
|
|
if (exp instanceof AST_Function) {
|
|
if (exp.body[0] instanceof AST_Return) {
|
|
var value = exp.body[0].value;
|
|
if (!value || value.is_constant()) {
|
|
var args = self.args.concat(value || make_node(AST_Undefined, self));
|
|
return AST_Seq.from_array(args).transform(compressor);
|
|
}
|
|
}
|
|
if (compressor.option("side_effects") && all(exp.body, is_empty)) {
|
|
var args = self.args.concat(make_node(AST_Undefined, self));
|
|
return AST_Seq.from_array(args).transform(compressor);
|
|
}
|
|
}
|
|
if (compressor.option("drop_console")) {
|
|
if (exp instanceof AST_PropAccess) {
|
|
var name = exp.expression;
|
|
while (name.expression) {
|
|
name = name.expression;
|
|
}
|
|
if (name instanceof AST_SymbolRef
|
|
&& name.name == "console"
|
|
&& name.undeclared()) {
|
|
return make_node(AST_Undefined, self).optimize(compressor);
|
|
}
|
|
}
|
|
}
|
|
if (compressor.option("negate_iife")
|
|
&& compressor.parent() instanceof AST_SimpleStatement
|
|
&& is_iife_call(self)) {
|
|
return self.negate(compressor, true);
|
|
}
|
|
return self;
|
|
});
|
|
|
|
OPT(AST_New, function(self, compressor){
|
|
if (compressor.option("unsafe")) {
|
|
var exp = self.expression;
|
|
if (exp instanceof AST_SymbolRef && exp.undeclared()) {
|
|
switch (exp.name) {
|
|
case "Object":
|
|
case "RegExp":
|
|
case "Function":
|
|
case "Error":
|
|
case "Array":
|
|
return make_node(AST_Call, self, self).transform(compressor);
|
|
}
|
|
}
|
|
}
|
|
return self;
|
|
});
|
|
|
|
OPT(AST_Seq, function(self, compressor){
|
|
if (!compressor.option("side_effects"))
|
|
return self;
|
|
self.car = self.car.drop_side_effect_free(compressor, first_in_statement(compressor));
|
|
if (!self.car) return maintain_this_binding(compressor.parent(), self, self.cdr);
|
|
if (compressor.option("cascade")) {
|
|
var left;
|
|
if (self.car instanceof AST_Assign
|
|
&& !self.car.left.has_side_effects(compressor)) {
|
|
left = self.car.left;
|
|
} else if (self.car instanceof AST_Unary
|
|
&& (self.car.operator == "++" || self.car.operator == "--")) {
|
|
left = self.car.expression;
|
|
}
|
|
if (left
|
|
&& !(left instanceof AST_SymbolRef
|
|
&& (left.definition().orig[0] instanceof AST_SymbolLambda
|
|
|| is_reference_const(left)))) {
|
|
var parent, field;
|
|
var cdr = self.cdr;
|
|
while (true) {
|
|
if (cdr.equivalent_to(left)) {
|
|
var car = self.car instanceof AST_UnaryPostfix ? make_node(AST_UnaryPrefix, self.car, {
|
|
operator: self.car.operator,
|
|
expression: left
|
|
}) : self.car;
|
|
if (parent) {
|
|
parent[field] = car;
|
|
return self.cdr;
|
|
}
|
|
return car;
|
|
}
|
|
if (cdr instanceof AST_Binary && !(cdr instanceof AST_Assign)) {
|
|
if (cdr.left.is_constant()) {
|
|
if (cdr.operator == "||" || cdr.operator == "&&") break;
|
|
field = "right";
|
|
} else {
|
|
field = "left";
|
|
}
|
|
} else if (cdr instanceof AST_Call
|
|
|| cdr instanceof AST_Unary && !unary_side_effects(cdr.operator)) {
|
|
field = "expression";
|
|
} else break;
|
|
parent = cdr;
|
|
cdr = cdr[field];
|
|
}
|
|
}
|
|
}
|
|
if (is_undefined(self.cdr, compressor)) {
|
|
return make_node(AST_UnaryPrefix, self, {
|
|
operator : "void",
|
|
expression : self.car
|
|
});
|
|
}
|
|
return self;
|
|
});
|
|
|
|
AST_Unary.DEFMETHOD("lift_sequences", function(compressor){
|
|
if (compressor.option("sequences")) {
|
|
if (this.expression instanceof AST_Seq) {
|
|
var seq = this.expression;
|
|
var x = seq.to_array();
|
|
var e = this.clone();
|
|
e.expression = x.pop();
|
|
x.push(e);
|
|
seq = AST_Seq.from_array(x).transform(compressor);
|
|
return seq;
|
|
}
|
|
}
|
|
return this;
|
|
});
|
|
|
|
OPT(AST_UnaryPostfix, function(self, compressor){
|
|
return self.lift_sequences(compressor);
|
|
});
|
|
|
|
OPT(AST_UnaryPrefix, function(self, compressor){
|
|
var e = self.expression;
|
|
if (self.operator == "delete"
|
|
&& !(e instanceof AST_SymbolRef
|
|
|| e instanceof AST_PropAccess
|
|
|| e instanceof AST_NaN
|
|
|| e instanceof AST_Infinity
|
|
|| e instanceof AST_Undefined)) {
|
|
if (e instanceof AST_Seq) {
|
|
e = e.to_array();
|
|
e.push(make_node(AST_True, self));
|
|
return AST_Seq.from_array(e).optimize(compressor);
|
|
}
|
|
return make_node(AST_Seq, self, {
|
|
car: e,
|
|
cdr: make_node(AST_True, self)
|
|
}).optimize(compressor);
|
|
}
|
|
var seq = self.lift_sequences(compressor);
|
|
if (seq !== self) {
|
|
return seq;
|
|
}
|
|
if (compressor.option("side_effects") && self.operator == "void") {
|
|
e = e.drop_side_effect_free(compressor);
|
|
if (e) {
|
|
self.expression = e;
|
|
return self;
|
|
} else {
|
|
return make_node(AST_Undefined, self).optimize(compressor);
|
|
}
|
|
}
|
|
if (compressor.option("booleans") && compressor.in_boolean_context()) {
|
|
switch (self.operator) {
|
|
case "!":
|
|
if (e instanceof AST_UnaryPrefix && e.operator == "!") {
|
|
// !!foo ==> foo, if we're in boolean context
|
|
return e.expression;
|
|
}
|
|
if (e instanceof AST_Binary) {
|
|
self = best_of(compressor, self, e.negate(compressor, first_in_statement(compressor)));
|
|
}
|
|
break;
|
|
case "typeof":
|
|
// typeof always returns a non-empty string, thus it's
|
|
// always true in booleans
|
|
compressor.warn("Boolean expression always true [{file}:{line},{col}]", self.start);
|
|
return (e instanceof AST_SymbolRef ? make_node(AST_True, self) : make_node(AST_Seq, self, {
|
|
car: e,
|
|
cdr: make_node(AST_True, self)
|
|
})).optimize(compressor);
|
|
}
|
|
}
|
|
if (self.operator == "-" && e instanceof AST_Infinity) {
|
|
e = e.transform(compressor);
|
|
}
|
|
if (e instanceof AST_Binary
|
|
&& (self.operator == "+" || self.operator == "-")
|
|
&& (e.operator == "*" || e.operator == "/" || e.operator == "%")) {
|
|
return make_node(AST_Binary, self, {
|
|
operator: e.operator,
|
|
left: make_node(AST_UnaryPrefix, e.left, {
|
|
operator: self.operator,
|
|
expression: e.left
|
|
}),
|
|
right: e.right
|
|
});
|
|
}
|
|
// avoids infinite recursion of numerals
|
|
if (self.operator != "-"
|
|
|| !(e instanceof AST_Number || e instanceof AST_Infinity)) {
|
|
var ev = self.evaluate(compressor);
|
|
if (ev !== self) {
|
|
ev = make_node_from_constant(ev, self).optimize(compressor);
|
|
return best_of(compressor, ev, self);
|
|
}
|
|
}
|
|
return self;
|
|
});
|
|
|
|
AST_Binary.DEFMETHOD("lift_sequences", function(compressor){
|
|
if (compressor.option("sequences")) {
|
|
if (this.left instanceof AST_Seq) {
|
|
var seq = this.left;
|
|
var x = seq.to_array();
|
|
var e = this.clone();
|
|
e.left = x.pop();
|
|
x.push(e);
|
|
return AST_Seq.from_array(x).optimize(compressor);
|
|
}
|
|
if (this.right instanceof AST_Seq && !this.left.has_side_effects(compressor)) {
|
|
var assign = this.operator == "=" && this.left instanceof AST_SymbolRef;
|
|
var root = this.right.clone();
|
|
var cursor, seq = root;
|
|
while (assign || !seq.car.has_side_effects(compressor)) {
|
|
cursor = seq;
|
|
if (seq.cdr instanceof AST_Seq) {
|
|
seq = seq.cdr = seq.cdr.clone();
|
|
} else break;
|
|
}
|
|
if (cursor) {
|
|
var e = this.clone();
|
|
e.right = cursor.cdr;
|
|
cursor.cdr = e;
|
|
return root.optimize(compressor);
|
|
}
|
|
}
|
|
}
|
|
return this;
|
|
});
|
|
|
|
var commutativeOperators = makePredicate("== === != !== * & | ^");
|
|
|
|
OPT(AST_Binary, function(self, compressor){
|
|
function reversible() {
|
|
return self.left.is_constant()
|
|
|| self.right.is_constant()
|
|
|| !self.left.has_side_effects(compressor)
|
|
&& !self.right.has_side_effects(compressor);
|
|
}
|
|
function reverse(op) {
|
|
if (reversible()) {
|
|
if (op) self.operator = op;
|
|
var tmp = self.left;
|
|
self.left = self.right;
|
|
self.right = tmp;
|
|
}
|
|
}
|
|
if (commutativeOperators(self.operator)) {
|
|
if (self.right.is_constant()
|
|
&& !self.left.is_constant()) {
|
|
// if right is a constant, whatever side effects the
|
|
// left side might have could not influence the
|
|
// result. hence, force switch.
|
|
|
|
if (!(self.left instanceof AST_Binary
|
|
&& PRECEDENCE[self.left.operator] >= PRECEDENCE[self.operator])) {
|
|
reverse();
|
|
}
|
|
}
|
|
}
|
|
self = self.lift_sequences(compressor);
|
|
if (compressor.option("comparisons")) switch (self.operator) {
|
|
case "===":
|
|
case "!==":
|
|
if ((self.left.is_string(compressor) && self.right.is_string(compressor)) ||
|
|
(self.left.is_number(compressor) && self.right.is_number(compressor)) ||
|
|
(self.left.is_boolean() && self.right.is_boolean())) {
|
|
self.operator = self.operator.substr(0, 2);
|
|
}
|
|
// XXX: intentionally falling down to the next case
|
|
case "==":
|
|
case "!=":
|
|
// "undefined" == typeof x => undefined === x
|
|
if (self.left instanceof AST_String
|
|
&& self.left.value == "undefined"
|
|
&& self.right instanceof AST_UnaryPrefix
|
|
&& self.right.operator == "typeof") {
|
|
var expr = self.right.expression;
|
|
if (expr instanceof AST_SymbolRef ? !expr.undeclared()
|
|
: !(expr instanceof AST_PropAccess) || compressor.option("screw_ie8")) {
|
|
self.right = expr;
|
|
self.left = make_node(AST_Undefined, self.left).optimize(compressor);
|
|
if (self.operator.length == 2) self.operator += "=";
|
|
}
|
|
}
|
|
break;
|
|
}
|
|
if (compressor.option("booleans") && self.operator == "+" && compressor.in_boolean_context()) {
|
|
var ll = self.left.evaluate(compressor);
|
|
var rr = self.right.evaluate(compressor);
|
|
if (ll && typeof ll == "string") {
|
|
compressor.warn("+ in boolean context always true [{file}:{line},{col}]", self.start);
|
|
return make_node(AST_Seq, self, {
|
|
car: self.right,
|
|
cdr: make_node(AST_True, self)
|
|
}).optimize(compressor);
|
|
}
|
|
if (rr && typeof rr == "string") {
|
|
compressor.warn("+ in boolean context always true [{file}:{line},{col}]", self.start);
|
|
return make_node(AST_Seq, self, {
|
|
car: self.left,
|
|
cdr: make_node(AST_True, self)
|
|
}).optimize(compressor);
|
|
}
|
|
}
|
|
if (compressor.option("comparisons") && self.is_boolean()) {
|
|
if (!(compressor.parent() instanceof AST_Binary)
|
|
|| compressor.parent() instanceof AST_Assign) {
|
|
var negated = make_node(AST_UnaryPrefix, self, {
|
|
operator: "!",
|
|
expression: self.negate(compressor, first_in_statement(compressor))
|
|
});
|
|
self = best_of(compressor, self, negated);
|
|
}
|
|
if (compressor.option("unsafe_comps")) {
|
|
switch (self.operator) {
|
|
case "<": reverse(">"); break;
|
|
case "<=": reverse(">="); break;
|
|
}
|
|
}
|
|
}
|
|
if (self.operator == "+") {
|
|
if (self.right instanceof AST_String
|
|
&& self.right.getValue() == ""
|
|
&& self.left.is_string(compressor)) {
|
|
return self.left;
|
|
}
|
|
if (self.left instanceof AST_String
|
|
&& self.left.getValue() == ""
|
|
&& self.right.is_string(compressor)) {
|
|
return self.right;
|
|
}
|
|
if (self.left instanceof AST_Binary
|
|
&& self.left.operator == "+"
|
|
&& self.left.left instanceof AST_String
|
|
&& self.left.left.getValue() == ""
|
|
&& self.right.is_string(compressor)) {
|
|
self.left = self.left.right;
|
|
return self.transform(compressor);
|
|
}
|
|
}
|
|
if (compressor.option("evaluate")) {
|
|
switch (self.operator) {
|
|
case "&&":
|
|
var ll = self.left.evaluate(compressor);
|
|
if (!ll) {
|
|
compressor.warn("Condition left of && always false [{file}:{line},{col}]", self.start);
|
|
return maintain_this_binding(compressor.parent(), self, self.left).optimize(compressor);
|
|
} else if (ll !== self.left) {
|
|
compressor.warn("Condition left of && always true [{file}:{line},{col}]", self.start);
|
|
return maintain_this_binding(compressor.parent(), self, self.right).optimize(compressor);
|
|
}
|
|
if (compressor.option("booleans") && compressor.in_boolean_context()) {
|
|
var rr = self.right.evaluate(compressor);
|
|
if (!rr) {
|
|
compressor.warn("Boolean && always false [{file}:{line},{col}]", self.start);
|
|
return make_node(AST_Seq, self, {
|
|
car: self.left,
|
|
cdr: make_node(AST_False, self)
|
|
}).optimize(compressor);
|
|
} else if (rr !== self.right) {
|
|
compressor.warn("Dropping side-effect-free && in boolean context [{file}:{line},{col}]", self.start);
|
|
return self.left.optimize(compressor);
|
|
}
|
|
}
|
|
break;
|
|
case "||":
|
|
var ll = self.left.evaluate(compressor);
|
|
if (!ll) {
|
|
compressor.warn("Condition left of || always false [{file}:{line},{col}]", self.start);
|
|
return maintain_this_binding(compressor.parent(), self, self.right).optimize(compressor);
|
|
} else if (ll !== self.left) {
|
|
compressor.warn("Condition left of || always true [{file}:{line},{col}]", self.start);
|
|
return maintain_this_binding(compressor.parent(), self, self.left).optimize(compressor);
|
|
}
|
|
if (compressor.option("booleans") && compressor.in_boolean_context()) {
|
|
var rr = self.right.evaluate(compressor);
|
|
if (!rr) {
|
|
compressor.warn("Dropping side-effect-free || in boolean context [{file}:{line},{col}]", self.start);
|
|
return self.left.optimize(compressor);
|
|
} else if (rr !== self.right) {
|
|
compressor.warn("Boolean || always true [{file}:{line},{col}]", self.start);
|
|
return make_node(AST_Seq, self, {
|
|
car: self.left,
|
|
cdr: make_node(AST_True, self)
|
|
}).optimize(compressor);
|
|
}
|
|
}
|
|
break;
|
|
}
|
|
var associative = true;
|
|
switch (self.operator) {
|
|
case "+":
|
|
// "foo" + ("bar" + x) => "foobar" + x
|
|
if (self.left instanceof AST_Constant
|
|
&& self.right instanceof AST_Binary
|
|
&& self.right.operator == "+"
|
|
&& self.right.left instanceof AST_Constant
|
|
&& self.right.is_string(compressor)) {
|
|
self = make_node(AST_Binary, self, {
|
|
operator: "+",
|
|
left: make_node(AST_String, self.left, {
|
|
value: "" + self.left.getValue() + self.right.left.getValue(),
|
|
start: self.left.start,
|
|
end: self.right.left.end
|
|
}),
|
|
right: self.right.right
|
|
});
|
|
}
|
|
// (x + "foo") + "bar" => x + "foobar"
|
|
if (self.right instanceof AST_Constant
|
|
&& self.left instanceof AST_Binary
|
|
&& self.left.operator == "+"
|
|
&& self.left.right instanceof AST_Constant
|
|
&& self.left.is_string(compressor)) {
|
|
self = make_node(AST_Binary, self, {
|
|
operator: "+",
|
|
left: self.left.left,
|
|
right: make_node(AST_String, self.right, {
|
|
value: "" + self.left.right.getValue() + self.right.getValue(),
|
|
start: self.left.right.start,
|
|
end: self.right.end
|
|
})
|
|
});
|
|
}
|
|
// (x + "foo") + ("bar" + y) => (x + "foobar") + y
|
|
if (self.left instanceof AST_Binary
|
|
&& self.left.operator == "+"
|
|
&& self.left.is_string(compressor)
|
|
&& self.left.right instanceof AST_Constant
|
|
&& self.right instanceof AST_Binary
|
|
&& self.right.operator == "+"
|
|
&& self.right.left instanceof AST_Constant
|
|
&& self.right.is_string(compressor)) {
|
|
self = make_node(AST_Binary, self, {
|
|
operator: "+",
|
|
left: make_node(AST_Binary, self.left, {
|
|
operator: "+",
|
|
left: self.left.left,
|
|
right: make_node(AST_String, self.left.right, {
|
|
value: "" + self.left.right.getValue() + self.right.left.getValue(),
|
|
start: self.left.right.start,
|
|
end: self.right.left.end
|
|
})
|
|
}),
|
|
right: self.right.right
|
|
});
|
|
}
|
|
// a + -b => a - b
|
|
if (self.right instanceof AST_UnaryPrefix
|
|
&& self.right.operator == "-"
|
|
&& self.left.is_number(compressor)) {
|
|
self = make_node(AST_Binary, self, {
|
|
operator: "-",
|
|
left: self.left,
|
|
right: self.right.expression
|
|
});
|
|
break;
|
|
}
|
|
// -a + b => b - a
|
|
if (self.left instanceof AST_UnaryPrefix
|
|
&& self.left.operator == "-"
|
|
&& reversible()
|
|
&& self.right.is_number(compressor)) {
|
|
self = make_node(AST_Binary, self, {
|
|
operator: "-",
|
|
left: self.right,
|
|
right: self.left.expression
|
|
});
|
|
break;
|
|
}
|
|
case "*":
|
|
associative = compressor.option("unsafe_math");
|
|
case "&":
|
|
case "|":
|
|
case "^":
|
|
// a + +b => +b + a
|
|
if (self.left.is_number(compressor)
|
|
&& self.right.is_number(compressor)
|
|
&& reversible()
|
|
&& !(self.left instanceof AST_Binary
|
|
&& self.left.operator != self.operator
|
|
&& PRECEDENCE[self.left.operator] >= PRECEDENCE[self.operator])) {
|
|
var reversed = make_node(AST_Binary, self, {
|
|
operator: self.operator,
|
|
left: self.right,
|
|
right: self.left
|
|
});
|
|
if (self.right instanceof AST_Constant
|
|
&& !(self.left instanceof AST_Constant)) {
|
|
self = best_of(compressor, reversed, self);
|
|
} else {
|
|
self = best_of(compressor, self, reversed);
|
|
}
|
|
}
|
|
if (associative && self.is_number(compressor)) {
|
|
// a + (b + c) => (a + b) + c
|
|
if (self.right instanceof AST_Binary
|
|
&& self.right.operator == self.operator) {
|
|
self = make_node(AST_Binary, self, {
|
|
operator: self.operator,
|
|
left: make_node(AST_Binary, self.left, {
|
|
operator: self.operator,
|
|
left: self.left,
|
|
right: self.right.left,
|
|
start: self.left.start,
|
|
end: self.right.left.end
|
|
}),
|
|
right: self.right.right
|
|
});
|
|
}
|
|
// (n + 2) + 3 => 5 + n
|
|
// (2 * n) * 3 => 6 + n
|
|
if (self.right instanceof AST_Constant
|
|
&& self.left instanceof AST_Binary
|
|
&& self.left.operator == self.operator) {
|
|
if (self.left.left instanceof AST_Constant) {
|
|
self = make_node(AST_Binary, self, {
|
|
operator: self.operator,
|
|
left: make_node(AST_Binary, self.left, {
|
|
operator: self.operator,
|
|
left: self.left.left,
|
|
right: self.right,
|
|
start: self.left.left.start,
|
|
end: self.right.end
|
|
}),
|
|
right: self.left.right
|
|
});
|
|
} else if (self.left.right instanceof AST_Constant) {
|
|
self = make_node(AST_Binary, self, {
|
|
operator: self.operator,
|
|
left: make_node(AST_Binary, self.left, {
|
|
operator: self.operator,
|
|
left: self.left.right,
|
|
right: self.right,
|
|
start: self.left.right.start,
|
|
end: self.right.end
|
|
}),
|
|
right: self.left.left
|
|
});
|
|
}
|
|
}
|
|
// (a | 1) | (2 | d) => (3 | a) | b
|
|
if (self.left instanceof AST_Binary
|
|
&& self.left.operator == self.operator
|
|
&& self.left.right instanceof AST_Constant
|
|
&& self.right instanceof AST_Binary
|
|
&& self.right.operator == self.operator
|
|
&& self.right.left instanceof AST_Constant) {
|
|
self = make_node(AST_Binary, self, {
|
|
operator: self.operator,
|
|
left: make_node(AST_Binary, self.left, {
|
|
operator: self.operator,
|
|
left: make_node(AST_Binary, self.left.left, {
|
|
operator: self.operator,
|
|
left: self.left.right,
|
|
right: self.right.left,
|
|
start: self.left.right.start,
|
|
end: self.right.left.end
|
|
}),
|
|
right: self.left.left
|
|
}),
|
|
right: self.right.right
|
|
});
|
|
}
|
|
}
|
|
}
|
|
}
|
|
// x && (y && z) ==> x && y && z
|
|
// x || (y || z) ==> x || y || z
|
|
// x + ("y" + z) ==> x + "y" + z
|
|
// "x" + (y + "z")==> "x" + y + "z"
|
|
if (self.right instanceof AST_Binary
|
|
&& self.right.operator == self.operator
|
|
&& (self.operator == "&&"
|
|
|| self.operator == "||"
|
|
|| (self.operator == "+"
|
|
&& (self.right.left.is_string(compressor)
|
|
|| (self.left.is_string(compressor)
|
|
&& self.right.right.is_string(compressor))))))
|
|
{
|
|
self.left = make_node(AST_Binary, self.left, {
|
|
operator : self.operator,
|
|
left : self.left,
|
|
right : self.right.left
|
|
});
|
|
self.right = self.right.right;
|
|
return self.transform(compressor);
|
|
}
|
|
var ev = self.evaluate(compressor);
|
|
if (ev !== self) {
|
|
ev = make_node_from_constant(ev, self).optimize(compressor);
|
|
return best_of(compressor, ev, self);
|
|
}
|
|
return self;
|
|
});
|
|
|
|
OPT(AST_SymbolRef, function(self, compressor){
|
|
var def = self.resolve_defines(compressor);
|
|
if (def) {
|
|
return def.optimize(compressor);
|
|
}
|
|
// testing against !self.scope.uses_with first is an optimization
|
|
if (compressor.option("screw_ie8")
|
|
&& self.undeclared()
|
|
&& (!self.scope.uses_with || !compressor.find_parent(AST_With))) {
|
|
switch (self.name) {
|
|
case "undefined":
|
|
return make_node(AST_Undefined, self).optimize(compressor);
|
|
case "NaN":
|
|
return make_node(AST_NaN, self).optimize(compressor);
|
|
case "Infinity":
|
|
return make_node(AST_Infinity, self).optimize(compressor);
|
|
}
|
|
}
|
|
if (compressor.option("evaluate")
|
|
&& compressor.option("reduce_vars")
|
|
&& is_lhs(self, compressor.parent()) !== self) {
|
|
var d = self.definition();
|
|
var fixed = self.fixed_value();
|
|
if (fixed) {
|
|
if (d.should_replace === undefined) {
|
|
var init = fixed.evaluate(compressor);
|
|
if (init !== fixed && (compressor.option("unsafe_regexp") || !(init instanceof RegExp))) {
|
|
init = make_node_from_constant(init, fixed);
|
|
var value = init.optimize(compressor).print_to_string().length;
|
|
var fn;
|
|
if (has_symbol_ref(fixed)) {
|
|
fn = function() {
|
|
var result = init.optimize(compressor);
|
|
return result === init ? result.clone(true) : result;
|
|
};
|
|
} else {
|
|
value = Math.min(value, fixed.print_to_string().length);
|
|
fn = function() {
|
|
var result = best_of_expression(init.optimize(compressor), fixed);
|
|
return result === init || result === fixed ? result.clone(true) : result;
|
|
};
|
|
}
|
|
var name = d.name.length;
|
|
var overhead = 0;
|
|
if (compressor.option("unused") && (!d.global || compressor.option("toplevel"))) {
|
|
overhead = (name + 2 + value) / d.references.length;
|
|
}
|
|
d.should_replace = value <= name + overhead ? fn : false;
|
|
} else {
|
|
d.should_replace = false;
|
|
}
|
|
}
|
|
if (d.should_replace) {
|
|
return d.should_replace();
|
|
}
|
|
}
|
|
}
|
|
return self;
|
|
|
|
function has_symbol_ref(value) {
|
|
var found;
|
|
value.walk(new TreeWalker(function(node) {
|
|
if (node instanceof AST_SymbolRef) found = true;
|
|
if (found) return true;
|
|
}));
|
|
return found;
|
|
}
|
|
});
|
|
|
|
function is_atomic(lhs, self) {
|
|
return lhs instanceof AST_SymbolRef || lhs.TYPE === self.TYPE;
|
|
}
|
|
|
|
OPT(AST_Undefined, function(self, compressor){
|
|
if (compressor.option("unsafe")) {
|
|
var undef = find_variable(compressor, "undefined");
|
|
if (undef) {
|
|
var ref = make_node(AST_SymbolRef, self, {
|
|
name : "undefined",
|
|
scope : undef.scope,
|
|
thedef : undef
|
|
});
|
|
ref.is_undefined = true;
|
|
return ref;
|
|
}
|
|
}
|
|
var lhs = is_lhs(compressor.self(), compressor.parent());
|
|
if (lhs && is_atomic(lhs, self)) return self;
|
|
return make_node(AST_UnaryPrefix, self, {
|
|
operator: "void",
|
|
expression: make_node(AST_Number, self, {
|
|
value: 0
|
|
})
|
|
});
|
|
});
|
|
|
|
OPT(AST_Infinity, function(self, compressor){
|
|
var lhs = is_lhs(compressor.self(), compressor.parent());
|
|
if (lhs && is_atomic(lhs, self)) return self;
|
|
if (compressor.option("keep_infinity")
|
|
&& !(lhs && !is_atomic(lhs, self))
|
|
&& !find_variable(compressor, "Infinity"))
|
|
return self;
|
|
return make_node(AST_Binary, self, {
|
|
operator: "/",
|
|
left: make_node(AST_Number, self, {
|
|
value: 1
|
|
}),
|
|
right: make_node(AST_Number, self, {
|
|
value: 0
|
|
})
|
|
});
|
|
});
|
|
|
|
OPT(AST_NaN, function(self, compressor){
|
|
var lhs = is_lhs(compressor.self(), compressor.parent());
|
|
if (lhs && !is_atomic(lhs, self)
|
|
|| find_variable(compressor, "NaN")) {
|
|
return make_node(AST_Binary, self, {
|
|
operator: "/",
|
|
left: make_node(AST_Number, self, {
|
|
value: 0
|
|
}),
|
|
right: make_node(AST_Number, self, {
|
|
value: 0
|
|
})
|
|
});
|
|
}
|
|
return self;
|
|
});
|
|
|
|
var ASSIGN_OPS = [ '+', '-', '/', '*', '%', '>>', '<<', '>>>', '|', '^', '&' ];
|
|
var ASSIGN_OPS_COMMUTATIVE = [ '*', '|', '^', '&' ];
|
|
OPT(AST_Assign, function(self, compressor){
|
|
self = self.lift_sequences(compressor);
|
|
if (self.operator == "=" && self.left instanceof AST_SymbolRef && self.right instanceof AST_Binary) {
|
|
// x = expr1 OP expr2
|
|
if (self.right.left instanceof AST_SymbolRef
|
|
&& self.right.left.name == self.left.name
|
|
&& member(self.right.operator, ASSIGN_OPS)) {
|
|
// x = x - 2 ---> x -= 2
|
|
self.operator = self.right.operator + "=";
|
|
self.right = self.right.right;
|
|
}
|
|
else if (self.right.right instanceof AST_SymbolRef
|
|
&& self.right.right.name == self.left.name
|
|
&& member(self.right.operator, ASSIGN_OPS_COMMUTATIVE)
|
|
&& !self.right.left.has_side_effects(compressor)) {
|
|
// x = 2 & x ---> x &= 2
|
|
self.operator = self.right.operator + "=";
|
|
self.right = self.right.left;
|
|
}
|
|
}
|
|
return self;
|
|
});
|
|
|
|
OPT(AST_Conditional, function(self, compressor){
|
|
if (!compressor.option("conditionals")) return self;
|
|
if (self.condition instanceof AST_Seq) {
|
|
var car = self.condition.car;
|
|
self.condition = self.condition.cdr;
|
|
return AST_Seq.cons(car, self);
|
|
}
|
|
var cond = self.condition.evaluate(compressor);
|
|
if (cond !== self.condition) {
|
|
if (cond) {
|
|
compressor.warn("Condition always true [{file}:{line},{col}]", self.start);
|
|
return maintain_this_binding(compressor.parent(), self, self.consequent);
|
|
} else {
|
|
compressor.warn("Condition always false [{file}:{line},{col}]", self.start);
|
|
return maintain_this_binding(compressor.parent(), self, self.alternative);
|
|
}
|
|
}
|
|
var negated = cond.negate(compressor, first_in_statement(compressor));
|
|
if (best_of(compressor, cond, negated) === negated) {
|
|
self = make_node(AST_Conditional, self, {
|
|
condition: negated,
|
|
consequent: self.alternative,
|
|
alternative: self.consequent
|
|
});
|
|
}
|
|
var condition = self.condition;
|
|
var consequent = self.consequent;
|
|
var alternative = self.alternative;
|
|
// x?x:y --> x||y
|
|
if (condition instanceof AST_SymbolRef
|
|
&& consequent instanceof AST_SymbolRef
|
|
&& condition.definition() === consequent.definition()) {
|
|
return make_node(AST_Binary, self, {
|
|
operator: "||",
|
|
left: condition,
|
|
right: alternative
|
|
});
|
|
}
|
|
// if (foo) exp = something; else exp = something_else;
|
|
// |
|
|
// v
|
|
// exp = foo ? something : something_else;
|
|
if (consequent instanceof AST_Assign
|
|
&& alternative instanceof AST_Assign
|
|
&& consequent.operator == alternative.operator
|
|
&& consequent.left.equivalent_to(alternative.left)
|
|
&& (!self.condition.has_side_effects(compressor)
|
|
|| consequent.operator == "="
|
|
&& !consequent.left.has_side_effects(compressor))) {
|
|
return make_node(AST_Assign, self, {
|
|
operator: consequent.operator,
|
|
left: consequent.left,
|
|
right: make_node(AST_Conditional, self, {
|
|
condition: self.condition,
|
|
consequent: consequent.right,
|
|
alternative: alternative.right
|
|
})
|
|
});
|
|
}
|
|
// x ? y(a) : y(b) --> y(x ? a : b)
|
|
if (consequent instanceof AST_Call
|
|
&& alternative.TYPE === consequent.TYPE
|
|
&& consequent.args.length == 1
|
|
&& alternative.args.length == 1
|
|
&& consequent.expression.equivalent_to(alternative.expression)
|
|
&& !consequent.expression.has_side_effects(compressor)) {
|
|
consequent.args[0] = make_node(AST_Conditional, self, {
|
|
condition: self.condition,
|
|
consequent: consequent.args[0],
|
|
alternative: alternative.args[0]
|
|
});
|
|
return consequent;
|
|
}
|
|
// x?y?z:a:a --> x&&y?z:a
|
|
if (consequent instanceof AST_Conditional
|
|
&& consequent.alternative.equivalent_to(alternative)) {
|
|
return make_node(AST_Conditional, self, {
|
|
condition: make_node(AST_Binary, self, {
|
|
left: self.condition,
|
|
operator: "&&",
|
|
right: consequent.condition
|
|
}),
|
|
consequent: consequent.consequent,
|
|
alternative: alternative
|
|
});
|
|
}
|
|
// x ? y : y --> x, y
|
|
if (consequent.equivalent_to(alternative)) {
|
|
return make_node(AST_Seq, self, {
|
|
car: self.condition,
|
|
cdr: consequent
|
|
}).optimize(compressor);
|
|
}
|
|
|
|
if (is_true(self.consequent)) {
|
|
if (is_false(self.alternative)) {
|
|
// c ? true : false ---> !!c
|
|
return booleanize(self.condition);
|
|
}
|
|
// c ? true : x ---> !!c || x
|
|
return make_node(AST_Binary, self, {
|
|
operator: "||",
|
|
left: booleanize(self.condition),
|
|
right: self.alternative
|
|
});
|
|
}
|
|
if (is_false(self.consequent)) {
|
|
if (is_true(self.alternative)) {
|
|
// c ? false : true ---> !c
|
|
return booleanize(self.condition.negate(compressor));
|
|
}
|
|
// c ? false : x ---> !c && x
|
|
return make_node(AST_Binary, self, {
|
|
operator: "&&",
|
|
left: booleanize(self.condition.negate(compressor)),
|
|
right: self.alternative
|
|
});
|
|
}
|
|
if (is_true(self.alternative)) {
|
|
// c ? x : true ---> !c || x
|
|
return make_node(AST_Binary, self, {
|
|
operator: "||",
|
|
left: booleanize(self.condition.negate(compressor)),
|
|
right: self.consequent
|
|
});
|
|
}
|
|
if (is_false(self.alternative)) {
|
|
// c ? x : false ---> !!c && x
|
|
return make_node(AST_Binary, self, {
|
|
operator: "&&",
|
|
left: booleanize(self.condition),
|
|
right: self.consequent
|
|
});
|
|
}
|
|
|
|
return self;
|
|
|
|
function booleanize(node) {
|
|
if (node.is_boolean()) return node;
|
|
// !!expression
|
|
return make_node(AST_UnaryPrefix, node, {
|
|
operator: "!",
|
|
expression: node.negate(compressor)
|
|
});
|
|
}
|
|
|
|
// AST_True or !0
|
|
function is_true(node) {
|
|
return node instanceof AST_True
|
|
|| (node instanceof AST_UnaryPrefix
|
|
&& node.operator == "!"
|
|
&& node.expression instanceof AST_Constant
|
|
&& !node.expression.value);
|
|
}
|
|
// AST_False or !1
|
|
function is_false(node) {
|
|
return node instanceof AST_False
|
|
|| (node instanceof AST_UnaryPrefix
|
|
&& node.operator == "!"
|
|
&& node.expression instanceof AST_Constant
|
|
&& !!node.expression.value);
|
|
}
|
|
});
|
|
|
|
OPT(AST_Boolean, function(self, compressor){
|
|
if (compressor.option("booleans")) {
|
|
var p = compressor.parent();
|
|
if (p instanceof AST_Binary && (p.operator == "=="
|
|
|| p.operator == "!=")) {
|
|
compressor.warn("Non-strict equality against boolean: {operator} {value} [{file}:{line},{col}]", {
|
|
operator : p.operator,
|
|
value : self.value,
|
|
file : p.start.file,
|
|
line : p.start.line,
|
|
col : p.start.col,
|
|
});
|
|
return make_node(AST_Number, self, {
|
|
value: +self.value
|
|
});
|
|
}
|
|
return make_node(AST_UnaryPrefix, self, {
|
|
operator: "!",
|
|
expression: make_node(AST_Number, self, {
|
|
value: 1 - self.value
|
|
})
|
|
});
|
|
}
|
|
return self;
|
|
});
|
|
|
|
OPT(AST_Sub, function(self, compressor){
|
|
var prop = self.property;
|
|
if (prop instanceof AST_String && compressor.option("properties")) {
|
|
prop = prop.getValue();
|
|
if (RESERVED_WORDS(prop) ? compressor.option("screw_ie8") : is_identifier_string(prop)) {
|
|
return make_node(AST_Dot, self, {
|
|
expression : self.expression,
|
|
property : prop
|
|
}).optimize(compressor);
|
|
}
|
|
var v = parseFloat(prop);
|
|
if (!isNaN(v) && v.toString() == prop) {
|
|
self.property = make_node(AST_Number, self.property, {
|
|
value: v
|
|
});
|
|
}
|
|
}
|
|
var ev = self.evaluate(compressor);
|
|
if (ev !== self) {
|
|
ev = make_node_from_constant(ev, self).optimize(compressor);
|
|
return best_of(compressor, ev, self);
|
|
}
|
|
return self;
|
|
});
|
|
|
|
OPT(AST_Dot, function(self, compressor){
|
|
var def = self.resolve_defines(compressor);
|
|
if (def) {
|
|
return def.optimize(compressor);
|
|
}
|
|
var prop = self.property;
|
|
if (RESERVED_WORDS(prop) && !compressor.option("screw_ie8")) {
|
|
return make_node(AST_Sub, self, {
|
|
expression : self.expression,
|
|
property : make_node(AST_String, self, {
|
|
value: prop
|
|
})
|
|
}).optimize(compressor);
|
|
}
|
|
if (compressor.option("unsafe_proto")
|
|
&& self.expression instanceof AST_Dot
|
|
&& self.expression.property == "prototype") {
|
|
var exp = self.expression.expression;
|
|
if (exp instanceof AST_SymbolRef && exp.undeclared()) switch (exp.name) {
|
|
case "Array":
|
|
self.expression = make_node(AST_Array, self.expression, {
|
|
elements: []
|
|
});
|
|
break;
|
|
case "Object":
|
|
self.expression = make_node(AST_Object, self.expression, {
|
|
properties: []
|
|
});
|
|
break;
|
|
case "String":
|
|
self.expression = make_node(AST_String, self.expression, {
|
|
value: ""
|
|
});
|
|
break;
|
|
}
|
|
}
|
|
var ev = self.evaluate(compressor);
|
|
if (ev !== self) {
|
|
ev = make_node_from_constant(ev, self).optimize(compressor);
|
|
return best_of(compressor, ev, self);
|
|
}
|
|
return self;
|
|
});
|
|
|
|
function literals_in_boolean_context(self, compressor) {
|
|
if (compressor.option("booleans") && compressor.in_boolean_context()) {
|
|
return best_of(compressor, self, make_node(AST_Seq, self, {
|
|
car: self,
|
|
cdr: make_node(AST_True, self)
|
|
}).optimize(compressor));
|
|
}
|
|
return self;
|
|
};
|
|
OPT(AST_Array, literals_in_boolean_context);
|
|
OPT(AST_Object, literals_in_boolean_context);
|
|
OPT(AST_RegExp, literals_in_boolean_context);
|
|
|
|
OPT(AST_Return, function(self, compressor){
|
|
if (self.value && is_undefined(self.value, compressor)) {
|
|
self.value = null;
|
|
}
|
|
return self;
|
|
});
|
|
|
|
OPT(AST_VarDef, function(self, compressor){
|
|
var defines = compressor.option("global_defs");
|
|
if (defines && HOP(defines, self.name.name)) {
|
|
compressor.warn('global_defs ' + self.name.name + ' redefined [{file}:{line},{col}]', self.start);
|
|
}
|
|
return self;
|
|
});
|
|
|
|
})();
|
|
|
|
/***********************************************************************
|
|
|
|
A JavaScript tokenizer / parser / beautifier / compressor.
|
|
https://github.com/mishoo/UglifyJS2
|
|
|
|
-------------------------------- (C) ---------------------------------
|
|
|
|
Author: Mihai Bazon
|
|
<mihai.bazon@gmail.com>
|
|
http://mihai.bazon.net/blog
|
|
|
|
Distributed under the BSD license:
|
|
|
|
Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
|
|
|
Redistribution and use in source and binary forms, with or without
|
|
modification, are permitted provided that the following conditions
|
|
are met:
|
|
|
|
* Redistributions of source code must retain the above
|
|
copyright notice, this list of conditions and the following
|
|
disclaimer.
|
|
|
|
* Redistributions in binary form must reproduce the above
|
|
copyright notice, this list of conditions and the following
|
|
disclaimer in the documentation and/or other materials
|
|
provided with the distribution.
|
|
|
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
|
|
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
|
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
|
|
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
|
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
|
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
|
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
|
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
|
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
|
SUCH DAMAGE.
|
|
|
|
***********************************************************************/
|
|
|
|
"use strict";
|
|
|
|
// a small wrapper around fitzgen's source-map library
|
|
function SourceMap(options) {
|
|
options = defaults(options, {
|
|
file : null,
|
|
root : null,
|
|
orig : null,
|
|
|
|
orig_line_diff : 0,
|
|
dest_line_diff : 0,
|
|
});
|
|
var generator = new MOZ_SourceMap.SourceMapGenerator({
|
|
file : options.file,
|
|
sourceRoot : options.root
|
|
});
|
|
var orig_map = options.orig && new MOZ_SourceMap.SourceMapConsumer(options.orig);
|
|
|
|
if (orig_map && Array.isArray(options.orig.sources)) {
|
|
orig_map._sources.toArray().forEach(function(source) {
|
|
var sourceContent = orig_map.sourceContentFor(source, true);
|
|
if (sourceContent) {
|
|
generator.setSourceContent(source, sourceContent);
|
|
}
|
|
});
|
|
}
|
|
|
|
function add(source, gen_line, gen_col, orig_line, orig_col, name) {
|
|
if (orig_map) {
|
|
var info = orig_map.originalPositionFor({
|
|
line: orig_line,
|
|
column: orig_col
|
|
});
|
|
if (info.source === null) {
|
|
return;
|
|
}
|
|
source = info.source;
|
|
orig_line = info.line;
|
|
orig_col = info.column;
|
|
name = info.name || name;
|
|
}
|
|
generator.addMapping({
|
|
generated : { line: gen_line + options.dest_line_diff, column: gen_col },
|
|
original : { line: orig_line + options.orig_line_diff, column: orig_col },
|
|
source : source,
|
|
name : name
|
|
});
|
|
};
|
|
return {
|
|
add : add,
|
|
get : function() { return generator },
|
|
toString : function() { return JSON.stringify(generator.toJSON()); }
|
|
};
|
|
};
|
|
|
|
/***********************************************************************
|
|
|
|
A JavaScript tokenizer / parser / beautifier / compressor.
|
|
https://github.com/mishoo/UglifyJS2
|
|
|
|
-------------------------------- (C) ---------------------------------
|
|
|
|
Author: Mihai Bazon
|
|
<mihai.bazon@gmail.com>
|
|
http://mihai.bazon.net/blog
|
|
|
|
Distributed under the BSD license:
|
|
|
|
Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
|
|
|
Redistribution and use in source and binary forms, with or without
|
|
modification, are permitted provided that the following conditions
|
|
are met:
|
|
|
|
* Redistributions of source code must retain the above
|
|
copyright notice, this list of conditions and the following
|
|
disclaimer.
|
|
|
|
* Redistributions in binary form must reproduce the above
|
|
copyright notice, this list of conditions and the following
|
|
disclaimer in the documentation and/or other materials
|
|
provided with the distribution.
|
|
|
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
|
|
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
|
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
|
|
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
|
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
|
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
|
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
|
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
|
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
|
SUCH DAMAGE.
|
|
|
|
***********************************************************************/
|
|
|
|
"use strict";
|
|
|
|
(function(){
|
|
|
|
var normalize_directives = function(body) {
|
|
var in_directive = true;
|
|
|
|
for (var i = 0; i < body.length; i++) {
|
|
if (in_directive && body[i] instanceof AST_Statement && body[i].body instanceof AST_String) {
|
|
body[i] = new AST_Directive({
|
|
start: body[i].start,
|
|
end: body[i].end,
|
|
value: body[i].body.value
|
|
});
|
|
} else if (in_directive && !(body[i] instanceof AST_Statement && body[i].body instanceof AST_String)) {
|
|
in_directive = false;
|
|
}
|
|
}
|
|
|
|
return body;
|
|
};
|
|
|
|
var MOZ_TO_ME = {
|
|
Program: function(M) {
|
|
return new AST_Toplevel({
|
|
start: my_start_token(M),
|
|
end: my_end_token(M),
|
|
body: normalize_directives(M.body.map(from_moz))
|
|
});
|
|
},
|
|
FunctionDeclaration: function(M) {
|
|
return new AST_Defun({
|
|
start: my_start_token(M),
|
|
end: my_end_token(M),
|
|
name: from_moz(M.id),
|
|
argnames: M.params.map(from_moz),
|
|
body: normalize_directives(from_moz(M.body).body)
|
|
});
|
|
},
|
|
FunctionExpression: function(M) {
|
|
return new AST_Function({
|
|
start: my_start_token(M),
|
|
end: my_end_token(M),
|
|
name: from_moz(M.id),
|
|
argnames: M.params.map(from_moz),
|
|
body: normalize_directives(from_moz(M.body).body)
|
|
});
|
|
},
|
|
ExpressionStatement: function(M) {
|
|
return new AST_SimpleStatement({
|
|
start: my_start_token(M),
|
|
end: my_end_token(M),
|
|
body: from_moz(M.expression)
|
|
});
|
|
},
|
|
TryStatement: function(M) {
|
|
var handlers = M.handlers || [M.handler];
|
|
if (handlers.length > 1 || M.guardedHandlers && M.guardedHandlers.length) {
|
|
throw new Error("Multiple catch clauses are not supported.");
|
|
}
|
|
return new AST_Try({
|
|
start : my_start_token(M),
|
|
end : my_end_token(M),
|
|
body : from_moz(M.block).body,
|
|
bcatch : from_moz(handlers[0]),
|
|
bfinally : M.finalizer ? new AST_Finally(from_moz(M.finalizer)) : null
|
|
});
|
|
},
|
|
Property: function(M) {
|
|
var key = M.key;
|
|
var args = {
|
|
start : my_start_token(key),
|
|
end : my_end_token(M.value),
|
|
key : key.type == "Identifier" ? key.name : key.value,
|
|
value : from_moz(M.value)
|
|
};
|
|
if (M.kind == "init") return new AST_ObjectKeyVal(args);
|
|
args.key = new AST_SymbolAccessor({
|
|
name: args.key
|
|
});
|
|
args.value = new AST_Accessor(args.value);
|
|
if (M.kind == "get") return new AST_ObjectGetter(args);
|
|
if (M.kind == "set") return new AST_ObjectSetter(args);
|
|
},
|
|
ArrayExpression: function(M) {
|
|
return new AST_Array({
|
|
start : my_start_token(M),
|
|
end : my_end_token(M),
|
|
elements : M.elements.map(function(elem){
|
|
return elem === null ? new AST_Hole() : from_moz(elem);
|
|
})
|
|
});
|
|
},
|
|
ObjectExpression: function(M) {
|
|
return new AST_Object({
|
|
start : my_start_token(M),
|
|
end : my_end_token(M),
|
|
properties : M.properties.map(function(prop){
|
|
prop.type = "Property";
|
|
return from_moz(prop)
|
|
})
|
|
});
|
|
},
|
|
SequenceExpression: function(M) {
|
|
return AST_Seq.from_array(M.expressions.map(from_moz));
|
|
},
|
|
MemberExpression: function(M) {
|
|
return new (M.computed ? AST_Sub : AST_Dot)({
|
|
start : my_start_token(M),
|
|
end : my_end_token(M),
|
|
property : M.computed ? from_moz(M.property) : M.property.name,
|
|
expression : from_moz(M.object)
|
|
});
|
|
},
|
|
SwitchCase: function(M) {
|
|
return new (M.test ? AST_Case : AST_Default)({
|
|
start : my_start_token(M),
|
|
end : my_end_token(M),
|
|
expression : from_moz(M.test),
|
|
body : M.consequent.map(from_moz)
|
|
});
|
|
},
|
|
VariableDeclaration: function(M) {
|
|
return new (M.kind === "const" ? AST_Const : AST_Var)({
|
|
start : my_start_token(M),
|
|
end : my_end_token(M),
|
|
definitions : M.declarations.map(from_moz)
|
|
});
|
|
},
|
|
Literal: function(M) {
|
|
var val = M.value, args = {
|
|
start : my_start_token(M),
|
|
end : my_end_token(M)
|
|
};
|
|
if (val === null) return new AST_Null(args);
|
|
switch (typeof val) {
|
|
case "string":
|
|
args.value = val;
|
|
return new AST_String(args);
|
|
case "number":
|
|
args.value = val;
|
|
return new AST_Number(args);
|
|
case "boolean":
|
|
return new (val ? AST_True : AST_False)(args);
|
|
default:
|
|
var rx = M.regex;
|
|
if (rx && rx.pattern) {
|
|
// RegExpLiteral as per ESTree AST spec
|
|
args.value = new RegExp(rx.pattern, rx.flags).toString();
|
|
} else {
|
|
// support legacy RegExp
|
|
args.value = M.regex && M.raw ? M.raw : val;
|
|
}
|
|
return new AST_RegExp(args);
|
|
}
|
|
},
|
|
Identifier: function(M) {
|
|
var p = FROM_MOZ_STACK[FROM_MOZ_STACK.length - 2];
|
|
return new ( p.type == "LabeledStatement" ? AST_Label
|
|
: p.type == "VariableDeclarator" && p.id === M ? (p.kind == "const" ? AST_SymbolConst : AST_SymbolVar)
|
|
: p.type == "FunctionExpression" ? (p.id === M ? AST_SymbolLambda : AST_SymbolFunarg)
|
|
: p.type == "FunctionDeclaration" ? (p.id === M ? AST_SymbolDefun : AST_SymbolFunarg)
|
|
: p.type == "CatchClause" ? AST_SymbolCatch
|
|
: p.type == "BreakStatement" || p.type == "ContinueStatement" ? AST_LabelRef
|
|
: AST_SymbolRef)({
|
|
start : my_start_token(M),
|
|
end : my_end_token(M),
|
|
name : M.name
|
|
});
|
|
}
|
|
};
|
|
|
|
MOZ_TO_ME.UpdateExpression =
|
|
MOZ_TO_ME.UnaryExpression = function To_Moz_Unary(M) {
|
|
var prefix = "prefix" in M ? M.prefix
|
|
: M.type == "UnaryExpression" ? true : false;
|
|
return new (prefix ? AST_UnaryPrefix : AST_UnaryPostfix)({
|
|
start : my_start_token(M),
|
|
end : my_end_token(M),
|
|
operator : M.operator,
|
|
expression : from_moz(M.argument)
|
|
});
|
|
};
|
|
|
|
map("EmptyStatement", AST_EmptyStatement);
|
|
map("BlockStatement", AST_BlockStatement, "body@body");
|
|
map("IfStatement", AST_If, "test>condition, consequent>body, alternate>alternative");
|
|
map("LabeledStatement", AST_LabeledStatement, "label>label, body>body");
|
|
map("BreakStatement", AST_Break, "label>label");
|
|
map("ContinueStatement", AST_Continue, "label>label");
|
|
map("WithStatement", AST_With, "object>expression, body>body");
|
|
map("SwitchStatement", AST_Switch, "discriminant>expression, cases@body");
|
|
map("ReturnStatement", AST_Return, "argument>value");
|
|
map("ThrowStatement", AST_Throw, "argument>value");
|
|
map("WhileStatement", AST_While, "test>condition, body>body");
|
|
map("DoWhileStatement", AST_Do, "test>condition, body>body");
|
|
map("ForStatement", AST_For, "init>init, test>condition, update>step, body>body");
|
|
map("ForInStatement", AST_ForIn, "left>init, right>object, body>body");
|
|
map("DebuggerStatement", AST_Debugger);
|
|
map("VariableDeclarator", AST_VarDef, "id>name, init>value");
|
|
map("CatchClause", AST_Catch, "param>argname, body%body");
|
|
|
|
map("ThisExpression", AST_This);
|
|
map("BinaryExpression", AST_Binary, "operator=operator, left>left, right>right");
|
|
map("LogicalExpression", AST_Binary, "operator=operator, left>left, right>right");
|
|
map("AssignmentExpression", AST_Assign, "operator=operator, left>left, right>right");
|
|
map("ConditionalExpression", AST_Conditional, "test>condition, consequent>consequent, alternate>alternative");
|
|
map("NewExpression", AST_New, "callee>expression, arguments@args");
|
|
map("CallExpression", AST_Call, "callee>expression, arguments@args");
|
|
|
|
def_to_moz(AST_Toplevel, function To_Moz_Program(M) {
|
|
return to_moz_scope("Program", M);
|
|
});
|
|
|
|
def_to_moz(AST_Defun, function To_Moz_FunctionDeclaration(M) {
|
|
return {
|
|
type: "FunctionDeclaration",
|
|
id: to_moz(M.name),
|
|
params: M.argnames.map(to_moz),
|
|
body: to_moz_scope("BlockStatement", M)
|
|
}
|
|
});
|
|
|
|
def_to_moz(AST_Function, function To_Moz_FunctionExpression(M) {
|
|
return {
|
|
type: "FunctionExpression",
|
|
id: to_moz(M.name),
|
|
params: M.argnames.map(to_moz),
|
|
body: to_moz_scope("BlockStatement", M)
|
|
}
|
|
});
|
|
|
|
def_to_moz(AST_Directive, function To_Moz_Directive(M) {
|
|
return {
|
|
type: "ExpressionStatement",
|
|
expression: {
|
|
type: "Literal",
|
|
value: M.value
|
|
}
|
|
};
|
|
});
|
|
|
|
def_to_moz(AST_SimpleStatement, function To_Moz_ExpressionStatement(M) {
|
|
return {
|
|
type: "ExpressionStatement",
|
|
expression: to_moz(M.body)
|
|
};
|
|
});
|
|
|
|
def_to_moz(AST_SwitchBranch, function To_Moz_SwitchCase(M) {
|
|
return {
|
|
type: "SwitchCase",
|
|
test: to_moz(M.expression),
|
|
consequent: M.body.map(to_moz)
|
|
};
|
|
});
|
|
|
|
def_to_moz(AST_Try, function To_Moz_TryStatement(M) {
|
|
return {
|
|
type: "TryStatement",
|
|
block: to_moz_block(M),
|
|
handler: to_moz(M.bcatch),
|
|
guardedHandlers: [],
|
|
finalizer: to_moz(M.bfinally)
|
|
};
|
|
});
|
|
|
|
def_to_moz(AST_Catch, function To_Moz_CatchClause(M) {
|
|
return {
|
|
type: "CatchClause",
|
|
param: to_moz(M.argname),
|
|
guard: null,
|
|
body: to_moz_block(M)
|
|
};
|
|
});
|
|
|
|
def_to_moz(AST_Definitions, function To_Moz_VariableDeclaration(M) {
|
|
return {
|
|
type: "VariableDeclaration",
|
|
kind: M instanceof AST_Const ? "const" : "var",
|
|
declarations: M.definitions.map(to_moz)
|
|
};
|
|
});
|
|
|
|
def_to_moz(AST_Seq, function To_Moz_SequenceExpression(M) {
|
|
return {
|
|
type: "SequenceExpression",
|
|
expressions: M.to_array().map(to_moz)
|
|
};
|
|
});
|
|
|
|
def_to_moz(AST_PropAccess, function To_Moz_MemberExpression(M) {
|
|
var isComputed = M instanceof AST_Sub;
|
|
return {
|
|
type: "MemberExpression",
|
|
object: to_moz(M.expression),
|
|
computed: isComputed,
|
|
property: isComputed ? to_moz(M.property) : {type: "Identifier", name: M.property}
|
|
};
|
|
});
|
|
|
|
def_to_moz(AST_Unary, function To_Moz_Unary(M) {
|
|
return {
|
|
type: M.operator == "++" || M.operator == "--" ? "UpdateExpression" : "UnaryExpression",
|
|
operator: M.operator,
|
|
prefix: M instanceof AST_UnaryPrefix,
|
|
argument: to_moz(M.expression)
|
|
};
|
|
});
|
|
|
|
def_to_moz(AST_Binary, function To_Moz_BinaryExpression(M) {
|
|
return {
|
|
type: M.operator == "&&" || M.operator == "||" ? "LogicalExpression" : "BinaryExpression",
|
|
left: to_moz(M.left),
|
|
operator: M.operator,
|
|
right: to_moz(M.right)
|
|
};
|
|
});
|
|
|
|
def_to_moz(AST_Array, function To_Moz_ArrayExpression(M) {
|
|
return {
|
|
type: "ArrayExpression",
|
|
elements: M.elements.map(to_moz)
|
|
};
|
|
});
|
|
|
|
def_to_moz(AST_Object, function To_Moz_ObjectExpression(M) {
|
|
return {
|
|
type: "ObjectExpression",
|
|
properties: M.properties.map(to_moz)
|
|
};
|
|
});
|
|
|
|
def_to_moz(AST_ObjectProperty, function To_Moz_Property(M) {
|
|
var key = {
|
|
type: "Literal",
|
|
value: M.key instanceof AST_SymbolAccessor ? M.key.name : M.key
|
|
};
|
|
var kind;
|
|
if (M instanceof AST_ObjectKeyVal) {
|
|
kind = "init";
|
|
} else
|
|
if (M instanceof AST_ObjectGetter) {
|
|
kind = "get";
|
|
} else
|
|
if (M instanceof AST_ObjectSetter) {
|
|
kind = "set";
|
|
}
|
|
return {
|
|
type: "Property",
|
|
kind: kind,
|
|
key: key,
|
|
value: to_moz(M.value)
|
|
};
|
|
});
|
|
|
|
def_to_moz(AST_Symbol, function To_Moz_Identifier(M) {
|
|
var def = M.definition();
|
|
return {
|
|
type: "Identifier",
|
|
name: def ? def.mangled_name || def.name : M.name
|
|
};
|
|
});
|
|
|
|
def_to_moz(AST_RegExp, function To_Moz_RegExpLiteral(M) {
|
|
var value = M.value;
|
|
return {
|
|
type: "Literal",
|
|
value: value,
|
|
raw: value.toString(),
|
|
regex: {
|
|
pattern: value.source,
|
|
flags: value.toString().match(/[gimuy]*$/)[0]
|
|
}
|
|
};
|
|
});
|
|
|
|
def_to_moz(AST_Constant, function To_Moz_Literal(M) {
|
|
var value = M.value;
|
|
if (typeof value === 'number' && (value < 0 || (value === 0 && 1 / value < 0))) {
|
|
return {
|
|
type: "UnaryExpression",
|
|
operator: "-",
|
|
prefix: true,
|
|
argument: {
|
|
type: "Literal",
|
|
value: -value,
|
|
raw: M.start.raw
|
|
}
|
|
};
|
|
}
|
|
return {
|
|
type: "Literal",
|
|
value: value,
|
|
raw: M.start.raw
|
|
};
|
|
});
|
|
|
|
def_to_moz(AST_Atom, function To_Moz_Atom(M) {
|
|
return {
|
|
type: "Identifier",
|
|
name: String(M.value)
|
|
};
|
|
});
|
|
|
|
AST_Boolean.DEFMETHOD("to_mozilla_ast", AST_Constant.prototype.to_mozilla_ast);
|
|
AST_Null.DEFMETHOD("to_mozilla_ast", AST_Constant.prototype.to_mozilla_ast);
|
|
AST_Hole.DEFMETHOD("to_mozilla_ast", function To_Moz_ArrayHole() { return null });
|
|
|
|
AST_Block.DEFMETHOD("to_mozilla_ast", AST_BlockStatement.prototype.to_mozilla_ast);
|
|
AST_Lambda.DEFMETHOD("to_mozilla_ast", AST_Function.prototype.to_mozilla_ast);
|
|
|
|
/* -----[ tools ]----- */
|
|
|
|
function raw_token(moznode) {
|
|
if (moznode.type == "Literal") {
|
|
return moznode.raw != null ? moznode.raw : moznode.value + "";
|
|
}
|
|
}
|
|
|
|
function my_start_token(moznode) {
|
|
var loc = moznode.loc, start = loc && loc.start;
|
|
var range = moznode.range;
|
|
return new AST_Token({
|
|
file : loc && loc.source,
|
|
line : start && start.line,
|
|
col : start && start.column,
|
|
pos : range ? range[0] : moznode.start,
|
|
endline : start && start.line,
|
|
endcol : start && start.column,
|
|
endpos : range ? range[0] : moznode.start,
|
|
raw : raw_token(moznode),
|
|
});
|
|
};
|
|
|
|
function my_end_token(moznode) {
|
|
var loc = moznode.loc, end = loc && loc.end;
|
|
var range = moznode.range;
|
|
return new AST_Token({
|
|
file : loc && loc.source,
|
|
line : end && end.line,
|
|
col : end && end.column,
|
|
pos : range ? range[1] : moznode.end,
|
|
endline : end && end.line,
|
|
endcol : end && end.column,
|
|
endpos : range ? range[1] : moznode.end,
|
|
raw : raw_token(moznode),
|
|
});
|
|
};
|
|
|
|
function map(moztype, mytype, propmap) {
|
|
var moz_to_me = "function From_Moz_" + moztype + "(M){\n";
|
|
moz_to_me += "return new U2." + mytype.name + "({\n" +
|
|
"start: my_start_token(M),\n" +
|
|
"end: my_end_token(M)";
|
|
|
|
var me_to_moz = "function To_Moz_" + moztype + "(M){\n";
|
|
me_to_moz += "return {\n" +
|
|
"type: " + JSON.stringify(moztype);
|
|
|
|
if (propmap) propmap.split(/\s*,\s*/).forEach(function(prop){
|
|
var m = /([a-z0-9$_]+)(=|@|>|%)([a-z0-9$_]+)/i.exec(prop);
|
|
if (!m) throw new Error("Can't understand property map: " + prop);
|
|
var moz = m[1], how = m[2], my = m[3];
|
|
moz_to_me += ",\n" + my + ": ";
|
|
me_to_moz += ",\n" + moz + ": ";
|
|
switch (how) {
|
|
case "@":
|
|
moz_to_me += "M." + moz + ".map(from_moz)";
|
|
me_to_moz += "M." + my + ".map(to_moz)";
|
|
break;
|
|
case ">":
|
|
moz_to_me += "from_moz(M." + moz + ")";
|
|
me_to_moz += "to_moz(M." + my + ")";
|
|
break;
|
|
case "=":
|
|
moz_to_me += "M." + moz;
|
|
me_to_moz += "M." + my;
|
|
break;
|
|
case "%":
|
|
moz_to_me += "from_moz(M." + moz + ").body";
|
|
me_to_moz += "to_moz_block(M)";
|
|
break;
|
|
default:
|
|
throw new Error("Can't understand operator in propmap: " + prop);
|
|
}
|
|
});
|
|
|
|
moz_to_me += "\n})\n}";
|
|
me_to_moz += "\n}\n}";
|
|
|
|
//moz_to_me = parse(moz_to_me).print_to_string({ beautify: true });
|
|
//me_to_moz = parse(me_to_moz).print_to_string({ beautify: true });
|
|
//console.log(moz_to_me);
|
|
|
|
moz_to_me = new Function("U2", "my_start_token", "my_end_token", "from_moz", "return(" + moz_to_me + ")")(
|
|
exports, my_start_token, my_end_token, from_moz
|
|
);
|
|
me_to_moz = new Function("to_moz", "to_moz_block", "to_moz_scope", "return(" + me_to_moz + ")")(
|
|
to_moz, to_moz_block, to_moz_scope
|
|
);
|
|
MOZ_TO_ME[moztype] = moz_to_me;
|
|
def_to_moz(mytype, me_to_moz);
|
|
};
|
|
|
|
var FROM_MOZ_STACK = null;
|
|
|
|
function from_moz(node) {
|
|
FROM_MOZ_STACK.push(node);
|
|
var ret = node != null ? MOZ_TO_ME[node.type](node) : null;
|
|
FROM_MOZ_STACK.pop();
|
|
return ret;
|
|
};
|
|
|
|
AST_Node.from_mozilla_ast = function(node){
|
|
var save_stack = FROM_MOZ_STACK;
|
|
FROM_MOZ_STACK = [];
|
|
var ast = from_moz(node);
|
|
FROM_MOZ_STACK = save_stack;
|
|
return ast;
|
|
};
|
|
|
|
function set_moz_loc(mynode, moznode, myparent) {
|
|
var start = mynode.start;
|
|
var end = mynode.end;
|
|
if (start.pos != null && end.endpos != null) {
|
|
moznode.range = [start.pos, end.endpos];
|
|
}
|
|
if (start.line) {
|
|
moznode.loc = {
|
|
start: {line: start.line, column: start.col},
|
|
end: end.endline ? {line: end.endline, column: end.endcol} : null
|
|
};
|
|
if (start.file) {
|
|
moznode.loc.source = start.file;
|
|
}
|
|
}
|
|
return moznode;
|
|
};
|
|
|
|
function def_to_moz(mytype, handler) {
|
|
mytype.DEFMETHOD("to_mozilla_ast", function() {
|
|
return set_moz_loc(this, handler(this));
|
|
});
|
|
};
|
|
|
|
function to_moz(node) {
|
|
return node != null ? node.to_mozilla_ast() : null;
|
|
};
|
|
|
|
function to_moz_block(node) {
|
|
return {
|
|
type: "BlockStatement",
|
|
body: node.body.map(to_moz)
|
|
};
|
|
};
|
|
|
|
function to_moz_scope(type, node) {
|
|
var body = node.body.map(to_moz);
|
|
if (node.body[0] instanceof AST_SimpleStatement && node.body[0].body instanceof AST_String) {
|
|
body.unshift(to_moz(new AST_EmptyStatement(node.body[0])));
|
|
}
|
|
return {
|
|
type: type,
|
|
body: body
|
|
};
|
|
};
|
|
})();
|
|
|
|
/***********************************************************************
|
|
|
|
A JavaScript tokenizer / parser / beautifier / compressor.
|
|
https://github.com/mishoo/UglifyJS2
|
|
|
|
-------------------------------- (C) ---------------------------------
|
|
|
|
Author: Mihai Bazon
|
|
<mihai.bazon@gmail.com>
|
|
http://mihai.bazon.net/blog
|
|
|
|
Distributed under the BSD license:
|
|
|
|
Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
|
|
|
Redistribution and use in source and binary forms, with or without
|
|
modification, are permitted provided that the following conditions
|
|
are met:
|
|
|
|
* Redistributions of source code must retain the above
|
|
copyright notice, this list of conditions and the following
|
|
disclaimer.
|
|
|
|
* Redistributions in binary form must reproduce the above
|
|
copyright notice, this list of conditions and the following
|
|
disclaimer in the documentation and/or other materials
|
|
provided with the distribution.
|
|
|
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
|
|
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
|
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
|
|
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
|
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
|
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
|
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
|
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
|
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
|
SUCH DAMAGE.
|
|
|
|
***********************************************************************/
|
|
|
|
"use strict";
|
|
|
|
function find_builtins() {
|
|
// NaN will be included due to Number.NaN
|
|
var a = [
|
|
"null",
|
|
"true",
|
|
"false",
|
|
"Infinity",
|
|
"-Infinity",
|
|
"undefined",
|
|
];
|
|
[ Object, Array, Function, Number,
|
|
String, Boolean, Error, Math,
|
|
Date, RegExp
|
|
].forEach(function(ctor){
|
|
Object.getOwnPropertyNames(ctor).map(add);
|
|
if (ctor.prototype) {
|
|
Object.getOwnPropertyNames(ctor.prototype).map(add);
|
|
}
|
|
});
|
|
function add(name) {
|
|
push_uniq(a, name);
|
|
}
|
|
return a;
|
|
}
|
|
|
|
function mangle_properties(ast, options) {
|
|
options = defaults(options, {
|
|
cache: null,
|
|
debug: false,
|
|
ignore_quoted: false,
|
|
only_cache: false,
|
|
regex: null,
|
|
reserved: null,
|
|
});
|
|
|
|
var reserved = options.reserved;
|
|
if (reserved == null)
|
|
reserved = find_builtins();
|
|
|
|
var cache = options.cache;
|
|
if (cache == null) {
|
|
cache = {
|
|
cname: -1,
|
|
props: new Dictionary()
|
|
};
|
|
}
|
|
|
|
var regex = options.regex;
|
|
var ignore_quoted = options.ignore_quoted;
|
|
|
|
// note debug is either false (disabled), or a string of the debug suffix to use (enabled).
|
|
// note debug may be enabled as an empty string, which is falsey. Also treat passing 'true'
|
|
// the same as passing an empty string.
|
|
var debug = (options.debug !== false);
|
|
var debug_name_suffix;
|
|
if (debug) {
|
|
debug_name_suffix = (options.debug === true ? "" : options.debug);
|
|
}
|
|
|
|
var names_to_mangle = [];
|
|
var unmangleable = [];
|
|
var ignored = {};
|
|
|
|
// step 1: find candidates to mangle
|
|
ast.walk(new TreeWalker(function(node){
|
|
if (node instanceof AST_ObjectKeyVal) {
|
|
add(node.key, ignore_quoted && node.quote);
|
|
}
|
|
else if (node instanceof AST_ObjectProperty) {
|
|
// setter or getter, since KeyVal is handled above
|
|
add(node.key.name);
|
|
}
|
|
else if (node instanceof AST_Dot) {
|
|
add(node.property);
|
|
}
|
|
else if (node instanceof AST_Sub) {
|
|
addStrings(node.property, ignore_quoted);
|
|
}
|
|
}));
|
|
|
|
// step 2: transform the tree, renaming properties
|
|
return ast.transform(new TreeTransformer(function(node){
|
|
if (node instanceof AST_ObjectKeyVal) {
|
|
if (!(ignore_quoted && node.quote))
|
|
node.key = mangle(node.key);
|
|
}
|
|
else if (node instanceof AST_ObjectProperty) {
|
|
// setter or getter
|
|
node.key.name = mangle(node.key.name);
|
|
}
|
|
else if (node instanceof AST_Dot) {
|
|
node.property = mangle(node.property);
|
|
}
|
|
else if (node instanceof AST_Sub) {
|
|
if (!ignore_quoted)
|
|
node.property = mangleStrings(node.property);
|
|
}
|
|
// else if (node instanceof AST_String) {
|
|
// if (should_mangle(node.value)) {
|
|
// AST_Node.warn(
|
|
// "Found \"{prop}\" property candidate for mangling in an arbitrary string [{file}:{line},{col}]", {
|
|
// file : node.start.file,
|
|
// line : node.start.line,
|
|
// col : node.start.col,
|
|
// prop : node.value
|
|
// }
|
|
// );
|
|
// }
|
|
// }
|
|
}));
|
|
|
|
// only function declarations after this line
|
|
|
|
function can_mangle(name) {
|
|
if (unmangleable.indexOf(name) >= 0) return false;
|
|
if (reserved.indexOf(name) >= 0) return false;
|
|
if (options.only_cache) {
|
|
return cache.props.has(name);
|
|
}
|
|
if (/^-?[0-9]+(\.[0-9]+)?(e[+-][0-9]+)?$/.test(name)) return false;
|
|
return true;
|
|
}
|
|
|
|
function should_mangle(name) {
|
|
if (ignore_quoted && name in ignored) return false;
|
|
if (regex && !regex.test(name)) return false;
|
|
if (reserved.indexOf(name) >= 0) return false;
|
|
return cache.props.has(name)
|
|
|| names_to_mangle.indexOf(name) >= 0;
|
|
}
|
|
|
|
function add(name, ignore) {
|
|
if (ignore) {
|
|
ignored[name] = true;
|
|
return;
|
|
}
|
|
|
|
if (can_mangle(name))
|
|
push_uniq(names_to_mangle, name);
|
|
|
|
if (!should_mangle(name)) {
|
|
push_uniq(unmangleable, name);
|
|
}
|
|
}
|
|
|
|
function mangle(name) {
|
|
if (!should_mangle(name)) {
|
|
return name;
|
|
}
|
|
|
|
var mangled = cache.props.get(name);
|
|
if (!mangled) {
|
|
if (debug) {
|
|
// debug mode: use a prefix and suffix to preserve readability, e.g. o.foo -> o._$foo$NNN_.
|
|
var debug_mangled = "_$" + name + "$" + debug_name_suffix + "_";
|
|
|
|
if (can_mangle(debug_mangled) && !(ignore_quoted && debug_mangled in ignored)) {
|
|
mangled = debug_mangled;
|
|
}
|
|
}
|
|
|
|
// either debug mode is off, or it is on and we could not use the mangled name
|
|
if (!mangled) {
|
|
// note can_mangle() does not check if the name collides with the 'ignored' set
|
|
// (filled with quoted properties when ignore_quoted set). Make sure we add this
|
|
// check so we don't collide with a quoted name.
|
|
do {
|
|
mangled = base54(++cache.cname);
|
|
} while (!can_mangle(mangled) || (ignore_quoted && mangled in ignored));
|
|
}
|
|
|
|
cache.props.set(name, mangled);
|
|
}
|
|
return mangled;
|
|
}
|
|
|
|
function addStrings(node, ignore) {
|
|
var out = {};
|
|
try {
|
|
(function walk(node){
|
|
node.walk(new TreeWalker(function(node){
|
|
if (node instanceof AST_Seq) {
|
|
walk(node.cdr);
|
|
return true;
|
|
}
|
|
if (node instanceof AST_String) {
|
|
add(node.value, ignore);
|
|
return true;
|
|
}
|
|
if (node instanceof AST_Conditional) {
|
|
walk(node.consequent);
|
|
walk(node.alternative);
|
|
return true;
|
|
}
|
|
throw out;
|
|
}));
|
|
})(node);
|
|
} catch(ex) {
|
|
if (ex !== out) throw ex;
|
|
}
|
|
}
|
|
|
|
function mangleStrings(node) {
|
|
return node.transform(new TreeTransformer(function(node){
|
|
if (node instanceof AST_Seq) {
|
|
node.cdr = mangleStrings(node.cdr);
|
|
}
|
|
else if (node instanceof AST_String) {
|
|
node.value = mangle(node.value);
|
|
}
|
|
else if (node instanceof AST_Conditional) {
|
|
node.consequent = mangleStrings(node.consequent);
|
|
node.alternative = mangleStrings(node.alternative);
|
|
}
|
|
return node;
|
|
}));
|
|
}
|
|
|
|
}
|
|
|
|
exports["Compressor"] = Compressor;
|
|
exports["DefaultsError"] = DefaultsError;
|
|
exports["Dictionary"] = Dictionary;
|
|
exports["JS_Parse_Error"] = JS_Parse_Error;
|
|
exports["MAP"] = MAP;
|
|
exports["OutputStream"] = OutputStream;
|
|
exports["SourceMap"] = SourceMap;
|
|
exports["TreeTransformer"] = TreeTransformer;
|
|
exports["TreeWalker"] = TreeWalker;
|
|
exports["base54"] = base54;
|
|
exports["defaults"] = defaults;
|
|
exports["mangle_properties"] = mangle_properties;
|
|
exports["merge"] = merge;
|
|
exports["parse"] = parse;
|
|
exports["push_uniq"] = push_uniq;
|
|
exports["string_template"] = string_template;
|
|
exports["tokenizer"] = tokenizer;
|
|
exports["is_identifier"] = is_identifier;
|
|
exports["SymbolDef"] = SymbolDef;
|
|
|
|
AST_Node.warn_function = function(txt) { logger.error("uglifyjs WARN: " + txt); };
|
|
|
|
exports.AST_Node.warn_function = function(txt) {
|
|
console.error("WARN: %s", txt);
|
|
};
|
|
|
|
function read_source_map(code) {
|
|
var match = /\n\/\/# sourceMappingURL=data:application\/json(;.*?)?;base64,(.*)/.exec(code);
|
|
if (!match) {
|
|
exports.AST_Node.warn("inline source map not found");
|
|
return null;
|
|
}
|
|
return JSON.parse(new Buffer(match[2], "base64"));
|
|
}
|
|
|
|
exports.minify = function(files, options, name) {
|
|
options = exports.defaults(options, {
|
|
compress : {},
|
|
fromString : false,
|
|
inSourceMap : null,
|
|
mangle : {},
|
|
mangleProperties : false,
|
|
nameCache : null,
|
|
outFileName : null,
|
|
output : null,
|
|
outSourceMap : null,
|
|
parse : {},
|
|
sourceMapInline : false,
|
|
sourceMapUrl : null,
|
|
sourceRoot : null,
|
|
spidermonkey : false,
|
|
warnings : false,
|
|
});
|
|
exports.base54.reset();
|
|
|
|
var inMap = options.inSourceMap;
|
|
if (typeof inMap == "string" && inMap != "inline") {
|
|
inMap = JSON.parse(rjsFile.readFile(inMap, "utf8"));
|
|
}
|
|
|
|
// 1. parse
|
|
var toplevel = null,
|
|
sourcesContent = {};
|
|
|
|
if (options.spidermonkey) {
|
|
if (inMap == "inline") {
|
|
throw new Error("inline source map only works with built-in parser");
|
|
}
|
|
toplevel = exports.AST_Node.from_mozilla_ast(files);
|
|
} else {
|
|
var addFile = function(file, fileUrl) {
|
|
var code = options.fromString
|
|
? file
|
|
: rjsFile.readFile(file, "utf8");
|
|
if (inMap == "inline") {
|
|
inMap = read_source_map(code);
|
|
}
|
|
sourcesContent[fileUrl] = code;
|
|
toplevel = exports.parse(code, {
|
|
filename: fileUrl,
|
|
toplevel: toplevel,
|
|
bare_returns: options.parse ? options.parse.bare_returns : undefined
|
|
});
|
|
}
|
|
if (!options.fromString) {
|
|
files = exports.simple_glob(files);
|
|
if (inMap == "inline" && files.length > 1) {
|
|
throw new Error("inline source map only works with singular input");
|
|
}
|
|
}
|
|
[].concat(files).forEach(function (files, i) {
|
|
if (typeof files === 'string') {
|
|
addFile(files, options.fromString ? i : files);
|
|
} else {
|
|
for (var fileUrl in files) {
|
|
addFile(files[fileUrl], fileUrl);
|
|
}
|
|
}
|
|
});
|
|
}
|
|
if (options.wrap) {
|
|
toplevel = toplevel.wrap_commonjs(options.wrap, options.exportAll);
|
|
}
|
|
|
|
// 2. compress
|
|
if (options.compress) {
|
|
var compress = { warnings: options.warnings };
|
|
exports.merge(compress, options.compress);
|
|
toplevel.figure_out_scope(options.mangle);
|
|
var sq = exports.Compressor(compress);
|
|
toplevel = sq.compress(toplevel);
|
|
}
|
|
|
|
// 3. mangle properties
|
|
if (options.mangleProperties || options.nameCache) {
|
|
options.mangleProperties.cache = exports.readNameCache(options.nameCache, "props");
|
|
toplevel = exports.mangle_properties(toplevel, options.mangleProperties);
|
|
exports.writeNameCache(options.nameCache, "props", options.mangleProperties.cache);
|
|
}
|
|
|
|
// 4. mangle
|
|
if (options.mangle) {
|
|
toplevel.figure_out_scope(options.mangle);
|
|
toplevel.compute_char_frequency(options.mangle);
|
|
toplevel.mangle_names(options.mangle);
|
|
}
|
|
|
|
// 5. output
|
|
var output = { max_line_len: 32000 };
|
|
if (options.outSourceMap || options.sourceMapInline) {
|
|
output.source_map = exports.SourceMap({
|
|
// prefer outFileName, otherwise use outSourceMap without .map suffix
|
|
file: options.outFileName || (typeof options.outSourceMap === 'string' ? options.outSourceMap.replace(/\.map$/i, '') : null),
|
|
orig: inMap,
|
|
root: options.sourceRoot
|
|
});
|
|
if (options.sourceMapIncludeSources) {
|
|
for (var file in sourcesContent) {
|
|
if (sourcesContent.hasOwnProperty(file)) {
|
|
output.source_map.get().setSourceContent(file, sourcesContent[file]);
|
|
}
|
|
}
|
|
}
|
|
|
|
}
|
|
if (options.output) {
|
|
exports.merge(output, options.output);
|
|
}
|
|
var stream = exports.OutputStream(output);
|
|
toplevel.print(stream);
|
|
|
|
|
|
var source_map = output.source_map;
|
|
if (source_map) {
|
|
source_map = source_map + "";
|
|
}
|
|
|
|
var mappingUrlPrefix = "\n//# sourceMappingURL=";
|
|
if (options.sourceMapInline) {
|
|
stream += mappingUrlPrefix + "data:application/json;charset=utf-8;base64," + new Buffer(source_map).toString("base64");
|
|
} else if (options.outSourceMap && typeof options.outSourceMap === "string" && options.sourceMapUrl !== false) {
|
|
stream += mappingUrlPrefix + (typeof options.sourceMapUrl === "string" ? options.sourceMapUrl : options.outSourceMap);
|
|
}
|
|
|
|
return {
|
|
code : stream + "",
|
|
map : source_map
|
|
};
|
|
};
|
|
|
|
// exports.describe_ast = function() {
|
|
// function doitem(ctor) {
|
|
// var sub = {};
|
|
// ctor.SUBCLASSES.forEach(function(ctor){
|
|
// sub[ctor.TYPE] = doitem(ctor);
|
|
// });
|
|
// var ret = {};
|
|
// if (ctor.SELF_PROPS.length > 0) ret.props = ctor.SELF_PROPS;
|
|
// if (ctor.SUBCLASSES.length > 0) ret.sub = sub;
|
|
// return ret;
|
|
// }
|
|
// return doitem(exports.AST_Node).sub;
|
|
// }
|
|
|
|
exports.describe_ast = function() {
|
|
var out = exports.OutputStream({ beautify: true });
|
|
function doitem(ctor) {
|
|
out.print("AST_" + ctor.TYPE);
|
|
var props = ctor.SELF_PROPS.filter(function(prop){
|
|
return !/^\$/.test(prop);
|
|
});
|
|
if (props.length > 0) {
|
|
out.space();
|
|
out.with_parens(function(){
|
|
props.forEach(function(prop, i){
|
|
if (i) out.space();
|
|
out.print(prop);
|
|
});
|
|
});
|
|
}
|
|
if (ctor.documentation) {
|
|
out.space();
|
|
out.print_string(ctor.documentation);
|
|
}
|
|
if (ctor.SUBCLASSES.length > 0) {
|
|
out.space();
|
|
out.with_block(function(){
|
|
ctor.SUBCLASSES.forEach(function(ctor, i){
|
|
out.indent();
|
|
doitem(ctor);
|
|
out.newline();
|
|
});
|
|
});
|
|
}
|
|
};
|
|
doitem(exports.AST_Node);
|
|
return out + "";
|
|
};
|
|
|
|
function readReservedFile(filename, reserved) {
|
|
if (!reserved) {
|
|
reserved = { vars: [], props: [] };
|
|
}
|
|
var data = rjsFile.readFile(filename, "utf8");
|
|
data = JSON.parse(data);
|
|
if (data.vars) {
|
|
data.vars.forEach(function(name){
|
|
exports.push_uniq(reserved.vars, name);
|
|
});
|
|
}
|
|
if (data.props) {
|
|
data.props.forEach(function(name){
|
|
exports.push_uniq(reserved.props, name);
|
|
});
|
|
}
|
|
return reserved;
|
|
}
|
|
|
|
exports.readReservedFile = readReservedFile;
|
|
|
|
exports.readDefaultReservedFile = function(reserved) {
|
|
return readReservedFile(require.resolve("./domprops.json"), reserved);
|
|
};
|
|
|
|
exports.readNameCache = function(filename, key) {
|
|
var cache = null;
|
|
if (filename) {
|
|
try {
|
|
var cache = rjsFile.readFile(filename, "utf8");
|
|
cache = JSON.parse(cache)[key];
|
|
if (!cache) throw "init";
|
|
cache.props = exports.Dictionary.fromObject(cache.props);
|
|
} catch(ex) {
|
|
cache = {
|
|
cname: -1,
|
|
props: new exports.Dictionary()
|
|
};
|
|
}
|
|
}
|
|
return cache;
|
|
};
|
|
|
|
exports.writeNameCache = function(filename, key, cache) {
|
|
if (filename) {
|
|
var data;
|
|
try {
|
|
data = rjsFile.readFile(filename, "utf8");
|
|
data = JSON.parse(data);
|
|
} catch(ex) {
|
|
data = {};
|
|
}
|
|
data[key] = {
|
|
cname: cache.cname,
|
|
props: cache.props.toObject()
|
|
};
|
|
rjsFile.writeFile(filename, JSON.stringify(data, null, 2), "utf8");
|
|
}
|
|
};
|
|
|
|
// A file glob function that only supports "*" and "?" wildcards in the basename.
|
|
// Example: "foo/bar/*baz??.*.js"
|
|
// Argument `glob` may be a string or an array of strings.
|
|
// Returns an array of strings. Garbage in, garbage out.
|
|
exports.simple_glob = function simple_glob(glob) {
|
|
if (Array.isArray(glob)) {
|
|
return [].concat.apply([], glob.map(simple_glob));
|
|
}
|
|
if (glob.match(/\*|\?/)) {
|
|
var dir = path.dirname(glob);
|
|
try {
|
|
var entries = fs.readdirSync(dir);
|
|
} catch (ex) {}
|
|
if (entries) {
|
|
var pattern = "^" + path.basename(glob)
|
|
.replace(/[.+^$[\]\\(){}]/g, "\\$&")
|
|
.replace(/\*/g, "[^/\\\\]*")
|
|
.replace(/\?/g, "[^/\\\\]") + "$";
|
|
var mod = process.platform === "win32" ? "i" : "";
|
|
var rx = new RegExp(pattern, mod);
|
|
var results = entries.filter(function(name) {
|
|
return rx.test(name);
|
|
}).map(function(name) {
|
|
return path.join(dir, name);
|
|
});
|
|
if (results.length) return results;
|
|
}
|
|
}
|
|
return [ glob ];
|
|
};
|
|
|
|
|
|
});
|
|
/*jslint plusplus: true */
|
|
/*global define: false */
|
|
|
|
define('parse', ['./esprimaAdapter', 'lang'], function (esprima, lang) {
|
|
'use strict';
|
|
|
|
function arrayToString(ary) {
|
|
var output = '[';
|
|
if (ary) {
|
|
ary.forEach(function (item, i) {
|
|
output += (i > 0 ? ',' : '') + '"' + lang.jsEscape(item) + '"';
|
|
});
|
|
}
|
|
output += ']';
|
|
|
|
return output;
|
|
}
|
|
|
|
//This string is saved off because JSLint complains
|
|
//about obj.arguments use, as 'reserved word'
|
|
var argPropName = 'arguments',
|
|
//Default object to use for "scope" checking for UMD identifiers.
|
|
emptyScope = {},
|
|
mixin = lang.mixin,
|
|
hasProp = lang.hasProp;
|
|
|
|
//From an esprima example for traversing its ast.
|
|
function traverse(object, visitor) {
|
|
var child;
|
|
|
|
if (!object) {
|
|
return;
|
|
}
|
|
|
|
if (visitor.call(null, object) === false) {
|
|
return false;
|
|
}
|
|
for (var i = 0, keys = Object.keys(object); i < keys.length; i++) {
|
|
child = object[keys[i]];
|
|
if (typeof child === 'object' && child !== null) {
|
|
if (traverse(child, visitor) === false) {
|
|
return false;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
//Like traverse, but visitor returning false just
|
|
//stops that subtree analysis, not the rest of tree
|
|
//visiting.
|
|
function traverseBroad(object, visitor) {
|
|
var child;
|
|
|
|
if (!object) {
|
|
return;
|
|
}
|
|
|
|
if (visitor.call(null, object) === false) {
|
|
return false;
|
|
}
|
|
for (var i = 0, keys = Object.keys(object); i < keys.length; i++) {
|
|
child = object[key];
|
|
if (typeof child === 'object' && child !== null) {
|
|
traverseBroad(child, visitor);
|
|
}
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Pulls out dependencies from an array literal with just string members.
|
|
* If string literals, will just return those string values in an array,
|
|
* skipping other items in the array.
|
|
*
|
|
* @param {Node} node an AST node.
|
|
*
|
|
* @returns {Array} an array of strings.
|
|
* If null is returned, then it means the input node was not a valid
|
|
* dependency.
|
|
*/
|
|
function getValidDeps(node) {
|
|
if (!node || node.type !== 'ArrayExpression' || !node.elements) {
|
|
return;
|
|
}
|
|
|
|
var deps = [];
|
|
|
|
node.elements.some(function (elem) {
|
|
if (elem.type === 'Literal') {
|
|
deps.push(elem.value);
|
|
}
|
|
});
|
|
|
|
return deps.length ? deps : undefined;
|
|
}
|
|
|
|
// Detects regular or arrow function expressions as the desired expression
|
|
// type.
|
|
function isFnExpression(node) {
|
|
return (node && (node.type === 'FunctionExpression' ||
|
|
node.type === 'ArrowFunctionExpression'));
|
|
}
|
|
|
|
/**
|
|
* Main parse function. Returns a string of any valid require or
|
|
* define/require.def calls as part of one JavaScript source string.
|
|
* @param {String} moduleName the module name that represents this file.
|
|
* It is used to create a default define if there is not one already for the
|
|
* file. This allows properly tracing dependencies for builds. Otherwise, if
|
|
* the file just has a require() call, the file dependencies will not be
|
|
* properly reflected: the file will come before its dependencies.
|
|
* @param {String} moduleName
|
|
* @param {String} fileName
|
|
* @param {String} fileContents
|
|
* @param {Object} options optional options. insertNeedsDefine: true will
|
|
* add calls to require.needsDefine() if appropriate.
|
|
* @returns {String} JS source string or null, if no require or
|
|
* define/require.def calls are found.
|
|
*/
|
|
function parse(moduleName, fileName, fileContents, options) {
|
|
options = options || {};
|
|
|
|
//Set up source input
|
|
var i, moduleCall, depString,
|
|
moduleDeps = [],
|
|
result = '',
|
|
moduleList = [],
|
|
needsDefine = true,
|
|
astRoot = esprima.parse(fileContents);
|
|
|
|
parse.recurse(astRoot, function (callName, config, name, deps, node, factoryIdentifier, fnExpScope) {
|
|
if (!deps) {
|
|
deps = [];
|
|
}
|
|
|
|
if (callName === 'define' && (!name || name === moduleName)) {
|
|
needsDefine = false;
|
|
}
|
|
|
|
if (!name) {
|
|
//If there is no module name, the dependencies are for
|
|
//this file/default module name.
|
|
moduleDeps = moduleDeps.concat(deps);
|
|
} else {
|
|
moduleList.push({
|
|
name: name,
|
|
deps: deps
|
|
});
|
|
}
|
|
|
|
if (callName === 'define' && factoryIdentifier && hasProp(fnExpScope, factoryIdentifier)) {
|
|
return factoryIdentifier;
|
|
}
|
|
|
|
//If define was found, no need to dive deeper, unless
|
|
//the config explicitly wants to dig deeper.
|
|
return !!options.findNestedDependencies;
|
|
}, options);
|
|
|
|
if (options.insertNeedsDefine && needsDefine) {
|
|
result += 'require.needsDefine("' + moduleName + '");';
|
|
}
|
|
|
|
if (moduleDeps.length || moduleList.length) {
|
|
for (i = 0; i < moduleList.length; i++) {
|
|
moduleCall = moduleList[i];
|
|
if (result) {
|
|
result += '\n';
|
|
}
|
|
|
|
//If this is the main module for this file, combine any
|
|
//"anonymous" dependencies (could come from a nested require
|
|
//call) with this module.
|
|
if (moduleCall.name === moduleName) {
|
|
moduleCall.deps = moduleCall.deps.concat(moduleDeps);
|
|
moduleDeps = [];
|
|
}
|
|
|
|
depString = arrayToString(moduleCall.deps);
|
|
result += 'define("' + moduleCall.name + '",' +
|
|
depString + ');';
|
|
}
|
|
if (moduleDeps.length) {
|
|
if (result) {
|
|
result += '\n';
|
|
}
|
|
depString = arrayToString(moduleDeps);
|
|
result += 'define("' + moduleName + '",' + depString + ');';
|
|
}
|
|
}
|
|
|
|
return result || null;
|
|
}
|
|
|
|
parse.traverse = traverse;
|
|
parse.traverseBroad = traverseBroad;
|
|
parse.isFnExpression = isFnExpression;
|
|
|
|
/**
|
|
* Handles parsing a file recursively for require calls.
|
|
* @param {Array} parentNode the AST node to start with.
|
|
* @param {Function} onMatch function to call on a parse match.
|
|
* @param {Object} [options] This is normally the build config options if
|
|
* it is passed.
|
|
* @param {Object} [fnExpScope] holds list of function expresssion
|
|
* argument identifiers, set up internally, not passed in
|
|
*/
|
|
parse.recurse = function (object, onMatch, options, fnExpScope) {
|
|
//Like traverse, but skips if branches that would not be processed
|
|
//after has application that results in tests of true or false boolean
|
|
//literal values.
|
|
var keys, child, result, i, params, param, tempObject,
|
|
hasHas = options && options.has;
|
|
|
|
fnExpScope = fnExpScope || emptyScope;
|
|
|
|
if (!object) {
|
|
return;
|
|
}
|
|
|
|
//If has replacement has resulted in if(true){} or if(false){}, take
|
|
//the appropriate branch and skip the other one.
|
|
if (hasHas && object.type === 'IfStatement' && object.test.type &&
|
|
object.test.type === 'Literal') {
|
|
if (object.test.value) {
|
|
//Take the if branch
|
|
this.recurse(object.consequent, onMatch, options, fnExpScope);
|
|
} else {
|
|
//Take the else branch
|
|
this.recurse(object.alternate, onMatch, options, fnExpScope);
|
|
}
|
|
} else {
|
|
result = this.parseNode(object, onMatch, fnExpScope);
|
|
if (result === false) {
|
|
return;
|
|
} else if (typeof result === 'string') {
|
|
return result;
|
|
}
|
|
|
|
//Build up a "scope" object that informs nested recurse calls if
|
|
//the define call references an identifier that is likely a UMD
|
|
//wrapped function expression argument.
|
|
//Catch (function(a) {... wrappers
|
|
if (object.type === 'ExpressionStatement' && object.expression &&
|
|
object.expression.type === 'CallExpression' && object.expression.callee &&
|
|
isFnExpression(object.expression.callee)) {
|
|
tempObject = object.expression.callee;
|
|
}
|
|
// Catch !function(a) {... wrappers
|
|
if (object.type === 'UnaryExpression' && object.argument &&
|
|
object.argument.type === 'CallExpression' && object.argument.callee &&
|
|
isFnExpression(object.argument.callee)) {
|
|
tempObject = object.argument.callee;
|
|
}
|
|
if (tempObject && tempObject.params && tempObject.params.length) {
|
|
params = tempObject.params;
|
|
fnExpScope = mixin({}, fnExpScope, true);
|
|
for (i = 0; i < params.length; i++) {
|
|
param = params[i];
|
|
if (param.type === 'Identifier') {
|
|
fnExpScope[param.name] = true;
|
|
}
|
|
}
|
|
}
|
|
|
|
for (i = 0, keys = Object.keys(object); i < keys.length; i++) {
|
|
child = object[keys[i]];
|
|
if (typeof child === 'object' && child !== null) {
|
|
result = this.recurse(child, onMatch, options, fnExpScope);
|
|
if (typeof result === 'string' && hasProp(fnExpScope, result)) {
|
|
//The result was still in fnExpScope so break. Otherwise,
|
|
//was a return from a a tree that had a UMD definition,
|
|
//but now out of that scope so keep siblings.
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
|
|
//Check for an identifier for a factory function identifier being
|
|
//passed in as a function expression, indicating a UMD-type of
|
|
//wrapping.
|
|
if (typeof result === 'string') {
|
|
if (hasProp(fnExpScope, result)) {
|
|
//result still in scope, keep jumping out indicating the
|
|
//identifier still in use.
|
|
return result;
|
|
}
|
|
|
|
return;
|
|
}
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Determines if the file defines the require/define module API.
|
|
* Specifically, it looks for the `define.amd = ` expression.
|
|
* @param {String} fileName
|
|
* @param {String} fileContents
|
|
* @returns {Boolean}
|
|
*/
|
|
parse.definesRequire = function (fileName, fileContents) {
|
|
var foundDefine = false,
|
|
foundDefineAmd = false;
|
|
|
|
traverse(esprima.parse(fileContents), function (node) {
|
|
// Look for a top level declaration of a define, like
|
|
// var requirejs, require, define, off Program body.
|
|
if (node.type === 'Program' && node.body && node.body.length) {
|
|
foundDefine = node.body.some(function(bodyNode) {
|
|
// var define
|
|
if (bodyNode.type === 'VariableDeclaration') {
|
|
var decls = bodyNode.declarations;
|
|
if (decls) {
|
|
var hasVarDefine = decls.some(function(declNode) {
|
|
return (declNode.type === 'VariableDeclarator' &&
|
|
declNode.id &&
|
|
declNode.id.type === 'Identifier' &&
|
|
declNode.id.name === 'define');
|
|
});
|
|
if (hasVarDefine) {
|
|
return true;
|
|
}
|
|
}
|
|
}
|
|
|
|
// function define() {}
|
|
if (bodyNode.type === 'FunctionDeclaration' &&
|
|
bodyNode.id &&
|
|
bodyNode.id.type === 'Identifier' &&
|
|
bodyNode.id.name === 'define') {
|
|
return true;
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
});
|
|
}
|
|
|
|
// Need define variable found first, before detecting define.amd.
|
|
if (foundDefine && parse.hasDefineAmd(node)) {
|
|
foundDefineAmd = true;
|
|
|
|
//Stop traversal
|
|
return false;
|
|
}
|
|
});
|
|
|
|
return foundDefine && foundDefineAmd;
|
|
};
|
|
|
|
/**
|
|
* Finds require("") calls inside a CommonJS anonymous module wrapped in a
|
|
* define(function(require, exports, module){}) wrapper. These dependencies
|
|
* will be added to a modified define() call that lists the dependencies
|
|
* on the outside of the function.
|
|
* @param {String} fileName
|
|
* @param {String|Object} fileContents: a string of contents, or an already
|
|
* parsed AST tree.
|
|
* @returns {Array} an array of module names that are dependencies. Always
|
|
* returns an array, but could be of length zero.
|
|
*/
|
|
parse.getAnonDeps = function (fileName, fileContents) {
|
|
var astRoot = typeof fileContents === 'string' ?
|
|
esprima.parse(fileContents) : fileContents,
|
|
defFunc = this.findAnonDefineFactory(astRoot);
|
|
|
|
return parse.getAnonDepsFromNode(defFunc);
|
|
};
|
|
|
|
/**
|
|
* Finds require("") calls inside a CommonJS anonymous module wrapped
|
|
* in a define function, given an AST node for the definition function.
|
|
* @param {Node} node the AST node for the definition function.
|
|
* @returns {Array} and array of dependency names. Can be of zero length.
|
|
*/
|
|
parse.getAnonDepsFromNode = function (node) {
|
|
var deps = [],
|
|
funcArgLength;
|
|
|
|
if (node) {
|
|
this.findRequireDepNames(node, deps);
|
|
|
|
//If no deps, still add the standard CommonJS require, exports,
|
|
//module, in that order, to the deps, but only if specified as
|
|
//function args. In particular, if exports is used, it is favored
|
|
//over the return value of the function, so only add it if asked.
|
|
funcArgLength = node.params && node.params.length;
|
|
if (funcArgLength) {
|
|
deps = (funcArgLength > 1 ? ["require", "exports", "module"] :
|
|
["require"]).concat(deps);
|
|
}
|
|
}
|
|
return deps;
|
|
};
|
|
|
|
parse.isDefineNodeWithArgs = function (node) {
|
|
return node && node.type === 'CallExpression' &&
|
|
node.callee && node.callee.type === 'Identifier' &&
|
|
node.callee.name === 'define' && node[argPropName];
|
|
};
|
|
|
|
/**
|
|
* Finds the function in define(function (require, exports, module){});
|
|
* @param {Array} node
|
|
* @returns {Boolean}
|
|
*/
|
|
parse.findAnonDefineFactory = function (node) {
|
|
var match;
|
|
|
|
traverse(node, function (node) {
|
|
var arg0, arg1;
|
|
|
|
if (parse.isDefineNodeWithArgs(node)) {
|
|
|
|
//Just the factory function passed to define
|
|
arg0 = node[argPropName][0];
|
|
if (isFnExpression(arg0)) {
|
|
match = arg0;
|
|
return false;
|
|
}
|
|
|
|
//A string literal module ID followed by the factory function.
|
|
arg1 = node[argPropName][1];
|
|
if (arg0.type === 'Literal' && isFnExpression(arg1)) {
|
|
match = arg1;
|
|
return false;
|
|
}
|
|
}
|
|
});
|
|
|
|
return match;
|
|
};
|
|
|
|
/**
|
|
* Finds any config that is passed to requirejs. That includes calls to
|
|
* require/requirejs.config(), as well as require({}, ...) and
|
|
* requirejs({}, ...)
|
|
* @param {String} fileContents
|
|
*
|
|
* @returns {Object} a config details object with the following properties:
|
|
* - config: {Object} the config object found. Can be undefined if no
|
|
* config found.
|
|
* - range: {Array} the start index and end index in the contents where
|
|
* the config was found. Can be undefined if no config found.
|
|
* Can throw an error if the config in the file cannot be evaluated in
|
|
* a build context to valid JavaScript.
|
|
*/
|
|
parse.findConfig = function (fileContents) {
|
|
/*jslint evil: true */
|
|
var jsConfig, foundConfig, stringData, foundRange, quote, quoteMatch,
|
|
quoteRegExp = /(:\s|\[\s*)(['"])/,
|
|
astRoot = esprima.parse(fileContents, {
|
|
loc: true
|
|
});
|
|
|
|
traverse(astRoot, function (node) {
|
|
var arg,
|
|
requireType = parse.hasRequire(node);
|
|
|
|
if (requireType && (requireType === 'require' ||
|
|
requireType === 'requirejs' ||
|
|
requireType === 'requireConfig' ||
|
|
requireType === 'requirejsConfig')) {
|
|
|
|
arg = node[argPropName] && node[argPropName][0];
|
|
|
|
if (arg && arg.type === 'ObjectExpression') {
|
|
stringData = parse.nodeToString(fileContents, arg);
|
|
jsConfig = stringData.value;
|
|
foundRange = stringData.range;
|
|
return false;
|
|
}
|
|
} else {
|
|
arg = parse.getRequireObjectLiteral(node);
|
|
if (arg) {
|
|
stringData = parse.nodeToString(fileContents, arg);
|
|
jsConfig = stringData.value;
|
|
foundRange = stringData.range;
|
|
return false;
|
|
}
|
|
}
|
|
});
|
|
|
|
if (jsConfig) {
|
|
// Eval the config
|
|
quoteMatch = quoteRegExp.exec(jsConfig);
|
|
quote = (quoteMatch && quoteMatch[2]) || '"';
|
|
foundConfig = eval('(' + jsConfig + ')');
|
|
}
|
|
|
|
return {
|
|
config: foundConfig,
|
|
range: foundRange,
|
|
quote: quote
|
|
};
|
|
};
|
|
|
|
/** Returns the node for the object literal assigned to require/requirejs,
|
|
* for holding a declarative config.
|
|
*/
|
|
parse.getRequireObjectLiteral = function (node) {
|
|
if (node.id && node.id.type === 'Identifier' &&
|
|
(node.id.name === 'require' || node.id.name === 'requirejs') &&
|
|
node.init && node.init.type === 'ObjectExpression') {
|
|
return node.init;
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Renames require/requirejs/define calls to be ns + '.' + require/requirejs/define
|
|
* Does *not* do .config calls though. See pragma.namespace for the complete
|
|
* set of namespace transforms. This function is used because require calls
|
|
* inside a define() call should not be renamed, so a simple regexp is not
|
|
* good enough.
|
|
* @param {String} fileContents the contents to transform.
|
|
* @param {String} ns the namespace, *not* including trailing dot.
|
|
* @return {String} the fileContents with the namespace applied
|
|
*/
|
|
parse.renameNamespace = function (fileContents, ns) {
|
|
var lines,
|
|
locs = [],
|
|
astRoot = esprima.parse(fileContents, {
|
|
loc: true
|
|
});
|
|
|
|
parse.recurse(astRoot, function (callName, config, name, deps, node) {
|
|
locs.push(node.loc);
|
|
//Do not recurse into define functions, they should be using
|
|
//local defines.
|
|
return callName !== 'define';
|
|
}, {});
|
|
|
|
if (locs.length) {
|
|
lines = fileContents.split('\n');
|
|
|
|
//Go backwards through the found locs, adding in the namespace name
|
|
//in front.
|
|
locs.reverse();
|
|
locs.forEach(function (loc) {
|
|
var startIndex = loc.start.column,
|
|
//start.line is 1-based, not 0 based.
|
|
lineIndex = loc.start.line - 1,
|
|
line = lines[lineIndex];
|
|
|
|
lines[lineIndex] = line.substring(0, startIndex) +
|
|
ns + '.' +
|
|
line.substring(startIndex,
|
|
line.length);
|
|
});
|
|
|
|
fileContents = lines.join('\n');
|
|
}
|
|
|
|
return fileContents;
|
|
};
|
|
|
|
/**
|
|
* Finds all dependencies specified in dependency arrays and inside
|
|
* simplified commonjs wrappers.
|
|
* @param {String} fileName
|
|
* @param {String} fileContents
|
|
*
|
|
* @returns {Array} an array of dependency strings. The dependencies
|
|
* have not been normalized, they may be relative IDs.
|
|
*/
|
|
parse.findDependencies = function (fileName, fileContents, options) {
|
|
var dependencies = [],
|
|
astRoot = esprima.parse(fileContents);
|
|
|
|
parse.recurse(astRoot, function (callName, config, name, deps) {
|
|
if (deps) {
|
|
dependencies = dependencies.concat(deps);
|
|
}
|
|
}, options);
|
|
|
|
return dependencies;
|
|
};
|
|
|
|
/**
|
|
* Finds only CJS dependencies, ones that are the form
|
|
* require('stringLiteral')
|
|
*/
|
|
parse.findCjsDependencies = function (fileName, fileContents) {
|
|
var dependencies = [];
|
|
|
|
traverse(esprima.parse(fileContents), function (node) {
|
|
var arg;
|
|
|
|
if (node && node.type === 'CallExpression' && node.callee &&
|
|
node.callee.type === 'Identifier' &&
|
|
node.callee.name === 'require' && node[argPropName] &&
|
|
node[argPropName].length === 1) {
|
|
arg = node[argPropName][0];
|
|
if (arg.type === 'Literal') {
|
|
dependencies.push(arg.value);
|
|
}
|
|
}
|
|
});
|
|
|
|
return dependencies;
|
|
};
|
|
|
|
//function define() {}
|
|
parse.hasDefDefine = function (node) {
|
|
return node.type === 'FunctionDeclaration' && node.id &&
|
|
node.id.type === 'Identifier' && node.id.name === 'define';
|
|
};
|
|
|
|
//define.amd = ...
|
|
parse.hasDefineAmd = function (node) {
|
|
return node && node.type === 'AssignmentExpression' &&
|
|
node.left && node.left.type === 'MemberExpression' &&
|
|
node.left.object && node.left.object.name === 'define' &&
|
|
node.left.property && node.left.property.name === 'amd';
|
|
};
|
|
|
|
//define.amd reference, as in: if (define.amd)
|
|
parse.refsDefineAmd = function (node) {
|
|
return node && node.type === 'MemberExpression' &&
|
|
node.object && node.object.name === 'define' &&
|
|
node.object.type === 'Identifier' &&
|
|
node.property && node.property.name === 'amd' &&
|
|
node.property.type === 'Identifier';
|
|
};
|
|
|
|
//require(), requirejs(), require.config() and requirejs.config()
|
|
parse.hasRequire = function (node) {
|
|
var callName,
|
|
c = node && node.callee;
|
|
|
|
if (node && node.type === 'CallExpression' && c) {
|
|
if (c.type === 'Identifier' &&
|
|
(c.name === 'require' ||
|
|
c.name === 'requirejs')) {
|
|
//A require/requirejs({}, ...) call
|
|
callName = c.name;
|
|
} else if (c.type === 'MemberExpression' &&
|
|
c.object &&
|
|
c.object.type === 'Identifier' &&
|
|
(c.object.name === 'require' ||
|
|
c.object.name === 'requirejs') &&
|
|
c.property && c.property.name === 'config') {
|
|
// require/requirejs.config({}) call
|
|
callName = c.object.name + 'Config';
|
|
}
|
|
}
|
|
|
|
return callName;
|
|
};
|
|
|
|
//define()
|
|
parse.hasDefine = function (node) {
|
|
return node && node.type === 'CallExpression' && node.callee &&
|
|
node.callee.type === 'Identifier' &&
|
|
node.callee.name === 'define';
|
|
};
|
|
|
|
/**
|
|
* If there is a named define in the file, returns the name. Does not
|
|
* scan for mulitple names, just the first one.
|
|
*/
|
|
parse.getNamedDefine = function (fileContents) {
|
|
var name;
|
|
traverse(esprima.parse(fileContents), function (node) {
|
|
if (node && node.type === 'CallExpression' && node.callee &&
|
|
node.callee.type === 'Identifier' &&
|
|
node.callee.name === 'define' &&
|
|
node[argPropName] && node[argPropName][0] &&
|
|
node[argPropName][0].type === 'Literal') {
|
|
name = node[argPropName][0].value;
|
|
return false;
|
|
}
|
|
});
|
|
|
|
return name;
|
|
};
|
|
|
|
/**
|
|
* Finds all the named define module IDs in a file.
|
|
*/
|
|
parse.getAllNamedDefines = function (fileContents, excludeMap) {
|
|
var names = [];
|
|
parse.recurse(esprima.parse(fileContents),
|
|
function (callName, config, name, deps, node, factoryIdentifier, fnExpScope) {
|
|
if (callName === 'define' && name) {
|
|
if (!excludeMap.hasOwnProperty(name)) {
|
|
names.push(name);
|
|
}
|
|
}
|
|
|
|
//If a UMD definition that points to a factory that is an Identifier,
|
|
//indicate processing should not traverse inside the UMD definition.
|
|
if (callName === 'define' && factoryIdentifier && hasProp(fnExpScope, factoryIdentifier)) {
|
|
return factoryIdentifier;
|
|
}
|
|
|
|
//If define was found, no need to dive deeper, unless
|
|
//the config explicitly wants to dig deeper.
|
|
return true;
|
|
}, {});
|
|
|
|
return names;
|
|
};
|
|
|
|
/**
|
|
* Determines if define(), require({}|[]) or requirejs was called in the
|
|
* file. Also finds out if define() is declared and if define.amd is called.
|
|
*/
|
|
parse.usesAmdOrRequireJs = function (fileName, fileContents) {
|
|
var uses;
|
|
|
|
traverse(esprima.parse(fileContents), function (node) {
|
|
var type, callName, arg;
|
|
|
|
if (parse.hasDefDefine(node)) {
|
|
//function define() {}
|
|
type = 'declaresDefine';
|
|
} else if (parse.hasDefineAmd(node)) {
|
|
type = 'defineAmd';
|
|
} else {
|
|
callName = parse.hasRequire(node);
|
|
if (callName) {
|
|
arg = node[argPropName] && node[argPropName][0];
|
|
if (arg && (arg.type === 'ObjectExpression' ||
|
|
arg.type === 'ArrayExpression')) {
|
|
type = callName;
|
|
}
|
|
} else if (parse.hasDefine(node)) {
|
|
type = 'define';
|
|
}
|
|
}
|
|
|
|
if (type) {
|
|
if (!uses) {
|
|
uses = {};
|
|
}
|
|
uses[type] = true;
|
|
}
|
|
});
|
|
|
|
return uses;
|
|
};
|
|
|
|
/**
|
|
* Determines if require(''), exports.x =, module.exports =,
|
|
* __dirname, __filename are used. So, not strictly traditional CommonJS,
|
|
* also checks for Node variants.
|
|
*/
|
|
parse.usesCommonJs = function (fileName, fileContents) {
|
|
var uses = null,
|
|
assignsExports = false;
|
|
|
|
|
|
traverse(esprima.parse(fileContents), function (node) {
|
|
var type,
|
|
exp = node.expression || node.init;
|
|
|
|
if (node.type === 'Identifier' &&
|
|
(node.name === '__dirname' || node.name === '__filename')) {
|
|
type = node.name.substring(2);
|
|
} else if (node.type === 'VariableDeclarator' && node.id &&
|
|
node.id.type === 'Identifier' &&
|
|
node.id.name === 'exports') {
|
|
//Hmm, a variable assignment for exports, so does not use cjs
|
|
//exports.
|
|
type = 'varExports';
|
|
} else if (exp && exp.type === 'AssignmentExpression' && exp.left &&
|
|
exp.left.type === 'MemberExpression' && exp.left.object) {
|
|
if (exp.left.object.name === 'module' && exp.left.property &&
|
|
exp.left.property.name === 'exports') {
|
|
type = 'moduleExports';
|
|
} else if (exp.left.object.name === 'exports' &&
|
|
exp.left.property) {
|
|
type = 'exports';
|
|
} else if (exp.left.object.type === 'MemberExpression' &&
|
|
exp.left.object.object.name === 'module' &&
|
|
exp.left.object.property.name === 'exports' &&
|
|
exp.left.object.property.type === 'Identifier') {
|
|
type = 'moduleExports';
|
|
}
|
|
|
|
} else if (node && node.type === 'CallExpression' && node.callee &&
|
|
node.callee.type === 'Identifier' &&
|
|
node.callee.name === 'require' && node[argPropName] &&
|
|
node[argPropName].length === 1 &&
|
|
node[argPropName][0].type === 'Literal') {
|
|
type = 'require';
|
|
}
|
|
|
|
if (type) {
|
|
if (type === 'varExports') {
|
|
assignsExports = true;
|
|
} else if (type !== 'exports' || !assignsExports) {
|
|
if (!uses) {
|
|
uses = {};
|
|
}
|
|
uses[type] = true;
|
|
}
|
|
}
|
|
});
|
|
|
|
return uses;
|
|
};
|
|
|
|
|
|
parse.findRequireDepNames = function (node, deps) {
|
|
traverse(node, function (node) {
|
|
var arg;
|
|
|
|
if (node && node.type === 'CallExpression' && node.callee &&
|
|
node.callee.type === 'Identifier' &&
|
|
node.callee.name === 'require' &&
|
|
node[argPropName] && node[argPropName].length === 1) {
|
|
|
|
arg = node[argPropName][0];
|
|
if (arg.type === 'Literal') {
|
|
deps.push(arg.value);
|
|
}
|
|
}
|
|
});
|
|
};
|
|
|
|
/**
|
|
* Determines if a specific node is a valid require or define/require.def
|
|
* call.
|
|
* @param {Array} node
|
|
* @param {Function} onMatch a function to call when a match is found.
|
|
* It is passed the match name, and the config, name, deps possible args.
|
|
* The config, name and deps args are not normalized.
|
|
* @param {Object} fnExpScope an object whose keys are all function
|
|
* expression identifiers that should be in scope. Useful for UMD wrapper
|
|
* detection to avoid parsing more into the wrapped UMD code.
|
|
*
|
|
* @returns {String} a JS source string with the valid require/define call.
|
|
* Otherwise null.
|
|
*/
|
|
parse.parseNode = function (node, onMatch, fnExpScope) {
|
|
var name, deps, cjsDeps, arg, factory, exp, refsDefine, bodyNode,
|
|
args = node && node[argPropName],
|
|
callName = parse.hasRequire(node),
|
|
isUmd = false;
|
|
|
|
if (callName === 'require' || callName === 'requirejs') {
|
|
//A plain require/requirejs call
|
|
arg = node[argPropName] && node[argPropName][0];
|
|
if (arg && arg.type !== 'ArrayExpression') {
|
|
if (arg.type === 'ObjectExpression') {
|
|
//A config call, try the second arg.
|
|
arg = node[argPropName][1];
|
|
}
|
|
}
|
|
|
|
deps = getValidDeps(arg);
|
|
if (!deps) {
|
|
return;
|
|
}
|
|
|
|
return onMatch("require", null, null, deps, node);
|
|
} else if (parse.hasDefine(node) && args && args.length) {
|
|
name = args[0];
|
|
deps = args[1];
|
|
factory = args[2];
|
|
|
|
if (name.type === 'ArrayExpression') {
|
|
//No name, adjust args
|
|
factory = deps;
|
|
deps = name;
|
|
name = null;
|
|
} else if (isFnExpression(name)) {
|
|
//Just the factory, no name or deps
|
|
factory = name;
|
|
name = deps = null;
|
|
} else if (name.type === 'Identifier' && args.length === 1 &&
|
|
hasProp(fnExpScope, name.name)) {
|
|
//define(e) where e is a UMD identifier for the factory
|
|
//function.
|
|
isUmd = true;
|
|
factory = name;
|
|
name = null;
|
|
} else if (name.type !== 'Literal') {
|
|
//An object literal, just null out
|
|
name = deps = factory = null;
|
|
}
|
|
|
|
if (name && name.type === 'Literal' && deps) {
|
|
if (isFnExpression(deps)) {
|
|
//deps is the factory
|
|
factory = deps;
|
|
deps = null;
|
|
} else if (deps.type === 'ObjectExpression') {
|
|
//deps is object literal, null out
|
|
deps = factory = null;
|
|
} else if (deps.type === 'Identifier') {
|
|
if (args.length === 2) {
|
|
//define('id', factory)
|
|
deps = factory = null;
|
|
} else if (args.length === 3 && isFnExpression(factory)) {
|
|
//define('id', depsIdentifier, factory)
|
|
//Since identifier, cannot know the deps, but do not
|
|
//error out, assume they are taken care of outside of
|
|
//static parsing.
|
|
deps = null;
|
|
}
|
|
}
|
|
}
|
|
|
|
if (deps && deps.type === 'ArrayExpression') {
|
|
deps = getValidDeps(deps);
|
|
} else if (isFnExpression(factory)) {
|
|
//If no deps and a factory function, could be a commonjs sugar
|
|
//wrapper, scan the function for dependencies.
|
|
cjsDeps = parse.getAnonDepsFromNode(factory);
|
|
if (cjsDeps.length) {
|
|
deps = cjsDeps;
|
|
}
|
|
} else if (deps || (factory && !isUmd)) {
|
|
//Does not match the shape of an AMD call.
|
|
return;
|
|
}
|
|
|
|
//Just save off the name as a string instead of an AST object.
|
|
if (name && name.type === 'Literal') {
|
|
name = name.value;
|
|
}
|
|
|
|
return onMatch("define", null, name, deps, node,
|
|
(factory && factory.type === 'Identifier' ? factory.name : undefined),
|
|
fnExpScope);
|
|
} else if (node.type === 'CallExpression' && node.callee &&
|
|
isFnExpression(node.callee) &&
|
|
node.callee.body && node.callee.body.body &&
|
|
node.callee.body.body.length === 1 &&
|
|
node.callee.body.body[0].type === 'IfStatement') {
|
|
bodyNode = node.callee.body.body[0];
|
|
//Look for a define(Identifier) case, but only if inside an
|
|
//if that has a define.amd test
|
|
if (bodyNode.consequent && bodyNode.consequent.body) {
|
|
exp = bodyNode.consequent.body[0];
|
|
if (exp.type === 'ExpressionStatement' && exp.expression &&
|
|
parse.hasDefine(exp.expression) &&
|
|
exp.expression.arguments &&
|
|
exp.expression.arguments.length === 1 &&
|
|
exp.expression.arguments[0].type === 'Identifier') {
|
|
|
|
//Calls define(Identifier) as first statement in body.
|
|
//Confirm the if test references define.amd
|
|
traverse(bodyNode.test, function (node) {
|
|
if (parse.refsDefineAmd(node)) {
|
|
refsDefine = true;
|
|
return false;
|
|
}
|
|
});
|
|
|
|
if (refsDefine) {
|
|
return onMatch("define", null, null, null, exp.expression,
|
|
exp.expression.arguments[0].name, fnExpScope);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Converts an AST node into a JS source string by extracting
|
|
* the node's location from the given contents string. Assumes
|
|
* esprima.parse() with loc was done.
|
|
* @param {String} contents
|
|
* @param {Object} node
|
|
* @returns {String} a JS source string.
|
|
*/
|
|
parse.nodeToString = function (contents, node) {
|
|
var extracted,
|
|
loc = node.loc,
|
|
lines = contents.split('\n'),
|
|
firstLine = loc.start.line > 1 ?
|
|
lines.slice(0, loc.start.line - 1).join('\n') + '\n' :
|
|
'',
|
|
preamble = firstLine +
|
|
lines[loc.start.line - 1].substring(0, loc.start.column);
|
|
|
|
if (loc.start.line === loc.end.line) {
|
|
extracted = lines[loc.start.line - 1].substring(loc.start.column,
|
|
loc.end.column);
|
|
} else {
|
|
extracted = lines[loc.start.line - 1].substring(loc.start.column) +
|
|
'\n' +
|
|
lines.slice(loc.start.line, loc.end.line - 1).join('\n') +
|
|
'\n' +
|
|
lines[loc.end.line - 1].substring(0, loc.end.column);
|
|
}
|
|
|
|
return {
|
|
value: extracted,
|
|
range: [
|
|
preamble.length,
|
|
preamble.length + extracted.length
|
|
]
|
|
};
|
|
};
|
|
|
|
/**
|
|
* Extracts license comments from JS text.
|
|
* @param {String} fileName
|
|
* @param {String} contents
|
|
* @returns {String} a string of license comments.
|
|
*/
|
|
parse.getLicenseComments = function (fileName, contents) {
|
|
var commentNode, refNode, subNode, value, i, j,
|
|
//xpconnect's Reflect does not support comment or range, but
|
|
//prefer continued operation vs strict parity of operation,
|
|
//as license comments can be expressed in other ways, like
|
|
//via wrap args, or linked via sourcemaps.
|
|
ast = esprima.parse(contents, {
|
|
comment: true,
|
|
range: true
|
|
}),
|
|
result = '',
|
|
existsMap = {},
|
|
lineEnd = contents.indexOf('\r') === -1 ? '\n' : '\r\n';
|
|
|
|
if (ast.comments) {
|
|
for (i = 0; i < ast.comments.length; i++) {
|
|
commentNode = ast.comments[i];
|
|
|
|
if (commentNode.type === 'Line') {
|
|
value = '//' + commentNode.value + lineEnd;
|
|
refNode = commentNode;
|
|
|
|
if (i + 1 >= ast.comments.length) {
|
|
value += lineEnd;
|
|
} else {
|
|
//Look for immediately adjacent single line comments
|
|
//since it could from a multiple line comment made out
|
|
//of single line comments. Like this comment.
|
|
for (j = i + 1; j < ast.comments.length; j++) {
|
|
subNode = ast.comments[j];
|
|
if (subNode.type === 'Line' &&
|
|
subNode.range[0] === refNode.range[1] + 1) {
|
|
//Adjacent single line comment. Collect it.
|
|
value += '//' + subNode.value + lineEnd;
|
|
refNode = subNode;
|
|
} else {
|
|
//No more single line comment blocks. Break out
|
|
//and continue outer looping.
|
|
break;
|
|
}
|
|
}
|
|
value += lineEnd;
|
|
i = j - 1;
|
|
}
|
|
} else {
|
|
value = '/*' + commentNode.value + '*/' + lineEnd + lineEnd;
|
|
}
|
|
|
|
if (!existsMap[value] && (value.indexOf('license') !== -1 ||
|
|
(commentNode.type === 'Block' &&
|
|
value.indexOf('/*!') === 0) ||
|
|
value.indexOf('opyright') !== -1 ||
|
|
value.indexOf('(c)') !== -1)) {
|
|
|
|
result += value;
|
|
existsMap[value] = true;
|
|
}
|
|
|
|
}
|
|
}
|
|
|
|
return result;
|
|
};
|
|
|
|
return parse;
|
|
});
|
|
/*global define */
|
|
|
|
define('transform', [ './esprimaAdapter', './parse', 'logger', 'lang'],
|
|
function (esprima, parse, logger, lang) {
|
|
'use strict';
|
|
var transform,
|
|
baseIndentRegExp = /^([ \t]+)/,
|
|
indentRegExp = /\{[\r\n]+([ \t]+)/,
|
|
keyRegExp = /^[_A-Za-z]([A-Za-z\d_]*)$/,
|
|
bulkIndentRegExps = {
|
|
'\n': /\n/g,
|
|
'\r\n': /\r\n/g
|
|
};
|
|
|
|
function applyIndent(str, indent, lineReturn) {
|
|
var regExp = bulkIndentRegExps[lineReturn];
|
|
return str.replace(regExp, '$&' + indent);
|
|
}
|
|
|
|
transform = {
|
|
toTransport: function (namespace, moduleName, path, contents, onFound, options) {
|
|
options = options || {};
|
|
|
|
var astRoot, contentLines, modLine,
|
|
foundAnon,
|
|
scanCount = 0,
|
|
scanReset = false,
|
|
defineInfos = [],
|
|
applySourceUrl = function (contents) {
|
|
if (options.useSourceUrl) {
|
|
contents = 'eval("' + lang.jsEscape(contents) +
|
|
'\\n//# sourceURL=' + (path.indexOf('/') === 0 ? '' : '/') +
|
|
path +
|
|
'");\n';
|
|
}
|
|
return contents;
|
|
};
|
|
|
|
try {
|
|
astRoot = esprima.parse(contents, {
|
|
loc: true
|
|
});
|
|
} catch (e) {
|
|
logger.trace('toTransport skipping ' + path + ': ' +
|
|
e.toString());
|
|
return contents;
|
|
}
|
|
|
|
//Find the define calls and their position in the files.
|
|
parse.traverse(astRoot, function (node) {
|
|
var args, firstArg, firstArgLoc, factoryNode,
|
|
needsId, depAction, foundId, init,
|
|
sourceUrlData, range,
|
|
namespaceExists = false;
|
|
|
|
// If a bundle script with a define declaration, do not
|
|
// parse any further at this level. Likely a built layer
|
|
// by some other tool.
|
|
if (node.type === 'VariableDeclarator' &&
|
|
node.id && node.id.name === 'define' &&
|
|
node.id.type === 'Identifier') {
|
|
init = node.init;
|
|
if (init && init.callee &&
|
|
init.callee.type === 'CallExpression' &&
|
|
init.callee.callee &&
|
|
init.callee.callee.type === 'Identifier' &&
|
|
init.callee.callee.name === 'require' &&
|
|
init.callee.arguments && init.callee.arguments.length === 1 &&
|
|
init.callee.arguments[0].type === 'Literal' &&
|
|
init.callee.arguments[0].value &&
|
|
init.callee.arguments[0].value.indexOf('amdefine') !== -1) {
|
|
// the var define = require('amdefine')(module) case,
|
|
// keep going in that case.
|
|
} else {
|
|
return false;
|
|
}
|
|
}
|
|
|
|
namespaceExists = namespace &&
|
|
node.type === 'CallExpression' &&
|
|
node.callee && node.callee.object &&
|
|
node.callee.object.type === 'Identifier' &&
|
|
node.callee.object.name === namespace &&
|
|
node.callee.property.type === 'Identifier' &&
|
|
node.callee.property.name === 'define';
|
|
|
|
if (namespaceExists || parse.isDefineNodeWithArgs(node)) {
|
|
//The arguments are where its at.
|
|
args = node.arguments;
|
|
if (!args || !args.length) {
|
|
return;
|
|
}
|
|
|
|
firstArg = args[0];
|
|
firstArgLoc = firstArg.loc;
|
|
|
|
if (args.length === 1) {
|
|
if (firstArg.type === 'Identifier') {
|
|
//The define(factory) case, but
|
|
//only allow it if one Identifier arg,
|
|
//to limit impact of false positives.
|
|
needsId = true;
|
|
depAction = 'empty';
|
|
} else if (parse.isFnExpression(firstArg)) {
|
|
//define(function(){})
|
|
factoryNode = firstArg;
|
|
needsId = true;
|
|
depAction = 'scan';
|
|
} else if (firstArg.type === 'ObjectExpression') {
|
|
//define({});
|
|
needsId = true;
|
|
depAction = 'skip';
|
|
} else if (firstArg.type === 'Literal' &&
|
|
typeof firstArg.value === 'number') {
|
|
//define('12345');
|
|
needsId = true;
|
|
depAction = 'skip';
|
|
} else if (firstArg.type === 'UnaryExpression' &&
|
|
firstArg.operator === '-' &&
|
|
firstArg.argument &&
|
|
firstArg.argument.type === 'Literal' &&
|
|
typeof firstArg.argument.value === 'number') {
|
|
//define('-12345');
|
|
needsId = true;
|
|
depAction = 'skip';
|
|
} else if (firstArg.type === 'MemberExpression' &&
|
|
firstArg.object &&
|
|
firstArg.property &&
|
|
firstArg.property.type === 'Identifier') {
|
|
//define(this.key);
|
|
needsId = true;
|
|
depAction = 'empty';
|
|
}
|
|
} else if (firstArg.type === 'ArrayExpression') {
|
|
//define([], ...);
|
|
needsId = true;
|
|
depAction = 'skip';
|
|
} else if (firstArg.type === 'Literal' &&
|
|
typeof firstArg.value === 'string') {
|
|
//define('string', ....)
|
|
//Already has an ID.
|
|
needsId = false;
|
|
if (args.length === 2 &&
|
|
parse.isFnExpression(args[1])) {
|
|
//Needs dependency scanning.
|
|
factoryNode = args[1];
|
|
depAction = 'scan';
|
|
} else {
|
|
depAction = 'skip';
|
|
}
|
|
} else {
|
|
//Unknown define entity, keep looking, even
|
|
//in the subtree for this node.
|
|
return;
|
|
}
|
|
|
|
range = {
|
|
foundId: foundId,
|
|
needsId: needsId,
|
|
depAction: depAction,
|
|
namespaceExists: namespaceExists,
|
|
node: node,
|
|
defineLoc: node.loc,
|
|
firstArgLoc: firstArgLoc,
|
|
factoryNode: factoryNode,
|
|
sourceUrlData: sourceUrlData
|
|
};
|
|
|
|
//Only transform ones that do not have IDs. If it has an
|
|
//ID but no dependency array, assume it is something like
|
|
//a phonegap implementation, that has its own internal
|
|
//define that cannot handle dependency array constructs,
|
|
//and if it is a named module, then it means it has been
|
|
//set for transport form.
|
|
if (range.needsId) {
|
|
if (foundAnon) {
|
|
logger.trace(path + ' has more than one anonymous ' +
|
|
'define. May be a built file from another ' +
|
|
'build system like, Ender. Skipping normalization.');
|
|
defineInfos = [];
|
|
return false;
|
|
} else {
|
|
foundAnon = range;
|
|
defineInfos.push(range);
|
|
}
|
|
} else if (depAction === 'scan') {
|
|
scanCount += 1;
|
|
if (scanCount > 1) {
|
|
//Just go back to an array that just has the
|
|
//anon one, since this is an already optimized
|
|
//file like the phonegap one.
|
|
if (!scanReset) {
|
|
defineInfos = foundAnon ? [foundAnon] : [];
|
|
scanReset = true;
|
|
}
|
|
} else {
|
|
defineInfos.push(range);
|
|
}
|
|
}
|
|
}
|
|
});
|
|
|
|
|
|
if (!defineInfos.length) {
|
|
return applySourceUrl(contents);
|
|
}
|
|
|
|
//Reverse the matches, need to start from the bottom of
|
|
//the file to modify it, so that the ranges are still true
|
|
//further up.
|
|
defineInfos.reverse();
|
|
|
|
contentLines = contents.split('\n');
|
|
|
|
modLine = function (loc, contentInsertion) {
|
|
var startIndex = loc.start.column,
|
|
//start.line is 1-based, not 0 based.
|
|
lineIndex = loc.start.line - 1,
|
|
line = contentLines[lineIndex];
|
|
contentLines[lineIndex] = line.substring(0, startIndex) +
|
|
contentInsertion +
|
|
line.substring(startIndex,
|
|
line.length);
|
|
};
|
|
|
|
defineInfos.forEach(function (info) {
|
|
var deps,
|
|
contentInsertion = '',
|
|
depString = '';
|
|
|
|
//Do the modifications "backwards", in other words, start with the
|
|
//one that is farthest down and work up, so that the ranges in the
|
|
//defineInfos still apply. So that means deps, id, then namespace.
|
|
if (info.needsId && moduleName) {
|
|
contentInsertion += "'" + moduleName + "',";
|
|
}
|
|
|
|
if (info.depAction === 'scan') {
|
|
deps = parse.getAnonDepsFromNode(info.factoryNode);
|
|
|
|
if (deps.length) {
|
|
depString = '[' + deps.map(function (dep) {
|
|
return "'" + dep + "'";
|
|
}) + ']';
|
|
} else {
|
|
depString = '[]';
|
|
}
|
|
depString += ',';
|
|
|
|
if (info.factoryNode) {
|
|
//Already have a named module, need to insert the
|
|
//dependencies after the name.
|
|
modLine(info.factoryNode.loc, depString);
|
|
} else {
|
|
contentInsertion += depString;
|
|
}
|
|
}
|
|
|
|
if (contentInsertion) {
|
|
modLine(info.firstArgLoc, contentInsertion);
|
|
}
|
|
|
|
//Do namespace last so that ui does not mess upthe parenRange
|
|
//used above.
|
|
if (namespace && !info.namespaceExists) {
|
|
modLine(info.defineLoc, namespace + '.');
|
|
}
|
|
|
|
//Notify any listener for the found info
|
|
if (onFound) {
|
|
onFound(info);
|
|
}
|
|
});
|
|
|
|
contents = contentLines.join('\n');
|
|
|
|
return applySourceUrl(contents);
|
|
},
|
|
|
|
/**
|
|
* Modify the contents of a require.config/requirejs.config call. This
|
|
* call will LOSE any existing comments that are in the config string.
|
|
*
|
|
* @param {String} fileContents String that may contain a config call
|
|
* @param {Function} onConfig Function called when the first config
|
|
* call is found. It will be passed an Object which is the current
|
|
* config, and the onConfig function should return an Object to use
|
|
* as the config.
|
|
* @return {String} the fileContents with the config changes applied.
|
|
*/
|
|
modifyConfig: function (fileContents, onConfig) {
|
|
var details = parse.findConfig(fileContents),
|
|
config = details.config;
|
|
|
|
if (config) {
|
|
config = onConfig(config);
|
|
if (config) {
|
|
return transform.serializeConfig(config,
|
|
fileContents,
|
|
details.range[0],
|
|
details.range[1],
|
|
{
|
|
quote: details.quote
|
|
});
|
|
}
|
|
}
|
|
|
|
return fileContents;
|
|
},
|
|
|
|
serializeConfig: function (config, fileContents, start, end, options) {
|
|
//Calculate base level of indent
|
|
var indent, match, configString, outDentRegExp,
|
|
baseIndent = '',
|
|
startString = fileContents.substring(0, start),
|
|
existingConfigString = fileContents.substring(start, end),
|
|
lineReturn = existingConfigString.indexOf('\r') === -1 ? '\n' : '\r\n',
|
|
lastReturnIndex = startString.lastIndexOf('\n');
|
|
|
|
//Get the basic amount of indent for the require config call.
|
|
if (lastReturnIndex === -1) {
|
|
lastReturnIndex = 0;
|
|
}
|
|
|
|
match = baseIndentRegExp.exec(startString.substring(lastReturnIndex + 1, start));
|
|
if (match && match[1]) {
|
|
baseIndent = match[1];
|
|
}
|
|
|
|
//Calculate internal indentation for config
|
|
match = indentRegExp.exec(existingConfigString);
|
|
if (match && match[1]) {
|
|
indent = match[1];
|
|
}
|
|
|
|
if (!indent || indent.length < baseIndent) {
|
|
indent = ' ';
|
|
} else {
|
|
indent = indent.substring(baseIndent.length);
|
|
}
|
|
|
|
outDentRegExp = new RegExp('(' + lineReturn + ')' + indent, 'g');
|
|
|
|
configString = transform.objectToString(config, {
|
|
indent: indent,
|
|
lineReturn: lineReturn,
|
|
outDentRegExp: outDentRegExp,
|
|
quote: options && options.quote
|
|
});
|
|
|
|
//Add in the base indenting level.
|
|
configString = applyIndent(configString, baseIndent, lineReturn);
|
|
|
|
return startString + configString + fileContents.substring(end);
|
|
},
|
|
|
|
/**
|
|
* Tries converting a JS object to a string. This will likely suck, and
|
|
* is tailored to the type of config expected in a loader config call.
|
|
* So, hasOwnProperty fields, strings, numbers, arrays and functions,
|
|
* no weird recursively referenced stuff.
|
|
* @param {Object} obj the object to convert
|
|
* @param {Object} options options object with the following values:
|
|
* {String} indent the indentation to use for each level
|
|
* {String} lineReturn the type of line return to use
|
|
* {outDentRegExp} outDentRegExp the regexp to use to outdent functions
|
|
* {String} quote the quote type to use, ' or ". Optional. Default is "
|
|
* @param {String} totalIndent the total indent to print for this level
|
|
* @return {String} a string representation of the object.
|
|
*/
|
|
objectToString: function (obj, options, totalIndent) {
|
|
var startBrace, endBrace, nextIndent,
|
|
first = true,
|
|
value = '',
|
|
lineReturn = options.lineReturn,
|
|
indent = options.indent,
|
|
outDentRegExp = options.outDentRegExp,
|
|
quote = options.quote || '"';
|
|
|
|
totalIndent = totalIndent || '';
|
|
nextIndent = totalIndent + indent;
|
|
|
|
if (obj === null) {
|
|
value = 'null';
|
|
} else if (obj === undefined) {
|
|
value = 'undefined';
|
|
} else if (typeof obj === 'number' || typeof obj === 'boolean') {
|
|
value = obj;
|
|
} else if (typeof obj === 'string') {
|
|
//Use double quotes in case the config may also work as JSON.
|
|
value = quote + lang.jsEscape(obj) + quote;
|
|
} else if (lang.isArray(obj)) {
|
|
lang.each(obj, function (item, i) {
|
|
value += (i !== 0 ? ',' + lineReturn : '' ) +
|
|
nextIndent +
|
|
transform.objectToString(item,
|
|
options,
|
|
nextIndent);
|
|
});
|
|
|
|
startBrace = '[';
|
|
endBrace = ']';
|
|
} else if (lang.isFunction(obj) || lang.isRegExp(obj)) {
|
|
//The outdent regexp just helps pretty up the conversion
|
|
//just in node. Rhino strips comments and does a different
|
|
//indent scheme for Function toString, so not really helpful
|
|
//there.
|
|
value = obj.toString().replace(outDentRegExp, '$1');
|
|
} else {
|
|
//An object
|
|
lang.eachProp(obj, function (v, prop) {
|
|
value += (first ? '': ',' + lineReturn) +
|
|
nextIndent +
|
|
(keyRegExp.test(prop) ? prop : quote + lang.jsEscape(prop) + quote )+
|
|
': ' +
|
|
transform.objectToString(v,
|
|
options,
|
|
nextIndent);
|
|
first = false;
|
|
});
|
|
startBrace = '{';
|
|
endBrace = '}';
|
|
}
|
|
|
|
if (startBrace) {
|
|
value = startBrace +
|
|
lineReturn +
|
|
value +
|
|
lineReturn + totalIndent +
|
|
endBrace;
|
|
}
|
|
|
|
return value;
|
|
}
|
|
};
|
|
|
|
return transform;
|
|
});
|
|
/*jslint regexp: true, plusplus: true */
|
|
/*global define: false */
|
|
|
|
define('pragma', ['parse', 'logger'], function (parse, logger) {
|
|
'use strict';
|
|
function Temp() {}
|
|
|
|
function create(obj, mixin) {
|
|
Temp.prototype = obj;
|
|
var temp = new Temp(), prop;
|
|
|
|
//Avoid any extra memory hanging around
|
|
Temp.prototype = null;
|
|
|
|
if (mixin) {
|
|
for (prop in mixin) {
|
|
if (mixin.hasOwnProperty(prop) && !temp.hasOwnProperty(prop)) {
|
|
temp[prop] = mixin[prop];
|
|
}
|
|
}
|
|
}
|
|
|
|
return temp; // Object
|
|
}
|
|
|
|
var pragma = {
|
|
conditionalRegExp: /(exclude|include)Start\s*\(\s*["'](\w+)["']\s*,(.*)\)/,
|
|
useStrictRegExp: /(^|[^{]\r?\n)['"]use strict['"];/g,
|
|
hasRegExp: /has\s*\(\s*['"]([^'"]+)['"]\s*\)/g,
|
|
configRegExp: /(^|[^\.])(requirejs|require)(\.config)\s*\(/g,
|
|
nsWrapRegExp: /\/\*requirejs namespace: true \*\//,
|
|
apiDefRegExp: /var requirejs,\s*require,\s*define;/,
|
|
defineCheckRegExp: /typeof(\s+|\s*\(\s*)define(\s*\))?\s*===?\s*["']function["']\s*&&\s*define\s*\.\s*amd/g,
|
|
defineStringCheckRegExp: /typeof\s+define\s*===?\s*["']function["']\s*&&\s*define\s*\[\s*["']amd["']\s*\]/g,
|
|
defineTypeFirstCheckRegExp: /\s*["']function["']\s*==(=?)\s*typeof\s+define\s*&&\s*define\s*\.\s*amd/g,
|
|
defineJQueryRegExp: /typeof\s+define\s*===?\s*["']function["']\s*&&\s*define\s*\.\s*amd\s*&&\s*define\s*\.\s*amd\s*\.\s*jQuery/g,
|
|
defineHasRegExp: /typeof\s+define\s*==(=)?\s*['"]function['"]\s*&&\s*typeof\s+define\.amd\s*==(=)?\s*['"]object['"]\s*&&\s*define\.amd/g,
|
|
defineTernaryRegExp: /typeof\s+define\s*===?\s*['"]function["']\s*&&\s*define\s*\.\s*amd\s*\?\s*define/,
|
|
defineExistsRegExp: /\s+typeof\s+define\s*!==?\s*['"]undefined["']\s*/,
|
|
defineExistsAndAmdRegExp: /typeof\s+define\s*!==?\s*['"]undefined["']\s*&&\s*define\s*\.\s*amd\s*/,
|
|
amdefineRegExp: /if\s*\(\s*typeof define\s*\!==\s*['"]function['"]\s*\)\s*\{\s*[^\{\}]+amdefine[^\{\}]+\}/g,
|
|
|
|
removeStrict: function (contents, config) {
|
|
return config.useStrict ? contents : contents.replace(pragma.useStrictRegExp, '$1');
|
|
},
|
|
|
|
namespace: function (fileContents, ns, onLifecycleName) {
|
|
if (ns) {
|
|
//Namespace require/define calls
|
|
fileContents = fileContents.replace(pragma.configRegExp, '$1' + ns + '.$2$3(');
|
|
|
|
|
|
fileContents = parse.renameNamespace(fileContents, ns);
|
|
|
|
//Namespace define ternary use:
|
|
fileContents = fileContents.replace(pragma.defineTernaryRegExp,
|
|
"typeof " + ns + ".define === 'function' && " + ns + ".define.amd ? " + ns + ".define");
|
|
|
|
//Namespace define jquery use:
|
|
fileContents = fileContents.replace(pragma.defineJQueryRegExp,
|
|
"typeof " + ns + ".define === 'function' && " + ns + ".define.amd && " + ns + ".define.amd.jQuery");
|
|
|
|
//Namespace has.js define use:
|
|
fileContents = fileContents.replace(pragma.defineHasRegExp,
|
|
"typeof " + ns + ".define === 'function' && typeof " + ns + ".define.amd === 'object' && " + ns + ".define.amd");
|
|
|
|
//Namespace async.js define use:
|
|
fileContents = fileContents.replace(pragma.defineExistsAndAmdRegExp,
|
|
"typeof " + ns + ".define !== 'undefined' && " + ns + ".define.amd");
|
|
|
|
//Namespace define checks.
|
|
//Do these ones last, since they are a subset of the more specific
|
|
//checks above.
|
|
fileContents = fileContents.replace(pragma.defineCheckRegExp,
|
|
"typeof " + ns + ".define === 'function' && " + ns + ".define.amd");
|
|
fileContents = fileContents.replace(pragma.defineStringCheckRegExp,
|
|
"typeof " + ns + ".define === 'function' && " + ns + ".define['amd']");
|
|
fileContents = fileContents.replace(pragma.defineTypeFirstCheckRegExp,
|
|
"'function' === typeof " + ns + ".define && " + ns + ".define.amd");
|
|
fileContents = fileContents.replace(pragma.defineExistsRegExp,
|
|
"typeof " + ns + ".define !== 'undefined'");
|
|
|
|
//Check for require.js with the require/define definitions
|
|
if (pragma.apiDefRegExp.test(fileContents) &&
|
|
fileContents.indexOf("if (!" + ns + " || !" + ns + ".requirejs)") === -1) {
|
|
//Wrap the file contents in a typeof check, and a function
|
|
//to contain the API globals.
|
|
fileContents = "var " + ns + ";(function () { if (!" + ns + " || !" + ns + ".requirejs) {\n" +
|
|
"if (!" + ns + ") { " + ns + ' = {}; } else { require = ' + ns + '; }\n' +
|
|
fileContents +
|
|
"\n" +
|
|
ns + ".requirejs = requirejs;" +
|
|
ns + ".require = require;" +
|
|
ns + ".define = define;\n" +
|
|
"}\n}());";
|
|
}
|
|
|
|
//Finally, if the file wants a special wrapper because it ties
|
|
//in to the requirejs internals in a way that would not fit
|
|
//the above matches, do that. Look for /*requirejs namespace: true*/
|
|
if (pragma.nsWrapRegExp.test(fileContents)) {
|
|
//Remove the pragma.
|
|
fileContents = fileContents.replace(pragma.nsWrapRegExp, '');
|
|
|
|
//Alter the contents.
|
|
fileContents = '(function () {\n' +
|
|
'var require = ' + ns + '.require,' +
|
|
'requirejs = ' + ns + '.requirejs,' +
|
|
'define = ' + ns + '.define;\n' +
|
|
fileContents +
|
|
'\n}());';
|
|
}
|
|
}
|
|
|
|
return fileContents;
|
|
},
|
|
|
|
/**
|
|
* processes the fileContents for some //>> conditional statements
|
|
*/
|
|
process: function (fileName, fileContents, config, onLifecycleName, pluginCollector) {
|
|
/*jslint evil: true */
|
|
var foundIndex = -1, startIndex = 0, lineEndIndex, conditionLine,
|
|
matches, type, marker, condition, isTrue, endRegExp, endMatches,
|
|
endMarkerIndex, shouldInclude, startLength, lifecycleHas, deps,
|
|
i, dep, moduleName, collectorMod,
|
|
lifecyclePragmas, pragmas = config.pragmas, hasConfig = config.has,
|
|
//Legacy arg defined to help in dojo conversion script. Remove later
|
|
//when dojo no longer needs conversion:
|
|
kwArgs = pragmas;
|
|
|
|
//Mix in a specific lifecycle scoped object, to allow targeting
|
|
//some pragmas/has tests to only when files are saved, or at different
|
|
//lifecycle events. Do not bother with kwArgs in this section, since
|
|
//the old dojo kwArgs were for all points in the build lifecycle.
|
|
if (onLifecycleName) {
|
|
lifecyclePragmas = config['pragmas' + onLifecycleName];
|
|
lifecycleHas = config['has' + onLifecycleName];
|
|
|
|
if (lifecyclePragmas) {
|
|
pragmas = create(pragmas || {}, lifecyclePragmas);
|
|
}
|
|
|
|
if (lifecycleHas) {
|
|
hasConfig = create(hasConfig || {}, lifecycleHas);
|
|
}
|
|
}
|
|
|
|
//Replace has references if desired
|
|
if (hasConfig) {
|
|
fileContents = fileContents.replace(pragma.hasRegExp, function (match, test) {
|
|
if (hasConfig.hasOwnProperty(test)) {
|
|
return !!hasConfig[test];
|
|
}
|
|
return match;
|
|
});
|
|
}
|
|
|
|
if (!config.skipPragmas) {
|
|
|
|
while ((foundIndex = fileContents.indexOf("//>>", startIndex)) !== -1) {
|
|
//Found a conditional. Get the conditional line.
|
|
lineEndIndex = fileContents.indexOf("\n", foundIndex);
|
|
if (lineEndIndex === -1) {
|
|
lineEndIndex = fileContents.length - 1;
|
|
}
|
|
|
|
//Increment startIndex past the line so the next conditional search can be done.
|
|
startIndex = lineEndIndex + 1;
|
|
|
|
//Break apart the conditional.
|
|
conditionLine = fileContents.substring(foundIndex, lineEndIndex + 1);
|
|
matches = conditionLine.match(pragma.conditionalRegExp);
|
|
if (matches) {
|
|
type = matches[1];
|
|
marker = matches[2];
|
|
condition = matches[3];
|
|
isTrue = false;
|
|
//See if the condition is true.
|
|
try {
|
|
isTrue = !!eval("(" + condition + ")");
|
|
} catch (e) {
|
|
throw "Error in file: " +
|
|
fileName +
|
|
". Conditional comment: " +
|
|
conditionLine +
|
|
" failed with this error: " + e;
|
|
}
|
|
|
|
//Find the endpoint marker.
|
|
endRegExp = new RegExp('\\/\\/\\>\\>\\s*' + type + 'End\\(\\s*[\'"]' + marker + '[\'"]\\s*\\)', "g");
|
|
endMatches = endRegExp.exec(fileContents.substring(startIndex, fileContents.length));
|
|
if (endMatches) {
|
|
endMarkerIndex = startIndex + endRegExp.lastIndex - endMatches[0].length;
|
|
|
|
//Find the next line return based on the match position.
|
|
lineEndIndex = fileContents.indexOf("\n", endMarkerIndex);
|
|
if (lineEndIndex === -1) {
|
|
lineEndIndex = fileContents.length - 1;
|
|
}
|
|
|
|
//Should we include the segment?
|
|
shouldInclude = ((type === "exclude" && !isTrue) || (type === "include" && isTrue));
|
|
|
|
//Remove the conditional comments, and optionally remove the content inside
|
|
//the conditional comments.
|
|
startLength = startIndex - foundIndex;
|
|
fileContents = fileContents.substring(0, foundIndex) +
|
|
(shouldInclude ? fileContents.substring(startIndex, endMarkerIndex) : "") +
|
|
fileContents.substring(lineEndIndex + 1, fileContents.length);
|
|
|
|
//Move startIndex to foundIndex, since that is the new position in the file
|
|
//where we need to look for more conditionals in the next while loop pass.
|
|
startIndex = foundIndex;
|
|
} else {
|
|
throw "Error in file: " +
|
|
fileName +
|
|
". Cannot find end marker for conditional comment: " +
|
|
conditionLine;
|
|
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
//If need to find all plugin resources to optimize, do that now,
|
|
//before namespacing, since the namespacing will change the API
|
|
//names.
|
|
//If there is a plugin collector, scan the file for plugin resources.
|
|
if (config.optimizeAllPluginResources && pluginCollector) {
|
|
try {
|
|
deps = parse.findDependencies(fileName, fileContents);
|
|
if (deps.length) {
|
|
for (i = 0; i < deps.length; i++) {
|
|
dep = deps[i];
|
|
if (dep.indexOf('!') !== -1) {
|
|
moduleName = dep.split('!')[0];
|
|
collectorMod = pluginCollector[moduleName];
|
|
if (!collectorMod) {
|
|
collectorMod = pluginCollector[moduleName] = [];
|
|
}
|
|
collectorMod.push(dep);
|
|
}
|
|
}
|
|
}
|
|
} catch (eDep) {
|
|
logger.error('Parse error looking for plugin resources in ' +
|
|
fileName + ', skipping.');
|
|
}
|
|
}
|
|
|
|
//Strip amdefine use for node-shared modules.
|
|
if (!config.keepAmdefine) {
|
|
fileContents = fileContents.replace(pragma.amdefineRegExp, '');
|
|
}
|
|
|
|
//Do namespacing
|
|
if (onLifecycleName === 'OnSave' && config.namespace) {
|
|
fileContents = pragma.namespace(fileContents, config.namespace, onLifecycleName);
|
|
}
|
|
|
|
|
|
return pragma.removeStrict(fileContents, config);
|
|
}
|
|
};
|
|
|
|
return pragma;
|
|
});
|
|
|
|
if(env === 'browser') {
|
|
/*jslint strict: false */
|
|
/*global define: false */
|
|
|
|
define('browser/optimize', {});
|
|
|
|
}
|
|
|
|
if(env === 'node') {
|
|
/*jslint strict: false */
|
|
/*global define: false */
|
|
|
|
define('node/optimize', {});
|
|
|
|
}
|
|
|
|
if(env === 'rhino') {
|
|
/*jslint sloppy: true, plusplus: true */
|
|
/*global define, java, Packages, com */
|
|
|
|
define('rhino/optimize', ['logger', 'env!env/file'], function (logger, file) {
|
|
|
|
//Add .reduce to Rhino so UglifyJS can run in Rhino,
|
|
//inspired by https://developer.mozilla.org/en/JavaScript/Reference/Global_Objects/Array/reduce
|
|
//but rewritten for brevity, and to be good enough for use by UglifyJS.
|
|
if (!Array.prototype.reduce) {
|
|
Array.prototype.reduce = function (fn /*, initialValue */) {
|
|
var i = 0,
|
|
length = this.length,
|
|
accumulator;
|
|
|
|
if (arguments.length >= 2) {
|
|
accumulator = arguments[1];
|
|
} else {
|
|
if (length) {
|
|
while (!(i in this)) {
|
|
i++;
|
|
}
|
|
accumulator = this[i++];
|
|
}
|
|
}
|
|
|
|
for (; i < length; i++) {
|
|
if (i in this) {
|
|
accumulator = fn.call(undefined, accumulator, this[i], i, this);
|
|
}
|
|
}
|
|
|
|
return accumulator;
|
|
};
|
|
}
|
|
|
|
var JSSourceFilefromCode, optimize,
|
|
mapRegExp = /"file":"[^"]+"/;
|
|
|
|
//Bind to Closure compiler, but if it is not available, do not sweat it.
|
|
try {
|
|
// Try older closure compiler that worked on Java 6
|
|
JSSourceFilefromCode = java.lang.Class.forName('com.google.javascript.jscomp.JSSourceFile').getMethod('fromCode', [java.lang.String, java.lang.String]);
|
|
} catch (e) {
|
|
try {
|
|
// Try for newer closure compiler that needs Java 7+
|
|
JSSourceFilefromCode = java.lang.Class.forName('com.google.javascript.jscomp.SourceFile').getMethod('fromCode', [java.lang.String, java.lang.String]);
|
|
} catch (e) {
|
|
try {
|
|
// Try Nashorn style
|
|
var stringClass = Java.type("java.lang.String").class;
|
|
JSSourceFilefromCode = Java.type("com.google.javascript.jscomp.SourceFile").class.getMethod("fromCode", [stringClass, stringClass]);
|
|
} catch (e) {}
|
|
}
|
|
}
|
|
|
|
//Helper for closure compiler, because of weird Java-JavaScript interactions.
|
|
function closurefromCode(filename, content) {
|
|
return JSSourceFilefromCode.invoke(null, [filename, content]);
|
|
}
|
|
|
|
|
|
function getFileWriter(fileName, encoding) {
|
|
var outFile = new java.io.File(fileName), outWriter, parentDir;
|
|
|
|
parentDir = outFile.getAbsoluteFile().getParentFile();
|
|
if (!parentDir.exists()) {
|
|
if (!parentDir.mkdirs()) {
|
|
throw "Could not create directory: " + parentDir.getAbsolutePath();
|
|
}
|
|
}
|
|
|
|
if (encoding) {
|
|
outWriter = new java.io.OutputStreamWriter(new java.io.FileOutputStream(outFile), encoding);
|
|
} else {
|
|
outWriter = new java.io.OutputStreamWriter(new java.io.FileOutputStream(outFile));
|
|
}
|
|
|
|
return new java.io.BufferedWriter(outWriter);
|
|
}
|
|
|
|
optimize = {
|
|
closure: function (fileName, fileContents, outFileName, keepLines, config) {
|
|
config = config || {};
|
|
var result, mappings, optimized, compressed, baseName, writer,
|
|
outBaseName, outFileNameMap, outFileNameMapContent,
|
|
srcOutFileName, concatNameMap,
|
|
jscomp = Packages.com.google.javascript.jscomp,
|
|
flags = Packages.com.google.common.flags,
|
|
//Set up source input
|
|
jsSourceFile = closurefromCode(String(fileName), String(fileContents)),
|
|
sourceListArray = new java.util.ArrayList(),
|
|
externList = new java.util.ArrayList(),
|
|
options, option, FLAG_compilation_level, compiler, externExportsPath,
|
|
Compiler = Packages.com.google.javascript.jscomp.Compiler,
|
|
CommandLineRunner = Packages.com.google.javascript.jscomp.CommandLineRunner;
|
|
|
|
logger.trace("Minifying file: " + fileName);
|
|
|
|
baseName = (new java.io.File(fileName)).getName();
|
|
|
|
//Set up options
|
|
options = new jscomp.CompilerOptions();
|
|
for (option in config.CompilerOptions) {
|
|
// options are false by default and jslint wanted an if statement in this for loop
|
|
if (config.CompilerOptions[option]) {
|
|
options[option] = config.CompilerOptions[option];
|
|
}
|
|
|
|
}
|
|
options.prettyPrint = keepLines || options.prettyPrint;
|
|
|
|
FLAG_compilation_level = jscomp.CompilationLevel[config.CompilationLevel || 'SIMPLE_OPTIMIZATIONS'];
|
|
FLAG_compilation_level.setOptionsForCompilationLevel(options);
|
|
|
|
if (config.generateSourceMaps) {
|
|
mappings = new java.util.ArrayList();
|
|
|
|
mappings.add(new com.google.javascript.jscomp.SourceMap.LocationMapping(fileName, baseName + ".src.js"));
|
|
options.setSourceMapLocationMappings(mappings);
|
|
options.setSourceMapOutputPath(fileName + ".map");
|
|
}
|
|
|
|
//If we need to pass an externs file to Closure so that it does not create aliases
|
|
//for certain symbols, do so here.
|
|
externList.addAll(CommandLineRunner.getDefaultExterns());
|
|
if (config.externExportsPath) {
|
|
externExportsPath = config.externExportsPath;
|
|
externList.add(jscomp.SourceFile.fromFile(externExportsPath));
|
|
}
|
|
|
|
//Trigger the compiler
|
|
Compiler.setLoggingLevel(Packages.java.util.logging.Level[config.loggingLevel || 'WARNING']);
|
|
compiler = new Compiler();
|
|
|
|
//fill the sourceArrrayList; we need the ArrayList because the only overload of compile
|
|
//accepting the getDefaultExterns return value (a List) also wants the sources as a List
|
|
sourceListArray.add(jsSourceFile);
|
|
|
|
result = compiler.compile(externList, sourceListArray, options);
|
|
if (result.success) {
|
|
optimized = String(compiler.toSource());
|
|
|
|
if (config.generateSourceMaps && result.sourceMap && outFileName) {
|
|
outBaseName = (new java.io.File(outFileName)).getName();
|
|
|
|
srcOutFileName = outFileName + ".src.js";
|
|
outFileNameMap = outFileName + ".map";
|
|
|
|
//If previous .map file exists, move it to the ".src.js"
|
|
//location. Need to update the sourceMappingURL part in the
|
|
//src.js file too.
|
|
if (file.exists(outFileNameMap)) {
|
|
concatNameMap = outFileNameMap.replace(/\.map$/, '.src.js.map');
|
|
file.saveFile(concatNameMap, file.readFile(outFileNameMap));
|
|
file.saveFile(srcOutFileName,
|
|
fileContents.replace(/\/\# sourceMappingURL=(.+).map/,
|
|
'/# sourceMappingURL=$1.src.js.map'));
|
|
} else {
|
|
file.saveUtf8File(srcOutFileName, fileContents);
|
|
}
|
|
|
|
writer = getFileWriter(outFileNameMap, "utf-8");
|
|
result.sourceMap.appendTo(writer, outFileName);
|
|
writer.close();
|
|
|
|
//Not sure how better to do this, but right now the .map file
|
|
//leaks the full OS path in the "file" property. Manually
|
|
//modify it to not do that.
|
|
file.saveFile(outFileNameMap,
|
|
file.readFile(outFileNameMap).replace(mapRegExp, '"file":"' + baseName + '"'));
|
|
|
|
fileContents = optimized + "\n//# sourceMappingURL=" + outBaseName + ".map";
|
|
} else {
|
|
fileContents = optimized;
|
|
}
|
|
return fileContents;
|
|
} else {
|
|
throw new Error('Cannot closure compile file: ' + fileName + '. Skipping it.');
|
|
}
|
|
|
|
return fileContents;
|
|
}
|
|
};
|
|
|
|
return optimize;
|
|
});
|
|
}
|
|
|
|
if(env === 'xpconnect') {
|
|
define('xpconnect/optimize', {});
|
|
}
|
|
/*jslint plusplus: true, nomen: true, regexp: true */
|
|
/*global define: false */
|
|
|
|
define('optimize', [ 'lang', 'logger', 'env!env/optimize', 'env!env/file', 'parse',
|
|
'pragma', 'uglifyjs',
|
|
'source-map'],
|
|
function (lang, logger, envOptimize, file, parse,
|
|
pragma, uglify,
|
|
sourceMap) {
|
|
'use strict';
|
|
|
|
var optimize,
|
|
cssImportRegExp = /\@import\s+(url\()?\s*([^);]+)\s*(\))?([\w, ]*)(;)?/ig,
|
|
cssCommentImportRegExp = /\/\*[^\*]*@import[^\*]*\*\//g,
|
|
cssUrlRegExp = /\url\(\s*([^\)]+)\s*\)?/g,
|
|
protocolRegExp = /^\w+:/,
|
|
SourceMapGenerator = sourceMap.SourceMapGenerator,
|
|
SourceMapConsumer = sourceMap.SourceMapConsumer,
|
|
es5PlusGuidance = 'If the source uses ES2015 or later syntax, please pass "optimize: \'none\'" to r.js and use an ES2015+ compatible minifier after running r.js. The included UglifyJS only understands ES5 or earlier syntax.';
|
|
|
|
/**
|
|
* If an URL from a CSS url value contains start/end quotes, remove them.
|
|
* This is not done in the regexp, since my regexp fu is not that strong,
|
|
* and the CSS spec allows for ' and " in the URL if they are backslash escaped.
|
|
* @param {String} url
|
|
*/
|
|
function cleanCssUrlQuotes(url) {
|
|
//Make sure we are not ending in whitespace.
|
|
//Not very confident of the css regexps above that there will not be ending
|
|
//whitespace.
|
|
url = url.replace(/\s+$/, "");
|
|
|
|
if (url.charAt(0) === "'" || url.charAt(0) === "\"") {
|
|
url = url.substring(1, url.length - 1);
|
|
}
|
|
|
|
return url;
|
|
}
|
|
|
|
function fixCssUrlPaths(fileName, path, contents, cssPrefix) {
|
|
return contents.replace(cssUrlRegExp, function (fullMatch, urlMatch) {
|
|
var firstChar, hasProtocol, parts, i,
|
|
fixedUrlMatch = cleanCssUrlQuotes(urlMatch);
|
|
|
|
fixedUrlMatch = fixedUrlMatch.replace(lang.backSlashRegExp, "/");
|
|
|
|
//Only do the work for relative URLs. Skip things that start with / or #, or have
|
|
//a protocol.
|
|
firstChar = fixedUrlMatch.charAt(0);
|
|
hasProtocol = protocolRegExp.test(fixedUrlMatch);
|
|
if (firstChar !== "/" && firstChar !== "#" && !hasProtocol) {
|
|
//It is a relative URL, tack on the cssPrefix and path prefix
|
|
urlMatch = cssPrefix + path + fixedUrlMatch;
|
|
} else if (!hasProtocol) {
|
|
logger.trace(fileName + "\n URL not a relative URL, skipping: " + urlMatch);
|
|
}
|
|
|
|
//Collapse .. and .
|
|
parts = urlMatch.split("/");
|
|
for (i = parts.length - 1; i > 0; i--) {
|
|
if (parts[i] === ".") {
|
|
parts.splice(i, 1);
|
|
} else if (parts[i] === "..") {
|
|
if (i !== 0 && parts[i - 1] !== "..") {
|
|
parts.splice(i - 1, 2);
|
|
i -= 1;
|
|
}
|
|
}
|
|
}
|
|
|
|
return "url(" + parts.join("/") + ")";
|
|
});
|
|
}
|
|
|
|
/**
|
|
* Inlines nested stylesheets that have @import calls in them.
|
|
* @param {String} fileName the file name
|
|
* @param {String} fileContents the file contents
|
|
* @param {String} cssImportIgnore comma delimited string of files to ignore
|
|
* @param {String} cssPrefix string to be prefixed before relative URLs
|
|
* @param {Object} included an object used to track the files already imported
|
|
*/
|
|
function flattenCss(fileName, fileContents, cssImportIgnore, cssPrefix, included, topLevel) {
|
|
//Find the last slash in the name.
|
|
fileName = fileName.replace(lang.backSlashRegExp, "/");
|
|
var endIndex = fileName.lastIndexOf("/"),
|
|
//Make a file path based on the last slash.
|
|
//If no slash, so must be just a file name. Use empty string then.
|
|
filePath = (endIndex !== -1) ? fileName.substring(0, endIndex + 1) : "",
|
|
//store a list of merged files
|
|
importList = [],
|
|
skippedList = [];
|
|
|
|
//First make a pass by removing any commented out @import calls.
|
|
fileContents = fileContents.replace(cssCommentImportRegExp, '');
|
|
|
|
//Make sure we have a delimited ignore list to make matching faster
|
|
if (cssImportIgnore && cssImportIgnore.charAt(cssImportIgnore.length - 1) !== ",") {
|
|
cssImportIgnore += ",";
|
|
}
|
|
|
|
fileContents = fileContents.replace(cssImportRegExp, function (fullMatch, urlStart, importFileName, urlEnd, mediaTypes) {
|
|
//Only process media type "all" or empty media type rules.
|
|
if (mediaTypes && ((mediaTypes.replace(/^\s\s*/, '').replace(/\s\s*$/, '')) !== "all")) {
|
|
skippedList.push(fileName);
|
|
return fullMatch;
|
|
}
|
|
|
|
importFileName = cleanCssUrlQuotes(importFileName);
|
|
|
|
//Ignore the file import if it is part of an ignore list.
|
|
if (cssImportIgnore && cssImportIgnore.indexOf(importFileName + ",") !== -1) {
|
|
return fullMatch;
|
|
}
|
|
|
|
//Make sure we have a unix path for the rest of the operation.
|
|
importFileName = importFileName.replace(lang.backSlashRegExp, "/");
|
|
|
|
try {
|
|
//if a relative path, then tack on the filePath.
|
|
//If it is not a relative path, then the readFile below will fail,
|
|
//and we will just skip that import.
|
|
var fullImportFileName = importFileName.charAt(0) === "/" ? importFileName : filePath + importFileName,
|
|
importContents = file.readFile(fullImportFileName),
|
|
importEndIndex, importPath, flat;
|
|
|
|
//Skip the file if it has already been included.
|
|
if (included[fullImportFileName]) {
|
|
return '';
|
|
}
|
|
included[fullImportFileName] = true;
|
|
|
|
//Make sure to flatten any nested imports.
|
|
flat = flattenCss(fullImportFileName, importContents, cssImportIgnore, cssPrefix, included);
|
|
importContents = flat.fileContents;
|
|
|
|
if (flat.importList.length) {
|
|
importList.push.apply(importList, flat.importList);
|
|
}
|
|
if (flat.skippedList.length) {
|
|
skippedList.push.apply(skippedList, flat.skippedList);
|
|
}
|
|
|
|
//Make the full import path
|
|
importEndIndex = importFileName.lastIndexOf("/");
|
|
|
|
//Make a file path based on the last slash.
|
|
//If no slash, so must be just a file name. Use empty string then.
|
|
importPath = (importEndIndex !== -1) ? importFileName.substring(0, importEndIndex + 1) : "";
|
|
|
|
//fix url() on relative import (#5)
|
|
importPath = importPath.replace(/^\.\//, '');
|
|
|
|
//Modify URL paths to match the path represented by this file.
|
|
importContents = fixCssUrlPaths(importFileName, importPath, importContents, cssPrefix);
|
|
|
|
importList.push(fullImportFileName);
|
|
return importContents;
|
|
} catch (e) {
|
|
logger.warn(fileName + "\n Cannot inline css import, skipping: " + importFileName);
|
|
return fullMatch;
|
|
}
|
|
});
|
|
|
|
if (cssPrefix && topLevel) {
|
|
//Modify URL paths to match the path represented by this file.
|
|
fileContents = fixCssUrlPaths(fileName, '', fileContents, cssPrefix);
|
|
}
|
|
|
|
return {
|
|
importList : importList,
|
|
skippedList: skippedList,
|
|
fileContents : fileContents
|
|
};
|
|
}
|
|
|
|
optimize = {
|
|
/**
|
|
* Optimizes a file that contains JavaScript content. Optionally collects
|
|
* plugin resources mentioned in a file, and then passes the content
|
|
* through an minifier if one is specified via config.optimize.
|
|
*
|
|
* @param {String} fileName the name of the file to optimize
|
|
* @param {String} fileContents the contents to optimize. If this is
|
|
* a null value, then fileName will be used to read the fileContents.
|
|
* @param {String} outFileName the name of the file to use for the
|
|
* saved optimized content.
|
|
* @param {Object} config the build config object.
|
|
* @param {Array} [pluginCollector] storage for any plugin resources
|
|
* found.
|
|
*/
|
|
jsFile: function (fileName, fileContents, outFileName, config, pluginCollector) {
|
|
if (!fileContents) {
|
|
fileContents = file.readFile(fileName);
|
|
}
|
|
|
|
fileContents = optimize.js(fileName, fileContents, outFileName, config, pluginCollector);
|
|
|
|
file.saveUtf8File(outFileName, fileContents);
|
|
},
|
|
|
|
/**
|
|
* Optimizes a file that contains JavaScript content. Optionally collects
|
|
* plugin resources mentioned in a file, and then passes the content
|
|
* through an minifier if one is specified via config.optimize.
|
|
*
|
|
* @param {String} fileName the name of the file that matches the
|
|
* fileContents.
|
|
* @param {String} fileContents the string of JS to optimize.
|
|
* @param {Object} [config] the build config object.
|
|
* @param {Array} [pluginCollector] storage for any plugin resources
|
|
* found.
|
|
*/
|
|
js: function (fileName, fileContents, outFileName, config, pluginCollector) {
|
|
var optFunc, optConfig,
|
|
parts = (String(config.optimize)).split('.'),
|
|
optimizerName = parts[0],
|
|
keepLines = parts[1] === 'keepLines',
|
|
licenseContents = '';
|
|
|
|
config = config || {};
|
|
|
|
//Apply pragmas/namespace renaming
|
|
fileContents = pragma.process(fileName, fileContents, config, 'OnSave', pluginCollector);
|
|
|
|
//Optimize the JS files if asked.
|
|
if (optimizerName && optimizerName !== 'none') {
|
|
optFunc = envOptimize[optimizerName] || optimize.optimizers[optimizerName];
|
|
if (!optFunc) {
|
|
throw new Error('optimizer with name of "' +
|
|
optimizerName +
|
|
'" not found for this environment');
|
|
}
|
|
|
|
optConfig = config[optimizerName] || {};
|
|
if (config.generateSourceMaps) {
|
|
optConfig.generateSourceMaps = !!config.generateSourceMaps;
|
|
optConfig._buildSourceMap = config._buildSourceMap;
|
|
}
|
|
|
|
try {
|
|
if (config.preserveLicenseComments) {
|
|
//Pull out any license comments for prepending after optimization.
|
|
try {
|
|
licenseContents = parse.getLicenseComments(fileName, fileContents);
|
|
} catch (e) {
|
|
throw new Error('Cannot parse file: ' + fileName + ' for comments. Skipping it. Error is:\n' + e.toString());
|
|
}
|
|
}
|
|
|
|
if (config.generateSourceMaps && licenseContents) {
|
|
optConfig.preamble = licenseContents;
|
|
licenseContents = '';
|
|
}
|
|
|
|
fileContents = licenseContents + optFunc(fileName,
|
|
fileContents,
|
|
outFileName,
|
|
keepLines,
|
|
optConfig);
|
|
if (optConfig._buildSourceMap && optConfig._buildSourceMap !== config._buildSourceMap) {
|
|
config._buildSourceMap = optConfig._buildSourceMap;
|
|
}
|
|
} catch (e) {
|
|
if (config.throwWhen && config.throwWhen.optimize) {
|
|
throw e;
|
|
} else {
|
|
logger.error(e);
|
|
}
|
|
}
|
|
} else {
|
|
if (config._buildSourceMap) {
|
|
config._buildSourceMap = null;
|
|
}
|
|
}
|
|
|
|
return fileContents;
|
|
},
|
|
|
|
/**
|
|
* Optimizes one CSS file, inlining @import calls, stripping comments, and
|
|
* optionally removes line returns.
|
|
* @param {String} fileName the path to the CSS file to optimize
|
|
* @param {String} outFileName the path to save the optimized file.
|
|
* @param {Object} config the config object with the optimizeCss and
|
|
* cssImportIgnore options.
|
|
*/
|
|
cssFile: function (fileName, outFileName, config) {
|
|
|
|
//Read in the file. Make sure we have a JS string.
|
|
var originalFileContents = file.readFile(fileName),
|
|
flat = flattenCss(fileName, originalFileContents, config.cssImportIgnore, config.cssPrefix, {}, true),
|
|
//Do not use the flattened CSS if there was one that was skipped.
|
|
fileContents = flat.skippedList.length ? originalFileContents : flat.fileContents,
|
|
startIndex, endIndex, buildText, comment;
|
|
|
|
if (flat.skippedList.length) {
|
|
logger.warn('Cannot inline @imports for ' + fileName +
|
|
',\nthe following files had media queries in them:\n' +
|
|
flat.skippedList.join('\n'));
|
|
}
|
|
|
|
//Do comment removal.
|
|
try {
|
|
if (config.optimizeCss.indexOf(".keepComments") === -1) {
|
|
startIndex = 0;
|
|
//Get rid of comments.
|
|
while ((startIndex = fileContents.indexOf("/*", startIndex)) !== -1) {
|
|
endIndex = fileContents.indexOf("*/", startIndex + 2);
|
|
if (endIndex === -1) {
|
|
throw "Improper comment in CSS file: " + fileName;
|
|
}
|
|
comment = fileContents.substring(startIndex, endIndex);
|
|
|
|
if (config.preserveLicenseComments &&
|
|
(comment.indexOf('license') !== -1 ||
|
|
comment.indexOf('opyright') !== -1 ||
|
|
comment.indexOf('(c)') !== -1)) {
|
|
//Keep the comment, just increment the startIndex
|
|
startIndex = endIndex;
|
|
} else {
|
|
fileContents = fileContents.substring(0, startIndex) + fileContents.substring(endIndex + 2, fileContents.length);
|
|
startIndex = 0;
|
|
}
|
|
}
|
|
}
|
|
//Get rid of newlines.
|
|
if (config.optimizeCss.indexOf(".keepLines") === -1) {
|
|
fileContents = fileContents.replace(/[\r\n]/g, " ");
|
|
fileContents = fileContents.replace(/\s+/g, " ");
|
|
fileContents = fileContents.replace(/\{\s/g, "{");
|
|
fileContents = fileContents.replace(/\s\}/g, "}");
|
|
} else {
|
|
//Remove multiple empty lines.
|
|
fileContents = fileContents.replace(/(\r\n)+/g, "\r\n");
|
|
fileContents = fileContents.replace(/(\n)+/g, "\n");
|
|
}
|
|
//Remove unnecessary whitespace
|
|
if (config.optimizeCss.indexOf(".keepWhitespace") === -1) {
|
|
//Remove leading and trailing whitespace from lines
|
|
fileContents = fileContents.replace(/^[ \t]+/gm, "");
|
|
fileContents = fileContents.replace(/[ \t]+$/gm, "");
|
|
//Remove whitespace after semicolon, colon, curly brackets and commas
|
|
fileContents = fileContents.replace(/(;|:|\{|}|,)[ \t]+/g, "$1");
|
|
//Remove whitespace before opening curly brackets
|
|
fileContents = fileContents.replace(/[ \t]+(\{)/g, "$1");
|
|
//Truncate double whitespace
|
|
fileContents = fileContents.replace(/([ \t])+/g, "$1");
|
|
//Remove empty lines
|
|
fileContents = fileContents.replace(/^[ \t]*[\r\n]/gm,'');
|
|
}
|
|
} catch (e) {
|
|
fileContents = originalFileContents;
|
|
logger.error("Could not optimized CSS file: " + fileName + ", error: " + e);
|
|
}
|
|
|
|
file.saveUtf8File(outFileName, fileContents);
|
|
|
|
//text output to stdout and/or written to build.txt file
|
|
buildText = "\n"+ outFileName.replace(config.dir, "") +"\n----------------\n";
|
|
flat.importList.push(fileName);
|
|
buildText += flat.importList.map(function(path){
|
|
return path.replace(config.dir, "");
|
|
}).join("\n");
|
|
|
|
return {
|
|
importList: flat.importList,
|
|
buildText: buildText +"\n"
|
|
};
|
|
},
|
|
|
|
/**
|
|
* Optimizes CSS files, inlining @import calls, stripping comments, and
|
|
* optionally removes line returns.
|
|
* @param {String} startDir the path to the top level directory
|
|
* @param {Object} config the config object with the optimizeCss and
|
|
* cssImportIgnore options.
|
|
*/
|
|
css: function (startDir, config) {
|
|
var buildText = "",
|
|
importList = [],
|
|
shouldRemove = config.dir && config.removeCombined,
|
|
i, fileName, result, fileList;
|
|
if (config.optimizeCss.indexOf("standard") !== -1) {
|
|
fileList = file.getFilteredFileList(startDir, /\.css$/, true);
|
|
if (fileList) {
|
|
for (i = 0; i < fileList.length; i++) {
|
|
fileName = fileList[i];
|
|
logger.trace("Optimizing (" + config.optimizeCss + ") CSS file: " + fileName);
|
|
result = optimize.cssFile(fileName, fileName, config);
|
|
buildText += result.buildText;
|
|
if (shouldRemove) {
|
|
result.importList.pop();
|
|
importList = importList.concat(result.importList);
|
|
}
|
|
}
|
|
}
|
|
|
|
if (shouldRemove) {
|
|
importList.forEach(function (path) {
|
|
if (file.exists(path)) {
|
|
file.deleteFile(path);
|
|
}
|
|
});
|
|
}
|
|
}
|
|
return buildText;
|
|
},
|
|
|
|
optimizers: {
|
|
uglify: function (fileName, fileContents, outFileName, keepLines, config) {
|
|
var result, existingMap, resultMap, finalMap, sourceIndex,
|
|
uconfig = {},
|
|
existingMapPath = outFileName + '.map',
|
|
baseName = fileName && fileName.split('/').pop();
|
|
|
|
config = config || {};
|
|
|
|
lang.mixin(uconfig, config, true);
|
|
|
|
uconfig.fromString = true;
|
|
|
|
if (config.preamble) {
|
|
uconfig.output = {preamble: config.preamble};
|
|
}
|
|
|
|
|
|
if (config.generateSourceMaps && (outFileName || config._buildSourceMap)) {
|
|
uconfig.outSourceMap = baseName + '.map';
|
|
|
|
if (config._buildSourceMap) {
|
|
existingMap = JSON.parse(config._buildSourceMap);
|
|
uconfig.inSourceMap = existingMap;
|
|
} else if (file.exists(existingMapPath)) {
|
|
uconfig.inSourceMap = existingMapPath;
|
|
existingMap = JSON.parse(file.readFile(existingMapPath));
|
|
}
|
|
}
|
|
|
|
logger.trace("Uglify file: " + fileName);
|
|
|
|
try {
|
|
//var tempContents = fileContents.replace(/\/\/\# sourceMappingURL=.*$/, '');
|
|
result = uglify.minify(fileContents, uconfig, baseName + '.src.js');
|
|
if (uconfig.outSourceMap && result.map) {
|
|
resultMap = result.map;
|
|
if (!existingMap && !config._buildSourceMap) {
|
|
file.saveFile(outFileName + '.src.js', fileContents);
|
|
}
|
|
|
|
fileContents = result.code;
|
|
|
|
if (config._buildSourceMap) {
|
|
config._buildSourceMap = resultMap;
|
|
} else {
|
|
file.saveFile(outFileName + '.map', resultMap);
|
|
}
|
|
} else {
|
|
fileContents = result.code;
|
|
}
|
|
} catch (e) {
|
|
var errorString = e.toString();
|
|
var isSyntaxError = /SyntaxError/.test(errorString);
|
|
throw new Error('Cannot uglify file: ' + fileName +
|
|
'. Skipping it. Error is:\n' + errorString +
|
|
(isSyntaxError ? '\n\n' + es5PlusGuidance : ''));
|
|
}
|
|
return fileContents;
|
|
}
|
|
}
|
|
};
|
|
|
|
return optimize;
|
|
});
|
|
/*
|
|
* This file patches require.js to communicate with the build system.
|
|
*/
|
|
|
|
//Using sloppy since this uses eval for some code like plugins,
|
|
//which may not be strict mode compliant. So if use strict is used
|
|
//below they will have strict rules applied and may cause an error.
|
|
/*jslint sloppy: true, nomen: true, plusplus: true, regexp: true */
|
|
/*global require, define: true */
|
|
|
|
//NOT asking for require as a dependency since the goal is to modify the
|
|
//global require below
|
|
define('requirePatch', [ 'env!env/file', 'pragma', 'parse', 'lang', 'logger', 'commonJs', 'prim'], function (
|
|
file,
|
|
pragma,
|
|
parse,
|
|
lang,
|
|
logger,
|
|
commonJs,
|
|
prim
|
|
) {
|
|
|
|
var allowRun = true,
|
|
hasProp = lang.hasProp,
|
|
falseProp = lang.falseProp,
|
|
getOwn = lang.getOwn,
|
|
// Used to strip out use strict from toString()'d functions for the
|
|
// shim config since they will explicitly want to not be bound by strict,
|
|
// but some envs, explicitly xpcshell, adds a use strict.
|
|
useStrictRegExp = /['"]use strict['"];/g,
|
|
//Absolute path if starts with /, \, or x:
|
|
absoluteUrlRegExp = /^[\/\\]|^\w:/;
|
|
|
|
//Turn off throwing on resolution conflict, that was just an older prim
|
|
//idea about finding errors early, but does not comply with how promises
|
|
//should operate.
|
|
prim.hideResolutionConflict = true;
|
|
|
|
//This method should be called when the patches to require should take hold.
|
|
return function () {
|
|
if (!allowRun) {
|
|
return;
|
|
}
|
|
allowRun = false;
|
|
|
|
var layer,
|
|
pluginBuilderRegExp = /(["']?)pluginBuilder(["']?)\s*[=\:]\s*["']([^'"\s]+)["']/,
|
|
oldNewContext = require.s.newContext,
|
|
oldDef,
|
|
|
|
//create local undefined values for module and exports,
|
|
//so that when files are evaled in this function they do not
|
|
//see the node values used for r.js
|
|
exports,
|
|
module;
|
|
|
|
/**
|
|
* Reset "global" build caches that are kept around between
|
|
* build layer builds. Useful to do when there are multiple
|
|
* top level requirejs.optimize() calls.
|
|
*/
|
|
require._cacheReset = function () {
|
|
//Stored raw text caches, used by browser use.
|
|
require._cachedRawText = {};
|
|
//Stored cached file contents for reuse in other layers.
|
|
require._cachedFileContents = {};
|
|
//Store which cached files contain a require definition.
|
|
require._cachedDefinesRequireUrls = {};
|
|
};
|
|
require._cacheReset();
|
|
|
|
/**
|
|
* Makes sure the URL is something that can be supported by the
|
|
* optimization tool.
|
|
* @param {String} url
|
|
* @returns {Boolean}
|
|
*/
|
|
require._isSupportedBuildUrl = function (url) {
|
|
//Ignore URLs with protocols, hosts or question marks, means either network
|
|
//access is needed to fetch it or it is too dynamic. Note that
|
|
//on Windows, full paths are used for some urls, which include
|
|
//the drive, like c:/something, so need to test for something other
|
|
//than just a colon.
|
|
if (url.indexOf("://") === -1 && url.indexOf("?") === -1 &&
|
|
url.indexOf('empty:') !== 0 && url.indexOf('//') !== 0) {
|
|
return true;
|
|
} else {
|
|
if (!layer.ignoredUrls[url]) {
|
|
if (url.indexOf('empty:') === -1) {
|
|
logger.info('Cannot optimize network URL, skipping: ' + url);
|
|
}
|
|
layer.ignoredUrls[url] = true;
|
|
}
|
|
return false;
|
|
}
|
|
};
|
|
|
|
function normalizeUrlWithBase(context, moduleName, url) {
|
|
//Adjust the URL if it was not transformed to use baseUrl, but only
|
|
//if the URL is not already an absolute path.
|
|
if (require.jsExtRegExp.test(moduleName) &&
|
|
!absoluteUrlRegExp.test(url)) {
|
|
url = (context.config.dir || context.config.dirBaseUrl) + url;
|
|
}
|
|
return url;
|
|
}
|
|
|
|
//Overrides the new context call to add existing tracking features.
|
|
require.s.newContext = function (name) {
|
|
var context = oldNewContext(name),
|
|
oldEnable = context.enable,
|
|
moduleProto = context.Module.prototype,
|
|
oldInit = moduleProto.init,
|
|
oldCallPlugin = moduleProto.callPlugin;
|
|
|
|
//Only do this for the context used for building.
|
|
if (name === '_') {
|
|
//For build contexts, do everything sync
|
|
context.nextTick = function (fn) {
|
|
fn();
|
|
};
|
|
|
|
context.needFullExec = {};
|
|
context.fullExec = {};
|
|
context.plugins = {};
|
|
context.buildShimExports = {};
|
|
|
|
//Override the shim exports function generator to just
|
|
//spit out strings that can be used in the stringified
|
|
//build output.
|
|
context.makeShimExports = function (value) {
|
|
var fn;
|
|
if (context.config.wrapShim) {
|
|
fn = function () {
|
|
var str = 'return ';
|
|
// If specifies an export that is just a global
|
|
// name, no dot for a `this.` and such, then also
|
|
// attach to the global, for `var a = {}` files
|
|
// where the function closure would hide that from
|
|
// the global object.
|
|
if (value.exports && value.exports.indexOf('.') === -1) {
|
|
str += 'root.' + value.exports + ' = ';
|
|
}
|
|
|
|
if (value.init) {
|
|
str += '(' + value.init.toString()
|
|
.replace(useStrictRegExp, '') + '.apply(this, arguments))';
|
|
}
|
|
if (value.init && value.exports) {
|
|
str += ' || ';
|
|
}
|
|
if (value.exports) {
|
|
str += value.exports;
|
|
}
|
|
str += ';';
|
|
return str;
|
|
};
|
|
} else {
|
|
fn = function () {
|
|
return '(function (global) {\n' +
|
|
' return function () {\n' +
|
|
' var ret, fn;\n' +
|
|
(value.init ?
|
|
(' fn = ' + value.init.toString()
|
|
.replace(useStrictRegExp, '') + ';\n' +
|
|
' ret = fn.apply(global, arguments);\n') : '') +
|
|
(value.exports ?
|
|
' return ret || global.' + value.exports + ';\n' :
|
|
' return ret;\n') +
|
|
' };\n' +
|
|
'}(this))';
|
|
};
|
|
}
|
|
|
|
return fn;
|
|
};
|
|
|
|
context.enable = function (depMap, parent) {
|
|
var id = depMap.id,
|
|
parentId = parent && parent.map.id,
|
|
needFullExec = context.needFullExec,
|
|
fullExec = context.fullExec,
|
|
mod = getOwn(context.registry, id);
|
|
|
|
if (mod && !mod.defined) {
|
|
if (parentId && getOwn(needFullExec, parentId)) {
|
|
needFullExec[id] = depMap;
|
|
}
|
|
|
|
} else if ((getOwn(needFullExec, id) && falseProp(fullExec, id)) ||
|
|
(parentId && getOwn(needFullExec, parentId) &&
|
|
falseProp(fullExec, id))) {
|
|
context.require.undef(id);
|
|
}
|
|
|
|
return oldEnable.apply(context, arguments);
|
|
};
|
|
|
|
//Override load so that the file paths can be collected.
|
|
context.load = function (moduleName, url) {
|
|
/*jslint evil: true */
|
|
var contents, pluginBuilderMatch, builderName,
|
|
shim, shimExports;
|
|
|
|
//Do not mark the url as fetched if it is
|
|
//not an empty: URL, used by the optimizer.
|
|
//In that case we need to be sure to call
|
|
//load() for each module that is mapped to
|
|
//empty: so that dependencies are satisfied
|
|
//correctly.
|
|
if (url.indexOf('empty:') === 0) {
|
|
delete context.urlFetched[url];
|
|
}
|
|
|
|
//Only handle urls that can be inlined, so that means avoiding some
|
|
//URLs like ones that require network access or may be too dynamic,
|
|
//like JSONP
|
|
if (require._isSupportedBuildUrl(url)) {
|
|
//Adjust the URL if it was not transformed to use baseUrl.
|
|
url = normalizeUrlWithBase(context, moduleName, url);
|
|
|
|
//Save the module name to path and path to module name mappings.
|
|
layer.buildPathMap[moduleName] = url;
|
|
layer.buildFileToModule[url] = moduleName;
|
|
|
|
if (hasProp(context.plugins, moduleName)) {
|
|
//plugins need to have their source evaled as-is.
|
|
context.needFullExec[moduleName] = true;
|
|
}
|
|
|
|
prim().start(function () {
|
|
if (hasProp(require._cachedFileContents, url) &&
|
|
(falseProp(context.needFullExec, moduleName) ||
|
|
getOwn(context.fullExec, moduleName))) {
|
|
contents = require._cachedFileContents[url];
|
|
|
|
//If it defines require, mark it so it can be hoisted.
|
|
//Done here and in the else below, before the
|
|
//else block removes code from the contents.
|
|
//Related to #263
|
|
if (!layer.existingRequireUrl && require._cachedDefinesRequireUrls[url]) {
|
|
layer.existingRequireUrl = url;
|
|
}
|
|
} else {
|
|
//Load the file contents, process for conditionals, then
|
|
//evaluate it.
|
|
return require._cacheReadAsync(url).then(function (text) {
|
|
contents = text;
|
|
|
|
if (context.config.cjsTranslate &&
|
|
(!context.config.shim || !lang.hasProp(context.config.shim, moduleName))) {
|
|
contents = commonJs.convert(url, contents);
|
|
}
|
|
|
|
//If there is a read filter, run it now.
|
|
if (context.config.onBuildRead) {
|
|
contents = context.config.onBuildRead(moduleName, url, contents);
|
|
}
|
|
|
|
contents = pragma.process(url, contents, context.config, 'OnExecute');
|
|
|
|
//Find out if the file contains a require() definition. Need to know
|
|
//this so we can inject plugins right after it, but before they are needed,
|
|
//and to make sure this file is first, so that define calls work.
|
|
try {
|
|
if (!layer.existingRequireUrl && parse.definesRequire(url, contents)) {
|
|
layer.existingRequireUrl = url;
|
|
require._cachedDefinesRequireUrls[url] = true;
|
|
}
|
|
} catch (e1) {
|
|
throw new Error('Parse error using esprima ' +
|
|
'for file: ' + url + '\n' + e1);
|
|
}
|
|
}).then(function () {
|
|
if (hasProp(context.plugins, moduleName)) {
|
|
//This is a loader plugin, check to see if it has a build extension,
|
|
//otherwise the plugin will act as the plugin builder too.
|
|
pluginBuilderMatch = pluginBuilderRegExp.exec(contents);
|
|
if (pluginBuilderMatch) {
|
|
//Load the plugin builder for the plugin contents.
|
|
builderName = context.makeModuleMap(pluginBuilderMatch[3],
|
|
context.makeModuleMap(moduleName),
|
|
null,
|
|
true).id;
|
|
return require._cacheReadAsync(context.nameToUrl(builderName));
|
|
}
|
|
}
|
|
return contents;
|
|
}).then(function (text) {
|
|
contents = text;
|
|
|
|
//Parse out the require and define calls.
|
|
//Do this even for plugins in case they have their own
|
|
//dependencies that may be separate to how the pluginBuilder works.
|
|
try {
|
|
if (falseProp(context.needFullExec, moduleName)) {
|
|
contents = parse(moduleName, url, contents, {
|
|
insertNeedsDefine: true,
|
|
has: context.config.has,
|
|
findNestedDependencies: context.config.findNestedDependencies
|
|
});
|
|
}
|
|
} catch (e2) {
|
|
throw new Error('Parse error using esprima ' +
|
|
'for file: ' + url + '\n' + e2);
|
|
}
|
|
|
|
require._cachedFileContents[url] = contents;
|
|
});
|
|
}
|
|
}).then(function () {
|
|
if (contents) {
|
|
eval(contents);
|
|
}
|
|
|
|
try {
|
|
//If have a string shim config, and this is
|
|
//a fully executed module, try to see if
|
|
//it created a variable in this eval scope
|
|
if (getOwn(context.needFullExec, moduleName)) {
|
|
shim = getOwn(context.config.shim, moduleName);
|
|
if (shim && shim.exports) {
|
|
shimExports = eval(shim.exports);
|
|
if (typeof shimExports !== 'undefined') {
|
|
context.buildShimExports[moduleName] = shimExports;
|
|
}
|
|
}
|
|
}
|
|
|
|
//Need to close out completion of this module
|
|
//so that listeners will get notified that it is available.
|
|
context.completeLoad(moduleName);
|
|
} catch (e) {
|
|
//Track which module could not complete loading.
|
|
if (!e.moduleTree) {
|
|
e.moduleTree = [];
|
|
}
|
|
e.moduleTree.push(moduleName);
|
|
throw e;
|
|
}
|
|
}).then(null, function (eOuter) {
|
|
|
|
if (!eOuter.fileName) {
|
|
eOuter.fileName = url;
|
|
}
|
|
throw eOuter;
|
|
}).end();
|
|
} else {
|
|
//With unsupported URLs still need to call completeLoad to
|
|
//finish loading.
|
|
context.completeLoad(moduleName);
|
|
}
|
|
};
|
|
|
|
//Marks module has having a name, and optionally executes the
|
|
//callback, but only if it meets certain criteria.
|
|
context.execCb = function (name, cb, args, exports) {
|
|
var buildShimExports = getOwn(layer.context.buildShimExports, name);
|
|
|
|
if (buildShimExports) {
|
|
return buildShimExports;
|
|
} else if (cb.__requireJsBuild || getOwn(layer.context.needFullExec, name)) {
|
|
return cb.apply(exports, args);
|
|
}
|
|
return undefined;
|
|
};
|
|
|
|
moduleProto.init = function (depMaps) {
|
|
if (context.needFullExec[this.map.id]) {
|
|
lang.each(depMaps, lang.bind(this, function (depMap) {
|
|
if (typeof depMap === 'string') {
|
|
depMap = context.makeModuleMap(depMap,
|
|
(this.map.isDefine ? this.map : this.map.parentMap),
|
|
false, true);
|
|
}
|
|
|
|
if (!context.fullExec[depMap.id]) {
|
|
context.require.undef(depMap.id);
|
|
}
|
|
}));
|
|
}
|
|
|
|
return oldInit.apply(this, arguments);
|
|
};
|
|
|
|
moduleProto.callPlugin = function () {
|
|
var map = this.map,
|
|
pluginMap = context.makeModuleMap(map.prefix),
|
|
pluginId = pluginMap.id,
|
|
pluginMod = getOwn(context.registry, pluginId);
|
|
|
|
context.plugins[pluginId] = true;
|
|
context.needFullExec[pluginId] = map;
|
|
|
|
//If the module is not waiting to finish being defined,
|
|
//undef it and start over, to get full execution.
|
|
if (falseProp(context.fullExec, pluginId) && (!pluginMod || pluginMod.defined)) {
|
|
context.require.undef(pluginMap.id);
|
|
}
|
|
|
|
return oldCallPlugin.apply(this, arguments);
|
|
};
|
|
}
|
|
|
|
return context;
|
|
};
|
|
|
|
//Clear up the existing context so that the newContext modifications
|
|
//above will be active.
|
|
delete require.s.contexts._;
|
|
|
|
/** Reset state for each build layer pass. */
|
|
require._buildReset = function () {
|
|
var oldContext = require.s.contexts._;
|
|
|
|
//Clear up the existing context.
|
|
delete require.s.contexts._;
|
|
|
|
//Set up new context, so the layer object can hold onto it.
|
|
require({});
|
|
|
|
layer = require._layer = {
|
|
buildPathMap: {},
|
|
buildFileToModule: {},
|
|
buildFilePaths: [],
|
|
pathAdded: {},
|
|
modulesWithNames: {},
|
|
needsDefine: {},
|
|
existingRequireUrl: "",
|
|
ignoredUrls: {},
|
|
context: require.s.contexts._
|
|
};
|
|
|
|
//Return the previous context in case it is needed, like for
|
|
//the basic config object.
|
|
return oldContext;
|
|
};
|
|
|
|
require._buildReset();
|
|
|
|
//Override define() to catch modules that just define an object, so that
|
|
//a dummy define call is not put in the build file for them. They do
|
|
//not end up getting defined via context.execCb, so we need to catch them
|
|
//at the define call.
|
|
oldDef = define;
|
|
|
|
//This function signature does not have to be exact, just match what we
|
|
//are looking for.
|
|
define = function (name) {
|
|
if (typeof name === "string" && falseProp(layer.needsDefine, name)) {
|
|
layer.modulesWithNames[name] = true;
|
|
}
|
|
return oldDef.apply(require, arguments);
|
|
};
|
|
|
|
define.amd = oldDef.amd;
|
|
|
|
//Add some utilities for plugins
|
|
require._readFile = file.readFile;
|
|
require._fileExists = function (path) {
|
|
return file.exists(path);
|
|
};
|
|
|
|
//Called when execManager runs for a dependency. Used to figure out
|
|
//what order of execution.
|
|
require.onResourceLoad = function (context, map) {
|
|
var id = map.id,
|
|
url;
|
|
|
|
// Fix up any maps that need to be normalized as part of the fullExec
|
|
// plumbing for plugins to participate in the build.
|
|
if (context.plugins && lang.hasProp(context.plugins, id)) {
|
|
lang.eachProp(context.needFullExec, function(value, prop) {
|
|
// For plugin entries themselves, they do not have a map
|
|
// value in needFullExec, just a "true" entry.
|
|
if (value !== true && value.prefix === id && value.unnormalized) {
|
|
var map = context.makeModuleMap(value.originalName, value.parentMap);
|
|
context.needFullExec[map.id] = map;
|
|
}
|
|
});
|
|
}
|
|
|
|
//If build needed a full execution, indicate it
|
|
//has been done now. But only do it if the context is tracking
|
|
//that. Only valid for the context used in a build, not for
|
|
//other contexts being run, like for useLib, plain requirejs
|
|
//use in node/rhino.
|
|
if (context.needFullExec && getOwn(context.needFullExec, id)) {
|
|
context.fullExec[id] = map;
|
|
}
|
|
|
|
//A plugin.
|
|
if (map.prefix) {
|
|
if (falseProp(layer.pathAdded, id)) {
|
|
layer.buildFilePaths.push(id);
|
|
//For plugins the real path is not knowable, use the name
|
|
//for both module to file and file to module mappings.
|
|
layer.buildPathMap[id] = id;
|
|
layer.buildFileToModule[id] = id;
|
|
layer.modulesWithNames[id] = true;
|
|
layer.pathAdded[id] = true;
|
|
}
|
|
} else if (map.url && require._isSupportedBuildUrl(map.url)) {
|
|
//If the url has not been added to the layer yet, and it
|
|
//is from an actual file that was loaded, add it now.
|
|
url = normalizeUrlWithBase(context, id, map.url);
|
|
if (!layer.pathAdded[url] && getOwn(layer.buildPathMap, id)) {
|
|
//Remember the list of dependencies for this layer.
|
|
layer.buildFilePaths.push(url);
|
|
layer.pathAdded[url] = true;
|
|
}
|
|
}
|
|
};
|
|
|
|
//Called by output of the parse() function, when a file does not
|
|
//explicitly call define, probably just require, but the parse()
|
|
//function normalizes on define() for dependency mapping and file
|
|
//ordering works correctly.
|
|
require.needsDefine = function (moduleName) {
|
|
layer.needsDefine[moduleName] = true;
|
|
};
|
|
};
|
|
});
|
|
/*jslint */
|
|
/*global define: false, console: false */
|
|
|
|
define('commonJs', ['env!env/file', 'parse'], function (file, parse) {
|
|
'use strict';
|
|
var commonJs = {
|
|
//Set to false if you do not want this file to log. Useful in environments
|
|
//like node where you want the work to happen without noise.
|
|
useLog: true,
|
|
|
|
convertDir: function (commonJsPath, savePath) {
|
|
var fileList, i,
|
|
jsFileRegExp = /\.js$/,
|
|
fileName, convertedFileName, fileContents;
|
|
|
|
//Get list of files to convert.
|
|
fileList = file.getFilteredFileList(commonJsPath, /\w/, true);
|
|
|
|
//Normalize on front slashes and make sure the paths do not end in a slash.
|
|
commonJsPath = commonJsPath.replace(/\\/g, "/");
|
|
savePath = savePath.replace(/\\/g, "/");
|
|
if (commonJsPath.charAt(commonJsPath.length - 1) === "/") {
|
|
commonJsPath = commonJsPath.substring(0, commonJsPath.length - 1);
|
|
}
|
|
if (savePath.charAt(savePath.length - 1) === "/") {
|
|
savePath = savePath.substring(0, savePath.length - 1);
|
|
}
|
|
|
|
//Cycle through all the JS files and convert them.
|
|
if (!fileList || !fileList.length) {
|
|
if (commonJs.useLog) {
|
|
if (commonJsPath === "convert") {
|
|
//A request just to convert one file.
|
|
console.log('\n\n' + commonJs.convert(savePath, file.readFile(savePath)));
|
|
} else {
|
|
console.log("No files to convert in directory: " + commonJsPath);
|
|
}
|
|
}
|
|
} else {
|
|
for (i = 0; i < fileList.length; i++) {
|
|
fileName = fileList[i];
|
|
convertedFileName = fileName.replace(commonJsPath, savePath);
|
|
|
|
//Handle JS files.
|
|
if (jsFileRegExp.test(fileName)) {
|
|
fileContents = file.readFile(fileName);
|
|
fileContents = commonJs.convert(fileName, fileContents);
|
|
file.saveUtf8File(convertedFileName, fileContents);
|
|
} else {
|
|
//Just copy the file over.
|
|
file.copyFile(fileName, convertedFileName, true);
|
|
}
|
|
}
|
|
}
|
|
},
|
|
|
|
/**
|
|
* Does the actual file conversion.
|
|
*
|
|
* @param {String} fileName the name of the file.
|
|
*
|
|
* @param {String} fileContents the contents of a file :)
|
|
*
|
|
* @returns {String} the converted contents
|
|
*/
|
|
convert: function (fileName, fileContents) {
|
|
//Strip out comments.
|
|
try {
|
|
var preamble = '',
|
|
commonJsProps = parse.usesCommonJs(fileName, fileContents);
|
|
|
|
//First see if the module is not already RequireJS-formatted.
|
|
if (parse.usesAmdOrRequireJs(fileName, fileContents) || !commonJsProps) {
|
|
return fileContents;
|
|
}
|
|
|
|
if (commonJsProps.dirname || commonJsProps.filename) {
|
|
preamble = 'var __filename = module.uri || "", ' +
|
|
'__dirname = __filename.substring(0, __filename.lastIndexOf("/") + 1); ';
|
|
}
|
|
|
|
//Construct the wrapper boilerplate.
|
|
fileContents = 'define(function (require, exports, module) {' +
|
|
preamble +
|
|
fileContents +
|
|
'\n});\n';
|
|
|
|
} catch (e) {
|
|
console.log("commonJs.convert: COULD NOT CONVERT: " + fileName + ", so skipping it. Error was: " + e);
|
|
return fileContents;
|
|
}
|
|
|
|
return fileContents;
|
|
}
|
|
};
|
|
|
|
return commonJs;
|
|
});
|
|
/*jslint plusplus: true, nomen: true, regexp: true */
|
|
/*global define, requirejs, java, process, console */
|
|
|
|
|
|
define('build', function (require) {
|
|
'use strict';
|
|
|
|
var build,
|
|
lang = require('lang'),
|
|
prim = require('prim'),
|
|
logger = require('logger'),
|
|
file = require('env!env/file'),
|
|
parse = require('parse'),
|
|
optimize = require('optimize'),
|
|
pragma = require('pragma'),
|
|
transform = require('transform'),
|
|
requirePatch = require('requirePatch'),
|
|
env = require('env'),
|
|
commonJs = require('commonJs'),
|
|
SourceMapGenerator = require('source-map').SourceMapGenerator,
|
|
hasProp = lang.hasProp,
|
|
getOwn = lang.getOwn,
|
|
falseProp = lang.falseProp,
|
|
endsWithSemiColonRegExp = /;\s*$/,
|
|
endsWithSlashRegExp = /[\/\\]$/,
|
|
resourceIsModuleIdRegExp = /^[\w\/\\\.]+$/,
|
|
deepCopyProps = {
|
|
layer: true
|
|
};
|
|
|
|
//Deep copy a config object, but do not copy over the "layer" property,
|
|
//as it can be a deeply nested structure with a full requirejs context.
|
|
function copyConfig(obj) {
|
|
return lang.deeplikeCopy(obj, deepCopyProps);
|
|
}
|
|
|
|
prim.nextTick = function (fn) {
|
|
fn();
|
|
};
|
|
|
|
//Now map require to the outermost requirejs, now that we have
|
|
//local dependencies for this module. The rest of the require use is
|
|
//manipulating the requirejs loader.
|
|
require = requirejs;
|
|
|
|
//Caching function for performance. Attached to
|
|
//require so it can be reused in requirePatch.js. _cachedRawText
|
|
//set up by requirePatch.js
|
|
require._cacheReadAsync = function (path, encoding) {
|
|
var d;
|
|
|
|
if (lang.hasProp(require._cachedRawText, path)) {
|
|
d = prim();
|
|
d.resolve(require._cachedRawText[path]);
|
|
return d.promise;
|
|
} else {
|
|
return file.readFileAsync(path, encoding).then(function (text) {
|
|
require._cachedRawText[path] = text;
|
|
return text;
|
|
});
|
|
}
|
|
};
|
|
|
|
function makeBuildBaseConfig() {
|
|
return {
|
|
appDir: "",
|
|
pragmas: {},
|
|
paths: {},
|
|
optimize: "uglify",
|
|
optimizeCss: "standard.keepLines.keepWhitespace",
|
|
inlineText: true,
|
|
isBuild: true,
|
|
optimizeAllPluginResources: false,
|
|
findNestedDependencies: false,
|
|
preserveLicenseComments: true,
|
|
writeBuildTxt: true,
|
|
//Some builds can take a while, up the default limit.
|
|
waitSeconds: 30,
|
|
//By default, all files/directories are copied, unless
|
|
//they match this regexp, by default just excludes .folders
|
|
dirExclusionRegExp: file.dirExclusionRegExp,
|
|
_buildPathToModuleIndex: {}
|
|
};
|
|
}
|
|
|
|
/**
|
|
* Some JS may not be valid if concatenated with other JS, in particular
|
|
* the style of omitting semicolons and rely on ASI. Add a semicolon in
|
|
* those cases.
|
|
*/
|
|
function addSemiColon(text, config) {
|
|
if (config.skipSemiColonInsertion || endsWithSemiColonRegExp.test(text)) {
|
|
return text;
|
|
} else {
|
|
return text + ";";
|
|
}
|
|
}
|
|
|
|
function endsWithSlash(dirName) {
|
|
if (dirName.charAt(dirName.length - 1) !== "/") {
|
|
dirName += "/";
|
|
}
|
|
return dirName;
|
|
}
|
|
|
|
function endsWithNewLine(text) {
|
|
if (text.charAt(text.length - 1) !== "\n") {
|
|
text += "\n";
|
|
}
|
|
return text;
|
|
}
|
|
|
|
//Method used by plugin writeFile calls, defined up here to avoid
|
|
//jslint warning about "making a function in a loop".
|
|
function makeWriteFile(namespace, layer) {
|
|
function writeFile(name, contents) {
|
|
logger.trace('Saving plugin-optimized file: ' + name);
|
|
file.saveUtf8File(name, contents);
|
|
}
|
|
|
|
writeFile.asModule = function (moduleName, fileName, contents) {
|
|
writeFile(fileName,
|
|
build.toTransport(namespace, moduleName, fileName, contents, layer));
|
|
};
|
|
|
|
return writeFile;
|
|
}
|
|
|
|
/**
|
|
* Appends singleContents to fileContents and returns the result. If a sourceMapGenerator
|
|
* is provided, adds singleContents to the source map.
|
|
*
|
|
* @param {string} fileContents - The file contents to which to append singleContents
|
|
* @param {string} singleContents - The additional contents to append to fileContents
|
|
* @param {string} path - An absolute path of a file whose name to use in the source map.
|
|
* The file need not actually exist if the code in singleContents is generated.
|
|
* @param {{out: ?string, baseUrl: ?string}} config - The build configuration object.
|
|
* @param {?{_buildPath: ?string}} module - An object with module information.
|
|
* @param {?SourceMapGenerator} sourceMapGenerator - An instance of Mozilla's SourceMapGenerator,
|
|
* or null if no source map is being generated.
|
|
* @returns {string} fileContents with singleContents appended
|
|
*/
|
|
function appendToFileContents(fileContents, singleContents, path, config, module, sourceMapGenerator) {
|
|
var refPath, sourceMapPath, resourcePath, pluginId, sourceMapLineNumber, lineCount, parts, i;
|
|
if (sourceMapGenerator) {
|
|
if (config.out) {
|
|
refPath = config.baseUrl;
|
|
} else if (module && module._buildPath) {
|
|
refPath = module._buildPath;
|
|
} else {
|
|
refPath = "";
|
|
}
|
|
parts = path.split('!');
|
|
if (parts.length === 1) {
|
|
//Not a plugin resource, fix the path
|
|
sourceMapPath = build.makeRelativeFilePath(refPath, path);
|
|
} else {
|
|
//Plugin resource. If it looks like just a plugin
|
|
//followed by a module ID, pull off the plugin
|
|
//and put it at the end of the name, otherwise
|
|
//just leave it alone.
|
|
pluginId = parts.shift();
|
|
resourcePath = parts.join('!');
|
|
if (resourceIsModuleIdRegExp.test(resourcePath)) {
|
|
sourceMapPath = build.makeRelativeFilePath(refPath, require.toUrl(resourcePath)) +
|
|
'!' + pluginId;
|
|
} else {
|
|
sourceMapPath = path;
|
|
}
|
|
}
|
|
|
|
sourceMapLineNumber = fileContents.split('\n').length - 1;
|
|
lineCount = singleContents.split('\n').length;
|
|
for (i = 1; i <= lineCount; i += 1) {
|
|
sourceMapGenerator.addMapping({
|
|
generated: {
|
|
line: sourceMapLineNumber + i,
|
|
column: 0
|
|
},
|
|
original: {
|
|
line: i,
|
|
column: 0
|
|
},
|
|
source: sourceMapPath
|
|
});
|
|
}
|
|
|
|
//Store the content of the original in the source
|
|
//map since other transforms later like minification
|
|
//can mess up translating back to the original
|
|
//source.
|
|
sourceMapGenerator.setSourceContent(sourceMapPath, singleContents);
|
|
}
|
|
fileContents += singleContents;
|
|
return fileContents;
|
|
}
|
|
|
|
/**
|
|
* Main API entry point into the build. The args argument can either be
|
|
* an array of arguments (like the onese passed on a command-line),
|
|
* or it can be a JavaScript object that has the format of a build profile
|
|
* file.
|
|
*
|
|
* If it is an object, then in addition to the normal properties allowed in
|
|
* a build profile file, the object should contain one other property:
|
|
*
|
|
* The object could also contain a "buildFile" property, which is a string
|
|
* that is the file path to a build profile that contains the rest
|
|
* of the build profile directives.
|
|
*
|
|
* This function does not return a status, it should throw an error if
|
|
* there is a problem completing the build.
|
|
*/
|
|
build = function (args) {
|
|
var buildFile, cmdConfig, errorMsg, errorStack, stackMatch, errorTree,
|
|
i, j, errorMod,
|
|
stackRegExp = /( {4}at[^\n]+)\n/,
|
|
standardIndent = ' ';
|
|
|
|
return prim().start(function () {
|
|
if (!args || lang.isArray(args)) {
|
|
if (!args || args.length < 1) {
|
|
logger.error("build.js buildProfile.js\n" +
|
|
"where buildProfile.js is the name of the build file (see example.build.js for hints on how to make a build file).");
|
|
return undefined;
|
|
}
|
|
|
|
//Next args can include a build file path as well as other build args.
|
|
//build file path comes first. If it does not contain an = then it is
|
|
//a build file path. Otherwise, just all build args.
|
|
if (args[0].indexOf("=") === -1) {
|
|
buildFile = args[0];
|
|
args.splice(0, 1);
|
|
}
|
|
|
|
//Remaining args are options to the build
|
|
cmdConfig = build.convertArrayToObject(args);
|
|
cmdConfig.buildFile = buildFile;
|
|
} else {
|
|
cmdConfig = args;
|
|
}
|
|
|
|
return build._run(cmdConfig);
|
|
}).then(null, function (e) {
|
|
var err;
|
|
|
|
errorMsg = e.toString();
|
|
errorTree = e.moduleTree;
|
|
stackMatch = stackRegExp.exec(errorMsg);
|
|
|
|
if (stackMatch) {
|
|
errorMsg += errorMsg.substring(0, stackMatch.index + stackMatch[0].length + 1);
|
|
}
|
|
|
|
//If a module tree that shows what module triggered the error,
|
|
//print it out.
|
|
if (errorTree && errorTree.length > 0) {
|
|
errorMsg += '\nIn module tree:\n';
|
|
|
|
for (i = errorTree.length - 1; i > -1; i--) {
|
|
errorMod = errorTree[i];
|
|
if (errorMod) {
|
|
for (j = errorTree.length - i; j > -1; j--) {
|
|
errorMsg += standardIndent;
|
|
}
|
|
errorMsg += errorMod + '\n';
|
|
}
|
|
}
|
|
|
|
logger.error(errorMsg);
|
|
}
|
|
|
|
errorStack = e.stack;
|
|
|
|
if (typeof args === 'string' && args.indexOf('stacktrace=true') !== -1) {
|
|
errorMsg += '\n' + errorStack;
|
|
} else {
|
|
if (!stackMatch && errorStack) {
|
|
//Just trim out the first "at" in the stack.
|
|
stackMatch = stackRegExp.exec(errorStack);
|
|
if (stackMatch) {
|
|
errorMsg += '\n' + stackMatch[0] || '';
|
|
}
|
|
}
|
|
}
|
|
|
|
err = new Error(errorMsg);
|
|
err.originalError = e;
|
|
throw err;
|
|
});
|
|
};
|
|
|
|
build._run = function (cmdConfig) {
|
|
var buildPaths, fileName, fileNames,
|
|
paths, i,
|
|
baseConfig, config,
|
|
modules, srcPath, buildContext,
|
|
destPath, moduleMap, parentModuleMap, context,
|
|
resources, resource, plugin, fileContents,
|
|
pluginProcessed = {},
|
|
buildFileContents = "",
|
|
pluginCollector = {};
|
|
|
|
return prim().start(function () {
|
|
var prop;
|
|
|
|
//Can now run the patches to require.js to allow it to be used for
|
|
//build generation. Do it here instead of at the top of the module
|
|
//because we want normal require behavior to load the build tool
|
|
//then want to switch to build mode.
|
|
requirePatch();
|
|
|
|
config = build.createConfig(cmdConfig);
|
|
paths = config.paths;
|
|
|
|
//Remove the previous build dir, in case it contains source transforms,
|
|
//like the ones done with onBuildRead and onBuildWrite.
|
|
if (config.dir && !config.keepBuildDir && file.exists(config.dir)) {
|
|
file.deleteFile(config.dir);
|
|
}
|
|
|
|
if (!config.out && !config.cssIn) {
|
|
//This is not just a one-off file build but a full build profile, with
|
|
//lots of files to process.
|
|
|
|
//First copy all the baseUrl content
|
|
file.copyDir((config.appDir || config.baseUrl), config.dir, /\w/, true);
|
|
|
|
//Adjust baseUrl if config.appDir is in play, and set up build output paths.
|
|
buildPaths = {};
|
|
if (config.appDir) {
|
|
//All the paths should be inside the appDir, so just adjust
|
|
//the paths to use the dirBaseUrl
|
|
for (prop in paths) {
|
|
if (hasProp(paths, prop)) {
|
|
buildPaths[prop] = paths[prop].replace(config.appDir, config.dir);
|
|
}
|
|
}
|
|
} else {
|
|
//If no appDir, then make sure to copy the other paths to this directory.
|
|
for (prop in paths) {
|
|
if (hasProp(paths, prop)) {
|
|
//Set up build path for each path prefix, but only do so
|
|
//if the path falls out of the current baseUrl
|
|
if (paths[prop].indexOf(config.baseUrl) === 0) {
|
|
buildPaths[prop] = paths[prop].replace(config.baseUrl, config.dirBaseUrl);
|
|
} else {
|
|
buildPaths[prop] = paths[prop] === 'empty:' ? 'empty:' : prop;
|
|
|
|
//Make sure source path is fully formed with baseUrl,
|
|
//if it is a relative URL.
|
|
srcPath = paths[prop];
|
|
if (srcPath.indexOf('/') !== 0 && srcPath.indexOf(':') === -1) {
|
|
srcPath = config.baseUrl + srcPath;
|
|
}
|
|
|
|
destPath = config.dirBaseUrl + buildPaths[prop];
|
|
|
|
//Skip empty: paths
|
|
if (srcPath !== 'empty:') {
|
|
//If the srcPath is a directory, copy the whole directory.
|
|
if (file.exists(srcPath) && file.isDirectory(srcPath)) {
|
|
//Copy files to build area. Copy all files (the /\w/ regexp)
|
|
file.copyDir(srcPath, destPath, /\w/, true);
|
|
} else {
|
|
//Try a .js extension
|
|
srcPath += '.js';
|
|
destPath += '.js';
|
|
file.copyFile(srcPath, destPath);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
//Figure out source file location for each module layer. Do this by seeding require
|
|
//with source area configuration. This is needed so that later the module layers
|
|
//can be manually copied over to the source area, since the build may be
|
|
//require multiple times and the above copyDir call only copies newer files.
|
|
require({
|
|
baseUrl: config.baseUrl,
|
|
paths: paths,
|
|
packagePaths: config.packagePaths,
|
|
packages: config.packages
|
|
});
|
|
buildContext = require.s.contexts._;
|
|
modules = config.modules;
|
|
|
|
if (modules) {
|
|
modules.forEach(function (module) {
|
|
if (module.name) {
|
|
module._sourcePath = buildContext.nameToUrl(module.name);
|
|
//If the module does not exist, and this is not a "new" module layer,
|
|
//as indicated by a true "create" property on the module, and
|
|
//it is not a plugin-loaded resource, and there is no
|
|
//'rawText' containing the module's source then throw an error.
|
|
if (!file.exists(module._sourcePath) && !module.create &&
|
|
module.name.indexOf('!') === -1 &&
|
|
(!config.rawText || !lang.hasProp(config.rawText, module.name))) {
|
|
throw new Error("ERROR: module path does not exist: " +
|
|
module._sourcePath + " for module named: " + module.name +
|
|
". Path is relative to: " + file.absPath('.'));
|
|
}
|
|
}
|
|
});
|
|
}
|
|
|
|
if (config.out) {
|
|
//Just set up the _buildPath for the module layer.
|
|
require(config);
|
|
if (!config.cssIn) {
|
|
config.modules[0]._buildPath = typeof config.out === 'function' ?
|
|
'FUNCTION' : config.out;
|
|
}
|
|
} else if (!config.cssIn) {
|
|
//Now set up the config for require to use the build area, and calculate the
|
|
//build file locations. Pass along any config info too.
|
|
baseConfig = {
|
|
baseUrl: config.dirBaseUrl,
|
|
paths: buildPaths
|
|
};
|
|
|
|
lang.mixin(baseConfig, config);
|
|
require(baseConfig);
|
|
|
|
if (modules) {
|
|
modules.forEach(function (module) {
|
|
if (module.name) {
|
|
module._buildPath = buildContext.nameToUrl(module.name, null);
|
|
|
|
//If buildPath and sourcePath are the same, throw since this
|
|
//would result in modifying source. This condition can happen
|
|
//with some more tricky paths: config and appDir/baseUrl
|
|
//setting, which is a sign of incorrect config.
|
|
if (module._buildPath === module._sourcePath &&
|
|
!config.allowSourceOverwrites) {
|
|
throw new Error('Module ID \'' + module.name +
|
|
'\' has a source path that is same as output path: ' +
|
|
module._sourcePath +
|
|
'. Stopping, config is malformed.');
|
|
}
|
|
|
|
// Copy the file, but only if it is not provided in rawText.
|
|
if (!module.create && (!config.rawText || !lang.hasProp(config.rawText, module.name))) {
|
|
file.copyFile(module._sourcePath, module._buildPath);
|
|
}
|
|
}
|
|
});
|
|
}
|
|
}
|
|
|
|
//Run CSS optimizations before doing JS module tracing, to allow
|
|
//things like text loader plugins loading CSS to get the optimized
|
|
//CSS.
|
|
if (config.optimizeCss && config.optimizeCss !== "none" && config.dir) {
|
|
buildFileContents += optimize.css(config.dir, config);
|
|
}
|
|
}).then(function() {
|
|
baseConfig = copyConfig(require.s.contexts._.config);
|
|
}).then(function () {
|
|
var actions = [];
|
|
|
|
if (modules) {
|
|
actions = modules.map(function (module, i) {
|
|
return function () {
|
|
//Save off buildPath to module index in a hash for quicker
|
|
//lookup later.
|
|
config._buildPathToModuleIndex[file.normalize(module._buildPath)] = i;
|
|
|
|
//Call require to calculate dependencies.
|
|
return build.traceDependencies(module, config, baseConfig)
|
|
.then(function (layer) {
|
|
module.layer = layer;
|
|
});
|
|
};
|
|
});
|
|
|
|
return prim.serial(actions);
|
|
}
|
|
}).then(function () {
|
|
var actions;
|
|
|
|
if (modules) {
|
|
//Now build up shadow layers for anything that should be excluded.
|
|
//Do this after tracing dependencies for each module, in case one
|
|
//of those modules end up being one of the excluded values.
|
|
actions = modules.map(function (module) {
|
|
return function () {
|
|
if (module.exclude) {
|
|
module.excludeLayers = [];
|
|
return prim.serial(module.exclude.map(function (exclude, i) {
|
|
return function () {
|
|
//See if it is already in the list of modules.
|
|
//If not trace dependencies for it.
|
|
var found = build.findBuildModule(exclude, modules);
|
|
if (found) {
|
|
module.excludeLayers[i] = found;
|
|
} else {
|
|
return build.traceDependencies({name: exclude}, config, baseConfig)
|
|
.then(function (layer) {
|
|
module.excludeLayers[i] = { layer: layer };
|
|
});
|
|
}
|
|
};
|
|
}));
|
|
}
|
|
};
|
|
});
|
|
|
|
return prim.serial(actions);
|
|
}
|
|
}).then(function () {
|
|
if (modules) {
|
|
return prim.serial(modules.map(function (module) {
|
|
return function () {
|
|
if (module.exclude) {
|
|
//module.exclude is an array of module names. For each one,
|
|
//get the nested dependencies for it via a matching entry
|
|
//in the module.excludeLayers array.
|
|
module.exclude.forEach(function (excludeModule, i) {
|
|
var excludeLayer = module.excludeLayers[i].layer,
|
|
map = excludeLayer.buildFileToModule;
|
|
excludeLayer.buildFilePaths.forEach(function(filePath){
|
|
build.removeModulePath(map[filePath], filePath, module.layer);
|
|
});
|
|
});
|
|
}
|
|
if (module.excludeShallow) {
|
|
//module.excludeShallow is an array of module names.
|
|
//shallow exclusions are just that module itself, and not
|
|
//its nested dependencies.
|
|
module.excludeShallow.forEach(function (excludeShallowModule) {
|
|
var path = getOwn(module.layer.buildPathMap, excludeShallowModule);
|
|
if (path) {
|
|
build.removeModulePath(excludeShallowModule, path, module.layer);
|
|
}
|
|
});
|
|
}
|
|
|
|
//Flatten them and collect the build output for each module.
|
|
return build.flattenModule(module, module.layer, config).then(function (builtModule) {
|
|
var finalText, baseName;
|
|
//Save it to a temp file for now, in case there are other layers that
|
|
//contain optimized content that should not be included in later
|
|
//layer optimizations. See issue #56.
|
|
if (module._buildPath === 'FUNCTION') {
|
|
module._buildText = builtModule.text;
|
|
module._buildSourceMap = builtModule.sourceMap;
|
|
} else {
|
|
finalText = builtModule.text;
|
|
if (builtModule.sourceMap) {
|
|
baseName = module._buildPath.split('/');
|
|
baseName = baseName.pop();
|
|
finalText += '\n//# sourceMappingURL=' + baseName + '.map';
|
|
file.saveUtf8File(module._buildPath + '.map', builtModule.sourceMap);
|
|
}
|
|
file.saveUtf8File(module._buildPath + '-temp', finalText);
|
|
|
|
}
|
|
buildFileContents += builtModule.buildText;
|
|
});
|
|
};
|
|
}));
|
|
}
|
|
}).then(function () {
|
|
var moduleName, outOrigSourceMap,
|
|
bundlesConfig = {},
|
|
bundlesConfigOutFile = config.bundlesConfigOutFile;
|
|
|
|
if (modules) {
|
|
//Now move the build layers to their final position.
|
|
modules.forEach(function (module) {
|
|
var entryConfig,
|
|
finalPath = module._buildPath;
|
|
|
|
if (finalPath !== 'FUNCTION') {
|
|
if (file.exists(finalPath)) {
|
|
file.deleteFile(finalPath);
|
|
}
|
|
file.renameFile(finalPath + '-temp', finalPath);
|
|
|
|
//If bundles config should be written out, scan the
|
|
//built file for module IDs. Favor doing this reparse
|
|
//since tracking the IDs as the file is built has some
|
|
//edge cases around files that had more than one ID in
|
|
//them already, and likely loader plugin-written contents.
|
|
if (bundlesConfigOutFile) {
|
|
entryConfig = bundlesConfig[module.name] = [];
|
|
var bundleContents = file.readFile(finalPath);
|
|
var excludeMap = {};
|
|
excludeMap[module.name] = true;
|
|
var parsedIds = parse.getAllNamedDefines(bundleContents, excludeMap);
|
|
entryConfig.push.apply(entryConfig, parsedIds);
|
|
}
|
|
|
|
//And finally, if removeCombined is specified, remove
|
|
//any of the files that were used in this layer.
|
|
//Be sure not to remove other build layers.
|
|
if (config.removeCombined && !config.out) {
|
|
module.layer.buildFilePaths.forEach(function (path) {
|
|
var isLayer = modules.some(function (mod) {
|
|
return mod._buildPath === path;
|
|
}),
|
|
relPath = build.makeRelativeFilePath(config.dir, path);
|
|
|
|
if (file.exists(path) &&
|
|
// not a build layer target
|
|
!isLayer &&
|
|
// not outside the build directory
|
|
relPath.indexOf('..') !== 0) {
|
|
file.deleteFile(path);
|
|
}
|
|
});
|
|
}
|
|
}
|
|
|
|
//Signal layer is done
|
|
if (config.onModuleBundleComplete) {
|
|
config.onModuleBundleComplete(module.onCompleteData);
|
|
}
|
|
});
|
|
|
|
//Write out bundles config, if it is wanted.
|
|
if (bundlesConfigOutFile) {
|
|
var text = file.readFile(bundlesConfigOutFile);
|
|
text = transform.modifyConfig(text, function (config) {
|
|
if (!config.bundles) {
|
|
config.bundles = {};
|
|
}
|
|
|
|
lang.eachProp(bundlesConfig, function (value, prop) {
|
|
config.bundles[prop] = value;
|
|
});
|
|
|
|
return config;
|
|
});
|
|
|
|
file.saveUtf8File(bundlesConfigOutFile, text);
|
|
}
|
|
}
|
|
|
|
//If removeCombined in play, remove any empty directories that
|
|
//may now exist because of its use
|
|
if (config.removeCombined && !config.out && config.dir) {
|
|
file.deleteEmptyDirs(config.dir);
|
|
}
|
|
|
|
//Do other optimizations.
|
|
if (config.out && !config.cssIn) {
|
|
//Just need to worry about one JS file.
|
|
fileName = config.modules[0]._buildPath;
|
|
if (fileName === 'FUNCTION') {
|
|
outOrigSourceMap = config.modules[0]._buildSourceMap;
|
|
config._buildSourceMap = outOrigSourceMap;
|
|
config.modules[0]._buildText = optimize.js((config.modules[0].name ||
|
|
config.modules[0].include[0] ||
|
|
fileName) + '.build.js',
|
|
config.modules[0]._buildText,
|
|
null,
|
|
config);
|
|
if (config._buildSourceMap && config._buildSourceMap !== outOrigSourceMap) {
|
|
config.modules[0]._buildSourceMap = config._buildSourceMap;
|
|
config._buildSourceMap = null;
|
|
}
|
|
} else {
|
|
optimize.jsFile(fileName, null, fileName, config);
|
|
}
|
|
} else if (!config.cssIn) {
|
|
//Normal optimizations across modules.
|
|
|
|
//JS optimizations.
|
|
fileNames = file.getFilteredFileList(config.dir, /\.js$/, true);
|
|
fileNames.forEach(function (fileName) {
|
|
var cfg, override, moduleIndex;
|
|
|
|
//Generate the module name from the config.dir root.
|
|
moduleName = fileName.replace(config.dir, '');
|
|
//Get rid of the extension
|
|
moduleName = moduleName.substring(0, moduleName.length - 3);
|
|
|
|
//If there is an override for a specific layer build module,
|
|
//and this file is that module, mix in the override for use
|
|
//by optimize.jsFile.
|
|
moduleIndex = getOwn(config._buildPathToModuleIndex, fileName);
|
|
//Normalize, since getOwn could have returned undefined
|
|
moduleIndex = moduleIndex === 0 || moduleIndex > 0 ? moduleIndex : -1;
|
|
|
|
//Try to avoid extra work if the other files do not need to
|
|
//be read. Build layers should be processed at the very
|
|
//least for optimization.
|
|
if (moduleIndex > -1 || !config.skipDirOptimize ||
|
|
config.normalizeDirDefines === "all" ||
|
|
config.cjsTranslate) {
|
|
//Convert the file to transport format, but without a name
|
|
//inserted (by passing null for moduleName) since the files are
|
|
//standalone, one module per file.
|
|
fileContents = file.readFile(fileName);
|
|
|
|
|
|
//For builds, if wanting cjs translation, do it now, so that
|
|
//the individual modules can be loaded cross domain via
|
|
//plain script tags.
|
|
if (config.cjsTranslate &&
|
|
(!config.shim || !lang.hasProp(config.shim, moduleName))) {
|
|
fileContents = commonJs.convert(fileName, fileContents);
|
|
}
|
|
|
|
if (moduleIndex === -1) {
|
|
if (config.onBuildRead) {
|
|
fileContents = config.onBuildRead(moduleName,
|
|
fileName,
|
|
fileContents);
|
|
}
|
|
|
|
//Only do transport normalization if this is not a build
|
|
//layer (since it was already normalized) and if
|
|
//normalizeDirDefines indicated all should be done.
|
|
if (config.normalizeDirDefines === "all") {
|
|
fileContents = build.toTransport(config.namespace,
|
|
null,
|
|
fileName,
|
|
fileContents);
|
|
}
|
|
|
|
if (config.onBuildWrite) {
|
|
fileContents = config.onBuildWrite(moduleName,
|
|
fileName,
|
|
fileContents);
|
|
}
|
|
}
|
|
|
|
override = moduleIndex > -1 ?
|
|
config.modules[moduleIndex].override : null;
|
|
if (override) {
|
|
cfg = build.createOverrideConfig(config, override);
|
|
} else {
|
|
cfg = config;
|
|
}
|
|
|
|
if (moduleIndex > -1 || !config.skipDirOptimize) {
|
|
optimize.jsFile(fileName, fileContents, fileName, cfg, pluginCollector);
|
|
}
|
|
}
|
|
});
|
|
|
|
//Normalize all the plugin resources.
|
|
context = require.s.contexts._;
|
|
|
|
for (moduleName in pluginCollector) {
|
|
if (hasProp(pluginCollector, moduleName)) {
|
|
parentModuleMap = context.makeModuleMap(moduleName);
|
|
resources = pluginCollector[moduleName];
|
|
for (i = 0; i < resources.length; i++) {
|
|
resource = resources[i];
|
|
moduleMap = context.makeModuleMap(resource, parentModuleMap);
|
|
if (falseProp(context.plugins, moduleMap.prefix)) {
|
|
//Set the value in context.plugins so it
|
|
//will be evaluated as a full plugin.
|
|
context.plugins[moduleMap.prefix] = true;
|
|
|
|
//Do not bother if the plugin is not available.
|
|
if (!file.exists(require.toUrl(moduleMap.prefix + '.js'))) {
|
|
continue;
|
|
}
|
|
|
|
//Rely on the require in the build environment
|
|
//to be synchronous
|
|
context.require([moduleMap.prefix]);
|
|
|
|
//Now that the plugin is loaded, redo the moduleMap
|
|
//since the plugin will need to normalize part of the path.
|
|
moduleMap = context.makeModuleMap(resource, parentModuleMap);
|
|
}
|
|
|
|
//Only bother with plugin resources that can be handled
|
|
//processed by the plugin, via support of the writeFile
|
|
//method.
|
|
if (falseProp(pluginProcessed, moduleMap.id)) {
|
|
//Only do the work if the plugin was really loaded.
|
|
//Using an internal access because the file may
|
|
//not really be loaded.
|
|
plugin = getOwn(context.defined, moduleMap.prefix);
|
|
if (plugin && plugin.writeFile) {
|
|
plugin.writeFile(
|
|
moduleMap.prefix,
|
|
moduleMap.name,
|
|
require,
|
|
makeWriteFile(
|
|
config.namespace
|
|
),
|
|
context.config
|
|
);
|
|
}
|
|
|
|
pluginProcessed[moduleMap.id] = true;
|
|
}
|
|
}
|
|
|
|
}
|
|
}
|
|
|
|
//console.log('PLUGIN COLLECTOR: ' + JSON.stringify(pluginCollector, null, " "));
|
|
|
|
|
|
//All module layers are done, write out the build.txt file.
|
|
if (config.writeBuildTxt) {
|
|
file.saveUtf8File(config.dir + "build.txt", buildFileContents);
|
|
}
|
|
}
|
|
|
|
//If just have one CSS file to optimize, do that here.
|
|
if (config.cssIn) {
|
|
buildFileContents += optimize.cssFile(config.cssIn, config.out, config).buildText;
|
|
}
|
|
|
|
if (typeof config.out === 'function') {
|
|
config.out(config.modules[0]._buildText, config.modules[0]._buildSourceMap);
|
|
}
|
|
|
|
//Print out what was built into which layers.
|
|
if (buildFileContents) {
|
|
logger.info(buildFileContents);
|
|
return buildFileContents;
|
|
}
|
|
|
|
return '';
|
|
});
|
|
};
|
|
|
|
/**
|
|
* Converts command line args like "paths.foo=../some/path"
|
|
* result.paths = { foo: '../some/path' } where prop = paths,
|
|
* name = paths.foo and value = ../some/path, so it assumes the
|
|
* name=value splitting has already happened.
|
|
*/
|
|
function stringDotToObj(result, name, value) {
|
|
var parts = name.split('.');
|
|
|
|
parts.forEach(function (prop, i) {
|
|
if (i === parts.length - 1) {
|
|
result[prop] = value;
|
|
} else {
|
|
if (falseProp(result, prop)) {
|
|
result[prop] = {};
|
|
}
|
|
result = result[prop];
|
|
}
|
|
|
|
});
|
|
}
|
|
|
|
build.objProps = {
|
|
paths: true,
|
|
wrap: true,
|
|
pragmas: true,
|
|
pragmasOnSave: true,
|
|
has: true,
|
|
hasOnSave: true,
|
|
uglify: true,
|
|
uglify2: true,
|
|
closure: true,
|
|
map: true,
|
|
throwWhen: true,
|
|
rawText: true
|
|
};
|
|
|
|
build.hasDotPropMatch = function (prop) {
|
|
var dotProp,
|
|
index = prop.indexOf('.');
|
|
|
|
if (index !== -1) {
|
|
dotProp = prop.substring(0, index);
|
|
return hasProp(build.objProps, dotProp);
|
|
}
|
|
return false;
|
|
};
|
|
|
|
/**
|
|
* Converts an array that has String members of "name=value"
|
|
* into an object, where the properties on the object are the names in the array.
|
|
* Also converts the strings "true" and "false" to booleans for the values.
|
|
* member name/value pairs, and converts some comma-separated lists into
|
|
* arrays.
|
|
* @param {Array} ary
|
|
*/
|
|
build.convertArrayToObject = function (ary) {
|
|
var result = {}, i, separatorIndex, prop, value,
|
|
needArray = {
|
|
"include": true,
|
|
"exclude": true,
|
|
"excludeShallow": true,
|
|
"insertRequire": true,
|
|
"stubModules": true,
|
|
"deps": true,
|
|
"mainConfigFile": true,
|
|
"wrap.startFile": true,
|
|
"wrap.endFile": true
|
|
};
|
|
|
|
for (i = 0; i < ary.length; i++) {
|
|
separatorIndex = ary[i].indexOf("=");
|
|
if (separatorIndex === -1) {
|
|
throw "Malformed name/value pair: [" + ary[i] + "]. Format should be name=value";
|
|
}
|
|
|
|
value = ary[i].substring(separatorIndex + 1, ary[i].length);
|
|
if (value === "true") {
|
|
value = true;
|
|
} else if (value === "false") {
|
|
value = false;
|
|
}
|
|
|
|
prop = ary[i].substring(0, separatorIndex);
|
|
|
|
//Convert to array if necessary
|
|
if (getOwn(needArray, prop)) {
|
|
value = value.split(",");
|
|
}
|
|
|
|
if (build.hasDotPropMatch(prop)) {
|
|
stringDotToObj(result, prop, value);
|
|
} else {
|
|
result[prop] = value;
|
|
}
|
|
}
|
|
return result; //Object
|
|
};
|
|
|
|
build.makeAbsPath = function (path, absFilePath) {
|
|
if (!absFilePath) {
|
|
return path;
|
|
}
|
|
|
|
//Add abspath if necessary. If path starts with a slash or has a colon,
|
|
//then already is an abolute path.
|
|
if (path.indexOf('/') !== 0 && path.indexOf(':') === -1) {
|
|
path = absFilePath +
|
|
(absFilePath.charAt(absFilePath.length - 1) === '/' ? '' : '/') +
|
|
path;
|
|
path = file.normalize(path);
|
|
}
|
|
return path.replace(lang.backSlashRegExp, '/');
|
|
};
|
|
|
|
build.makeAbsObject = function (props, obj, absFilePath) {
|
|
var i, prop;
|
|
if (obj) {
|
|
for (i = 0; i < props.length; i++) {
|
|
prop = props[i];
|
|
if (hasProp(obj, prop) && typeof obj[prop] === 'string') {
|
|
obj[prop] = build.makeAbsPath(obj[prop], absFilePath);
|
|
}
|
|
}
|
|
}
|
|
};
|
|
|
|
/**
|
|
* For any path in a possible config, make it absolute relative
|
|
* to the absFilePath passed in.
|
|
*/
|
|
build.makeAbsConfig = function (config, absFilePath) {
|
|
var props, prop, i;
|
|
|
|
props = ["appDir", "dir", "baseUrl"];
|
|
for (i = 0; i < props.length; i++) {
|
|
prop = props[i];
|
|
|
|
if (getOwn(config, prop)) {
|
|
//Add abspath if necessary, make sure these paths end in
|
|
//slashes
|
|
if (prop === "baseUrl") {
|
|
config.originalBaseUrl = config.baseUrl;
|
|
if (config.appDir) {
|
|
//If baseUrl with an appDir, the baseUrl is relative to
|
|
//the appDir, *not* the absFilePath. appDir and dir are
|
|
//made absolute before baseUrl, so this will work.
|
|
config.baseUrl = build.makeAbsPath(config.originalBaseUrl, config.appDir);
|
|
} else {
|
|
//The dir output baseUrl is same as regular baseUrl, both
|
|
//relative to the absFilePath.
|
|
config.baseUrl = build.makeAbsPath(config[prop], absFilePath);
|
|
}
|
|
} else {
|
|
config[prop] = build.makeAbsPath(config[prop], absFilePath);
|
|
}
|
|
|
|
config[prop] = endsWithSlash(config[prop]);
|
|
}
|
|
}
|
|
|
|
build.makeAbsObject((config.out === "stdout" ? ["cssIn"] : ["out", "cssIn"]),
|
|
config, absFilePath);
|
|
build.makeAbsObject(["startFile", "endFile"], config.wrap, absFilePath);
|
|
build.makeAbsObject(["externExportsPath"], config.closure, absFilePath);
|
|
};
|
|
|
|
/**
|
|
* Creates a relative path to targetPath from refPath.
|
|
* Only deals with file paths, not folders. If folders,
|
|
* make sure paths end in a trailing '/'.
|
|
*/
|
|
build.makeRelativeFilePath = function (refPath, targetPath) {
|
|
var i, dotLength, finalParts, length, targetParts, targetName,
|
|
refParts = refPath.split('/'),
|
|
hasEndSlash = endsWithSlashRegExp.test(targetPath),
|
|
dotParts = [];
|
|
|
|
targetPath = file.normalize(targetPath);
|
|
if (hasEndSlash && !endsWithSlashRegExp.test(targetPath)) {
|
|
targetPath += '/';
|
|
}
|
|
targetParts = targetPath.split('/');
|
|
//Pull off file name
|
|
targetName = targetParts.pop();
|
|
|
|
//Also pop off the ref file name to make the matches against
|
|
//targetParts equivalent.
|
|
refParts.pop();
|
|
|
|
length = refParts.length;
|
|
|
|
for (i = 0; i < length; i += 1) {
|
|
if (refParts[i] !== targetParts[i]) {
|
|
break;
|
|
}
|
|
}
|
|
|
|
//Now i is the index in which they diverge.
|
|
finalParts = targetParts.slice(i);
|
|
|
|
dotLength = length - i;
|
|
for (i = 0; i > -1 && i < dotLength; i += 1) {
|
|
dotParts.push('..');
|
|
}
|
|
|
|
return dotParts.join('/') + (dotParts.length ? '/' : '') +
|
|
finalParts.join('/') + (finalParts.length ? '/' : '') +
|
|
targetName;
|
|
};
|
|
|
|
build.nestedMix = {
|
|
paths: true,
|
|
has: true,
|
|
hasOnSave: true,
|
|
pragmas: true,
|
|
pragmasOnSave: true
|
|
};
|
|
|
|
/**
|
|
* Mixes additional source config into target config, and merges some
|
|
* nested config, like paths, correctly.
|
|
*/
|
|
function mixConfig(target, source, skipArrays) {
|
|
var prop, value, isArray, targetValue;
|
|
|
|
for (prop in source) {
|
|
if (hasProp(source, prop)) {
|
|
//If the value of the property is a plain object, then
|
|
//allow a one-level-deep mixing of it.
|
|
value = source[prop];
|
|
isArray = lang.isArray(value);
|
|
if (typeof value === 'object' && value &&
|
|
!isArray && !lang.isFunction(value) &&
|
|
!lang.isRegExp(value)) {
|
|
|
|
// TODO: need to generalize this work, maybe also reuse
|
|
// the work done in requirejs configure, perhaps move to
|
|
// just a deep copy/merge overall. However, given the
|
|
// amount of observable change, wait for a dot release.
|
|
// This change is in relation to #645
|
|
if (prop === 'map') {
|
|
if (!target.map) {
|
|
target.map = {};
|
|
}
|
|
lang.deepMix(target.map, source.map);
|
|
} else {
|
|
target[prop] = lang.mixin({}, target[prop], value, true);
|
|
}
|
|
} else if (isArray) {
|
|
if (!skipArrays) {
|
|
// Some config, like packages, are arrays. For those,
|
|
// just merge the results.
|
|
targetValue = target[prop];
|
|
if (lang.isArray(targetValue)) {
|
|
target[prop] = targetValue.concat(value);
|
|
} else {
|
|
target[prop] = value;
|
|
}
|
|
}
|
|
} else {
|
|
target[prop] = value;
|
|
}
|
|
}
|
|
}
|
|
|
|
//Set up log level since it can affect if errors are thrown
|
|
//or caught and passed to errbacks while doing config setup.
|
|
if (lang.hasProp(target, 'logLevel')) {
|
|
logger.logLevel(target.logLevel);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Converts a wrap.startFile or endFile to be start/end as a string.
|
|
* the startFile/endFile values can be arrays.
|
|
*/
|
|
function flattenWrapFile(config, keyName, absFilePath) {
|
|
var wrap = config.wrap,
|
|
keyFileName = keyName + 'File',
|
|
keyMapName = '__' + keyName + 'Map';
|
|
|
|
if (typeof wrap[keyName] !== 'string' && wrap[keyFileName]) {
|
|
wrap[keyName] = '';
|
|
if (typeof wrap[keyFileName] === 'string') {
|
|
wrap[keyFileName] = [wrap[keyFileName]];
|
|
}
|
|
wrap[keyMapName] = [];
|
|
wrap[keyFileName].forEach(function (fileName) {
|
|
var absPath = build.makeAbsPath(fileName, absFilePath),
|
|
fileText = endsWithNewLine(file.readFile(absPath));
|
|
wrap[keyMapName].push(function (fileContents, cfg, sourceMapGenerator) {
|
|
return appendToFileContents(fileContents, fileText, absPath, cfg, null, sourceMapGenerator);
|
|
});
|
|
wrap[keyName] += fileText;
|
|
});
|
|
} else if (wrap[keyName] === null || wrap[keyName] === undefined) {
|
|
//Allow missing one, just set to empty string.
|
|
wrap[keyName] = '';
|
|
} else if (typeof wrap[keyName] === 'string') {
|
|
wrap[keyName] = endsWithNewLine(wrap[keyName]);
|
|
wrap[keyMapName] = [
|
|
function (fileContents, cfg, sourceMapGenerator) {
|
|
var absPath = build.makeAbsPath("config-wrap-" + keyName + "-default.js", absFilePath);
|
|
return appendToFileContents(fileContents, wrap[keyName], absPath, cfg, null, sourceMapGenerator);
|
|
}
|
|
];
|
|
} else {
|
|
throw new Error('wrap.' + keyName + ' or wrap.' + keyFileName + ' malformed');
|
|
}
|
|
}
|
|
|
|
function normalizeWrapConfig(config, absFilePath) {
|
|
//Get any wrap text.
|
|
try {
|
|
if (config.wrap) {
|
|
if (config.wrap === true) {
|
|
//Use default values.
|
|
config.wrap = {
|
|
start: '(function () {\n',
|
|
end: '}());',
|
|
__startMap: [
|
|
function (fileContents, cfg, sourceMapGenerator) {
|
|
return appendToFileContents(fileContents, "(function () {\n",
|
|
build.makeAbsPath("config-wrap-start-default.js",
|
|
absFilePath), cfg, null,
|
|
sourceMapGenerator);
|
|
}
|
|
],
|
|
__endMap: [
|
|
function (fileContents, cfg, sourceMapGenerator) {
|
|
return appendToFileContents(fileContents, "}());",
|
|
build.makeAbsPath("config-wrap-end-default.js", absFilePath),
|
|
cfg, null, sourceMapGenerator);
|
|
}
|
|
]
|
|
};
|
|
} else {
|
|
flattenWrapFile(config, 'start', absFilePath);
|
|
flattenWrapFile(config, 'end', absFilePath);
|
|
}
|
|
}
|
|
} catch (wrapError) {
|
|
throw new Error('Malformed wrap config: ' + wrapError.toString());
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Creates a config object for an optimization build.
|
|
* It will also read the build profile if it is available, to create
|
|
* the configuration.
|
|
*
|
|
* @param {Object} cfg config options that take priority
|
|
* over defaults and ones in the build file. These options could
|
|
* be from a command line, for instance.
|
|
*
|
|
* @param {Object} the created config object.
|
|
*/
|
|
build.createConfig = function (cfg) {
|
|
/*jslint evil: true */
|
|
var buildFileContents, buildFileConfig, mainConfig,
|
|
mainConfigFile, mainConfigPath, buildFile, absFilePath,
|
|
config = {},
|
|
buildBaseConfig = makeBuildBaseConfig();
|
|
|
|
//Make sure all paths are relative to current directory.
|
|
absFilePath = file.absPath('.');
|
|
build.makeAbsConfig(cfg, absFilePath);
|
|
build.makeAbsConfig(buildBaseConfig, absFilePath);
|
|
|
|
lang.mixin(config, buildBaseConfig);
|
|
lang.mixin(config, cfg, true);
|
|
|
|
//Set up log level early since it can affect if errors are thrown
|
|
//or caught and passed to errbacks, even while constructing config.
|
|
if (lang.hasProp(config, 'logLevel')) {
|
|
logger.logLevel(config.logLevel);
|
|
}
|
|
|
|
if (config.buildFile) {
|
|
//A build file exists, load it to get more config.
|
|
buildFile = file.absPath(config.buildFile);
|
|
|
|
//Find the build file, and make sure it exists, if this is a build
|
|
//that has a build profile, and not just command line args with an in=path
|
|
if (!file.exists(buildFile)) {
|
|
throw new Error("ERROR: build file does not exist: " + buildFile);
|
|
}
|
|
|
|
absFilePath = config.baseUrl = file.absPath(file.parent(buildFile));
|
|
|
|
//Load build file options.
|
|
buildFileContents = file.readFile(buildFile);
|
|
try {
|
|
//Be a bit lenient in the file ending in a ; or ending with
|
|
//a //# sourceMappingUrl comment, mostly for compiled languages
|
|
//that create a config, like typescript.
|
|
buildFileContents = buildFileContents
|
|
.replace(/\/\/\#[^\n\r]+[\n\r]*$/, '')
|
|
.trim()
|
|
.replace(/;$/, '');
|
|
|
|
buildFileConfig = eval("(" + buildFileContents + ")");
|
|
build.makeAbsConfig(buildFileConfig, absFilePath);
|
|
|
|
//Mix in the config now so that items in mainConfigFile can
|
|
//be resolved relative to them if necessary, like if appDir
|
|
//is set here, but the baseUrl is in mainConfigFile. Will
|
|
//re-mix in the same build config later after mainConfigFile
|
|
//is processed, since build config should take priority.
|
|
mixConfig(config, buildFileConfig);
|
|
} catch (e) {
|
|
throw new Error("Build file " + buildFile + " is malformed: " + e);
|
|
}
|
|
}
|
|
|
|
mainConfigFile = config.mainConfigFile || (buildFileConfig && buildFileConfig.mainConfigFile);
|
|
if (mainConfigFile) {
|
|
if (typeof mainConfigFile === 'string') {
|
|
mainConfigFile = [mainConfigFile];
|
|
}
|
|
|
|
mainConfigFile.forEach(function (configFile) {
|
|
configFile = build.makeAbsPath(configFile, absFilePath);
|
|
if (!file.exists(configFile)) {
|
|
throw new Error(configFile + ' does not exist.');
|
|
}
|
|
try {
|
|
mainConfig = parse.findConfig(file.readFile(configFile)).config;
|
|
} catch (configError) {
|
|
throw new Error('The config in mainConfigFile ' +
|
|
configFile +
|
|
' cannot be used because it cannot be evaluated' +
|
|
' correctly while running in the optimizer. Try only' +
|
|
' using a config that is also valid JSON, or do not use' +
|
|
' mainConfigFile and instead copy the config values needed' +
|
|
' into a build file or command line arguments given to the optimizer.\n' +
|
|
'Source error from parsing: ' + configFile + ': ' + configError);
|
|
}
|
|
if (mainConfig) {
|
|
mainConfigPath = configFile.substring(0, configFile.lastIndexOf('/'));
|
|
|
|
//Add in some existing config, like appDir, since they can be
|
|
//used inside the configFile -- paths and baseUrl are
|
|
//relative to them.
|
|
if (config.appDir && !mainConfig.appDir) {
|
|
mainConfig.appDir = config.appDir;
|
|
}
|
|
|
|
//If no baseUrl, then use the directory holding the main config.
|
|
if (!mainConfig.baseUrl) {
|
|
mainConfig.baseUrl = mainConfigPath;
|
|
}
|
|
|
|
build.makeAbsConfig(mainConfig, mainConfigPath);
|
|
mixConfig(config, mainConfig);
|
|
}
|
|
});
|
|
}
|
|
|
|
//Mix in build file config, but only after mainConfig has been mixed in.
|
|
//Since this is a re-application, skip array merging.
|
|
if (buildFileConfig) {
|
|
mixConfig(config, buildFileConfig, true);
|
|
}
|
|
|
|
//Re-apply the override config values. Command line
|
|
//args should take precedence over build file values.
|
|
//Since this is a re-application, skip array merging.
|
|
mixConfig(config, cfg, true);
|
|
|
|
//Fix paths to full paths so that they can be adjusted consistently
|
|
//lately to be in the output area.
|
|
lang.eachProp(config.paths, function (value, prop) {
|
|
if (lang.isArray(value)) {
|
|
throw new Error('paths fallback not supported in optimizer. ' +
|
|
'Please provide a build config path override ' +
|
|
'for ' + prop);
|
|
}
|
|
config.paths[prop] = build.makeAbsPath(value, config.baseUrl);
|
|
});
|
|
|
|
//Set final output dir
|
|
if (hasProp(config, "baseUrl")) {
|
|
if (config.appDir) {
|
|
if (!config.originalBaseUrl) {
|
|
throw new Error('Please set a baseUrl in the build config');
|
|
}
|
|
config.dirBaseUrl = build.makeAbsPath(config.originalBaseUrl, config.dir);
|
|
} else {
|
|
config.dirBaseUrl = config.dir || config.baseUrl;
|
|
}
|
|
//Make sure dirBaseUrl ends in a slash, since it is
|
|
//concatenated with other strings.
|
|
config.dirBaseUrl = endsWithSlash(config.dirBaseUrl);
|
|
}
|
|
|
|
if (config.bundlesConfigOutFile) {
|
|
if (!config.dir) {
|
|
throw new Error('bundlesConfigOutFile can only be used with optimizations ' +
|
|
'that use "dir".');
|
|
}
|
|
config.bundlesConfigOutFile = build.makeAbsPath(config.bundlesConfigOutFile, config.dir);
|
|
}
|
|
|
|
//If out=stdout, write output to STDOUT instead of a file.
|
|
if (config.out && config.out === 'stdout') {
|
|
config.out = function (content) {
|
|
var e = env.get();
|
|
if (e === 'rhino') {
|
|
var out = new java.io.PrintStream(java.lang.System.out, true, 'UTF-8');
|
|
out.println(content);
|
|
} else if (e === 'node') {
|
|
process.stdout.write(content, 'utf8');
|
|
} else {
|
|
console.log(content);
|
|
}
|
|
};
|
|
}
|
|
|
|
//Check for errors in config
|
|
if (config.main) {
|
|
throw new Error('"main" passed as an option, but the ' +
|
|
'supported option is called "name".');
|
|
}
|
|
if (config.out && !config.name && !config.modules && !config.include &&
|
|
!config.cssIn) {
|
|
throw new Error('Missing either a "name", "include" or "modules" ' +
|
|
'option');
|
|
}
|
|
if (config.cssIn) {
|
|
if (config.dir || config.appDir) {
|
|
throw new Error('cssIn is only for the output of single file ' +
|
|
'CSS optimizations and is not compatible with "dir" or "appDir" configuration.');
|
|
}
|
|
if (!config.out) {
|
|
throw new Error('"out" option missing.');
|
|
}
|
|
}
|
|
if (!config.cssIn && !config.baseUrl) {
|
|
//Just use the current directory as the baseUrl
|
|
config.baseUrl = './';
|
|
}
|
|
if (!config.out && !config.dir) {
|
|
throw new Error('Missing either an "out" or "dir" config value. ' +
|
|
'If using "appDir" for a full project optimization, ' +
|
|
'use "dir". If you want to optimize to one file, ' +
|
|
'use "out".');
|
|
}
|
|
if (config.appDir && config.out) {
|
|
throw new Error('"appDir" is not compatible with "out". Use "dir" ' +
|
|
'instead. appDir is used to copy whole projects, ' +
|
|
'where "out" with "baseUrl" is used to just ' +
|
|
'optimize to one file.');
|
|
}
|
|
if (config.out && config.dir) {
|
|
throw new Error('The "out" and "dir" options are incompatible.' +
|
|
' Use "out" if you are targeting a single file' +
|
|
' for optimization, and "dir" if you want the appDir' +
|
|
' or baseUrl directories optimized.');
|
|
}
|
|
|
|
|
|
if (config.dir) {
|
|
// Make sure the output dir is not set to a parent of the
|
|
// source dir or the same dir, as it will result in source
|
|
// code deletion.
|
|
if (!config.allowSourceOverwrites && (config.dir === config.baseUrl ||
|
|
config.dir === config.appDir ||
|
|
(config.baseUrl && build.makeRelativeFilePath(config.dir,
|
|
config.baseUrl).indexOf('..') !== 0) ||
|
|
(config.appDir &&
|
|
build.makeRelativeFilePath(config.dir, config.appDir).indexOf('..') !== 0))) {
|
|
throw new Error('"dir" is set to a parent or same directory as' +
|
|
' "appDir" or "baseUrl". This can result in' +
|
|
' the deletion of source code. Stopping. If' +
|
|
' you want to allow possible overwriting of' +
|
|
' source code, set "allowSourceOverwrites"' +
|
|
' to true in the build config, but do so at' +
|
|
' your own risk. In that case, you may want' +
|
|
' to also set "keepBuildDir" to true.');
|
|
}
|
|
}
|
|
|
|
if (config.insertRequire && !lang.isArray(config.insertRequire)) {
|
|
throw new Error('insertRequire should be a list of module IDs' +
|
|
' to insert in to a require([]) call.');
|
|
}
|
|
|
|
//Support older configs with uglify2 settings, but now that uglify1 has
|
|
//been removed, just translate it to 'uglify' settings.
|
|
if (config.optimize === 'uglify2') {
|
|
config.optimize = 'uglify';
|
|
}
|
|
if (config.uglify2) {
|
|
config.uglify = config.uglify2;
|
|
delete config.uglify2;
|
|
}
|
|
|
|
if (config.generateSourceMaps) {
|
|
if (config.preserveLicenseComments && !(config.optimize === 'none' || config.optimize === 'uglify')) {
|
|
throw new Error('Cannot use preserveLicenseComments and ' +
|
|
'generateSourceMaps together, unless optimize is set ' +
|
|
'to \'uglify\'. Either explicitly set preserveLicenseComments ' +
|
|
'to false (default is true) or turn off generateSourceMaps. ' +
|
|
'If you want source maps with license comments, see: ' +
|
|
'http://requirejs.org/docs/errors.html#sourcemapcomments');
|
|
} else if (config.optimize !== 'none' &&
|
|
config.optimize !== 'closure' &&
|
|
config.optimize !== 'uglify') {
|
|
//Allow optimize: none to pass, since it is useful when toggling
|
|
//minification on and off to debug something, and it implicitly
|
|
//works, since it does not need a source map.
|
|
throw new Error('optimize: "' + config.optimize +
|
|
'" does not support generateSourceMaps.');
|
|
}
|
|
}
|
|
|
|
if ((config.name || config.include) && !config.modules) {
|
|
//Just need to build one file, but may be part of a whole appDir/
|
|
//baseUrl copy, but specified on the command line, so cannot do
|
|
//the modules array setup. So create a modules section in that
|
|
//case.
|
|
config.modules = [
|
|
{
|
|
name: config.name,
|
|
out: config.out,
|
|
create: config.create,
|
|
include: config.include,
|
|
exclude: config.exclude,
|
|
excludeShallow: config.excludeShallow,
|
|
insertRequire: config.insertRequire,
|
|
stubModules: config.stubModules
|
|
}
|
|
];
|
|
delete config.stubModules;
|
|
} else if (config.modules && config.out) {
|
|
throw new Error('If the "modules" option is used, then there ' +
|
|
'should be a "dir" option set and "out" should ' +
|
|
'not be used since "out" is only for single file ' +
|
|
'optimization output.');
|
|
} else if (config.modules && config.name) {
|
|
throw new Error('"name" and "modules" options are incompatible. ' +
|
|
'Either use "name" if doing a single file ' +
|
|
'optimization, or "modules" if you want to target ' +
|
|
'more than one file for optimization.');
|
|
}
|
|
|
|
if (config.out && !config.cssIn) {
|
|
//Just one file to optimize.
|
|
|
|
//Does not have a build file, so set up some defaults.
|
|
//Optimizing CSS should not be allowed, unless explicitly
|
|
//asked for on command line. In that case the only task is
|
|
//to optimize a CSS file.
|
|
if (!cfg.optimizeCss) {
|
|
config.optimizeCss = "none";
|
|
}
|
|
}
|
|
|
|
//Normalize cssPrefix
|
|
if (config.cssPrefix) {
|
|
//Make sure cssPrefix ends in a slash
|
|
config.cssPrefix = endsWithSlash(config.cssPrefix);
|
|
} else {
|
|
config.cssPrefix = '';
|
|
}
|
|
|
|
//Cycle through modules and normalize
|
|
if (config.modules && config.modules.length) {
|
|
config.modules.forEach(function (mod) {
|
|
if (lang.isArray(mod) || typeof mod === 'string' || !mod) {
|
|
throw new Error('modules config item is malformed: it should' +
|
|
' be an object with a \'name\' property.');
|
|
}
|
|
|
|
//Combine any local stubModules with global values.
|
|
if (config.stubModules) {
|
|
mod.stubModules = config.stubModules.concat(mod.stubModules || []);
|
|
}
|
|
|
|
//Create a hash lookup for the stubModules config to make lookup
|
|
//cheaper later.
|
|
if (mod.stubModules) {
|
|
mod.stubModules._byName = {};
|
|
mod.stubModules.forEach(function (id) {
|
|
mod.stubModules._byName[id] = true;
|
|
});
|
|
}
|
|
|
|
// Legacy command support, which allowed a single string ID
|
|
// for include.
|
|
if (typeof mod.include === 'string') {
|
|
mod.include = [mod.include];
|
|
}
|
|
|
|
//Allow wrap config in overrides, but normalize it.
|
|
if (mod.override) {
|
|
normalizeWrapConfig(mod.override, absFilePath);
|
|
}
|
|
});
|
|
}
|
|
|
|
normalizeWrapConfig(config, absFilePath);
|
|
|
|
//Do final input verification
|
|
if (config.context) {
|
|
throw new Error('The build argument "context" is not supported' +
|
|
' in a build. It should only be used in web' +
|
|
' pages.');
|
|
}
|
|
|
|
//Set up normalizeDirDefines. If not explicitly set, if optimize "none",
|
|
//set to "skip" otherwise set to "all".
|
|
if (!hasProp(config, 'normalizeDirDefines')) {
|
|
if (config.optimize === 'none' || config.skipDirOptimize) {
|
|
config.normalizeDirDefines = 'skip';
|
|
} else {
|
|
config.normalizeDirDefines = 'all';
|
|
}
|
|
}
|
|
|
|
//Set file.fileExclusionRegExp if desired
|
|
if (hasProp(config, 'fileExclusionRegExp')) {
|
|
if (typeof config.fileExclusionRegExp === "string") {
|
|
file.exclusionRegExp = new RegExp(config.fileExclusionRegExp);
|
|
} else {
|
|
file.exclusionRegExp = config.fileExclusionRegExp;
|
|
}
|
|
} else if (hasProp(config, 'dirExclusionRegExp')) {
|
|
//Set file.dirExclusionRegExp if desired, this is the old
|
|
//name for fileExclusionRegExp before 1.0.2. Support for backwards
|
|
//compatibility
|
|
file.exclusionRegExp = config.dirExclusionRegExp;
|
|
}
|
|
|
|
//Track the deps, but in a different key, so that they are not loaded
|
|
//as part of config seeding before all config is in play (#648). Was
|
|
//going to merge this in with "include", but include is added after
|
|
//the "name" target. To preserve what r.js has done previously, make
|
|
//sure "deps" comes before the "name".
|
|
if (config.deps) {
|
|
config._depsInclude = config.deps;
|
|
}
|
|
|
|
|
|
//Remove things that may cause problems in the build.
|
|
//deps already merged above
|
|
delete config.deps;
|
|
delete config.jQuery;
|
|
delete config.enforceDefine;
|
|
delete config.urlArgs;
|
|
|
|
return config;
|
|
};
|
|
|
|
/**
|
|
* finds the module being built/optimized with the given moduleName,
|
|
* or returns null.
|
|
* @param {String} moduleName
|
|
* @param {Array} modules
|
|
* @returns {Object} the module object from the build profile, or null.
|
|
*/
|
|
build.findBuildModule = function (moduleName, modules) {
|
|
var i, module;
|
|
for (i = 0; i < modules.length; i++) {
|
|
module = modules[i];
|
|
if (module.name === moduleName) {
|
|
return module;
|
|
}
|
|
}
|
|
return null;
|
|
};
|
|
|
|
/**
|
|
* Removes a module name and path from a layer, if it is supposed to be
|
|
* excluded from the layer.
|
|
* @param {String} moduleName the name of the module
|
|
* @param {String} path the file path for the module
|
|
* @param {Object} layer the layer to remove the module/path from
|
|
*/
|
|
build.removeModulePath = function (module, path, layer) {
|
|
var index = layer.buildFilePaths.indexOf(path);
|
|
if (index !== -1) {
|
|
layer.buildFilePaths.splice(index, 1);
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Uses the module build config object to trace the dependencies for the
|
|
* given module.
|
|
*
|
|
* @param {Object} module the module object from the build config info.
|
|
* @param {Object} config the build config object.
|
|
* @param {Object} [baseLoaderConfig] the base loader config to use for env resets.
|
|
*
|
|
* @returns {Object} layer information about what paths and modules should
|
|
* be in the flattened module.
|
|
*/
|
|
build.traceDependencies = function (module, config, baseLoaderConfig) {
|
|
var include, override, layer, context, oldContext,
|
|
rawTextByIds,
|
|
syncChecks = {
|
|
rhino: true,
|
|
node: true,
|
|
xpconnect: true
|
|
},
|
|
deferred = prim();
|
|
|
|
//Reset some state set up in requirePatch.js, and clean up require's
|
|
//current context.
|
|
oldContext = require._buildReset();
|
|
|
|
//Grab the reset layer and context after the reset, but keep the
|
|
//old config to reuse in the new context.
|
|
layer = require._layer;
|
|
context = layer.context;
|
|
|
|
//Put back basic config, use a fresh object for it.
|
|
if (baseLoaderConfig) {
|
|
require(copyConfig(baseLoaderConfig));
|
|
}
|
|
|
|
logger.trace("\nTracing dependencies for: " + (module.name ||
|
|
(typeof module.out === 'function' ? 'FUNCTION' : module.out)));
|
|
include = config._depsInclude || [];
|
|
include = include.concat(module.name && !module.create ? [module.name] : []);
|
|
if (module.include) {
|
|
include = include.concat(module.include);
|
|
}
|
|
|
|
//If there are overrides to basic config, set that up now.;
|
|
if (module.override) {
|
|
if (baseLoaderConfig) {
|
|
override = build.createOverrideConfig(baseLoaderConfig, module.override);
|
|
} else {
|
|
override = copyConfig(module.override);
|
|
}
|
|
require(override);
|
|
}
|
|
|
|
//Now, populate the rawText cache with any values explicitly passed in
|
|
//via config.
|
|
rawTextByIds = require.s.contexts._.config.rawText;
|
|
if (rawTextByIds) {
|
|
lang.eachProp(rawTextByIds, function (contents, id) {
|
|
var url = require.toUrl(id) + '.js';
|
|
require._cachedRawText[url] = contents;
|
|
});
|
|
}
|
|
|
|
|
|
//Configure the callbacks to be called.
|
|
deferred.reject.__requireJsBuild = true;
|
|
|
|
//Use a wrapping function so can check for errors.
|
|
function includeFinished(value) {
|
|
//If a sync build environment, check for errors here, instead of
|
|
//in the then callback below, since some errors, like two IDs pointed
|
|
//to same URL but only one anon ID will leave the loader in an
|
|
//unresolved state since a setTimeout cannot be used to check for
|
|
//timeout.
|
|
var hasError = false;
|
|
if (syncChecks[env.get()]) {
|
|
try {
|
|
build.checkForErrors(context, layer);
|
|
} catch (e) {
|
|
hasError = true;
|
|
deferred.reject(e);
|
|
}
|
|
}
|
|
|
|
if (!hasError) {
|
|
deferred.resolve(value);
|
|
}
|
|
}
|
|
includeFinished.__requireJsBuild = true;
|
|
|
|
//Figure out module layer dependencies by calling require to do the work.
|
|
require(include, includeFinished, deferred.reject);
|
|
|
|
// If a sync env, then with the "two IDs to same anon module path"
|
|
// issue, the require never completes, need to check for errors
|
|
// here.
|
|
if (syncChecks[env.get()]) {
|
|
build.checkForErrors(context, layer);
|
|
}
|
|
|
|
return deferred.promise.then(function () {
|
|
//Reset config
|
|
if (module.override && baseLoaderConfig) {
|
|
require(copyConfig(baseLoaderConfig));
|
|
}
|
|
|
|
build.checkForErrors(context, layer);
|
|
|
|
return layer;
|
|
});
|
|
};
|
|
|
|
build.checkForErrors = function (context, layer) {
|
|
//Check to see if it all loaded. If not, then throw, and give
|
|
//a message on what is left.
|
|
var id, prop, mod, idParts, pluginId, pluginResources,
|
|
errMessage = '',
|
|
failedPluginMap = {},
|
|
failedPluginIds = [],
|
|
errIds = [],
|
|
errUrlMap = {},
|
|
errUrlConflicts = {},
|
|
hasErrUrl = false,
|
|
hasUndefined = false,
|
|
defined = context.defined,
|
|
registry = context.registry;
|
|
|
|
function populateErrUrlMap(id, errUrl, skipNew) {
|
|
// Loader plugins do not have an errUrl, so skip them.
|
|
if (!errUrl) {
|
|
return;
|
|
}
|
|
|
|
if (!skipNew) {
|
|
errIds.push(id);
|
|
}
|
|
|
|
if (errUrlMap[errUrl]) {
|
|
hasErrUrl = true;
|
|
//This error module has the same URL as another
|
|
//error module, could be misconfiguration.
|
|
if (!errUrlConflicts[errUrl]) {
|
|
errUrlConflicts[errUrl] = [];
|
|
//Store the original module that had the same URL.
|
|
errUrlConflicts[errUrl].push(errUrlMap[errUrl]);
|
|
}
|
|
errUrlConflicts[errUrl].push(id);
|
|
} else if (!skipNew) {
|
|
errUrlMap[errUrl] = id;
|
|
}
|
|
}
|
|
|
|
for (id in registry) {
|
|
if (hasProp(registry, id) && id.indexOf('_@r') !== 0) {
|
|
hasUndefined = true;
|
|
mod = getOwn(registry, id);
|
|
idParts = id.split('!');
|
|
pluginId = idParts[0];
|
|
|
|
if (id.indexOf('_unnormalized') === -1 && mod && mod.enabled) {
|
|
populateErrUrlMap(id, mod.map.url);
|
|
}
|
|
|
|
//Look for plugins that did not call load()
|
|
//But skip plugin IDs that were already inlined and called
|
|
//define() with a name.
|
|
if (!hasProp(layer.modulesWithNames, id) && idParts.length > 1) {
|
|
if (falseProp(failedPluginMap, pluginId)) {
|
|
failedPluginIds.push(pluginId);
|
|
}
|
|
pluginResources = failedPluginMap[pluginId];
|
|
if (!pluginResources) {
|
|
pluginResources = failedPluginMap[pluginId] = [];
|
|
}
|
|
pluginResources.push(id + (mod.error ? ': ' + mod.error : ''));
|
|
}
|
|
}
|
|
}
|
|
|
|
// If have some modules that are not defined/stuck in the registry,
|
|
// then check defined modules for URL overlap.
|
|
if (hasUndefined) {
|
|
for (id in defined) {
|
|
if (hasProp(defined, id) && id.indexOf('!') === -1) {
|
|
populateErrUrlMap(id, require.toUrl(id) + '.js', true);
|
|
}
|
|
}
|
|
}
|
|
|
|
if (errIds.length || failedPluginIds.length) {
|
|
if (failedPluginIds.length) {
|
|
errMessage += 'Loader plugin' +
|
|
(failedPluginIds.length === 1 ? '' : 's') +
|
|
' did not call ' +
|
|
'the load callback in the build:\n' +
|
|
failedPluginIds.map(function (pluginId) {
|
|
var pluginResources = failedPluginMap[pluginId];
|
|
return pluginId + ':\n ' + pluginResources.join('\n ');
|
|
}).join('\n') + '\n';
|
|
}
|
|
errMessage += 'Module loading did not complete for: ' + errIds.join(', ');
|
|
|
|
if (hasErrUrl) {
|
|
errMessage += '\nThe following modules share the same URL. This ' +
|
|
'could be a misconfiguration if that URL only has ' +
|
|
'one anonymous module in it:';
|
|
for (prop in errUrlConflicts) {
|
|
if (hasProp(errUrlConflicts, prop)) {
|
|
errMessage += '\n' + prop + ': ' +
|
|
errUrlConflicts[prop].join(', ');
|
|
}
|
|
}
|
|
}
|
|
throw new Error(errMessage);
|
|
}
|
|
};
|
|
|
|
build.createOverrideConfig = function (config, override) {
|
|
var cfg = copyConfig(config),
|
|
oride = copyConfig(override);
|
|
|
|
lang.eachProp(oride, function (value, prop) {
|
|
if (hasProp(build.objProps, prop)) {
|
|
//An object property, merge keys. Start a new object
|
|
//so that source object in config does not get modified.
|
|
cfg[prop] = {};
|
|
lang.mixin(cfg[prop], config[prop], true);
|
|
lang.mixin(cfg[prop], override[prop], true);
|
|
} else {
|
|
cfg[prop] = override[prop];
|
|
}
|
|
});
|
|
|
|
return cfg;
|
|
};
|
|
|
|
/**
|
|
* Uses the module build config object to create an flattened version
|
|
* of the module, with deep dependencies included.
|
|
*
|
|
* @param {Object} module the module object from the build config info.
|
|
*
|
|
* @param {Object} layer the layer object returned from build.traceDependencies.
|
|
*
|
|
* @param {Object} the build config object.
|
|
*
|
|
* @returns {Object} with two properties: "text", the text of the flattened
|
|
* module, and "buildText", a string of text representing which files were
|
|
* included in the flattened module text.
|
|
*/
|
|
build.flattenModule = function (module, layer, config) {
|
|
var fileContents, sourceMapGenerator,
|
|
sourceMapBase,
|
|
buildFileContents = '';
|
|
|
|
return prim().start(function () {
|
|
var reqIndex, currContents, fileForSourceMap,
|
|
moduleName, shim, packageName,
|
|
parts, builder, writeApi,
|
|
namespace, namespaceWithDot, stubModulesByName,
|
|
context = layer.context,
|
|
onLayerEnds = [],
|
|
onLayerEndAdded = {},
|
|
pkgsMainMap = {};
|
|
|
|
//Use override settings, particularly for pragmas
|
|
//Do this before the var readings since it reads config values.
|
|
if (module.override) {
|
|
config = build.createOverrideConfig(config, module.override);
|
|
}
|
|
|
|
namespace = config.namespace || '';
|
|
namespaceWithDot = namespace ? namespace + '.' : '';
|
|
stubModulesByName = (module.stubModules && module.stubModules._byName) || {};
|
|
|
|
//Start build output for the module.
|
|
module.onCompleteData = {
|
|
name: module.name,
|
|
path: (config.dir ? module._buildPath.replace(config.dir, "") : module._buildPath),
|
|
included: []
|
|
};
|
|
|
|
buildFileContents += "\n" +
|
|
module.onCompleteData.path +
|
|
"\n----------------\n";
|
|
|
|
//If there was an existing file with require in it, hoist to the top.
|
|
if (layer.existingRequireUrl) {
|
|
reqIndex = layer.buildFilePaths.indexOf(layer.existingRequireUrl);
|
|
if (reqIndex !== -1) {
|
|
layer.buildFilePaths.splice(reqIndex, 1);
|
|
layer.buildFilePaths.unshift(layer.existingRequireUrl);
|
|
}
|
|
}
|
|
|
|
if (config.generateSourceMaps) {
|
|
sourceMapBase = config.dir || config.baseUrl;
|
|
if (module._buildPath === 'FUNCTION') {
|
|
fileForSourceMap = (module.name || module.include[0] || 'FUNCTION') + '.build.js';
|
|
} else if (config.out) {
|
|
fileForSourceMap = module._buildPath.split('/').pop();
|
|
} else {
|
|
fileForSourceMap = module._buildPath.replace(sourceMapBase, '');
|
|
}
|
|
sourceMapGenerator = new SourceMapGenerator({
|
|
file: fileForSourceMap
|
|
});
|
|
}
|
|
|
|
//Create a reverse lookup for packages main module IDs to their package
|
|
//names, useful for knowing when to write out define() package main ID
|
|
//adapters.
|
|
lang.eachProp(layer.context.config.pkgs, function(value, prop) {
|
|
pkgsMainMap[value] = prop;
|
|
});
|
|
|
|
//Write the built module to disk, and build up the build output.
|
|
fileContents = "";
|
|
if (config.wrap && config.wrap.__startMap) {
|
|
config.wrap.__startMap.forEach(function (wrapFunction) {
|
|
fileContents = wrapFunction(fileContents, config, sourceMapGenerator);
|
|
});
|
|
}
|
|
|
|
return prim.serial(layer.buildFilePaths.map(function (path) {
|
|
return function () {
|
|
var singleContents = '';
|
|
|
|
moduleName = layer.buildFileToModule[path];
|
|
|
|
//If the moduleName is a package main, then hold on to the
|
|
//packageName in case an adapter needs to be written.
|
|
packageName = getOwn(pkgsMainMap, moduleName);
|
|
|
|
return prim().start(function () {
|
|
//Figure out if the module is a result of a build plugin, and if so,
|
|
//then delegate to that plugin.
|
|
parts = context.makeModuleMap(moduleName);
|
|
builder = parts.prefix && getOwn(context.defined, parts.prefix);
|
|
if (builder) {
|
|
if (builder.onLayerEnd && falseProp(onLayerEndAdded, parts.prefix)) {
|
|
onLayerEnds.push(builder);
|
|
onLayerEndAdded[parts.prefix] = true;
|
|
}
|
|
|
|
if (builder.write) {
|
|
writeApi = function (input) {
|
|
singleContents += "\n" + addSemiColon(input, config);
|
|
if (config.onBuildWrite) {
|
|
singleContents = config.onBuildWrite(moduleName, path, singleContents);
|
|
}
|
|
};
|
|
writeApi.asModule = function (moduleName, input) {
|
|
singleContents += "\n" +
|
|
addSemiColon(build.toTransport(namespace, moduleName, path, input, layer, {
|
|
useSourceUrl: layer.context.config.useSourceUrl
|
|
}), config);
|
|
if (config.onBuildWrite) {
|
|
singleContents = config.onBuildWrite(moduleName, path, singleContents);
|
|
}
|
|
};
|
|
|
|
builder.write(parts.prefix, parts.name, writeApi, {
|
|
name: module.onCompleteData.name,
|
|
path: module.onCompleteData.path
|
|
});
|
|
}
|
|
return;
|
|
} else {
|
|
return prim().start(function () {
|
|
if (hasProp(stubModulesByName, moduleName)) {
|
|
//Just want to insert a simple module definition instead
|
|
//of the source module. Useful for plugins that inline
|
|
//all their resources.
|
|
if (hasProp(layer.context.plugins, moduleName)) {
|
|
//Slightly different content for plugins, to indicate
|
|
//that dynamic loading will not work.
|
|
return 'define({load: function(id){throw new Error("Dynamic load not allowed: " + id);}});';
|
|
} else {
|
|
return 'define({});';
|
|
}
|
|
} else {
|
|
return require._cacheReadAsync(path);
|
|
}
|
|
}).then(function (text) {
|
|
var hasPackageName;
|
|
|
|
currContents = text;
|
|
|
|
if (config.cjsTranslate &&
|
|
(!config.shim || !lang.hasProp(config.shim, moduleName))) {
|
|
currContents = commonJs.convert(path, currContents);
|
|
}
|
|
|
|
if (config.onBuildRead) {
|
|
currContents = config.onBuildRead(moduleName, path, currContents);
|
|
}
|
|
|
|
if (packageName) {
|
|
hasPackageName = (packageName === parse.getNamedDefine(currContents));
|
|
}
|
|
|
|
if (namespace) {
|
|
currContents = pragma.namespace(currContents, namespace);
|
|
}
|
|
|
|
currContents = build.toTransport(namespace, moduleName, path, currContents, layer, {
|
|
useSourceUrl: config.useSourceUrl
|
|
});
|
|
|
|
if (packageName && !hasPackageName) {
|
|
currContents = addSemiColon(currContents, config) + '\n';
|
|
currContents += namespaceWithDot + "define('" +
|
|
packageName + "', ['" + moduleName +
|
|
"'], function (main) { return main; });\n";
|
|
}
|
|
|
|
if (config.onBuildWrite) {
|
|
currContents = config.onBuildWrite(moduleName, path, currContents);
|
|
}
|
|
|
|
//Semicolon is for files that are not well formed when
|
|
//concatenated with other content.
|
|
singleContents += addSemiColon(currContents, config);
|
|
});
|
|
}
|
|
}).then(function () {
|
|
var shimDeps, shortPath = path.replace(config.dir, "");
|
|
|
|
module.onCompleteData.included.push(shortPath);
|
|
buildFileContents += shortPath + "\n";
|
|
|
|
//Some files may not have declared a require module, and if so,
|
|
//put in a placeholder call so the require does not try to load them
|
|
//after the module is processed.
|
|
//If we have a name, but no defined module, then add in the placeholder.
|
|
if (moduleName && falseProp(layer.modulesWithNames, moduleName) && !config.skipModuleInsertion) {
|
|
shim = config.shim && (getOwn(config.shim, moduleName) || (packageName && getOwn(config.shim, packageName)));
|
|
if (shim) {
|
|
shimDeps = lang.isArray(shim) ? shim : shim.deps;
|
|
if (config.wrapShim) {
|
|
|
|
singleContents = '(function(root) {\n' +
|
|
namespaceWithDot + 'define("' + moduleName + '", ' +
|
|
(shimDeps && shimDeps.length ?
|
|
build.makeJsArrayString(shimDeps) + ', ' : '[], ') +
|
|
'function() {\n' +
|
|
' return (function() {\n' +
|
|
singleContents +
|
|
// Start with a \n in case last line is a comment
|
|
// in the singleContents, like a sourceURL comment.
|
|
'\n' + (shim.exportsFn ? shim.exportsFn() : '') +
|
|
'\n' +
|
|
' }).apply(root, arguments);\n' +
|
|
'});\n' +
|
|
'}(this));\n';
|
|
} else {
|
|
singleContents += '\n' + namespaceWithDot + 'define("' + moduleName + '", ' +
|
|
(shimDeps && shimDeps.length ?
|
|
build.makeJsArrayString(shimDeps) + ', ' : '') +
|
|
(shim.exportsFn ? shim.exportsFn() : 'function(){}') +
|
|
');\n';
|
|
}
|
|
} else {
|
|
singleContents += '\n' + namespaceWithDot + 'define("' + moduleName + '", function(){});\n';
|
|
}
|
|
}
|
|
|
|
//Add line break at end of file, instead of at beginning,
|
|
//so source map line numbers stay correct, but still allow
|
|
//for some space separation between files in case ASI issues
|
|
//for concatenation would cause an error otherwise.
|
|
singleContents += '\n';
|
|
|
|
//Add to the source map and to the final contents
|
|
fileContents = appendToFileContents(fileContents, singleContents, path, config, module,
|
|
sourceMapGenerator);
|
|
});
|
|
};
|
|
})).then(function () {
|
|
if (onLayerEnds.length) {
|
|
onLayerEnds.forEach(function (builder, index) {
|
|
var path;
|
|
if (typeof module.out === 'string') {
|
|
path = module.out;
|
|
} else if (typeof module._buildPath === 'string') {
|
|
path = module._buildPath;
|
|
}
|
|
builder.onLayerEnd(function (input) {
|
|
fileContents =
|
|
appendToFileContents(fileContents, "\n" + addSemiColon(input, config),
|
|
'onLayerEnd' + index + '.js', config, module, sourceMapGenerator);
|
|
}, {
|
|
name: module.name,
|
|
path: path
|
|
});
|
|
});
|
|
}
|
|
|
|
if (module.create) {
|
|
//The ID is for a created layer. Write out
|
|
//a module definition for it in case the
|
|
//built file is used with enforceDefine
|
|
//(#432)
|
|
fileContents =
|
|
appendToFileContents(fileContents, '\n' + namespaceWithDot + 'define("' + module.name +
|
|
'", function(){});\n', 'module-create.js', config, module,
|
|
sourceMapGenerator);
|
|
}
|
|
|
|
//Add a require at the end to kick start module execution, if that
|
|
//was desired. Usually this is only specified when using small shim
|
|
//loaders like almond.
|
|
if (module.insertRequire) {
|
|
fileContents =
|
|
appendToFileContents(fileContents, '\n' + namespaceWithDot + 'require(["' + module.insertRequire.join('", "') +
|
|
'"]);\n', 'module-insertRequire.js', config, module,
|
|
sourceMapGenerator);
|
|
}
|
|
});
|
|
}).then(function () {
|
|
if (config.wrap && config.wrap.__endMap) {
|
|
config.wrap.__endMap.forEach(function (wrapFunction) {
|
|
fileContents = wrapFunction(fileContents, config, sourceMapGenerator);
|
|
});
|
|
}
|
|
return {
|
|
text: fileContents,
|
|
buildText: buildFileContents,
|
|
sourceMap: sourceMapGenerator ?
|
|
JSON.stringify(sourceMapGenerator.toJSON(), null, ' ') :
|
|
undefined
|
|
};
|
|
});
|
|
};
|
|
|
|
//Converts an JS array of strings to a string representation.
|
|
//Not using JSON.stringify() for Rhino's sake.
|
|
build.makeJsArrayString = function (ary) {
|
|
return '["' + ary.map(function (item) {
|
|
//Escape any double quotes, backslashes
|
|
return lang.jsEscape(item);
|
|
}).join('","') + '"]';
|
|
};
|
|
|
|
build.toTransport = function (namespace, moduleName, path, contents, layer, options) {
|
|
var baseUrl = layer && layer.context.config.baseUrl;
|
|
|
|
function onFound(info) {
|
|
//Only mark this module as having a name if not a named module,
|
|
//or if a named module and the name matches expectations.
|
|
if (layer && (info.needsId || info.foundId === moduleName)) {
|
|
layer.modulesWithNames[moduleName] = true;
|
|
}
|
|
}
|
|
|
|
//Convert path to be a local one to the baseUrl, useful for
|
|
//useSourceUrl.
|
|
if (baseUrl) {
|
|
path = path.replace(baseUrl, '');
|
|
}
|
|
|
|
return transform.toTransport(namespace, moduleName, path, contents, onFound, options);
|
|
};
|
|
|
|
return build;
|
|
});
|
|
|
|
}
|
|
|
|
|
|
/**
|
|
* Sets the default baseUrl for requirejs to be directory of top level
|
|
* script.
|
|
*/
|
|
function setBaseUrl(fileName) {
|
|
//Use the file name's directory as the baseUrl if available.
|
|
dir = fileName.replace(/\\/g, '/');
|
|
if (dir.indexOf('/') !== -1) {
|
|
dir = dir.split('/');
|
|
dir.pop();
|
|
dir = dir.join('/');
|
|
//Make sure dir is JS-escaped, since it will be part of a JS string.
|
|
exec("require({baseUrl: '" + dir.replace(/[\\"']/g, '\\$&') + "'});");
|
|
}
|
|
}
|
|
|
|
function createRjsApi() {
|
|
//Create a method that will run the optimzer given an object
|
|
//config.
|
|
requirejs.optimize = function (config, callback, errback) {
|
|
if (!loadedOptimizedLib) {
|
|
loadLib();
|
|
loadedOptimizedLib = true;
|
|
}
|
|
|
|
//Create the function that will be called once build modules
|
|
//have been loaded.
|
|
var runBuild = function (build, logger, quit) {
|
|
//Make sure config has a log level, and if not,
|
|
//make it "silent" by default.
|
|
config.logLevel = config.hasOwnProperty('logLevel') ?
|
|
config.logLevel : logger.SILENT;
|
|
|
|
//Reset build internals first in case this is part
|
|
//of a long-running server process that could have
|
|
//exceptioned out in a bad state. It is only defined
|
|
//after the first call though.
|
|
if (requirejs._buildReset) {
|
|
requirejs._buildReset();
|
|
requirejs._cacheReset();
|
|
}
|
|
|
|
function done(result) {
|
|
//And clean up, in case something else triggers
|
|
//a build in another pathway.
|
|
if (requirejs._buildReset) {
|
|
requirejs._buildReset();
|
|
requirejs._cacheReset();
|
|
}
|
|
|
|
// Ensure errors get propagated to the errback
|
|
if (result instanceof Error) {
|
|
throw result;
|
|
}
|
|
|
|
return result;
|
|
}
|
|
|
|
errback = errback || function (err) {
|
|
// Using console here since logger may have
|
|
// turned off error logging. Since quit is
|
|
// called want to be sure a message is printed.
|
|
console.log(err);
|
|
quit(1);
|
|
};
|
|
|
|
build(config).then(done, done).then(callback, errback);
|
|
};
|
|
|
|
requirejs({
|
|
context: 'build'
|
|
}, ['build', 'logger', 'env!env/quit'], runBuild);
|
|
};
|
|
|
|
requirejs.tools = {
|
|
useLib: function (contextName, callback) {
|
|
if (!callback) {
|
|
callback = contextName;
|
|
contextName = 'uselib';
|
|
}
|
|
|
|
if (!useLibLoaded[contextName]) {
|
|
loadLib();
|
|
useLibLoaded[contextName] = true;
|
|
}
|
|
|
|
var req = requirejs({
|
|
context: contextName
|
|
});
|
|
|
|
req(['build'], function () {
|
|
callback(req);
|
|
});
|
|
}
|
|
};
|
|
|
|
requirejs.define = define;
|
|
}
|
|
|
|
//If in Node, and included via a require('requirejs'), just export and
|
|
//THROW IT ON THE GROUND!
|
|
if (env === 'node' && reqMain !== module) {
|
|
setBaseUrl(path.resolve(reqMain ? reqMain.filename : '.'));
|
|
|
|
createRjsApi();
|
|
|
|
module.exports = requirejs;
|
|
return;
|
|
} else if (env === 'browser') {
|
|
//Only option is to use the API.
|
|
setBaseUrl(location.href);
|
|
createRjsApi();
|
|
return;
|
|
} else if ((env === 'rhino' || env === 'xpconnect') &&
|
|
//User sets up requirejsAsLib variable to indicate it is loaded
|
|
//via load() to be used as a library.
|
|
typeof requirejsAsLib !== 'undefined' && requirejsAsLib) {
|
|
//This script is loaded via rhino's load() method, expose the
|
|
//API and get out.
|
|
setBaseUrl(fileName);
|
|
createRjsApi();
|
|
return;
|
|
}
|
|
|
|
if (commandOption === 'o') {
|
|
//Do the optimizer work.
|
|
loadLib();
|
|
|
|
/*
|
|
* Create a build.js file that has the build options you want and pass that
|
|
* build file to this file to do the build. See example.build.js for more information.
|
|
*/
|
|
|
|
/*jslint strict: false, nomen: false */
|
|
/*global require: false */
|
|
|
|
require({
|
|
baseUrl: require.s.contexts._.config.baseUrl,
|
|
//Use a separate context than the default context so that the
|
|
//build can use the default context.
|
|
context: 'build',
|
|
catchError: {
|
|
define: true
|
|
}
|
|
}, ['env!env/args', 'env!env/quit', 'logger', 'build'],
|
|
function (args, quit, logger, build) {
|
|
build(args).then(function () {}, function (err) {
|
|
logger.error(err);
|
|
quit(1);
|
|
});
|
|
});
|
|
|
|
|
|
} else if (commandOption === 'v') {
|
|
console.log('r.js: ' + version +
|
|
', RequireJS: ' + this.requirejsVars.require.version +
|
|
', UglifyJS: 2.8.29');
|
|
} else if (commandOption === 'convert') {
|
|
loadLib();
|
|
|
|
this.requirejsVars.require(['env!env/args', 'commonJs', 'env!env/print'],
|
|
function (args, commonJs, print) {
|
|
|
|
var srcDir, outDir;
|
|
srcDir = args[0];
|
|
outDir = args[1];
|
|
|
|
if (!srcDir || !outDir) {
|
|
print('Usage: path/to/commonjs/modules output/dir');
|
|
return;
|
|
}
|
|
|
|
commonJs.convertDir(args[0], args[1]);
|
|
});
|
|
} else {
|
|
//Just run an app
|
|
|
|
//Load the bundled libraries for use in the app.
|
|
if (commandOption === 'lib') {
|
|
loadLib();
|
|
}
|
|
|
|
setBaseUrl(fileName);
|
|
|
|
if (exists(fileName)) {
|
|
exec(readFile(fileName), fileName);
|
|
} else {
|
|
showHelp();
|
|
}
|
|
}
|
|
|
|
}((typeof console !== 'undefined' ? console : undefined),
|
|
(typeof Packages !== 'undefined' || (typeof window === 'undefined' &&
|
|
typeof Components !== 'undefined' && Components.interfaces) ?
|
|
Array.prototype.slice.call(arguments, 0) : []),
|
|
(typeof readFile !== 'undefined' ? readFile : undefined)));
|