Import Upstream version 2.3.6

This commit is contained in:
Lu zhiping 2022-06-27 15:39:18 +08:00
commit 42f2e65327
932 changed files with 149809 additions and 0 deletions

5
.github/CONTRIBUTING.md vendored Normal file
View File

@ -0,0 +1,5 @@
# Contributing to r.js
[See the requirejs.org contributing page](http://requirejs.org/docs/contributing.html).
If you have a question about a specific setup or using particular libraries or file layouts with requirejs/r.js, then it is best to ask that on [Stack Overflow using the requirejs tag](https://stackoverflow.com/questions/tagged/requirejs). New issues here are best used for bug reports that have test cases or examples.

127
.gitignore vendored Normal file
View File

@ -0,0 +1,127 @@
.DS_Store
.idea/
/r.js
build/jslib/uglifyjs/temp
build/tests/builds
build/tests/lib/amdefine/built.js
build/tests/lib/anonUmdInteriorModules/main-built.js
build/tests/lib/appDirSrcOverwrite/www-built
build/tests/lib/arrow/main-built.js
build/tests/lib/bundlesConfig/built
build/tests/lib/cjsTranslate/www-built
build/tests/lib/closureExterns/built
build/tests/lib/comments/built.js
build/tests/lib/comments/noPartialDupe/built
build/tests/lib/comments/unique/built.js
build/tests/lib/configPackageShim/built
build/tests/lib/cssComment138/main-built.css
build/tests/lib/cssDuplicates/main-built.css
build/tests/lib/cssKeepComments/main-built.css
build/tests/lib/cssKeepWhitespace/main-built.css
build/tests/lib/cssKeepLicense/main-license-built.css
build/tests/lib/cssKeepLicense/main-built.css
build/tests/lib/cssMediaQuery/main-built.css
build/tests/lib/cssPrefix/356/www-built
build/tests/lib/cssRemoveCombined/www-built
build/tests/lib/depsConfig/main-built.js
build/tests/lib/dormant213/main-built.js
build/tests/lib/dotpackage/built
build/tests/lib/dotTrim/built.js
build/tests/lib/dualLayerOverride/built
build/tests/lib/dynamicDefine/main-built.js
build/tests/lib/empty/built
build/tests/lib/hasOwnProperty/built.js
build/tests/lib/iife/main-built.js
build/tests/lib/insertRequire/main-built.js
build/tests/lib/intDefine/main-built.js
build/tests/lib/inlineDefineNoRequire/testmodule-built.js
build/tests/lib/jqueryConfig/main-built.js
build/tests/lib/keepAmdefine/built.js
build/tests/lib/mainConfigFile/basic/main-built.js
build/tests/lib/mainConfigFile/first/main-built.js
build/tests/lib/mainConfigFile/mergeConfig/main-built.js
build/tests/lib/mainConfigBaseUrl/www-built
build/tests/lib/mapConfigMix/a-built.js
build/tests/lib/moduleThenPlugin/built.js
build/tests/lib/modulesExclude/built
build/tests/lib/nameInsertion/built.js
build/tests/lib/nameInsertion/nested/built.js
build/tests/lib/namespace/foo.js
build/tests/lib/namespaceConfig/foo.js
build/tests/lib/namespaceMinified/foo.js
build/tests/lib/nested/main-built.js
build/tests/lib/nested/main-builtWithCE.js
build/tests/lib/nestedHas/main-built.js
build/tests/lib/nestedHas/main-builtNeedAll.js
build/tests/lib/nestedHas/main-builtNeedB.js
build/tests/lib/nestedHas/main-builtNeedC.js
build/tests/lib/nestedHas/main-builtNeedD.js
build/tests/lib/nestedHas/main-builtNested.js
build/tests/lib/noexports/main-built.js
build/tests/lib/nojQDupeDefine/main-built.js
build/tests/lib/nonStrict/main-built.js
build/tests/lib/nonUmdIdentifiers/main-built.js
build/tests/lib/onBuildAllDir/js-built
build/tests/lib/onBuildRead/main-built.js
build/tests/lib/onBuildWrite/main-built.js
build/tests/lib/override/wrap/built
build/tests/lib/packages/main-built.js
build/tests/lib/packagesNode/main-built.js
build/tests/lib/pathsNoCopy/js-built
build/tests/lib/pluginBuilder/main-built.js
build/tests/lib/pluginBuildUndef/out.js
build/tests/lib/pluginDepExec/main-built.js
build/tests/lib/pluginShimDep/main-built.js
build/tests/lib/plugins/main-built.js
build/tests/lib/plugins/main-builtPluginFirst.js
build/tests/lib/plugins/onLayerEnd/built
build/tests/lib/plugins/onLayerEnd/main-built.js
build/tests/lib/plugins/optimizeAllPluginResources/www-built
build/tests/lib/pragmas/override/built
build/tests/lib/pristineSrc/built
build/tests/lib/rawText/built.js
build/tests/lib/rawTextLongId/built.js
build/tests/lib/rawTextNameTarget/a-built.js
build/tests/lib/rawTextNameWholeProject/www-built
build/tests/lib/removeCombined/app-built
build/tests/lib/removeCombined/baseUrl-built
build/tests/lib/removeCombinedPaths/testcase/project/build/build_output
build/tests/lib/requireHoist/perLayer/built
build/tests/lib/rhino-186/built
build/tests/lib/semicolonInsert/a-built.js
build/tests/lib/shimBasicWrap/basic-tests-built.js
build/tests/lib/shimFakeDefine/main-built.js
build/tests/lib/shimWrapShort/main-built.js
build/tests/lib/sourcemap/www-built
build/tests/lib/sourcemap/onejs/www/js/built.js
build/tests/lib/sourcemap/onejs/www/js/built.js.map
build/tests/lib/sourcemap/twojs/www-built
build/tests/lib/sourcemapWrap/built
build/tests/lib/sourcemapComments/main-built.js
build/tests/lib/sourcemapComments/main-built.js.map
build/tests/lib/stubModules/create/foobar-built.js
build/tests/lib/stubModules/main-built.js
build/tests/lib/stubModules/perModule/built
build/tests/lib/transportBeforeMinify/www-built
build/tests/lib/typescriptConfig/main-built.js
build/tests/lib/uglifyMangleProperties/main-built.js
build/tests/lib/umd/main-built.js
build/tests/lib/umd2/built.js
build/tests/lib/umd4/app-built.js
build/tests/lib/umdNested/main-built.js
build/tests/lib/unicode/main-built.js
build/tests/lib/urlToEmpty/main-built.js
build/tests/lib/wrap/outBothArray.js
build/tests/lib/wrap/outOnlyEnd.js
build/tests/lib/wrap/outOnlyEndArray.js
build/tests/lib/wrap/outOnlyStart.js
build/tests/lib/wrap/outOnlyStartArray.js
build/tests/tools/doubleOptimize/built
build/tests/tools/override/node_modules
build/tests/tools/override/one-built.js
build/tests/transform/results
env/xpcshell
tests/node/node_modules
tests/node/embedded/node_modules
tests/rhino/main-built.js
tests/xpcshell/main-built.js

45
LICENSE Normal file
View File

@ -0,0 +1,45 @@
Copyright jQuery Foundation and other contributors, https://jquery.org/
This software consists of voluntary contributions made by many
individuals. For exact contribution history, see the revision history
available at https://github.com/requirejs/r.js
The following license applies to all parts of this software except as
documented below:
====
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
====
Copyright and related rights for sample code are waived via CC0. Sample
code is defined as all source code displayed within the prose of the
documentation.
CC0: http://creativecommons.org/publicdomain/zero/1.0/
====
Files located in the node_modules directory, and certain utilities used
to build or test the software in the tests, build/tests, env, lib directories,
are externally maintained libraries used by this software which have their own
licenses; we recommend you read them, as their terms may differ from the
terms above.

281
README.md Normal file
View File

@ -0,0 +1,281 @@
# r.js
A command line tool for running JavaScript scripts that use the
[Asynchronous Module Definition API (AMD)](https://github.com/amdjs/amdjs-api/blob/master/AMD.md)
for declaring and using JavaScript modules and regular JavaScript script files.
It is part of the [RequireJS project](http://requirejs.org), and works with
the RequireJS implementation of AMD.
r.js is a single script that has two major functions:
* Run AMD-based projects [in Node](http://requirejs.org/docs/node.html) and Nashorn, Rhino and xpcshell.
* Includes the [RequireJS Optimizer](http://requirejs.org/docs/optimization.html)
that combines scripts for optimal browser delivery.
# Installation
## Node
npm install -g requirejs
From then on, you can use `r.js` on the command line to run the optimizer.
## Nashorn/Rhino/Browser
Download the latest release from the
[RequireJS download page](http://requirejs.org/docs/download.html#rjs).
## xpcshell
[xpcshell](https://developer.mozilla.org/en-US/docs/XPConnect/xpcshell) support
was added in r.js version 2.1.5, so use that r.js version or later.
Download the latest release of r.js from the
[RequireJS download page](http://requirejs.org/docs/download.html#rjs).
## From this repo
r.js is made up of a series of modules that are built into one file for
distribution. The **dist** directory contains the built version of the
code. In the master branch, it should match the current state of the master
code.
If you are doing local modifications from a clone of this repo, you can run
the following command to generate an r.js at the root of this repo:
node dist.js
To generate an r.js that also gets copied to **dist** with a time stamp, run:
./copydist.js
# Running AMD-based projects
If your JS project's main application file is called main.js, then do
the following:
## Node
r.js main.js
Requires Node 0.4 or later.
r.js allows using Node modules installed via npm. For more info see the
[Use with Node](http://requirejs.org/docs/node.html) docs.
## Java
### Nashorn
As of r.js 2.1.16, r.js can run in [Nashorn](http://www.oracle.com/technetwork/articles/java/jf14-nashorn-2126515.html), Java 8+'s JavaScript engine, via the `jjs` command line tool that is installed with Java.
Then general format of the command:
```
jjs -scripting path/to/r.js -- [r.js command line arguments here]
```
Examples:
```
# Calling r.js to optimize a project using the build config in build.js
jjs -scripting path/to/r.js -- -o build.js
# Calling r.js to run AMD modules, where the main app program is main.js
jjs -scripting path/to/r.js -- main.js
```
All further examples will use the Node notation, but substitute the **r.js** references below with the command line structure mentioned above (`jjs -scripting path/to/r.js -- `).
### Rhino
Using Rhino requires some JAR files in the CLASSPATH for it to work:
* [rhino.jar](https://github.com/requirejs/r.js/blob/master/lib/rhino/js.jar?raw=true) from the [Rhino project](http://www.mozilla.org/rhino/).
* [compiler.jar](https://github.com/requirejs/r.js/blob/master/lib/closure/compiler.jar?raw=true) if you are using the optimizer and want to use
[Closure Compiler](http://code.google.com/closure/compiler/).
Download those files to your machine. To run r.js, you can use this type of
command:
### OS X/Linux/Unix:
java -classpath path/to/rhino/js.jar:path/to/closure/compiler.jar org.mozilla.javascript.tools.shell.Main -opt -1 r.js main.js
### Windows
java -classpath path/to/rhino/js.jar;path/to/closure/compiler.jar org.mozilla.javascript.tools.shell.Main -opt -1 r.js main.js
s
If you want to run it in the debugger, replace
org.mozilla.javascript.tools.shell.Main with
**org.mozilla.javascript.tools.debugger.Main**.
All further examples will use the Node notation, but substitute the **r.js** references below with the appropriate java command.
## xpcshell
To run the optimizer using a build config file or command line build options:
path/to/xpcshell path/to/r.js -o buildconfig.js
r.js can also be used as a library in another .js file run via xpcshell.
* [tests/xpcshell/run.js](https://github.com/requirejs/r.js/blob/master/tests/xpcshell/run.js):
shows how to load AMD modules by using r.js as a library.
* [tests/xpcshell/build.js](https://github.com/requirejs/r.js/blob/master/tests/xpcshell/build.js):
shows how to trigger the optimizer from within another .js file.
# Optimizer
The optimizer can be run by passing the **-o** command to r.js:
r.js -o path/to/buildconfig.js
See the [Optimization doc](http://requirejs.org/docs/optimization.html) for more
information on the optimizer.
If running in **Java**, be sure to grab the Rhino and Closure Compiler jar files in the lib/ directory, then run this command:
### OS X/Linux/Unix:
java -classpath path/to/rhino/js.jar:path/to/closure/compiler.jar org.mozilla.javascript.tools.shell.Main r.js -o path/to/buildconfig.js
### Windows
java -classpath path/to/rhino/js.jar;path/to/closure/compiler.jar org.mozilla.javascript.tools.shell.Main r.js -o path/to/buildconfig.js
## What makes it special
The optimizer is better than using a plain concatenation script because it runs
require.js as part of the optimization, so it knows how to:
* Use [Loader Plugins](http://requirejs.org/docs/plugins.html) to load non-script
dependencies and inline them in built files.
* [Name anonymous modules](http://requirejs.org/docs/api.html#modulename).
If your optimization step does not do this, and you use anonymous modules, you
will get errors running the built code.
# Other r.js commands
## Get Version
To get the version of r.js and the version of require.js used by r.js:
r.js -v
## Convert CommonJS modules
To convert a directory of CommonJS modules to ones that have define() wrappers:
r.js -convert path/to/commonjs/dir output/dir
Most, but not all, CommonJS modules can be converted to define()-wrapped modules
and still work.
However, there are some modules that may fail if:
* They use code branches like if/else or try/catch to call require(). There are
problems supporting this kind of dynamic module calls in an async environment.
* Some kinds of circular dependencies will not work right. The kinds that fail
are normally very brittle and depend on the execution order of the dependent
modules.
# License
MIT
# Code of Conduct
[jQuery Foundation Code of Conduct](https://jquery.org/conduct/).
# Directory layout
## Directory prerequisites
r.js assumes that there are some other projects checked out as sibling
directories to it, and named certain names, in order for the tests to pass.
So it is best to create the following directory structure with the following
git clone commands:
mkdir requirejs
cd requirejs
git clone git://github.com/requirejs/r.js.git
git clone git://github.com/requirejs/requirejs.git
git clone git://github.com/requirejs/text.git
So there should be a sibling `requirejs` and `text` directories to the r.js
directory containing your clone of the r.js project.
## Directory details
The r.js project has the following directory layout:
* **dist.js**: the script that builds r.js
* **require.js**: the version of require.js to include in r.js
* **dist** the directory containing releases of r.js
* **build**: The files that make up the optimizer. dist.js includes a list of
the files from this directory to build into r.js.
* **lib**: The Java libraries for Rhino and Closure Compiler. Only needed if using
Java/Rhino to run r.js
* **tests**: command line tests. Run it under Node and Rhino by doing ../r.js all.js
dist.js takes the build/jslib/x.js file and injects the require.js files and other
files from the build/jslib directory into it.
If you make changes to any of those files, you will need to run **node dist.js**
to generate a new r.js. Be sure to run it through the tests , using both Node
and Java/Rhino:
* node dist.js
* cd tests
* node ../r.js all.js
* java -classpath ../lib/rhino/js.jar:../lib/closure/compiler.jar org.mozilla.javascript.tools.shell.Main ../r.js all.js
* cd ../build/tests
* node ../../r.js all.js
* java -classpath ../../lib/rhino/js.jar:../../lib/closure/compiler.jar org.mozilla.javascript.tools.shell.Main ../../r.js all.js
For running tests, put xpcshell in env/xpcshell/ as a directory, that contains
all the files needed for it to run, including the xpcshell binary. Downloading
[a xulrunner nightly](http://ftp.mozilla.org/pub/mozilla.org/xulrunner/nightly/latest-mozilla-central/)
might work.
# Contributing code changes
See the [RequireJS Contributing](http://requirejs.org/docs/contributing.html)
page for info on how to contribute code/bug fixes to this project.
Use GitHub pull requests to point to code changes, although for larger changes,
contact the [requirejs mailing list](http://groups.google.com/group/requirejs)
to discuss them first.
# Included libraries
r.js includes modules from these projects:
* [Esprima](http://esprima.org/)
* [UglifyJS](https://github.com/mishoo/UglifyJS)
# Doing a release
To do a release of version 0.0.0:
* Make sure the right version of require.js is in the project.
* Modify build/jslib/x.js to update the r.js version number in two places.
* node dist.js
* Run the tests (see above). They should pass. :)
* mv r.js dist/r.js
* git commit -am "Release 0.0.0"
* git tag -am "Release 0.0.0" 0.0.0
* git push origin master
* git push --tags
Update the RequireJS download site to point to the latest release, then update
the [requirejs/requirejs-npm](https://github.com/requirejs/requirejs-npm) repo to have the latest
changes and publish the result to npm.
Make sure to keep `#!/usr/bin/env node` as the first line in bin/r.js in
the requirejs-npm repo.

23
build/build.js Normal file
View File

@ -0,0 +1,23 @@
/*
* Create a build.js file that has the build options you want and pass that
* build file to this file to do the build. See example.build.js for more information.
*/
/*jslint strict: false, nomen: false */
/*global require: false */
require({
baseUrl: require.s.contexts._.config.baseUrl,
//Use a separate context than the default context so that the
//build can use the default context.
context: 'build',
catchError: {
define: true
}
}, ['env!env/args', 'env!env/quit', 'logger', 'build'],
function (args, quit, logger, build) {
build(args).then(function () {}, function (err) {
logger.error(err);
quit(1);
});
});

4
build/buildebug.sh Executable file
View File

@ -0,0 +1,4 @@
#!/bin/sh
MYDIR=`cd \`dirname "$0"\`; pwd`
$MYDIR/../bin/xdebug $MYDIR/build.js "$@"

663
build/example.build.js Normal file
View File

@ -0,0 +1,663 @@
/*
* This is an example build file that demonstrates how to use the build system for
* require.js.
*
* THIS BUILD FILE WILL NOT WORK. It is referencing paths that probably
* do not exist on your machine. Just use it as a guide.
*
*
*/
({
//The top level directory that contains your app. If this option is used
//then it assumed your scripts are in a subdirectory under this path.
//This option is not required. If it is not specified, then baseUrl
//below is the anchor point for finding things. If this option is specified,
//then all the files from the app directory will be copied to the dir:
//output area, and baseUrl will assume to be a relative path under
//this directory.
appDir: "some/path/",
//By default, all modules are located relative to this path. If baseUrl
//is not explicitly set, then all modules are loaded relative to
//the directory that holds the build file. If appDir is set, then
//baseUrl should be specified as relative to the appDir.
baseUrl: "./",
//By default all the configuration for optimization happens from the command
//line or by properties in the config file, and configuration that was
//passed to requirejs as part of the app's runtime "main" JS file is *not*
//considered. However, if you prefer the "main" JS file configuration
//to be read for the build so that you do not have to duplicate the values
//in a separate configuration, set this property to the location of that
//main JS file. The first requirejs({}), require({}), requirejs.config({}),
//or require.config({}) call found in that file will be used.
//As of 2.1.10, mainConfigFile can be an array of values, with the last
//value's config take precedence over previous values in the array.
mainConfigFile: '../some/path/to/main.js',
//Set paths for modules. If relative paths, set relative to baseUrl above.
//If a special value of "empty:" is used for the path value, then that
//acts like mapping the path to an empty file. It allows the optimizer to
//resolve the dependency to path, but then does not include it in the output.
//Useful to map module names that are to resources on a CDN or other
//http: URL when running in the browser and during an optimization that
//file should be skipped because it has no dependencies.
//e.g. if you wish to include `jquery` and `angularjs` from public CDNs,
//paths: { "jquery": "empty:", "angular": "empty:" }
paths: {
"foo.bar": "../scripts/foo/bar",
"baz": "../another/path/baz"
},
//Sets up a map of module IDs to other module IDs. For more details, see
//the http://requirejs.org/docs/api.html#config-map docs.
map: {},
//Configure CommonJS packages. See http://requirejs.org/docs/api.html#packages
//for more information.
packages: [],
//The directory path to save the output. If not specified, then
//the path will default to be a directory called "build" as a sibling
//to the build file. All relative paths are relative to the build file.
dir: "../some/path",
//As of RequireJS 2.0.2, the dir above will be deleted before the
//build starts again. If you have a big build and are not doing
//source transforms with onBuildRead/onBuildWrite, then you can
//set keepBuildDir to true to keep the previous dir. This allows for
//faster rebuilds, but it could lead to unexpected errors if the
//built code is transformed in some way.
keepBuildDir: false,
//If shim config is used in the app during runtime, duplicate the config
//here. Necessary if shim config is used, so that the shim's dependencies
//are included in the build. Using "mainConfigFile" is a better way to
//pass this information though, so that it is only listed in one place.
//However, if mainConfigFile is not an option, the shim config can be
//inlined in the build config.
shim: {},
//As of 2.1.11, shimmed dependencies can be wrapped in a define() wrapper
//to help when intermediate dependencies are AMD have dependencies of their
//own. The canonical example is a project using Backbone, which depends on
//jQuery and Underscore. Shimmed dependencies that want Backbone available
//immediately will not see it in a build, since AMD compatible versions of
//Backbone will not execute the define() function until dependencies are
//ready. By wrapping those shimmed dependencies, this can be avoided, but
//it could introduce other errors if those shimmed dependencies use the
//global scope in weird ways, so it is not the default behavior to wrap.
//To use shim wrapping skipModuleInsertion needs to be false.
//More notes in http://requirejs.org/docs/api.html#config-shim
wrapShim: false,
//Used to inline i18n resources into the built file. If no locale
//is specified, i18n resources will not be inlined. Only one locale
//can be inlined for a build. Root bundles referenced by a build layer
//will be included in a build layer regardless of locale being set.
locale: "en-us",
//How to optimize (minify) all the JS files in the build output directory.
//Right now only the following values
//are supported:
//- "uglify": (default) uses UglifyJS to minify the code. Before version
//2.2, the uglify version was a 1.3.x release. With r.js 2.2, it is now
//a 2.x uglify release. Only supports ES5 syntax. For ES 2015 or later, use
//the "none" option instead.
//- "uglify2": in version 2.1.2+. Uses UglifyJS2. As of r.js 2.2, this
//is just an alias for "uglify" now that 2.2 just uses uglify 2.x.
//- "closure": uses Google's Closure Compiler in simple optimization
//mode to minify the code. Only available if running the optimizer using
//Java.
//- "closure.keepLines": Same as closure option, but keeps line returns
//in the minified files.
//- "none": no minification will be done. Use this setting if you are using
//ES 2015 or later syntax in your files, since the bundled UglifyJS only
//understands ES5 and earlier syntax. For ES2015 code, run a compliant
// minifier as a separate step after running r.js.
optimize: "uglify",
//Introduced in 2.1.2: If using "dir" for an output directory, normally the
//optimize setting is used to optimize the build bundles (the "modules"
//section of the config) and any other JS file in the directory. However, if
//the non-build bundle JS files will not be loaded after a build, you can
//skip the optimization of those files, to speed up builds. Set this value
//to true if you want to skip optimizing those other non-build bundle JS
//files.
skipDirOptimize: false,
//Introduced in 2.1.2 and considered experimental.
//If the minifier specified in the "optimize" option supports generating
//source maps for the minified code, then generate them. The source maps
//generated only translate minified JS to non-minified JS, it does not do
//anything magical for translating minified JS to transpiled source code.
//Currently only optimize: "uglify2" is supported when running in node or
//rhino, and if running in rhino, "closure" with a closure compiler jar
//build after r1592 (20111114 release).
//The source files will show up in a browser developer tool that supports
//source maps as ".js.src" files.
generateSourceMaps: false,
//Introduced in 2.1.1: If a full directory optimization ("dir" is used), and
//optimize is not "none", and skipDirOptimize is false, then normally all JS
//files in the directory will be minified, and this value is automatically
//set to "all". For JS files to properly work after a minification, the
//optimizer will parse for define() calls and insert any dependency arrays
//that are missing. However, this can be a bit slow if there are many/larger
//JS files. So this transport normalization is not done (automatically set
//to "skip") if optimize is set to "none". Cases where you may want to
//manually set this value:
//1) Optimizing later: if you plan on minifying the non-build bundle JS files
//after the optimizer runs (so not as part of running the optimizer), then
//you should explicitly this value to "all".
//2) Optimizing, but not dynamically loading later: you want to do a full
//project optimization, but do not plan on dynamically loading non-build
//bundle JS files later. In this case, the normalization just slows down
//builds, so you can explicitly set this value to "skip".
//Finally, all build bundles (specified in the "modules" or "out" setting)
//automatically get normalization, so this setting does not apply to those
//files.
normalizeDirDefines: "skip",
//If using UglifyJS for script optimization, these config options can be
//used to pass configuration values to UglifyJS.
//In r.js 2.2, this is now just uglify2, so see the 'uglify2' section below
//for example options. For r.js pre-2.2, this was for setting uglify 1.3.x
//options.
uglify: {
},
//If using UglifyJS2 for script optimization, these config options can be
//used to pass configuration values to UglifyJS2. As of r.js 2.2, UglifyJS2
//is the only uglify option, so the config key can just be 'uglify' for
//r.js 2.2+.
//For possible `output` values see:
//https://github.com/mishoo/UglifyJS2#beautifier-options
//For possible `compress` values see:
//https://github.com/mishoo/UglifyJS2#compressor-options
uglify2: {
//Example of a specialized config. If you are fine
//with the default options, no need to specify
//any of these properties.
output: {
beautify: true
},
compress: {
sequences: false,
global_defs: {
DEBUG: false
}
},
warnings: true,
mangle: false
},
//If using Closure Compiler for script optimization, these config options
//can be used to configure Closure Compiler. See the documentation for
//Closure compiler for more information.
closure: {
CompilerOptions: {},
CompilationLevel: 'SIMPLE_OPTIMIZATIONS',
loggingLevel: 'WARNING',
externExportsPath: './extern.js'
},
//Allow CSS optimizations. Allowed values:
//- "standard": @import inlining and removal of comments, unnecessary
//whitespace and line returns.
//Removing line returns may have problems in IE, depending on the type
//of CSS.
//- "standard.keepLines": like "standard" but keeps line returns.
//- "none": skip CSS optimizations.
//- "standard.keepComments": keeps the file comments, but removes line
//returns. (r.js 1.0.8+)
//- "standard.keepComments.keepLines": keeps the file comments and line
//returns. (r.js 1.0.8+)
//- "standard.keepWhitespace": like "standard" but keeps unnecessary whitespace.
optimizeCss: "standard.keepLines.keepWhitespace",
//If optimizeCss is in use, a list of files to ignore for the @import
//inlining. The value of this option should be a string of comma separated
//CSS file names to ignore (like 'a.css,b.css'. The file names should match
//whatever strings are used in the @import calls.
cssImportIgnore: null,
//cssIn is typically used as a command line option. It can be used
//along with out to optimize a single CSS file.
cssIn: "path/to/main.css",
out: "path/to/css-optimized.css",
//If "out" is not in the same directory as "cssIn", and there is a relative
//url() in the cssIn file, use this to set a prefix URL to use. Only set it
//if you find a problem with incorrect relative URLs after optimization.
cssPrefix: "",
//Inlines the text for any text! dependencies, to avoid the separate
//async XMLHttpRequest calls to load those dependencies.
inlineText: true,
//Allow "use strict"; be included in the RequireJS files.
//Default is false because there are not many browsers that can properly
//process and give errors on code for ES5 strict mode,
//and there is a lot of legacy code that will not work in strict mode.
useStrict: false,
//Specify build pragmas. If the source files contain comments like so:
//>>excludeStart("fooExclude", pragmas.fooExclude);
//>>excludeEnd("fooExclude");
//Then the comments that start with //>> are the build pragmas.
//excludeStart/excludeEnd and includeStart/includeEnd work, and the
//the pragmas value to the includeStart or excludeStart lines
//is evaluated to see if the code between the Start and End pragma
//lines should be included or excluded. If you have a choice to use
//"has" code or pragmas, use "has" code instead. Pragmas are harder
//to read, but they can be a bit more flexible on code removal vs.
//has-based code, which must follow JavaScript language rules.
//Pragmas also remove code in non-minified source, where has branch
//trimming is only done if the code is minified via UglifyJS or
//Closure Compiler.
pragmas: {
fooExclude: true
},
//Same as "pragmas", but only applied once during the file save phase
//of an optimization. "pragmas" are applied both during the dependency
//mapping and file saving phases on an optimization. Some pragmas
//should not be processed during the dependency mapping phase of an
//operation, such as the pragma in the CoffeeScript loader plugin,
//which wants the CoffeeScript compiler during the dependency mapping
//phase, but once files are saved as plain JavaScript, the CoffeeScript
//compiler is no longer needed. In that case, pragmasOnSave would be used
//to exclude the compiler code during the save phase.
pragmasOnSave: {
//Just an example
excludeCoffeeScript: true
},
//Allows trimming of code branches that use has.js-based feature detection:
//https://github.com/phiggins42/has.js
//The code branch trimming only happens if minification with UglifyJS or
//Closure Compiler is done. For more information, see:
//http://requirejs.org/docs/optimization.html#hasjs
has: {
'function-bind': true,
'string-trim': false
},
//Similar to pragmasOnSave, but for has tests -- only applied during the
//file save phase of optimization, where "has" is applied to both
//dependency mapping and file save phases.
hasOnSave: {
'function-bind': true,
'string-trim': false
},
//Allows namespacing requirejs, require and define calls to a new name.
//This allows stronger assurances of getting a module space that will
//not interfere with others using a define/require AMD-based module
//system. The example below will rename define() calls to foo.define().
//See http://requirejs.org/docs/faq-advanced.html#rename for a more
//complete example.
namespace: 'foo',
//Skip processing for pragmas.
skipPragmas: false,
//If skipModuleInsertion is false, then files that do not use define()
//to define modules will get a define() placeholder inserted for them.
//Also, require.pause/resume calls will be inserted.
//Set it to true to avoid this. This is useful if you are building code that
//does not use require() in the built project or in the JS files, but you
//still want to use the optimization tool from RequireJS to concatenate modules
//together.
skipModuleInsertion: false,
//Specify modules to stub out in the optimized file. The optimizer will
//use the source version of these modules for dependency tracing and for
//plugin use, but when writing the text into an optimized bundle, these
//modules will get the following text instead:
//If the module is used as a plugin:
// define({load: function(id){throw new Error("Dynamic load not allowed: " + id);}});
//If just a plain module:
// define({});
//This is useful particularly for plugins that inline all their resources
//and use the default module resolution behavior (do *not* implement the
//normalize() method). In those cases, an AMD loader just needs to know
//that the module has a definition. These small stubs can be used instead of
//including the full source for a plugin.
stubModules: ['text', 'bar'],
//If it is not a one file optimization, scan through all .js files in the
//output directory for any plugin resource dependencies, and if the plugin
//supports optimizing them as separate files, optimize them. Can be a
//slower optimization. Only use if there are some plugins that use things
//like XMLHttpRequest that do not work across domains, but the built code
//will be placed on another domain.
optimizeAllPluginResources: false,
//Finds require() dependencies inside a require() or define call. By default
//this value is false, because those resources should be considered dynamic/runtime
//calls. However, for some optimization scenarios, it is desirable to
//include them in the build.
//Introduced in 1.0.3. Previous versions incorrectly found the nested calls
//by default.
findNestedDependencies: false,
//If set to true, any files that were combined into a build bundle will be
//removed from the output folder.
removeCombined: false,
//List the modules that will be optimized. All their immediate and deep
//dependencies will be included in the module's file when the build is
//done. If that module or any of its dependencies includes i18n bundles,
//only the root bundles will be included unless the locale: section is set above.
modules: [
//Just specifying a module name means that module will be converted into
//a built file that contains all of its dependencies. If that module or any
//of its dependencies includes i18n bundles, they may not be included in the
//built file unless the locale: section is set above.
{
name: "foo/bar/bop",
//create: true can be used to create the module layer at the given
//name, if it does not already exist in the source location. If
//there is a module at the source location with this name, then
//create: true is superfluous.
create: true,
//For build profiles that contain more than one modules entry,
//allow overrides for the properties that set for the whole build,
//for example a different set of pragmas for this module.
//The override's value is an object that can
//contain any of the other build options in this file.
override: {
pragmas: {
fooExclude: true
}
}
},
//This module entry combines all the dependencies of foo/bar/bop and foo/bar/bee
//and any of their dependencies into one file.
{
name: "foo/bar/bop",
include: ["foo/bar/bee"]
},
//This module entry combines all the dependencies of foo/bar/bip into one file,
//but excludes foo/bar/bop and its dependencies from the built file. If you want
//to exclude a module that is also another module being optimized, it is more
//efficient if you define that module optimization entry before using it
//in an exclude array.
{
name: "foo/bar/bip",
exclude: [
"foo/bar/bop"
]
},
//This module entry shows how to specify a specific module be excluded
//from the built module file. excludeShallow means just exclude that
//specific module, but if that module has nested dependencies that are
//part of the built file, keep them in there. This is useful during
//development when you want to have a fast bundled set of modules, but
//just develop/debug one or two modules at a time.
{
name: "foo/bar/bin",
excludeShallow: [
"foo/bar/bot"
]
},
//This module entry shows the use insertRequire (first available in 2.0):
{
name: "foo/baz",
insertRequire: ["foo/baz"]
}
],
//If the target module only calls define and does not call require() at the
//top level, and this build output is used with an AMD shim loader like
//almond, where the data-main script in the HTML page is replaced with just
//a script to the built file, if there is no top-level require, no modules
//will execute. specify insertRequire to have a require([]) call placed at
//the end of the file to trigger the execution of modules. More detail at
//https://github.com/requirejs/almond
//Note that insertRequire does not affect or add to the modules that are
//built into the build bundle. It just adds a require([]) call to the end
//of the built file for use during the runtime execution of the built code.
insertRequire: ['foo/bar/bop'],
//If you only intend to optimize a module (and its dependencies), with
//a single file as the output, you can specify the module options inline,
//instead of using the 'modules' section above. 'exclude',
//'excludeShallow', 'include' and 'insertRequire' are all allowed as siblings
//to name. The name of the optimized file is specified by 'out'.
name: "foo/bar/bop",
include: ["foo/bar/bee"],
insertRequire: ['foo/bar/bop'],
out: "path/to/optimized-file.js",
//An alternative to "include". Normally only used in a requirejs.config()
//call for a module used for mainConfigFile, since requirejs will read
//"deps" during runtime to do the equivalent of require(deps) to kick
//off some module loading.
deps: ["foo/bar/bee"],
//In RequireJS 2.0, "out" can be a function. For single JS file
//optimizations that are generated by calling requirejs.optimize(),
//using an out function means the optimized contents are not written to
//a file on disk, but instead pass to the out function:
out: function (text, sourceMapText) {
//Do what you want with the optimized text here.
//Starting in 2.1.10, if generateSourceMaps was set to true
//and optimize: 'uglify2' was used ('uglify' in r.js 2.2+), then the
//second argument to this function, sourceMapText, will be the text of
//the source map.
},
//In 2.0.12+: by setting "out" to "stdout", the optimized output is written
//to STDOUT. This can be useful for integrating r.js with other commandline
//tools. In order to avoid additional output "logLevel: 4" should also be used.
out: "stdout",
//Wrap any build bundle in a start and end text specified by wrap.
//Use this to encapsulate the module code so that define/require are
//not globals. The end text can expose some globals from your file,
//making it easy to create stand-alone libraries that do not mandate
//the end user use requirejs.
wrap: {
start: "(function() {",
end: "}());"
},
//Another way to use wrap, but uses default wrapping of:
//(function() { + content + }());
wrap: true,
//Another way to use wrap, but uses file paths. This makes it easier
//to have the start text contain license information and the end text
//to contain the global variable exports, like
//window.myGlobal = requirejs('myModule');
//File paths are relative to the build file, or if running a commmand
//line build, the current directory.
wrap: {
startFile: "parts/start.frag",
endFile: "parts/end.frag"
},
//As of r.js 2.1.0, startFile and endFile can be arrays of files, and
//they will all be loaded and inserted at the start or end, respectively,
//of the build bundle.
wrap: {
startFile: ["parts/startOne.frag", "parts/startTwo.frag"],
endFile: ["parts/endOne.frag", "parts/endTwo.frag"]
},
//When the optimizer copies files from the source location to the
//destination directory, it will skip directories and files that start
//with a ".". If you want to copy .directories or certain .files, for
//instance if you keep some packages in a .packages directory, or copy
//over .htaccess files, you can set this to null. If you want to change
//the exclusion rules, change it to a different regexp. If the regexp
//matches, it means the directory will be excluded. This used to be
//called dirExclusionRegExp before the 1.0.2 release.
//As of 1.0.3, this value can also be a string that is converted to a
//RegExp via new RegExp().
fileExclusionRegExp: /^\./,
//By default, comments that have a license in them are preserved in the
//output when a minifier is used in the "optimize" option.
//However, for a larger built files there could be a lot of
//comment files that may be better served by having a smaller comment
//at the top of the file that points to the list of all the licenses.
//This option will turn off the auto-preservation, but you will need
//work out how best to surface the license information.
//NOTE: As of 2.1.7, if using xpcshell to run the optimizer, it cannot
//parse out comments since its native Reflect parser is used, and does
//not have the same comments option support as esprima.
preserveLicenseComments: true,
//Sets the logging level. It is a number. If you want "silent" running,
//set logLevel to 4. From the logger.js file:
//TRACE: 0,
//INFO: 1,
//WARN: 2,
//ERROR: 3,
//SILENT: 4
//Default is 0.
logLevel: 0,
//Introduced in 2.1.3: Some situations do not throw and stop the optimizer
//when an error occurs. However, you may want to have the optimizer stop
//on certain kinds of errors and you can configure those situations via
//this option
throwWhen: {
//If there is an error calling the minifier for some JavaScript,
//instead of just skipping that file throw an error.
optimize: true
},
//A function that if defined will be called for every file read in the
//build that is done to trace JS dependencies. This allows transforms of
//the content.
onBuildRead: function (moduleName, path, contents) {
//Always return a value.
//This is just a contrived example.
return contents.replace(/foo/g, 'bar');
},
//A function that will be called for every write to an optimized bundle
//of modules. This allows transforms of the content before serialization.
onBuildWrite: function (moduleName, path, contents) {
//Always return a value.
//This is just a contrived example.
return contents.replace(/bar/g, 'foo');
},
//A function that is called for each JS module bundle that has been
//completed. This function is called after all module bundles have
//completed, but it is called for each bundle. A module bundle is a
//"modules" entry or if just a single file JS optimization, the
//optimized JS file.
//Introduced in r.js version 2.1.6
onModuleBundleComplete: function (data) {
/*
data.name: the bundle name.
data.path: the bundle path relative to the output directory.
data.included: an array of items included in the build bundle.
If a file path, it is relative to the output directory. Loader
plugin IDs are also included in this array, but depending
on the plugin, may or may not have something inlined in the
module bundle.
*/
},
//Introduced in 2.1.3: Seed raw text contents for the listed module IDs.
//These text contents will be used instead of doing a file IO call for
//those modules. Useful if some module ID contents are dynamically
//based on user input, which is common in web build tools.
rawText: {
'some/id': 'define(["another/id"], function () {});'
},
//Introduced in 2.0.2: if set to true, then the optimizer will add a
//define(require, exports, module) {}); wrapper around any file that seems
//to use commonjs/node module syntax (require, exports) without already
//calling define(). This is useful to reuse modules that came from
//or are loadable in an AMD loader that can load commonjs style modules
//in development as well as AMD modules, but need to have a built form
//that is only AMD. Note that this does *not* enable different module
//ID-to-file path logic, all the modules still have to be found using the
//requirejs-style configuration, it does not use node's node_modules nested
//path lookups.
cjsTranslate: true,
//Introduced in 2.0.2: a bit experimental.
//Each script in the build bundle will be turned into
//a JavaScript string with a //# sourceURL comment, and then wrapped in an
//eval call. This allows some browsers to see each evaled script as a
//separate script in the script debugger even though they are all combined
//in the same file. Some important limitations:
//1) Do not use in IE if conditional comments are turned on, it will cause
//errors:
//http://en.wikipedia.org/wiki/Conditional_comment#Conditional_comments_in_JScript
//2) It is only useful in optimize: 'none' scenarios. The goal is to allow
//easier built bundle debugging, which goes against minification desires.
useSourceUrl: true,
//Defines the loading time for modules. Depending on the complexity of the
//dependencies and the size of the involved libraries, increasing the wait
//interval may be required. Default is 7 seconds. Setting the value to 0
//disables the waiting interval.
waitSeconds: 7,
//Introduced in 2.1.9: normally r.js inserts a semicolon at the end of a
//file if there is not already one present, to avoid issues with
//concatenated files and automatic semicolon insertion (ASI) rules for
//JavaScript. It is a very blunt fix that is safe to do, but if you want to
//lint the build output, depending on the linter rules, it may not like it.
//Setting this option to true skips this insertion. However, by doing this,
//you take responsibility for making sure your concatenated code works with
//JavaScript's ASI rules, and that you use a minifier that understands when
//to insert semicolons to avoid ASI pitfalls.
skipSemiColonInsertion: false,
//Introduced in 2.1.10: if set to true, will not strip amdefine use:
//https://github.com/requirejs/amdefine
//Normally you should not need to set this. It is only a concern if using
//a built .js file from some other source, that may have included amdefine
//in the built input. If you get a build error like
//"undefined is not a function" and the file that generated the error
//references amdefine, then set this to true.
keepAmdefine: false,
//Introduced in 2.1.11. As part of fixing a bug to prevent possible
//overwrites of source code, https://github.com/jrburke/r.js/issues/444,
//it prevented some cases where generated source is used for a build, and
//it was OK to overwrite content in this source area as it was generated
//from another source area, and not allowing source overwrites meant taking
//another file copy hit. By setting this to true, it allows this sort of
//source code overwriting. However, use at your own risk, and be sure you
//have your configuration set correctly. For example, you may want to
//set "keepBuildDir" to true.
allowSourceOverwrites: false,
//Introduced in 2.2.0. Path to file to write out bundles config
//(http://requirejs.org/docs/api.html#config-bundles) found in the module
//layers built by the optimizer. The path is relative to the "dir" config's
//path. Only applies to full project optimization:
//http://requirejs.org/docs/optimization.html#wholeproject
//Only use if the optimized layers are grouped more intricately then just
//a simple optimization of main app entry points. The file path specified
//should be to one that has the top level requirejs.config() call that sets
//up the loader. If using "mainConfigFile", then this path likely should be
//the path to that file where it is placed in the "dir" output directory.
bundlesConfigOutFile: 'some/path/to/main.js',
//Introduced in 2.2.0. Default is true for compatibility with older
//releases. If set to false, r.js will not write a build.txt file in the
//"dir" directory when doing a full project optimization.
writeBuildTxt: true
})

4
build/jslib/blank.js Normal file
View File

@ -0,0 +1,4 @@
//Just a blank file to use when building the optimizer with the optimizer,
//so that the build does not attempt to inline some env modules,
//like Node's fs and path.

27
build/jslib/browser.js Normal file
View File

@ -0,0 +1,27 @@
//sloppy since eval enclosed with use strict causes problems if the source
//text is not strict-compliant.
/*jslint sloppy: true, evil: true */
/*global require, XMLHttpRequest */
(function () {
// Separate function to avoid eval pollution, same with arguments use.
function exec() {
eval(arguments[0]);
}
require.load = function (context, moduleName, url) {
var xhr = new XMLHttpRequest();
xhr.open('GET', url, true);
xhr.send();
xhr.onreadystatechange = function () {
if (xhr.readyState === 4) {
exec(xhr.responseText);
//Support anonymous modules.
context.completeLoad(moduleName);
}
};
};
}());

View File

@ -0,0 +1,7 @@
/*jslint strict: false */
/*global define: false, process: false */
define(function () {
//Always expect config via an API call
return [];
});

View File

@ -0,0 +1,7 @@
/*jslint strict: false */
/*global define: false, load: false */
//Just a stub for use with uglify's consolidator.js
define(function () {
return {};
});

168
build/jslib/browser/file.js Normal file
View File

@ -0,0 +1,168 @@
/*jslint sloppy: true, nomen: true */
/*global require, define, console, XMLHttpRequest, requirejs, location */
define(['prim'], function (prim) {
var file,
currDirRegExp = /^\.(\/|$)/;
function frontSlash(path) {
return path.replace(/\\/g, '/');
}
function exists(path) {
var status, xhr = new XMLHttpRequest();
//Oh yeah, that is right SYNC IO. Behold its glory
//and horrible blocking behavior.
xhr.open('HEAD', path, false);
xhr.send();
status = xhr.status;
return status === 200 || status === 304;
}
function mkDir(dir) {
console.log('mkDir is no-op in browser');
}
function mkFullDir(dir) {
console.log('mkFullDir is no-op in browser');
}
file = {
backSlashRegExp: /\\/g,
exclusionRegExp: /^\./,
getLineSeparator: function () {
return '/';
},
exists: function (fileName) {
return exists(fileName);
},
parent: function (fileName) {
var parts = fileName.split('/');
parts.pop();
return parts.join('/');
},
/**
* Gets the absolute file path as a string, normalized
* to using front slashes for path separators.
* @param {String} fileName
*/
absPath: function (fileName) {
var dir;
if (currDirRegExp.test(fileName)) {
dir = frontSlash(location.href);
if (dir.indexOf('/') !== -1) {
dir = dir.split('/');
//Pull off protocol and host, just want
//to allow paths (other build parts, like
//require._isSupportedBuildUrl do not support
//full URLs), but a full path from
//the root.
dir.splice(0, 3);
dir.pop();
dir = '/' + dir.join('/');
}
fileName = dir + fileName.substring(1);
}
return fileName;
},
normalize: function (fileName) {
return fileName;
},
isFile: function (path) {
return true;
},
isDirectory: function (path) {
return false;
},
getFilteredFileList: function (startDir, regExpFilters, makeUnixPaths) {
console.log('file.getFilteredFileList is no-op in browser');
},
copyDir: function (srcDir, destDir, regExpFilter, onlyCopyNew) {
console.log('file.copyDir is no-op in browser');
},
copyFile: function (srcFileName, destFileName, onlyCopyNew) {
console.log('file.copyFile is no-op in browser');
},
/**
* Renames a file. May fail if "to" already exists or is on another drive.
*/
renameFile: function (from, to) {
console.log('file.renameFile is no-op in browser');
},
/**
* Reads a *text* file.
*/
readFile: function (path, encoding) {
var xhr = new XMLHttpRequest();
//Oh yeah, that is right SYNC IO. Behold its glory
//and horrible blocking behavior.
xhr.open('GET', path, false);
xhr.send();
return xhr.responseText;
},
readFileAsync: function (path, encoding) {
var xhr = new XMLHttpRequest(),
d = prim();
xhr.open('GET', path, true);
xhr.send();
xhr.onreadystatechange = function () {
if (xhr.readyState === 4) {
if (xhr.status > 400) {
d.reject(new Error('Status: ' + xhr.status + ': ' + xhr.statusText));
} else {
d.resolve(xhr.responseText);
}
}
};
return d.promise;
},
saveUtf8File: function (fileName, fileContents) {
//summary: saves a *text* file using UTF-8 encoding.
file.saveFile(fileName, fileContents, "utf8");
},
saveFile: function (fileName, fileContents, encoding) {
requirejs.browser.saveFile(fileName, fileContents, encoding);
},
deleteFile: function (fileName) {
console.log('file.deleteFile is no-op in browser');
},
/**
* Deletes any empty directories under the given directory.
*/
deleteEmptyDirs: function (startDir) {
console.log('file.deleteEmptyDirs is no-op in browser');
}
};
return file;
});

View File

@ -0,0 +1,10 @@
/*jslint strict: false */
/*global define: false, console: false */
define(['./file'], function (file) {
function load(fileName) {
eval(file.readFile(fileName));
}
return load;
});

View File

@ -0,0 +1,4 @@
/*jslint strict: false */
/*global define: false */
define({});

View File

@ -0,0 +1,10 @@
/*jslint strict: false */
/*global define: false, console: false */
define(function () {
function print(msg) {
console.log(msg);
}
return print;
});

View File

@ -0,0 +1,6 @@
/*global process */
define(function () {
'use strict';
return function (code) {
};
});

2186
build/jslib/build.js Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,17 @@
/*jslint strict: false */
/*global Packages: false */
var commandLine = {};
(function () {
var runtime = Packages.java.lang.Runtime.getRuntime();
/**
* Executes a command on the command line. May not work right in
* Windows environments, except maybe via something like cygwin.
* @param {String} command the command to run on the command line.
*/
commandLine.exec = function (command) {
var process = runtime.exec(["/bin/sh", "-c", command]);
process.waitFor();
};
}());

98
build/jslib/commonJs.js Normal file
View File

@ -0,0 +1,98 @@
/*jslint */
/*global define: false, console: false */
define(['env!env/file', 'parse'], function (file, parse) {
'use strict';
var commonJs = {
//Set to false if you do not want this file to log. Useful in environments
//like node where you want the work to happen without noise.
useLog: true,
convertDir: function (commonJsPath, savePath) {
var fileList, i,
jsFileRegExp = /\.js$/,
fileName, convertedFileName, fileContents;
//Get list of files to convert.
fileList = file.getFilteredFileList(commonJsPath, /\w/, true);
//Normalize on front slashes and make sure the paths do not end in a slash.
commonJsPath = commonJsPath.replace(/\\/g, "/");
savePath = savePath.replace(/\\/g, "/");
if (commonJsPath.charAt(commonJsPath.length - 1) === "/") {
commonJsPath = commonJsPath.substring(0, commonJsPath.length - 1);
}
if (savePath.charAt(savePath.length - 1) === "/") {
savePath = savePath.substring(0, savePath.length - 1);
}
//Cycle through all the JS files and convert them.
if (!fileList || !fileList.length) {
if (commonJs.useLog) {
if (commonJsPath === "convert") {
//A request just to convert one file.
console.log('\n\n' + commonJs.convert(savePath, file.readFile(savePath)));
} else {
console.log("No files to convert in directory: " + commonJsPath);
}
}
} else {
for (i = 0; i < fileList.length; i++) {
fileName = fileList[i];
convertedFileName = fileName.replace(commonJsPath, savePath);
//Handle JS files.
if (jsFileRegExp.test(fileName)) {
fileContents = file.readFile(fileName);
fileContents = commonJs.convert(fileName, fileContents);
file.saveUtf8File(convertedFileName, fileContents);
} else {
//Just copy the file over.
file.copyFile(fileName, convertedFileName, true);
}
}
}
},
/**
* Does the actual file conversion.
*
* @param {String} fileName the name of the file.
*
* @param {String} fileContents the contents of a file :)
*
* @returns {String} the converted contents
*/
convert: function (fileName, fileContents) {
//Strip out comments.
try {
var preamble = '',
commonJsProps = parse.usesCommonJs(fileName, fileContents);
//First see if the module is not already RequireJS-formatted.
if (parse.usesAmdOrRequireJs(fileName, fileContents) || !commonJsProps) {
return fileContents;
}
if (commonJsProps.dirname || commonJsProps.filename) {
preamble = 'var __filename = module.uri || "", ' +
'__dirname = __filename.substring(0, __filename.lastIndexOf("/") + 1); ';
}
//Construct the wrapper boilerplate.
fileContents = 'define(function (require, exports, module) {' +
preamble +
fileContents +
'\n});\n';
} catch (e) {
console.log("commonJs.convert: COULD NOT CONVERT: " + fileName + ", so skipping it. Error was: " + e);
return fileContents;
}
return fileContents;
}
};
return commonJs;
});

48
build/jslib/env.js Normal file
View File

@ -0,0 +1,48 @@
/*jslint strict: false */
/*global Packages: false, process: false, window: false, navigator: false,
document: false, define: false */
/**
* A plugin that modifies any /env/ path to be the right path based on
* the host environment. Right now only works for Node, Rhino and browser.
*/
(function () {
var pathRegExp = /(\/|^)env\/|\{env\}/,
env = 'unknown';
if (typeof process !== 'undefined' && process.versions && !!process.versions.node) {
env = 'node';
} else if (typeof Packages !== 'undefined') {
env = 'rhino';
} else if ((typeof navigator !== 'undefined' && typeof document !== 'undefined') ||
(typeof importScripts !== 'undefined' && typeof self !== 'undefined')) {
env = 'browser';
} else if (typeof Components !== 'undefined' && Components.classes && Components.interfaces) {
env = 'xpconnect';
}
define({
get: function () {
return env;
},
load: function (name, req, load, config) {
//Allow override in the config.
if (config.env) {
env = config.env;
}
name = name.replace(pathRegExp, function (match, prefix) {
if (match.indexOf('{') === -1) {
return prefix + env + '/';
} else {
return env;
}
});
req([name], function (mod) {
load(mod);
});
}
});
}());

6709
build/jslib/esprima.js Normal file

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,15 @@
/*global define, Reflect */
/*
* xpcshell has a smaller stack on linux and windows (1MB vs 9MB on mac),
* and the recursive nature of esprima can cause it to overflow pretty
* quickly. So favor it built in Reflect parser:
* https://developer.mozilla.org/en-US/docs/SpiderMonkey/Parser_API
*/
define(['./esprima', 'env'], function (esprima, env) {
if (env.get() === 'xpconnect' && typeof Reflect !== 'undefined') {
return Reflect;
} else {
return esprima;
}
});

216
build/jslib/lang.js Normal file
View File

@ -0,0 +1,216 @@
/*jslint plusplus: true */
/*global define, java */
define(function () {
'use strict';
var lang, isJavaObj,
hasOwn = Object.prototype.hasOwnProperty;
function hasProp(obj, prop) {
return hasOwn.call(obj, prop);
}
isJavaObj = function () {
return false;
};
//Rhino, but not Nashorn (detected by importPackage not existing)
//Can have some strange foreign objects.
if (typeof java !== 'undefined' && java.lang && java.lang.Object && typeof importPackage !== 'undefined') {
isJavaObj = function (obj) {
return obj instanceof java.lang.Object;
};
}
lang = {
backSlashRegExp: /\\/g,
ostring: Object.prototype.toString,
isArray: Array.isArray || function (it) {
return lang.ostring.call(it) === "[object Array]";
},
isFunction: function(it) {
return lang.ostring.call(it) === "[object Function]";
},
isRegExp: function(it) {
return it && it instanceof RegExp;
},
hasProp: hasProp,
//returns true if the object does not have an own property prop,
//or if it does, it is a falsy value.
falseProp: function (obj, prop) {
return !hasProp(obj, prop) || !obj[prop];
},
//gets own property value for given prop on object
getOwn: function (obj, prop) {
return hasProp(obj, prop) && obj[prop];
},
_mixin: function(dest, source, override){
var name;
for (name in source) {
if(source.hasOwnProperty(name) &&
(override || !dest.hasOwnProperty(name))) {
dest[name] = source[name];
}
}
return dest; // Object
},
/**
* mixin({}, obj1, obj2) is allowed. If the last argument is a boolean,
* then the source objects properties are force copied over to dest.
*/
mixin: function(dest){
var parameters = Array.prototype.slice.call(arguments),
override, i, l;
if (!dest) { dest = {}; }
if (parameters.length > 2 && typeof arguments[parameters.length-1] === 'boolean') {
override = parameters.pop();
}
for (i = 1, l = parameters.length; i < l; i++) {
lang._mixin(dest, parameters[i], override);
}
return dest; // Object
},
/**
* Does a deep mix of source into dest, where source values override
* dest values if a winner is needed.
* @param {Object} dest destination object that receives the mixed
* values.
* @param {Object} source source object contributing properties to mix
* in.
* @return {[Object]} returns dest object with the modification.
*/
deepMix: function(dest, source) {
lang.eachProp(source, function (value, prop) {
if (typeof value === 'object' && value &&
!lang.isArray(value) && !lang.isFunction(value) &&
!(value instanceof RegExp)) {
if (!dest[prop]) {
dest[prop] = {};
}
lang.deepMix(dest[prop], value);
} else {
dest[prop] = value;
}
});
return dest;
},
/**
* Does a type of deep copy. Do not give it anything fancy, best
* for basic object copies of objects that also work well as
* JSON-serialized things, or has properties pointing to functions.
* For non-array/object values, just returns the same object.
* @param {Object} obj copy properties from this object
* @param {Object} [ignoredProps] optional object whose own properties
* are keys that should be ignored.
* @return {Object}
*/
deeplikeCopy: function (obj, ignoredProps) {
var type, result;
if (lang.isArray(obj)) {
result = [];
obj.forEach(function(value) {
result.push(lang.deeplikeCopy(value, ignoredProps));
});
return result;
}
type = typeof obj;
if (obj === null || obj === undefined || type === 'boolean' ||
type === 'string' || type === 'number' || lang.isFunction(obj) ||
lang.isRegExp(obj)|| isJavaObj(obj)) {
return obj;
}
//Anything else is an object, hopefully.
result = {};
lang.eachProp(obj, function(value, key) {
if (!ignoredProps || !hasProp(ignoredProps, key)) {
result[key] = lang.deeplikeCopy(value, ignoredProps);
}
});
return result;
},
delegate: (function () {
// boodman/crockford delegation w/ cornford optimization
function TMP() {}
return function (obj, props) {
TMP.prototype = obj;
var tmp = new TMP();
TMP.prototype = null;
if (props) {
lang.mixin(tmp, props);
}
return tmp; // Object
};
}()),
/**
* Helper function for iterating over an array. If the func returns
* a true value, it will break out of the loop.
*/
each: function each(ary, func) {
if (ary) {
var i;
for (i = 0; i < ary.length; i += 1) {
if (func(ary[i], i, ary)) {
break;
}
}
}
},
/**
* Cycles over properties in an object and calls a function for each
* property value. If the function returns a truthy value, then the
* iteration is stopped.
*/
eachProp: function eachProp(obj, func) {
var prop;
for (prop in obj) {
if (hasProp(obj, prop)) {
if (func(obj[prop], prop)) {
break;
}
}
}
},
//Similar to Function.prototype.bind, but the "this" object is specified
//first, since it is easier to read/figure out what "this" will be.
bind: function bind(obj, fn) {
return function () {
return fn.apply(obj, arguments);
};
},
//Escapes a content string to be be a string that has characters escaped
//for inclusion as part of a JS string.
jsEscape: function (content) {
return content.replace(/(["'\\])/g, '\\$1')
.replace(/[\f]/g, "\\f")
.replace(/[\b]/g, "\\b")
.replace(/[\n]/g, "\\n")
.replace(/[\t]/g, "\\t")
.replace(/[\r]/g, "\\r");
}
};
return lang;
});

52
build/jslib/logger.js Normal file
View File

@ -0,0 +1,52 @@
/*jslint nomen: false, strict: false */
/*global define: false */
define(['env!env/print'], function (print) {
var logger = {
TRACE: 0,
INFO: 1,
WARN: 2,
ERROR: 3,
SILENT: 4,
level: 0,
logPrefix: "",
logLevel: function( level ) {
this.level = level;
},
trace: function (message) {
if (this.level <= this.TRACE) {
this._print(message);
}
},
info: function (message) {
if (this.level <= this.INFO) {
this._print(message);
}
},
warn: function (message) {
if (this.level <= this.WARN) {
this._print(message);
}
},
error: function (message) {
if (this.level <= this.ERROR) {
this._print(message);
}
},
_print: function (message) {
this._sysPrint((this.logPrefix ? (this.logPrefix + " ") : "") + message);
},
_sysPrint: function (message) {
print(message);
}
};
return logger;
});

172
build/jslib/node.js Normal file
View File

@ -0,0 +1,172 @@
//Explicity not strict since this file contains an eval call, and do not want
//to enforce strict on code evaluated that way. See
//https://github.com/requirejs/r.js/issues/774
/*jslint regexp: false, sloppy: true*/
/*global require: false, define: false, requirejsVars: false, process: false */
/**
* This adapter assumes that x.js has loaded it and set up
* some variables. This adapter just allows limited RequireJS
* usage from within the requirejs directory. The general
* node adapater is r.js.
*/
(function () {
var nodeReq = requirejsVars.nodeRequire,
req = requirejsVars.require,
def = requirejsVars.define,
fs = nodeReq('fs'),
path = nodeReq('path'),
vm = nodeReq('vm'),
//In Node 0.7+ existsSync is on fs.
exists = fs.existsSync || path.existsSync,
hasOwn = Object.prototype.hasOwnProperty;
function hasProp(obj, prop) {
return hasOwn.call(obj, prop);
}
function syncTick(fn) {
fn();
}
function makeError(message, moduleName) {
var err = new Error(message);
err.requireModules = [moduleName];
return err;
}
//Supply an implementation that allows synchronous get of a module.
req.get = function (context, moduleName, relModuleMap, localRequire) {
if (moduleName === "require" || moduleName === "exports" || moduleName === "module") {
context.onError(makeError("Explicit require of " + moduleName + " is not allowed.", moduleName));
}
var ret, oldTick,
moduleMap = context.makeModuleMap(moduleName, relModuleMap, false, true);
//Normalize module name, if it contains . or ..
moduleName = moduleMap.id;
if (hasProp(context.defined, moduleName)) {
ret = context.defined[moduleName];
} else {
if (ret === undefined) {
//Make sure nextTick for this type of call is sync-based.
oldTick = context.nextTick;
context.nextTick = syncTick;
try {
if (moduleMap.prefix) {
//A plugin, call requirejs to handle it. Now that
//nextTick is syncTick, the require will complete
//synchronously.
localRequire([moduleMap.originalName]);
//Now that plugin is loaded, can regenerate the moduleMap
//to get the final, normalized ID.
moduleMap = context.makeModuleMap(moduleMap.originalName, relModuleMap, false, true);
moduleName = moduleMap.id;
} else {
//Try to dynamically fetch it.
req.load(context, moduleName, moduleMap.url);
//Enable the module
context.enable(moduleMap, relModuleMap);
}
//Break any cycles by requiring it normally, but this will
//finish synchronously
context.require([moduleName]);
//The above calls are sync, so can do the next thing safely.
ret = context.defined[moduleName];
} finally {
context.nextTick = oldTick;
}
}
}
return ret;
};
req.nextTick = function (fn) {
process.nextTick(fn);
};
//Add wrapper around the code so that it gets the requirejs
//API instead of the Node API, and it is done lexically so
//that it survives later execution.
req.makeNodeWrapper = function (contents) {
return '(function (require, requirejs, define) { ' +
contents +
'\n}(requirejsVars.require, requirejsVars.requirejs, requirejsVars.define));';
};
req.load = function (context, moduleName, url) {
var contents, err,
config = context.config;
if (config.shim[moduleName] && (!config.suppress || !config.suppress.nodeShim)) {
console.warn('Shim config not supported in Node, may or may not work. Detected ' +
'for module: ' + moduleName);
}
if (exists(url)) {
contents = fs.readFileSync(url, 'utf8');
contents = req.makeNodeWrapper(contents);
try {
vm.runInThisContext(contents, fs.realpathSync(url));
} catch (e) {
err = new Error('Evaluating ' + url + ' as module "' +
moduleName + '" failed with error: ' + e);
err.originalError = e;
err.moduleName = moduleName;
err.requireModules = [moduleName];
err.fileName = url;
return context.onError(err);
}
} else {
def(moduleName, function () {
//Get the original name, since relative requires may be
//resolved differently in node (issue #202). Also, if relative,
//make it relative to the URL of the item requesting it
//(issue #393)
var dirName,
map = hasProp(context.registry, moduleName) &&
context.registry[moduleName].map,
parentMap = map && map.parentMap,
originalName = map && map.originalName;
if (originalName.charAt(0) === '.' && parentMap) {
dirName = parentMap.url.split('/');
dirName.pop();
originalName = dirName.join('/') + '/' + originalName;
}
try {
return (context.config.nodeRequire || req.nodeRequire)(originalName);
} catch (e) {
err = new Error('Tried loading "' + moduleName + '" at ' +
url + ' then tried node\'s require("' +
originalName + '") and it failed ' +
'with error: ' + e);
err.originalError = e;
err.moduleName = originalName;
err.requireModules = [moduleName];
throw err;
}
});
}
//Support anonymous modules.
context.completeLoad(moduleName);
};
//Override to provide the function wrapper for define/require.
req.exec = function (text) {
/*jslint evil: true */
text = req.makeNodeWrapper(text);
return eval(text);
};
}());

14
build/jslib/node/args.js Normal file
View File

@ -0,0 +1,14 @@
/*jslint strict: false */
/*global define: false, process: false */
define(function () {
//Do not return the "node" or "r.js" arguments
var args = process.argv.slice(2);
//Ignore any command option used for main x.js branching
if (args[0] && args[0].indexOf('-') === 0) {
args = args.slice(1);
}
return args;
});

View File

@ -0,0 +1,7 @@
/*jslint strict: false */
/*global define: false, load: false */
//Needed so that rhino/assert can return a stub for uglify's consolidator.js
define(['assert'], function (assert) {
return assert;
});

304
build/jslib/node/file.js Normal file
View File

@ -0,0 +1,304 @@
/*jslint plusplus: false, octal:false, strict: false */
/*global define: false, process: false */
define(['fs', 'path', 'prim'], function (fs, path, prim) {
var isWindows = process.platform === 'win32',
windowsDriveRegExp = /^[a-zA-Z]\:\/$/,
file;
function frontSlash(path) {
return path.replace(/\\/g, '/');
}
function exists(path) {
if (isWindows && path.charAt(path.length - 1) === '/' &&
path.charAt(path.length - 2) !== ':') {
path = path.substring(0, path.length - 1);
}
try {
fs.statSync(path);
return true;
} catch (e) {
return false;
}
}
function mkDir(dir) {
if (!exists(dir) && (!isWindows || !windowsDriveRegExp.test(dir))) {
fs.mkdirSync(dir, 511);
}
}
function mkFullDir(dir) {
var parts = dir.split('/'),
currDir = '',
first = true;
parts.forEach(function (part) {
//First part may be empty string if path starts with a slash.
currDir += part + '/';
first = false;
if (part) {
mkDir(currDir);
}
});
}
file = {
backSlashRegExp: /\\/g,
exclusionRegExp: /^\./,
getLineSeparator: function () {
return '/';
},
exists: function (fileName) {
return exists(fileName);
},
parent: function (fileName) {
var parts = fileName.split('/');
parts.pop();
return parts.join('/');
},
/**
* Gets the absolute file path as a string, normalized
* to using front slashes for path separators.
* @param {String} fileName
*/
absPath: function (fileName) {
return frontSlash(path.normalize(frontSlash(fs.realpathSync(fileName))));
},
normalize: function (fileName) {
return frontSlash(path.normalize(fileName));
},
isFile: function (path) {
return fs.statSync(path).isFile();
},
isDirectory: function (path) {
return fs.statSync(path).isDirectory();
},
getFilteredFileList: function (/*String*/startDir, /*RegExp*/regExpFilters, /*boolean?*/makeUnixPaths) {
//summary: Recurses startDir and finds matches to the files that match regExpFilters.include
//and do not match regExpFilters.exclude. Or just one regexp can be passed in for regExpFilters,
//and it will be treated as the "include" case.
//Ignores files/directories that start with a period (.) unless exclusionRegExp
//is set to another value.
var files = [], topDir, regExpInclude, regExpExclude, dirFileArray,
i, stat, filePath, ok, dirFiles, fileName;
topDir = startDir;
regExpInclude = regExpFilters.include || regExpFilters;
regExpExclude = regExpFilters.exclude || null;
if (file.exists(topDir)) {
dirFileArray = fs.readdirSync(topDir);
for (i = 0; i < dirFileArray.length; i++) {
fileName = dirFileArray[i];
filePath = path.join(topDir, fileName);
stat = fs.statSync(filePath);
if (stat.isFile()) {
if (makeUnixPaths) {
//Make sure we have a JS string.
if (filePath.indexOf("/") === -1) {
filePath = frontSlash(filePath);
}
}
ok = true;
if (regExpInclude) {
ok = filePath.match(regExpInclude);
}
if (ok && regExpExclude) {
ok = !filePath.match(regExpExclude);
}
if (ok && (!file.exclusionRegExp ||
!file.exclusionRegExp.test(fileName))) {
files.push(filePath);
}
} else if (stat.isDirectory() &&
(!file.exclusionRegExp || !file.exclusionRegExp.test(fileName))) {
dirFiles = this.getFilteredFileList(filePath, regExpFilters, makeUnixPaths);
//Do not use push.apply for dir listings, can hit limit of max number
//of arguments to a function call, #921.
dirFiles.forEach(function (dirFile) {
files.push(dirFile);
});
}
}
}
return files; //Array
},
copyDir: function (/*String*/srcDir, /*String*/destDir, /*RegExp?*/regExpFilter, /*boolean?*/onlyCopyNew) {
//summary: copies files from srcDir to destDir using the regExpFilter to determine if the
//file should be copied. Returns a list file name strings of the destinations that were copied.
regExpFilter = regExpFilter || /\w/;
//Normalize th directory names, but keep front slashes.
//path module on windows now returns backslashed paths.
srcDir = frontSlash(path.normalize(srcDir));
destDir = frontSlash(path.normalize(destDir));
var fileNames = file.getFilteredFileList(srcDir, regExpFilter, true),
copiedFiles = [], i, srcFileName, destFileName;
for (i = 0; i < fileNames.length; i++) {
srcFileName = fileNames[i];
destFileName = srcFileName.replace(srcDir, destDir);
if (file.copyFile(srcFileName, destFileName, onlyCopyNew)) {
copiedFiles.push(destFileName);
}
}
return copiedFiles.length ? copiedFiles : null; //Array or null
},
copyFile: function (/*String*/srcFileName, /*String*/destFileName, /*boolean?*/onlyCopyNew) {
//summary: copies srcFileName to destFileName. If onlyCopyNew is set, it only copies the file if
//srcFileName is newer than destFileName. Returns a boolean indicating if the copy occurred.
var parentDir;
//logger.trace("Src filename: " + srcFileName);
//logger.trace("Dest filename: " + destFileName);
//If onlyCopyNew is true, then compare dates and only copy if the src is newer
//than dest.
if (onlyCopyNew) {
if (file.exists(destFileName) && fs.statSync(destFileName).mtime.getTime() >= fs.statSync(srcFileName).mtime.getTime()) {
return false; //Boolean
}
}
//Make sure destination dir exists.
parentDir = path.dirname(destFileName);
if (!file.exists(parentDir)) {
mkFullDir(parentDir);
}
fs.writeFileSync(destFileName, fs.readFileSync(srcFileName, 'binary'), 'binary');
return true; //Boolean
},
/**
* Renames a file. May fail if "to" already exists or is on another drive.
*/
renameFile: function (from, to) {
return fs.renameSync(from, to);
},
/**
* Reads a *text* file.
*/
readFile: function (/*String*/path, /*String?*/encoding) {
if (encoding === 'utf-8') {
encoding = 'utf8';
}
if (!encoding) {
encoding = 'utf8';
}
var text = fs.readFileSync(path, encoding);
//Hmm, would not expect to get A BOM, but it seems to happen,
//remove it just in case.
if (text.indexOf('\uFEFF') === 0) {
text = text.substring(1, text.length);
}
return text;
},
readFileAsync: function (path, encoding) {
var d = prim();
try {
d.resolve(file.readFile(path, encoding));
} catch (e) {
d.reject(e);
}
return d.promise;
},
saveUtf8File: function (/*String*/fileName, /*String*/fileContents) {
//summary: saves a *text* file using UTF-8 encoding.
file.saveFile(fileName, fileContents, "utf8");
},
saveFile: function (/*String*/fileName, /*String*/fileContents, /*String?*/encoding) {
//summary: saves a *text* file.
var parentDir;
if (encoding === 'utf-8') {
encoding = 'utf8';
}
if (!encoding) {
encoding = 'utf8';
}
//Make sure destination directories exist.
parentDir = path.dirname(fileName);
if (!file.exists(parentDir)) {
mkFullDir(parentDir);
}
fs.writeFileSync(fileName, fileContents, encoding);
},
deleteFile: function (/*String*/fileName) {
//summary: deletes a file or directory if it exists.
var files, i, stat;
if (file.exists(fileName)) {
stat = fs.lstatSync(fileName);
if (stat.isDirectory()) {
files = fs.readdirSync(fileName);
for (i = 0; i < files.length; i++) {
this.deleteFile(path.join(fileName, files[i]));
}
fs.rmdirSync(fileName);
} else {
fs.unlinkSync(fileName);
}
}
},
/**
* Deletes any empty directories under the given directory.
*/
deleteEmptyDirs: function (startDir) {
var dirFileArray, i, fileName, filePath, stat;
if (file.exists(startDir)) {
dirFileArray = fs.readdirSync(startDir);
for (i = 0; i < dirFileArray.length; i++) {
fileName = dirFileArray[i];
filePath = path.join(startDir, fileName);
stat = fs.lstatSync(filePath);
if (stat.isDirectory()) {
file.deleteEmptyDirs(filePath);
}
}
//If directory is now empty, remove it.
if (fs.readdirSync(startDir).length === 0) {
file.deleteFile(startDir);
}
}
}
};
return file;
});

11
build/jslib/node/load.js Normal file
View File

@ -0,0 +1,11 @@
/*jslint strict: false */
/*global define: false, console: false */
define(['fs'], function (fs) {
function load(fileName) {
var contents = fs.readFileSync(fileName, 'utf8');
process.compile(contents, fileName);
}
return load;
});

View File

@ -0,0 +1,4 @@
/*jslint strict: false */
/*global define: false */
define({});

10
build/jslib/node/print.js Normal file
View File

@ -0,0 +1,10 @@
/*jslint strict: false */
/*global define: false, console: false */
define(function () {
function print(msg) {
console.log(msg);
}
return print;
});

23
build/jslib/node/quit.js Normal file
View File

@ -0,0 +1,23 @@
/*global process */
define(function () {
'use strict';
return function (code) {
var draining = 0;
var exit = function () {
if (draining === 0) {
process.exit(code);
} else {
draining -= 1;
}
};
if (process.stdout.bufferSize) {
draining += 1;
process.stdout.once('drain', exit);
}
if (process.stderr.bufferSize) {
draining += 1;
process.stderr.once('drain', exit);
}
exit();
};
});

474
build/jslib/optimize.js Normal file
View File

@ -0,0 +1,474 @@
/*jslint plusplus: true, nomen: true, regexp: true */
/*global define: false */
define([ 'lang', 'logger', 'env!env/optimize', 'env!env/file', 'parse',
'pragma', 'uglifyjs',
'source-map'],
function (lang, logger, envOptimize, file, parse,
pragma, uglify,
sourceMap) {
'use strict';
var optimize,
cssImportRegExp = /\@import\s+(url\()?\s*([^);]+)\s*(\))?([\w, ]*)(;)?/ig,
cssCommentImportRegExp = /\/\*[^\*]*@import[^\*]*\*\//g,
cssUrlRegExp = /\url\(\s*([^\)]+)\s*\)?/g,
protocolRegExp = /^\w+:/,
SourceMapGenerator = sourceMap.SourceMapGenerator,
SourceMapConsumer = sourceMap.SourceMapConsumer,
es5PlusGuidance = 'If the source uses ES2015 or later syntax, please pass "optimize: \'none\'" to r.js and use an ES2015+ compatible minifier after running r.js. The included UglifyJS only understands ES5 or earlier syntax.';
/**
* If an URL from a CSS url value contains start/end quotes, remove them.
* This is not done in the regexp, since my regexp fu is not that strong,
* and the CSS spec allows for ' and " in the URL if they are backslash escaped.
* @param {String} url
*/
function cleanCssUrlQuotes(url) {
//Make sure we are not ending in whitespace.
//Not very confident of the css regexps above that there will not be ending
//whitespace.
url = url.replace(/\s+$/, "");
if (url.charAt(0) === "'" || url.charAt(0) === "\"") {
url = url.substring(1, url.length - 1);
}
return url;
}
function fixCssUrlPaths(fileName, path, contents, cssPrefix) {
return contents.replace(cssUrlRegExp, function (fullMatch, urlMatch) {
var firstChar, hasProtocol, parts, i,
fixedUrlMatch = cleanCssUrlQuotes(urlMatch);
fixedUrlMatch = fixedUrlMatch.replace(lang.backSlashRegExp, "/");
//Only do the work for relative URLs. Skip things that start with / or #, or have
//a protocol.
firstChar = fixedUrlMatch.charAt(0);
hasProtocol = protocolRegExp.test(fixedUrlMatch);
if (firstChar !== "/" && firstChar !== "#" && !hasProtocol) {
//It is a relative URL, tack on the cssPrefix and path prefix
urlMatch = cssPrefix + path + fixedUrlMatch;
} else if (!hasProtocol) {
logger.trace(fileName + "\n URL not a relative URL, skipping: " + urlMatch);
}
//Collapse .. and .
parts = urlMatch.split("/");
for (i = parts.length - 1; i > 0; i--) {
if (parts[i] === ".") {
parts.splice(i, 1);
} else if (parts[i] === "..") {
if (i !== 0 && parts[i - 1] !== "..") {
parts.splice(i - 1, 2);
i -= 1;
}
}
}
return "url(" + parts.join("/") + ")";
});
}
/**
* Inlines nested stylesheets that have @import calls in them.
* @param {String} fileName the file name
* @param {String} fileContents the file contents
* @param {String} cssImportIgnore comma delimited string of files to ignore
* @param {String} cssPrefix string to be prefixed before relative URLs
* @param {Object} included an object used to track the files already imported
*/
function flattenCss(fileName, fileContents, cssImportIgnore, cssPrefix, included, topLevel) {
//Find the last slash in the name.
fileName = fileName.replace(lang.backSlashRegExp, "/");
var endIndex = fileName.lastIndexOf("/"),
//Make a file path based on the last slash.
//If no slash, so must be just a file name. Use empty string then.
filePath = (endIndex !== -1) ? fileName.substring(0, endIndex + 1) : "",
//store a list of merged files
importList = [],
skippedList = [];
//First make a pass by removing any commented out @import calls.
fileContents = fileContents.replace(cssCommentImportRegExp, '');
//Make sure we have a delimited ignore list to make matching faster
if (cssImportIgnore && cssImportIgnore.charAt(cssImportIgnore.length - 1) !== ",") {
cssImportIgnore += ",";
}
fileContents = fileContents.replace(cssImportRegExp, function (fullMatch, urlStart, importFileName, urlEnd, mediaTypes) {
//Only process media type "all" or empty media type rules.
if (mediaTypes && ((mediaTypes.replace(/^\s\s*/, '').replace(/\s\s*$/, '')) !== "all")) {
skippedList.push(fileName);
return fullMatch;
}
importFileName = cleanCssUrlQuotes(importFileName);
//Ignore the file import if it is part of an ignore list.
if (cssImportIgnore && cssImportIgnore.indexOf(importFileName + ",") !== -1) {
return fullMatch;
}
//Make sure we have a unix path for the rest of the operation.
importFileName = importFileName.replace(lang.backSlashRegExp, "/");
try {
//if a relative path, then tack on the filePath.
//If it is not a relative path, then the readFile below will fail,
//and we will just skip that import.
var fullImportFileName = importFileName.charAt(0) === "/" ? importFileName : filePath + importFileName,
importContents = file.readFile(fullImportFileName),
importEndIndex, importPath, flat;
//Skip the file if it has already been included.
if (included[fullImportFileName]) {
return '';
}
included[fullImportFileName] = true;
//Make sure to flatten any nested imports.
flat = flattenCss(fullImportFileName, importContents, cssImportIgnore, cssPrefix, included);
importContents = flat.fileContents;
if (flat.importList.length) {
importList.push.apply(importList, flat.importList);
}
if (flat.skippedList.length) {
skippedList.push.apply(skippedList, flat.skippedList);
}
//Make the full import path
importEndIndex = importFileName.lastIndexOf("/");
//Make a file path based on the last slash.
//If no slash, so must be just a file name. Use empty string then.
importPath = (importEndIndex !== -1) ? importFileName.substring(0, importEndIndex + 1) : "";
//fix url() on relative import (#5)
importPath = importPath.replace(/^\.\//, '');
//Modify URL paths to match the path represented by this file.
importContents = fixCssUrlPaths(importFileName, importPath, importContents, cssPrefix);
importList.push(fullImportFileName);
return importContents;
} catch (e) {
logger.warn(fileName + "\n Cannot inline css import, skipping: " + importFileName);
return fullMatch;
}
});
if (cssPrefix && topLevel) {
//Modify URL paths to match the path represented by this file.
fileContents = fixCssUrlPaths(fileName, '', fileContents, cssPrefix);
}
return {
importList : importList,
skippedList: skippedList,
fileContents : fileContents
};
}
optimize = {
/**
* Optimizes a file that contains JavaScript content. Optionally collects
* plugin resources mentioned in a file, and then passes the content
* through an minifier if one is specified via config.optimize.
*
* @param {String} fileName the name of the file to optimize
* @param {String} fileContents the contents to optimize. If this is
* a null value, then fileName will be used to read the fileContents.
* @param {String} outFileName the name of the file to use for the
* saved optimized content.
* @param {Object} config the build config object.
* @param {Array} [pluginCollector] storage for any plugin resources
* found.
*/
jsFile: function (fileName, fileContents, outFileName, config, pluginCollector) {
if (!fileContents) {
fileContents = file.readFile(fileName);
}
fileContents = optimize.js(fileName, fileContents, outFileName, config, pluginCollector);
file.saveUtf8File(outFileName, fileContents);
},
/**
* Optimizes a file that contains JavaScript content. Optionally collects
* plugin resources mentioned in a file, and then passes the content
* through an minifier if one is specified via config.optimize.
*
* @param {String} fileName the name of the file that matches the
* fileContents.
* @param {String} fileContents the string of JS to optimize.
* @param {Object} [config] the build config object.
* @param {Array} [pluginCollector] storage for any plugin resources
* found.
*/
js: function (fileName, fileContents, outFileName, config, pluginCollector) {
var optFunc, optConfig,
parts = (String(config.optimize)).split('.'),
optimizerName = parts[0],
keepLines = parts[1] === 'keepLines',
licenseContents = '';
config = config || {};
//Apply pragmas/namespace renaming
fileContents = pragma.process(fileName, fileContents, config, 'OnSave', pluginCollector);
//Optimize the JS files if asked.
if (optimizerName && optimizerName !== 'none') {
optFunc = envOptimize[optimizerName] || optimize.optimizers[optimizerName];
if (!optFunc) {
throw new Error('optimizer with name of "' +
optimizerName +
'" not found for this environment');
}
optConfig = config[optimizerName] || {};
if (config.generateSourceMaps) {
optConfig.generateSourceMaps = !!config.generateSourceMaps;
optConfig._buildSourceMap = config._buildSourceMap;
}
try {
if (config.preserveLicenseComments) {
//Pull out any license comments for prepending after optimization.
try {
licenseContents = parse.getLicenseComments(fileName, fileContents);
} catch (e) {
throw new Error('Cannot parse file: ' + fileName + ' for comments. Skipping it. Error is:\n' + e.toString());
}
}
if (config.generateSourceMaps && licenseContents) {
optConfig.preamble = licenseContents;
licenseContents = '';
}
fileContents = licenseContents + optFunc(fileName,
fileContents,
outFileName,
keepLines,
optConfig);
if (optConfig._buildSourceMap && optConfig._buildSourceMap !== config._buildSourceMap) {
config._buildSourceMap = optConfig._buildSourceMap;
}
} catch (e) {
if (config.throwWhen && config.throwWhen.optimize) {
throw e;
} else {
logger.error(e);
}
}
} else {
if (config._buildSourceMap) {
config._buildSourceMap = null;
}
}
return fileContents;
},
/**
* Optimizes one CSS file, inlining @import calls, stripping comments, and
* optionally removes line returns.
* @param {String} fileName the path to the CSS file to optimize
* @param {String} outFileName the path to save the optimized file.
* @param {Object} config the config object with the optimizeCss and
* cssImportIgnore options.
*/
cssFile: function (fileName, outFileName, config) {
//Read in the file. Make sure we have a JS string.
var originalFileContents = file.readFile(fileName),
flat = flattenCss(fileName, originalFileContents, config.cssImportIgnore, config.cssPrefix, {}, true),
//Do not use the flattened CSS if there was one that was skipped.
fileContents = flat.skippedList.length ? originalFileContents : flat.fileContents,
startIndex, endIndex, buildText, comment;
if (flat.skippedList.length) {
logger.warn('Cannot inline @imports for ' + fileName +
',\nthe following files had media queries in them:\n' +
flat.skippedList.join('\n'));
}
//Do comment removal.
try {
if (config.optimizeCss.indexOf(".keepComments") === -1) {
startIndex = 0;
//Get rid of comments.
while ((startIndex = fileContents.indexOf("/*", startIndex)) !== -1) {
endIndex = fileContents.indexOf("*/", startIndex + 2);
if (endIndex === -1) {
throw "Improper comment in CSS file: " + fileName;
}
comment = fileContents.substring(startIndex, endIndex);
if (config.preserveLicenseComments &&
(comment.indexOf('license') !== -1 ||
comment.indexOf('opyright') !== -1 ||
comment.indexOf('(c)') !== -1)) {
//Keep the comment, just increment the startIndex
startIndex = endIndex;
} else {
fileContents = fileContents.substring(0, startIndex) + fileContents.substring(endIndex + 2, fileContents.length);
startIndex = 0;
}
}
}
//Get rid of newlines.
if (config.optimizeCss.indexOf(".keepLines") === -1) {
fileContents = fileContents.replace(/[\r\n]/g, " ");
fileContents = fileContents.replace(/\s+/g, " ");
fileContents = fileContents.replace(/\{\s/g, "{");
fileContents = fileContents.replace(/\s\}/g, "}");
} else {
//Remove multiple empty lines.
fileContents = fileContents.replace(/(\r\n)+/g, "\r\n");
fileContents = fileContents.replace(/(\n)+/g, "\n");
}
//Remove unnecessary whitespace
if (config.optimizeCss.indexOf(".keepWhitespace") === -1) {
//Remove leading and trailing whitespace from lines
fileContents = fileContents.replace(/^[ \t]+/gm, "");
fileContents = fileContents.replace(/[ \t]+$/gm, "");
//Remove whitespace after semicolon, colon, curly brackets and commas
fileContents = fileContents.replace(/(;|:|\{|}|,)[ \t]+/g, "$1");
//Remove whitespace before opening curly brackets
fileContents = fileContents.replace(/[ \t]+(\{)/g, "$1");
//Truncate double whitespace
fileContents = fileContents.replace(/([ \t])+/g, "$1");
//Remove empty lines
fileContents = fileContents.replace(/^[ \t]*[\r\n]/gm,'');
}
} catch (e) {
fileContents = originalFileContents;
logger.error("Could not optimized CSS file: " + fileName + ", error: " + e);
}
file.saveUtf8File(outFileName, fileContents);
//text output to stdout and/or written to build.txt file
buildText = "\n"+ outFileName.replace(config.dir, "") +"\n----------------\n";
flat.importList.push(fileName);
buildText += flat.importList.map(function(path){
return path.replace(config.dir, "");
}).join("\n");
return {
importList: flat.importList,
buildText: buildText +"\n"
};
},
/**
* Optimizes CSS files, inlining @import calls, stripping comments, and
* optionally removes line returns.
* @param {String} startDir the path to the top level directory
* @param {Object} config the config object with the optimizeCss and
* cssImportIgnore options.
*/
css: function (startDir, config) {
var buildText = "",
importList = [],
shouldRemove = config.dir && config.removeCombined,
i, fileName, result, fileList;
if (config.optimizeCss.indexOf("standard") !== -1) {
fileList = file.getFilteredFileList(startDir, /\.css$/, true);
if (fileList) {
for (i = 0; i < fileList.length; i++) {
fileName = fileList[i];
logger.trace("Optimizing (" + config.optimizeCss + ") CSS file: " + fileName);
result = optimize.cssFile(fileName, fileName, config);
buildText += result.buildText;
if (shouldRemove) {
result.importList.pop();
importList = importList.concat(result.importList);
}
}
}
if (shouldRemove) {
importList.forEach(function (path) {
if (file.exists(path)) {
file.deleteFile(path);
}
});
}
}
return buildText;
},
optimizers: {
uglify: function (fileName, fileContents, outFileName, keepLines, config) {
var result, existingMap, resultMap, finalMap, sourceIndex,
uconfig = {},
existingMapPath = outFileName + '.map',
baseName = fileName && fileName.split('/').pop();
config = config || {};
lang.mixin(uconfig, config, true);
uconfig.fromString = true;
if (config.preamble) {
uconfig.output = {preamble: config.preamble};
}
if (config.generateSourceMaps && (outFileName || config._buildSourceMap)) {
uconfig.outSourceMap = baseName + '.map';
if (config._buildSourceMap) {
existingMap = JSON.parse(config._buildSourceMap);
uconfig.inSourceMap = existingMap;
} else if (file.exists(existingMapPath)) {
uconfig.inSourceMap = existingMapPath;
existingMap = JSON.parse(file.readFile(existingMapPath));
}
}
logger.trace("Uglify file: " + fileName);
try {
//var tempContents = fileContents.replace(/\/\/\# sourceMappingURL=.*$/, '');
result = uglify.minify(fileContents, uconfig, baseName + '.src.js');
if (uconfig.outSourceMap && result.map) {
resultMap = result.map;
if (!existingMap && !config._buildSourceMap) {
file.saveFile(outFileName + '.src.js', fileContents);
}
fileContents = result.code;
if (config._buildSourceMap) {
config._buildSourceMap = resultMap;
} else {
file.saveFile(outFileName + '.map', resultMap);
}
} else {
fileContents = result.code;
}
} catch (e) {
var errorString = e.toString();
var isSyntaxError = /SyntaxError/.test(errorString);
throw new Error('Cannot uglify file: ' + fileName +
'. Skipping it. Error is:\n' + errorString +
(isSyntaxError ? '\n\n' + es5PlusGuidance : ''));
}
return fileContents;
}
}
};
return optimize;
});

11
build/jslib/opto.build.js Normal file
View File

@ -0,0 +1,11 @@
({
baseUrl: '.',
optimize: 'none',
paths: {
fs: 'blank',
path: 'blank'
},
skipModuleInsertion: true,
name: '../build',
out: 'optotext.js'
})

1074
build/jslib/parse.js Normal file

File diff suppressed because it is too large Load Diff

268
build/jslib/pragma.js Normal file
View File

@ -0,0 +1,268 @@
/*jslint regexp: true, plusplus: true */
/*global define: false */
define(['parse', 'logger'], function (parse, logger) {
'use strict';
function Temp() {}
function create(obj, mixin) {
Temp.prototype = obj;
var temp = new Temp(), prop;
//Avoid any extra memory hanging around
Temp.prototype = null;
if (mixin) {
for (prop in mixin) {
if (mixin.hasOwnProperty(prop) && !temp.hasOwnProperty(prop)) {
temp[prop] = mixin[prop];
}
}
}
return temp; // Object
}
var pragma = {
conditionalRegExp: /(exclude|include)Start\s*\(\s*["'](\w+)["']\s*,(.*)\)/,
useStrictRegExp: /(^|[^{]\r?\n)['"]use strict['"];/g,
hasRegExp: /has\s*\(\s*['"]([^'"]+)['"]\s*\)/g,
configRegExp: /(^|[^\.])(requirejs|require)(\.config)\s*\(/g,
nsWrapRegExp: /\/\*requirejs namespace: true \*\//,
apiDefRegExp: /var requirejs,\s*require,\s*define;/,
defineCheckRegExp: /typeof(\s+|\s*\(\s*)define(\s*\))?\s*===?\s*["']function["']\s*&&\s*define\s*\.\s*amd/g,
defineStringCheckRegExp: /typeof\s+define\s*===?\s*["']function["']\s*&&\s*define\s*\[\s*["']amd["']\s*\]/g,
defineTypeFirstCheckRegExp: /\s*["']function["']\s*==(=?)\s*typeof\s+define\s*&&\s*define\s*\.\s*amd/g,
defineJQueryRegExp: /typeof\s+define\s*===?\s*["']function["']\s*&&\s*define\s*\.\s*amd\s*&&\s*define\s*\.\s*amd\s*\.\s*jQuery/g,
defineHasRegExp: /typeof\s+define\s*==(=)?\s*['"]function['"]\s*&&\s*typeof\s+define\.amd\s*==(=)?\s*['"]object['"]\s*&&\s*define\.amd/g,
defineTernaryRegExp: /typeof\s+define\s*===?\s*['"]function["']\s*&&\s*define\s*\.\s*amd\s*\?\s*define/,
defineExistsRegExp: /\s+typeof\s+define\s*!==?\s*['"]undefined["']\s*/,
defineExistsAndAmdRegExp: /typeof\s+define\s*!==?\s*['"]undefined["']\s*&&\s*define\s*\.\s*amd\s*/,
amdefineRegExp: /if\s*\(\s*typeof define\s*\!==\s*['"]function['"]\s*\)\s*\{\s*[^\{\}]+amdefine[^\{\}]+\}/g,
removeStrict: function (contents, config) {
return config.useStrict ? contents : contents.replace(pragma.useStrictRegExp, '$1');
},
namespace: function (fileContents, ns, onLifecycleName) {
if (ns) {
//Namespace require/define calls
fileContents = fileContents.replace(pragma.configRegExp, '$1' + ns + '.$2$3(');
fileContents = parse.renameNamespace(fileContents, ns);
//Namespace define ternary use:
fileContents = fileContents.replace(pragma.defineTernaryRegExp,
"typeof " + ns + ".define === 'function' && " + ns + ".define.amd ? " + ns + ".define");
//Namespace define jquery use:
fileContents = fileContents.replace(pragma.defineJQueryRegExp,
"typeof " + ns + ".define === 'function' && " + ns + ".define.amd && " + ns + ".define.amd.jQuery");
//Namespace has.js define use:
fileContents = fileContents.replace(pragma.defineHasRegExp,
"typeof " + ns + ".define === 'function' && typeof " + ns + ".define.amd === 'object' && " + ns + ".define.amd");
//Namespace async.js define use:
fileContents = fileContents.replace(pragma.defineExistsAndAmdRegExp,
"typeof " + ns + ".define !== 'undefined' && " + ns + ".define.amd");
//Namespace define checks.
//Do these ones last, since they are a subset of the more specific
//checks above.
fileContents = fileContents.replace(pragma.defineCheckRegExp,
"typeof " + ns + ".define === 'function' && " + ns + ".define.amd");
fileContents = fileContents.replace(pragma.defineStringCheckRegExp,
"typeof " + ns + ".define === 'function' && " + ns + ".define['amd']");
fileContents = fileContents.replace(pragma.defineTypeFirstCheckRegExp,
"'function' === typeof " + ns + ".define && " + ns + ".define.amd");
fileContents = fileContents.replace(pragma.defineExistsRegExp,
"typeof " + ns + ".define !== 'undefined'");
//Check for require.js with the require/define definitions
if (pragma.apiDefRegExp.test(fileContents) &&
fileContents.indexOf("if (!" + ns + " || !" + ns + ".requirejs)") === -1) {
//Wrap the file contents in a typeof check, and a function
//to contain the API globals.
fileContents = "var " + ns + ";(function () { if (!" + ns + " || !" + ns + ".requirejs) {\n" +
"if (!" + ns + ") { " + ns + ' = {}; } else { require = ' + ns + '; }\n' +
fileContents +
"\n" +
ns + ".requirejs = requirejs;" +
ns + ".require = require;" +
ns + ".define = define;\n" +
"}\n}());";
}
//Finally, if the file wants a special wrapper because it ties
//in to the requirejs internals in a way that would not fit
//the above matches, do that. Look for /*requirejs namespace: true*/
if (pragma.nsWrapRegExp.test(fileContents)) {
//Remove the pragma.
fileContents = fileContents.replace(pragma.nsWrapRegExp, '');
//Alter the contents.
fileContents = '(function () {\n' +
'var require = ' + ns + '.require,' +
'requirejs = ' + ns + '.requirejs,' +
'define = ' + ns + '.define;\n' +
fileContents +
'\n}());';
}
}
return fileContents;
},
/**
* processes the fileContents for some //>> conditional statements
*/
process: function (fileName, fileContents, config, onLifecycleName, pluginCollector) {
/*jslint evil: true */
var foundIndex = -1, startIndex = 0, lineEndIndex, conditionLine,
matches, type, marker, condition, isTrue, endRegExp, endMatches,
endMarkerIndex, shouldInclude, startLength, lifecycleHas, deps,
i, dep, moduleName, collectorMod,
lifecyclePragmas, pragmas = config.pragmas, hasConfig = config.has,
//Legacy arg defined to help in dojo conversion script. Remove later
//when dojo no longer needs conversion:
kwArgs = pragmas;
//Mix in a specific lifecycle scoped object, to allow targeting
//some pragmas/has tests to only when files are saved, or at different
//lifecycle events. Do not bother with kwArgs in this section, since
//the old dojo kwArgs were for all points in the build lifecycle.
if (onLifecycleName) {
lifecyclePragmas = config['pragmas' + onLifecycleName];
lifecycleHas = config['has' + onLifecycleName];
if (lifecyclePragmas) {
pragmas = create(pragmas || {}, lifecyclePragmas);
}
if (lifecycleHas) {
hasConfig = create(hasConfig || {}, lifecycleHas);
}
}
//Replace has references if desired
if (hasConfig) {
fileContents = fileContents.replace(pragma.hasRegExp, function (match, test) {
if (hasConfig.hasOwnProperty(test)) {
return !!hasConfig[test];
}
return match;
});
}
if (!config.skipPragmas) {
while ((foundIndex = fileContents.indexOf("//>>", startIndex)) !== -1) {
//Found a conditional. Get the conditional line.
lineEndIndex = fileContents.indexOf("\n", foundIndex);
if (lineEndIndex === -1) {
lineEndIndex = fileContents.length - 1;
}
//Increment startIndex past the line so the next conditional search can be done.
startIndex = lineEndIndex + 1;
//Break apart the conditional.
conditionLine = fileContents.substring(foundIndex, lineEndIndex + 1);
matches = conditionLine.match(pragma.conditionalRegExp);
if (matches) {
type = matches[1];
marker = matches[2];
condition = matches[3];
isTrue = false;
//See if the condition is true.
try {
isTrue = !!eval("(" + condition + ")");
} catch (e) {
throw "Error in file: " +
fileName +
". Conditional comment: " +
conditionLine +
" failed with this error: " + e;
}
//Find the endpoint marker.
endRegExp = new RegExp('\\/\\/\\>\\>\\s*' + type + 'End\\(\\s*[\'"]' + marker + '[\'"]\\s*\\)', "g");
endMatches = endRegExp.exec(fileContents.substring(startIndex, fileContents.length));
if (endMatches) {
endMarkerIndex = startIndex + endRegExp.lastIndex - endMatches[0].length;
//Find the next line return based on the match position.
lineEndIndex = fileContents.indexOf("\n", endMarkerIndex);
if (lineEndIndex === -1) {
lineEndIndex = fileContents.length - 1;
}
//Should we include the segment?
shouldInclude = ((type === "exclude" && !isTrue) || (type === "include" && isTrue));
//Remove the conditional comments, and optionally remove the content inside
//the conditional comments.
startLength = startIndex - foundIndex;
fileContents = fileContents.substring(0, foundIndex) +
(shouldInclude ? fileContents.substring(startIndex, endMarkerIndex) : "") +
fileContents.substring(lineEndIndex + 1, fileContents.length);
//Move startIndex to foundIndex, since that is the new position in the file
//where we need to look for more conditionals in the next while loop pass.
startIndex = foundIndex;
} else {
throw "Error in file: " +
fileName +
". Cannot find end marker for conditional comment: " +
conditionLine;
}
}
}
}
//If need to find all plugin resources to optimize, do that now,
//before namespacing, since the namespacing will change the API
//names.
//If there is a plugin collector, scan the file for plugin resources.
if (config.optimizeAllPluginResources && pluginCollector) {
try {
deps = parse.findDependencies(fileName, fileContents);
if (deps.length) {
for (i = 0; i < deps.length; i++) {
dep = deps[i];
if (dep.indexOf('!') !== -1) {
moduleName = dep.split('!')[0];
collectorMod = pluginCollector[moduleName];
if (!collectorMod) {
collectorMod = pluginCollector[moduleName] = [];
}
collectorMod.push(dep);
}
}
}
} catch (eDep) {
logger.error('Parse error looking for plugin resources in ' +
fileName + ', skipping.');
}
}
//Strip amdefine use for node-shared modules.
if (!config.keepAmdefine) {
fileContents = fileContents.replace(pragma.amdefineRegExp, '');
}
//Do namespacing
if (onLifecycleName === 'OnSave' && config.namespace) {
fileContents = pragma.namespace(fileContents, config.namespace, onLifecycleName);
}
return pragma.removeStrict(fileContents, config);
}
};
return pragma;
});

195
build/jslib/prim.js Normal file
View File

@ -0,0 +1,195 @@
/**
* prim 0.0.1 Copyright (c) 2012-2014, The Dojo Foundation All Rights Reserved.
* Available via the MIT or new BSD license.
* see: http://github.com/requirejs/prim for details
*/
/*global setImmediate, process, setTimeout, define, module */
//Set prime.hideResolutionConflict = true to allow "resolution-races"
//in promise-tests to pass.
//Since the goal of prim is to be a small impl for trusted code, it is
//more important to normally throw in this case so that we can find
//logic errors quicker.
var prim;
(function () {
'use strict';
var op = Object.prototype,
hasOwn = op.hasOwnProperty;
function hasProp(obj, prop) {
return hasOwn.call(obj, prop);
}
/**
* Helper function for iterating over an array. If the func returns
* a true value, it will break out of the loop.
*/
function each(ary, func) {
if (ary) {
var i;
for (i = 0; i < ary.length; i += 1) {
if (ary[i]) {
func(ary[i], i, ary);
}
}
}
}
function check(p) {
if (hasProp(p, 'e') || hasProp(p, 'v')) {
if (!prim.hideResolutionConflict) {
throw new Error('Prim promise already resolved: ' +
JSON.stringify(p));
}
return false;
}
return true;
}
function notify(ary, value) {
prim.nextTick(function () {
each(ary, function (item) {
item(value);
});
});
}
prim = function prim() {
var p,
ok = [],
fail = [];
return (p = {
callback: function (yes, no) {
if (no) {
p.errback(no);
}
if (hasProp(p, 'v')) {
prim.nextTick(function () {
yes(p.v);
});
} else {
ok.push(yes);
}
},
errback: function (no) {
if (hasProp(p, 'e')) {
prim.nextTick(function () {
no(p.e);
});
} else {
fail.push(no);
}
},
finished: function () {
return hasProp(p, 'e') || hasProp(p, 'v');
},
rejected: function () {
return hasProp(p, 'e');
},
resolve: function (v) {
if (check(p)) {
p.v = v;
notify(ok, v);
}
return p;
},
reject: function (e) {
if (check(p)) {
p.e = e;
notify(fail, e);
}
return p;
},
start: function (fn) {
p.resolve();
return p.promise.then(fn);
},
promise: {
then: function (yes, no) {
var next = prim();
p.callback(function (v) {
try {
if (yes && typeof yes === 'function') {
v = yes(v);
}
if (v && v.then) {
v.then(next.resolve, next.reject);
} else {
next.resolve(v);
}
} catch (e) {
next.reject(e);
}
}, function (e) {
var err;
try {
if (!no || typeof no !== 'function') {
next.reject(e);
} else {
err = no(e);
if (err && err.then) {
err.then(next.resolve, next.reject);
} else {
next.resolve(err);
}
}
} catch (e2) {
next.reject(e2);
}
});
return next.promise;
},
fail: function (no) {
return p.promise.then(null, no);
},
end: function () {
p.errback(function (e) {
throw e;
});
}
}
});
};
prim.serial = function (ary) {
var result = prim().resolve().promise;
each(ary, function (item) {
result = result.then(function () {
return item();
});
});
return result;
};
prim.nextTick = typeof setImmediate === 'function' ? setImmediate :
(typeof process !== 'undefined' && process.nextTick ?
process.nextTick : (typeof setTimeout !== 'undefined' ?
function (fn) {
setTimeout(fn, 0);
} : function (fn) {
fn();
}));
if (typeof define === 'function' && define.amd) {
define(function () { return prim; });
} else if (typeof module !== 'undefined' && module.exports) {
module.exports = prim;
}
}());

523
build/jslib/requirePatch.js Normal file
View File

@ -0,0 +1,523 @@
/*
* This file patches require.js to communicate with the build system.
*/
//Using sloppy since this uses eval for some code like plugins,
//which may not be strict mode compliant. So if use strict is used
//below they will have strict rules applied and may cause an error.
/*jslint sloppy: true, nomen: true, plusplus: true, regexp: true */
/*global require, define: true */
//NOT asking for require as a dependency since the goal is to modify the
//global require below
define([ 'env!env/file', 'pragma', 'parse', 'lang', 'logger', 'commonJs', 'prim'], function (
file,
pragma,
parse,
lang,
logger,
commonJs,
prim
) {
var allowRun = true,
hasProp = lang.hasProp,
falseProp = lang.falseProp,
getOwn = lang.getOwn,
// Used to strip out use strict from toString()'d functions for the
// shim config since they will explicitly want to not be bound by strict,
// but some envs, explicitly xpcshell, adds a use strict.
useStrictRegExp = /['"]use strict['"];/g,
//Absolute path if starts with /, \, or x:
absoluteUrlRegExp = /^[\/\\]|^\w:/;
//Turn off throwing on resolution conflict, that was just an older prim
//idea about finding errors early, but does not comply with how promises
//should operate.
prim.hideResolutionConflict = true;
//This method should be called when the patches to require should take hold.
return function () {
if (!allowRun) {
return;
}
allowRun = false;
var layer,
pluginBuilderRegExp = /(["']?)pluginBuilder(["']?)\s*[=\:]\s*["']([^'"\s]+)["']/,
oldNewContext = require.s.newContext,
oldDef,
//create local undefined values for module and exports,
//so that when files are evaled in this function they do not
//see the node values used for r.js
exports,
module;
/**
* Reset "global" build caches that are kept around between
* build layer builds. Useful to do when there are multiple
* top level requirejs.optimize() calls.
*/
require._cacheReset = function () {
//Stored raw text caches, used by browser use.
require._cachedRawText = {};
//Stored cached file contents for reuse in other layers.
require._cachedFileContents = {};
//Store which cached files contain a require definition.
require._cachedDefinesRequireUrls = {};
};
require._cacheReset();
/**
* Makes sure the URL is something that can be supported by the
* optimization tool.
* @param {String} url
* @returns {Boolean}
*/
require._isSupportedBuildUrl = function (url) {
//Ignore URLs with protocols, hosts or question marks, means either network
//access is needed to fetch it or it is too dynamic. Note that
//on Windows, full paths are used for some urls, which include
//the drive, like c:/something, so need to test for something other
//than just a colon.
if (url.indexOf("://") === -1 && url.indexOf("?") === -1 &&
url.indexOf('empty:') !== 0 && url.indexOf('//') !== 0) {
return true;
} else {
if (!layer.ignoredUrls[url]) {
if (url.indexOf('empty:') === -1) {
logger.info('Cannot optimize network URL, skipping: ' + url);
}
layer.ignoredUrls[url] = true;
}
return false;
}
};
function normalizeUrlWithBase(context, moduleName, url) {
//Adjust the URL if it was not transformed to use baseUrl, but only
//if the URL is not already an absolute path.
if (require.jsExtRegExp.test(moduleName) &&
!absoluteUrlRegExp.test(url)) {
url = (context.config.dir || context.config.dirBaseUrl) + url;
}
return url;
}
//Overrides the new context call to add existing tracking features.
require.s.newContext = function (name) {
var context = oldNewContext(name),
oldEnable = context.enable,
moduleProto = context.Module.prototype,
oldInit = moduleProto.init,
oldCallPlugin = moduleProto.callPlugin;
//Only do this for the context used for building.
if (name === '_') {
//For build contexts, do everything sync
context.nextTick = function (fn) {
fn();
};
context.needFullExec = {};
context.fullExec = {};
context.plugins = {};
context.buildShimExports = {};
//Override the shim exports function generator to just
//spit out strings that can be used in the stringified
//build output.
context.makeShimExports = function (value) {
var fn;
if (context.config.wrapShim) {
fn = function () {
var str = 'return ';
// If specifies an export that is just a global
// name, no dot for a `this.` and such, then also
// attach to the global, for `var a = {}` files
// where the function closure would hide that from
// the global object.
if (value.exports && value.exports.indexOf('.') === -1) {
str += 'root.' + value.exports + ' = ';
}
if (value.init) {
str += '(' + value.init.toString()
.replace(useStrictRegExp, '') + '.apply(this, arguments))';
}
if (value.init && value.exports) {
str += ' || ';
}
if (value.exports) {
str += value.exports;
}
str += ';';
return str;
};
} else {
fn = function () {
return '(function (global) {\n' +
' return function () {\n' +
' var ret, fn;\n' +
(value.init ?
(' fn = ' + value.init.toString()
.replace(useStrictRegExp, '') + ';\n' +
' ret = fn.apply(global, arguments);\n') : '') +
(value.exports ?
' return ret || global.' + value.exports + ';\n' :
' return ret;\n') +
' };\n' +
'}(this))';
};
}
return fn;
};
context.enable = function (depMap, parent) {
var id = depMap.id,
parentId = parent && parent.map.id,
needFullExec = context.needFullExec,
fullExec = context.fullExec,
mod = getOwn(context.registry, id);
if (mod && !mod.defined) {
if (parentId && getOwn(needFullExec, parentId)) {
needFullExec[id] = depMap;
}
} else if ((getOwn(needFullExec, id) && falseProp(fullExec, id)) ||
(parentId && getOwn(needFullExec, parentId) &&
falseProp(fullExec, id))) {
context.require.undef(id);
}
return oldEnable.apply(context, arguments);
};
//Override load so that the file paths can be collected.
context.load = function (moduleName, url) {
/*jslint evil: true */
var contents, pluginBuilderMatch, builderName,
shim, shimExports;
//Do not mark the url as fetched if it is
//not an empty: URL, used by the optimizer.
//In that case we need to be sure to call
//load() for each module that is mapped to
//empty: so that dependencies are satisfied
//correctly.
if (url.indexOf('empty:') === 0) {
delete context.urlFetched[url];
}
//Only handle urls that can be inlined, so that means avoiding some
//URLs like ones that require network access or may be too dynamic,
//like JSONP
if (require._isSupportedBuildUrl(url)) {
//Adjust the URL if it was not transformed to use baseUrl.
url = normalizeUrlWithBase(context, moduleName, url);
//Save the module name to path and path to module name mappings.
layer.buildPathMap[moduleName] = url;
layer.buildFileToModule[url] = moduleName;
if (hasProp(context.plugins, moduleName)) {
//plugins need to have their source evaled as-is.
context.needFullExec[moduleName] = true;
}
prim().start(function () {
if (hasProp(require._cachedFileContents, url) &&
(falseProp(context.needFullExec, moduleName) ||
getOwn(context.fullExec, moduleName))) {
contents = require._cachedFileContents[url];
//If it defines require, mark it so it can be hoisted.
//Done here and in the else below, before the
//else block removes code from the contents.
//Related to #263
if (!layer.existingRequireUrl && require._cachedDefinesRequireUrls[url]) {
layer.existingRequireUrl = url;
}
} else {
//Load the file contents, process for conditionals, then
//evaluate it.
return require._cacheReadAsync(url).then(function (text) {
contents = text;
if (context.config.cjsTranslate &&
(!context.config.shim || !lang.hasProp(context.config.shim, moduleName))) {
contents = commonJs.convert(url, contents);
}
//If there is a read filter, run it now.
if (context.config.onBuildRead) {
contents = context.config.onBuildRead(moduleName, url, contents);
}
contents = pragma.process(url, contents, context.config, 'OnExecute');
//Find out if the file contains a require() definition. Need to know
//this so we can inject plugins right after it, but before they are needed,
//and to make sure this file is first, so that define calls work.
try {
if (!layer.existingRequireUrl && parse.definesRequire(url, contents)) {
layer.existingRequireUrl = url;
require._cachedDefinesRequireUrls[url] = true;
}
} catch (e1) {
throw new Error('Parse error using esprima ' +
'for file: ' + url + '\n' + e1);
}
}).then(function () {
if (hasProp(context.plugins, moduleName)) {
//This is a loader plugin, check to see if it has a build extension,
//otherwise the plugin will act as the plugin builder too.
pluginBuilderMatch = pluginBuilderRegExp.exec(contents);
if (pluginBuilderMatch) {
//Load the plugin builder for the plugin contents.
builderName = context.makeModuleMap(pluginBuilderMatch[3],
context.makeModuleMap(moduleName),
null,
true).id;
return require._cacheReadAsync(context.nameToUrl(builderName));
}
}
return contents;
}).then(function (text) {
contents = text;
//Parse out the require and define calls.
//Do this even for plugins in case they have their own
//dependencies that may be separate to how the pluginBuilder works.
try {
if (falseProp(context.needFullExec, moduleName)) {
contents = parse(moduleName, url, contents, {
insertNeedsDefine: true,
has: context.config.has,
findNestedDependencies: context.config.findNestedDependencies
});
}
} catch (e2) {
throw new Error('Parse error using esprima ' +
'for file: ' + url + '\n' + e2);
}
require._cachedFileContents[url] = contents;
});
}
}).then(function () {
if (contents) {
eval(contents);
}
try {
//If have a string shim config, and this is
//a fully executed module, try to see if
//it created a variable in this eval scope
if (getOwn(context.needFullExec, moduleName)) {
shim = getOwn(context.config.shim, moduleName);
if (shim && shim.exports) {
shimExports = eval(shim.exports);
if (typeof shimExports !== 'undefined') {
context.buildShimExports[moduleName] = shimExports;
}
}
}
//Need to close out completion of this module
//so that listeners will get notified that it is available.
context.completeLoad(moduleName);
} catch (e) {
//Track which module could not complete loading.
if (!e.moduleTree) {
e.moduleTree = [];
}
e.moduleTree.push(moduleName);
throw e;
}
}).then(null, function (eOuter) {
if (!eOuter.fileName) {
eOuter.fileName = url;
}
throw eOuter;
}).end();
} else {
//With unsupported URLs still need to call completeLoad to
//finish loading.
context.completeLoad(moduleName);
}
};
//Marks module has having a name, and optionally executes the
//callback, but only if it meets certain criteria.
context.execCb = function (name, cb, args, exports) {
var buildShimExports = getOwn(layer.context.buildShimExports, name);
if (buildShimExports) {
return buildShimExports;
} else if (cb.__requireJsBuild || getOwn(layer.context.needFullExec, name)) {
return cb.apply(exports, args);
}
return undefined;
};
moduleProto.init = function (depMaps) {
if (context.needFullExec[this.map.id]) {
lang.each(depMaps, lang.bind(this, function (depMap) {
if (typeof depMap === 'string') {
depMap = context.makeModuleMap(depMap,
(this.map.isDefine ? this.map : this.map.parentMap),
false, true);
}
if (!context.fullExec[depMap.id]) {
context.require.undef(depMap.id);
}
}));
}
return oldInit.apply(this, arguments);
};
moduleProto.callPlugin = function () {
var map = this.map,
pluginMap = context.makeModuleMap(map.prefix),
pluginId = pluginMap.id,
pluginMod = getOwn(context.registry, pluginId);
context.plugins[pluginId] = true;
context.needFullExec[pluginId] = map;
//If the module is not waiting to finish being defined,
//undef it and start over, to get full execution.
if (falseProp(context.fullExec, pluginId) && (!pluginMod || pluginMod.defined)) {
context.require.undef(pluginMap.id);
}
return oldCallPlugin.apply(this, arguments);
};
}
return context;
};
//Clear up the existing context so that the newContext modifications
//above will be active.
delete require.s.contexts._;
/** Reset state for each build layer pass. */
require._buildReset = function () {
var oldContext = require.s.contexts._;
//Clear up the existing context.
delete require.s.contexts._;
//Set up new context, so the layer object can hold onto it.
require({});
layer = require._layer = {
buildPathMap: {},
buildFileToModule: {},
buildFilePaths: [],
pathAdded: {},
modulesWithNames: {},
needsDefine: {},
existingRequireUrl: "",
ignoredUrls: {},
context: require.s.contexts._
};
//Return the previous context in case it is needed, like for
//the basic config object.
return oldContext;
};
require._buildReset();
//Override define() to catch modules that just define an object, so that
//a dummy define call is not put in the build file for them. They do
//not end up getting defined via context.execCb, so we need to catch them
//at the define call.
oldDef = define;
//This function signature does not have to be exact, just match what we
//are looking for.
define = function (name) {
if (typeof name === "string" && falseProp(layer.needsDefine, name)) {
layer.modulesWithNames[name] = true;
}
return oldDef.apply(require, arguments);
};
define.amd = oldDef.amd;
//Add some utilities for plugins
require._readFile = file.readFile;
require._fileExists = function (path) {
return file.exists(path);
};
//Called when execManager runs for a dependency. Used to figure out
//what order of execution.
require.onResourceLoad = function (context, map) {
var id = map.id,
url;
// Fix up any maps that need to be normalized as part of the fullExec
// plumbing for plugins to participate in the build.
if (context.plugins && lang.hasProp(context.plugins, id)) {
lang.eachProp(context.needFullExec, function(value, prop) {
// For plugin entries themselves, they do not have a map
// value in needFullExec, just a "true" entry.
if (value !== true && value.prefix === id && value.unnormalized) {
var map = context.makeModuleMap(value.originalName, value.parentMap);
context.needFullExec[map.id] = map;
}
});
}
//If build needed a full execution, indicate it
//has been done now. But only do it if the context is tracking
//that. Only valid for the context used in a build, not for
//other contexts being run, like for useLib, plain requirejs
//use in node/rhino.
if (context.needFullExec && getOwn(context.needFullExec, id)) {
context.fullExec[id] = map;
}
//A plugin.
if (map.prefix) {
if (falseProp(layer.pathAdded, id)) {
layer.buildFilePaths.push(id);
//For plugins the real path is not knowable, use the name
//for both module to file and file to module mappings.
layer.buildPathMap[id] = id;
layer.buildFileToModule[id] = id;
layer.modulesWithNames[id] = true;
layer.pathAdded[id] = true;
}
} else if (map.url && require._isSupportedBuildUrl(map.url)) {
//If the url has not been added to the layer yet, and it
//is from an actual file that was loaded, add it now.
url = normalizeUrlWithBase(context, id, map.url);
if (!layer.pathAdded[url] && getOwn(layer.buildPathMap, id)) {
//Remember the list of dependencies for this layer.
layer.buildFilePaths.push(url);
layer.pathAdded[url] = true;
}
}
};
//Called by output of the parse() function, when a file does not
//explicitly call define, probably just require, but the parse()
//function normalizes on define() for dependency mapping and file
//ordering works correctly.
require.needsDefine = function (moduleName) {
layer.needsDefine[moduleName] = true;
};
};
});

13
build/jslib/rhino.js Normal file
View File

@ -0,0 +1,13 @@
/*global require: false, java: false, load: false */
(function () {
'use strict';
require.load = function (context, moduleName, url) {
load(url);
//Support anonymous modules.
context.completeLoad(moduleName);
};
}());

15
build/jslib/rhino/args.js Normal file
View File

@ -0,0 +1,15 @@
/*jslint strict: false */
/*global define: false, process: false */
var jsLibRhinoArgs = (typeof rhinoArgs !== 'undefined' && rhinoArgs) || [].concat(Array.prototype.slice.call(arguments, 0));
define(function () {
var args = jsLibRhinoArgs;
//Ignore any command option used for main x.js branching
if (args[0] && args[0].indexOf('-') === 0) {
args = args.slice(1);
}
return args;
});

View File

@ -0,0 +1,7 @@
/*jslint strict: false */
/*global define: false, load: false */
//Just a stub for use with uglify's consolidator.js
define(function () {
return {};
});

295
build/jslib/rhino/file.js Normal file
View File

@ -0,0 +1,295 @@
//Helper functions to deal with file I/O.
/*jslint plusplus: false */
/*global java: false, define: false */
define(['prim'], function (prim) {
var file = {
backSlashRegExp: /\\/g,
exclusionRegExp: /^\./,
getLineSeparator: function () {
return file.lineSeparator;
},
lineSeparator: java.lang.System.getProperty("line.separator"), //Java String
exists: function (fileName) {
return (new java.io.File(fileName)).exists();
},
parent: function (fileName) {
return file.absPath((new java.io.File(fileName)).getParentFile());
},
normalize: function (fileName) {
return file.absPath(fileName);
},
isFile: function (path) {
return (new java.io.File(path)).isFile();
},
isDirectory: function (path) {
return (new java.io.File(path)).isDirectory();
},
/**
* Gets the absolute file path as a string, normalized
* to using front slashes for path separators.
* @param {java.io.File||String} file
*/
absPath: function (fileObj) {
if (typeof fileObj === "string") {
fileObj = new java.io.File(fileObj);
}
return (fileObj.getCanonicalPath() + "").replace(file.backSlashRegExp, "/");
},
getFilteredFileList: function (/*String*/startDir, /*RegExp*/regExpFilters, /*boolean?*/makeUnixPaths, /*boolean?*/startDirIsJavaObject) {
//summary: Recurses startDir and finds matches to the files that match regExpFilters.include
//and do not match regExpFilters.exclude. Or just one regexp can be passed in for regExpFilters,
//and it will be treated as the "include" case.
//Ignores files/directories that start with a period (.) unless exclusionRegExp
//is set to another value.
var files = [], topDir, regExpInclude, regExpExclude, dirFileArray,
i, fileObj, filePath, ok, dirFiles;
topDir = startDir;
if (!startDirIsJavaObject) {
topDir = new java.io.File(startDir);
}
regExpInclude = regExpFilters.include || regExpFilters;
regExpExclude = regExpFilters.exclude || null;
if (topDir.exists()) {
dirFileArray = topDir.listFiles();
for (i = 0; i < dirFileArray.length; i++) {
fileObj = dirFileArray[i];
if (fileObj.isFile()) {
filePath = fileObj.getPath();
if (makeUnixPaths) {
//Make sure we have a JS string.
filePath = String(filePath);
if (filePath.indexOf("/") === -1) {
filePath = filePath.replace(/\\/g, "/");
}
}
ok = true;
if (regExpInclude) {
ok = filePath.match(regExpInclude);
}
if (ok && regExpExclude) {
ok = !filePath.match(regExpExclude);
}
if (ok && (!file.exclusionRegExp ||
!file.exclusionRegExp.test(fileObj.getName()))) {
files.push(filePath);
}
} else if (fileObj.isDirectory() &&
(!file.exclusionRegExp || !file.exclusionRegExp.test(fileObj.getName()))) {
dirFiles = this.getFilteredFileList(fileObj, regExpFilters, makeUnixPaths, true);
//Do not use push.apply for dir listings, can hit limit of max number
//of arguments to a function call, #921.
dirFiles.forEach(function (dirFile) {
files.push(dirFile);
});
}
}
}
return files; //Array
},
copyDir: function (/*String*/srcDir, /*String*/destDir, /*RegExp?*/regExpFilter, /*boolean?*/onlyCopyNew) {
//summary: copies files from srcDir to destDir using the regExpFilter to determine if the
//file should be copied. Returns a list file name strings of the destinations that were copied.
regExpFilter = regExpFilter || /\w/;
var fileNames = file.getFilteredFileList(srcDir, regExpFilter, true),
copiedFiles = [], i, srcFileName, destFileName;
for (i = 0; i < fileNames.length; i++) {
srcFileName = fileNames[i];
destFileName = srcFileName.replace(srcDir, destDir);
if (file.copyFile(srcFileName, destFileName, onlyCopyNew)) {
copiedFiles.push(destFileName);
}
}
return copiedFiles.length ? copiedFiles : null; //Array or null
},
copyFile: function (/*String*/srcFileName, /*String*/destFileName, /*boolean?*/onlyCopyNew) {
//summary: copies srcFileName to destFileName. If onlyCopyNew is set, it only copies the file if
//srcFileName is newer than destFileName. Returns a boolean indicating if the copy occurred.
var destFile = new java.io.File(destFileName), srcFile, parentDir,
srcChannel, destChannel;
//logger.trace("Src filename: " + srcFileName);
//logger.trace("Dest filename: " + destFileName);
//If onlyCopyNew is true, then compare dates and only copy if the src is newer
//than dest.
if (onlyCopyNew) {
srcFile = new java.io.File(srcFileName);
if (destFile.exists() && destFile.lastModified() >= srcFile.lastModified()) {
return false; //Boolean
}
}
//Make sure destination dir exists.
parentDir = destFile.getParentFile();
if (!parentDir.exists()) {
if (!parentDir.mkdirs()) {
throw "Could not create directory: " + parentDir.getCanonicalPath();
}
}
//Java's version of copy file.
srcChannel = new java.io.FileInputStream(srcFileName).getChannel();
destChannel = new java.io.FileOutputStream(destFileName).getChannel();
destChannel.transferFrom(srcChannel, 0, srcChannel.size());
srcChannel.close();
destChannel.close();
return true; //Boolean
},
/**
* Renames a file. May fail if "to" already exists or is on another drive.
*/
renameFile: function (from, to) {
return (new java.io.File(from)).renameTo((new java.io.File(to)));
},
readFile: function (/*String*/path, /*String?*/encoding) {
//A file read function that can deal with BOMs
encoding = encoding || "utf-8";
var fileObj = new java.io.File(path),
input = new java.io.BufferedReader(new java.io.InputStreamReader(new java.io.FileInputStream(fileObj), encoding)),
stringBuffer, line;
try {
stringBuffer = new java.lang.StringBuffer();
line = input.readLine();
// Byte Order Mark (BOM) - The Unicode Standard, version 3.0, page 324
// http://www.unicode.org/faq/utf_bom.html
// Note that when we use utf-8, the BOM should appear as "EF BB BF", but it doesn't due to this bug in the JDK:
// http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4508058
if (line && line.length() && line.charAt(0) === 0xfeff) {
// Eat the BOM, since we've already found the encoding on this file,
// and we plan to concatenating this buffer with others; the BOM should
// only appear at the top of a file.
line = line.substring(1);
}
while (line !== null) {
stringBuffer.append(line);
stringBuffer.append(file.lineSeparator);
line = input.readLine();
}
//Make sure we return a JavaScript string and not a Java string.
return String(stringBuffer.toString()); //String
} finally {
input.close();
}
},
readFileAsync: function (path, encoding) {
var d = prim();
try {
d.resolve(file.readFile(path, encoding));
} catch (e) {
d.reject(e);
}
return d.promise;
},
saveUtf8File: function (/*String*/fileName, /*String*/fileContents) {
//summary: saves a file using UTF-8 encoding.
file.saveFile(fileName, fileContents, "utf-8");
},
saveFile: function (/*String*/fileName, /*String*/fileContents, /*String?*/encoding) {
//summary: saves a file.
var outFile = new java.io.File(fileName), outWriter, parentDir, os;
parentDir = outFile.getAbsoluteFile().getParentFile();
if (!parentDir.exists()) {
if (!parentDir.mkdirs()) {
throw "Could not create directory: " + parentDir.getAbsolutePath();
}
}
if (encoding) {
outWriter = new java.io.OutputStreamWriter(new java.io.FileOutputStream(outFile), encoding);
} else {
outWriter = new java.io.OutputStreamWriter(new java.io.FileOutputStream(outFile));
}
os = new java.io.BufferedWriter(outWriter);
try {
//If in Nashorn, need to coerce the JS string to a Java string so that
//writer.write method dispatch correctly detects the type.
if (typeof importPackage !== 'undefined') {
os.write(fileContents);
} else {
os.write(new java.lang.String(fileContents));
}
} finally {
os.close();
}
},
deleteFile: function (/*String*/fileName) {
//summary: deletes a file or directory if it exists.
var fileObj = new java.io.File(fileName), files, i;
if (fileObj.exists()) {
if (fileObj.isDirectory()) {
files = fileObj.listFiles();
for (i = 0; i < files.length; i++) {
this.deleteFile(files[i]);
}
}
fileObj["delete"]();
}
},
/**
* Deletes any empty directories under the given directory.
* The startDirIsJavaObject is private to this implementation's
* recursion needs.
*/
deleteEmptyDirs: function (startDir, startDirIsJavaObject) {
var topDir = startDir,
dirFileArray, i, fileObj;
if (!startDirIsJavaObject) {
topDir = new java.io.File(startDir);
}
if (topDir.exists()) {
dirFileArray = topDir.listFiles();
for (i = 0; i < dirFileArray.length; i++) {
fileObj = dirFileArray[i];
if (fileObj.isDirectory()) {
file.deleteEmptyDirs(fileObj, true);
}
}
//If the directory is empty now, delete it.
if (topDir.listFiles().length === 0) {
file.deleteFile(String(topDir.getPath()));
}
}
}
};
return file;
});

View File

@ -0,0 +1,6 @@
/*jslint strict: false */
/*global define: false, load: false */
define(function () {
return load;
});

View File

@ -0,0 +1,186 @@
/*jslint sloppy: true, plusplus: true */
/*global define, java, Packages, com */
define(['logger', 'env!env/file'], function (logger, file) {
//Add .reduce to Rhino so UglifyJS can run in Rhino,
//inspired by https://developer.mozilla.org/en/JavaScript/Reference/Global_Objects/Array/reduce
//but rewritten for brevity, and to be good enough for use by UglifyJS.
if (!Array.prototype.reduce) {
Array.prototype.reduce = function (fn /*, initialValue */) {
var i = 0,
length = this.length,
accumulator;
if (arguments.length >= 2) {
accumulator = arguments[1];
} else {
if (length) {
while (!(i in this)) {
i++;
}
accumulator = this[i++];
}
}
for (; i < length; i++) {
if (i in this) {
accumulator = fn.call(undefined, accumulator, this[i], i, this);
}
}
return accumulator;
};
}
var JSSourceFilefromCode, optimize,
mapRegExp = /"file":"[^"]+"/;
//Bind to Closure compiler, but if it is not available, do not sweat it.
try {
// Try older closure compiler that worked on Java 6
JSSourceFilefromCode = java.lang.Class.forName('com.google.javascript.jscomp.JSSourceFile').getMethod('fromCode', [java.lang.String, java.lang.String]);
} catch (e) {
try {
// Try for newer closure compiler that needs Java 7+
JSSourceFilefromCode = java.lang.Class.forName('com.google.javascript.jscomp.SourceFile').getMethod('fromCode', [java.lang.String, java.lang.String]);
} catch (e) {
try {
// Try Nashorn style
var stringClass = Java.type("java.lang.String").class;
JSSourceFilefromCode = Java.type("com.google.javascript.jscomp.SourceFile").class.getMethod("fromCode", [stringClass, stringClass]);
} catch (e) {}
}
}
//Helper for closure compiler, because of weird Java-JavaScript interactions.
function closurefromCode(filename, content) {
return JSSourceFilefromCode.invoke(null, [filename, content]);
}
function getFileWriter(fileName, encoding) {
var outFile = new java.io.File(fileName), outWriter, parentDir;
parentDir = outFile.getAbsoluteFile().getParentFile();
if (!parentDir.exists()) {
if (!parentDir.mkdirs()) {
throw "Could not create directory: " + parentDir.getAbsolutePath();
}
}
if (encoding) {
outWriter = new java.io.OutputStreamWriter(new java.io.FileOutputStream(outFile), encoding);
} else {
outWriter = new java.io.OutputStreamWriter(new java.io.FileOutputStream(outFile));
}
return new java.io.BufferedWriter(outWriter);
}
optimize = {
closure: function (fileName, fileContents, outFileName, keepLines, config) {
config = config || {};
var result, mappings, optimized, compressed, baseName, writer,
outBaseName, outFileNameMap, outFileNameMapContent,
srcOutFileName, concatNameMap,
jscomp = Packages.com.google.javascript.jscomp,
flags = Packages.com.google.common.flags,
//Set up source input
jsSourceFile = closurefromCode(String(fileName), String(fileContents)),
sourceListArray = new java.util.ArrayList(),
externList = new java.util.ArrayList(),
options, option, FLAG_compilation_level, compiler, externExportsPath,
Compiler = Packages.com.google.javascript.jscomp.Compiler,
CommandLineRunner = Packages.com.google.javascript.jscomp.CommandLineRunner;
logger.trace("Minifying file: " + fileName);
baseName = (new java.io.File(fileName)).getName();
//Set up options
options = new jscomp.CompilerOptions();
for (option in config.CompilerOptions) {
// options are false by default and jslint wanted an if statement in this for loop
if (config.CompilerOptions[option]) {
options[option] = config.CompilerOptions[option];
}
}
options.prettyPrint = keepLines || options.prettyPrint;
FLAG_compilation_level = jscomp.CompilationLevel[config.CompilationLevel || 'SIMPLE_OPTIMIZATIONS'];
FLAG_compilation_level.setOptionsForCompilationLevel(options);
if (config.generateSourceMaps) {
mappings = new java.util.ArrayList();
mappings.add(new com.google.javascript.jscomp.SourceMap.LocationMapping(fileName, baseName + ".src.js"));
options.setSourceMapLocationMappings(mappings);
options.setSourceMapOutputPath(fileName + ".map");
}
//If we need to pass an externs file to Closure so that it does not create aliases
//for certain symbols, do so here.
externList.addAll(CommandLineRunner.getDefaultExterns());
if (config.externExportsPath) {
externExportsPath = config.externExportsPath;
externList.add(jscomp.SourceFile.fromFile(externExportsPath));
}
//Trigger the compiler
Compiler.setLoggingLevel(Packages.java.util.logging.Level[config.loggingLevel || 'WARNING']);
compiler = new Compiler();
//fill the sourceArrrayList; we need the ArrayList because the only overload of compile
//accepting the getDefaultExterns return value (a List) also wants the sources as a List
sourceListArray.add(jsSourceFile);
result = compiler.compile(externList, sourceListArray, options);
if (result.success) {
optimized = String(compiler.toSource());
if (config.generateSourceMaps && result.sourceMap && outFileName) {
outBaseName = (new java.io.File(outFileName)).getName();
srcOutFileName = outFileName + ".src.js";
outFileNameMap = outFileName + ".map";
//If previous .map file exists, move it to the ".src.js"
//location. Need to update the sourceMappingURL part in the
//src.js file too.
if (file.exists(outFileNameMap)) {
concatNameMap = outFileNameMap.replace(/\.map$/, '.src.js.map');
file.saveFile(concatNameMap, file.readFile(outFileNameMap));
file.saveFile(srcOutFileName,
fileContents.replace(/\/\# sourceMappingURL=(.+).map/,
'/# sourceMappingURL=$1.src.js.map'));
} else {
file.saveUtf8File(srcOutFileName, fileContents);
}
writer = getFileWriter(outFileNameMap, "utf-8");
result.sourceMap.appendTo(writer, outFileName);
writer.close();
//Not sure how better to do this, but right now the .map file
//leaks the full OS path in the "file" property. Manually
//modify it to not do that.
file.saveFile(outFileNameMap,
file.readFile(outFileNameMap).replace(mapRegExp, '"file":"' + baseName + '"'));
fileContents = optimized + "\n//# sourceMappingURL=" + outBaseName + ".map";
} else {
fileContents = optimized;
}
return fileContents;
} else {
throw new Error('Cannot closure compile file: ' + fileName + '. Skipping it.');
}
return fileContents;
}
};
return optimize;
});

View File

@ -0,0 +1,6 @@
/*jslint strict: false */
/*global define: false, print: false */
define(function () {
return print;
});

View File

@ -0,0 +1,7 @@
/*global quit */
define(function () {
'use strict';
return function (code) {
return quit(code);
};
});

3056
build/jslib/source-map.js Normal file

File diff suppressed because it is too large Load Diff

438
build/jslib/transform.js Normal file
View File

@ -0,0 +1,438 @@
/*global define */
define([ './esprimaAdapter', './parse', 'logger', 'lang'],
function (esprima, parse, logger, lang) {
'use strict';
var transform,
baseIndentRegExp = /^([ \t]+)/,
indentRegExp = /\{[\r\n]+([ \t]+)/,
keyRegExp = /^[_A-Za-z]([A-Za-z\d_]*)$/,
bulkIndentRegExps = {
'\n': /\n/g,
'\r\n': /\r\n/g
};
function applyIndent(str, indent, lineReturn) {
var regExp = bulkIndentRegExps[lineReturn];
return str.replace(regExp, '$&' + indent);
}
transform = {
toTransport: function (namespace, moduleName, path, contents, onFound, options) {
options = options || {};
var astRoot, contentLines, modLine,
foundAnon,
scanCount = 0,
scanReset = false,
defineInfos = [],
applySourceUrl = function (contents) {
if (options.useSourceUrl) {
contents = 'eval("' + lang.jsEscape(contents) +
'\\n//# sourceURL=' + (path.indexOf('/') === 0 ? '' : '/') +
path +
'");\n';
}
return contents;
};
try {
astRoot = esprima.parse(contents, {
loc: true
});
} catch (e) {
logger.trace('toTransport skipping ' + path + ': ' +
e.toString());
return contents;
}
//Find the define calls and their position in the files.
parse.traverse(astRoot, function (node) {
var args, firstArg, firstArgLoc, factoryNode,
needsId, depAction, foundId, init,
sourceUrlData, range,
namespaceExists = false;
// If a bundle script with a define declaration, do not
// parse any further at this level. Likely a built layer
// by some other tool.
if (node.type === 'VariableDeclarator' &&
node.id && node.id.name === 'define' &&
node.id.type === 'Identifier') {
init = node.init;
if (init && init.callee &&
init.callee.type === 'CallExpression' &&
init.callee.callee &&
init.callee.callee.type === 'Identifier' &&
init.callee.callee.name === 'require' &&
init.callee.arguments && init.callee.arguments.length === 1 &&
init.callee.arguments[0].type === 'Literal' &&
init.callee.arguments[0].value &&
init.callee.arguments[0].value.indexOf('amdefine') !== -1) {
// the var define = require('amdefine')(module) case,
// keep going in that case.
} else {
return false;
}
}
namespaceExists = namespace &&
node.type === 'CallExpression' &&
node.callee && node.callee.object &&
node.callee.object.type === 'Identifier' &&
node.callee.object.name === namespace &&
node.callee.property.type === 'Identifier' &&
node.callee.property.name === 'define';
if (namespaceExists || parse.isDefineNodeWithArgs(node)) {
//The arguments are where its at.
args = node.arguments;
if (!args || !args.length) {
return;
}
firstArg = args[0];
firstArgLoc = firstArg.loc;
if (args.length === 1) {
if (firstArg.type === 'Identifier') {
//The define(factory) case, but
//only allow it if one Identifier arg,
//to limit impact of false positives.
needsId = true;
depAction = 'empty';
} else if (parse.isFnExpression(firstArg)) {
//define(function(){})
factoryNode = firstArg;
needsId = true;
depAction = 'scan';
} else if (firstArg.type === 'ObjectExpression') {
//define({});
needsId = true;
depAction = 'skip';
} else if (firstArg.type === 'Literal' &&
typeof firstArg.value === 'number') {
//define('12345');
needsId = true;
depAction = 'skip';
} else if (firstArg.type === 'UnaryExpression' &&
firstArg.operator === '-' &&
firstArg.argument &&
firstArg.argument.type === 'Literal' &&
typeof firstArg.argument.value === 'number') {
//define('-12345');
needsId = true;
depAction = 'skip';
} else if (firstArg.type === 'MemberExpression' &&
firstArg.object &&
firstArg.property &&
firstArg.property.type === 'Identifier') {
//define(this.key);
needsId = true;
depAction = 'empty';
}
} else if (firstArg.type === 'ArrayExpression') {
//define([], ...);
needsId = true;
depAction = 'skip';
} else if (firstArg.type === 'Literal' &&
typeof firstArg.value === 'string') {
//define('string', ....)
//Already has an ID.
needsId = false;
if (args.length === 2 &&
parse.isFnExpression(args[1])) {
//Needs dependency scanning.
factoryNode = args[1];
depAction = 'scan';
} else {
depAction = 'skip';
}
} else {
//Unknown define entity, keep looking, even
//in the subtree for this node.
return;
}
range = {
foundId: foundId,
needsId: needsId,
depAction: depAction,
namespaceExists: namespaceExists,
node: node,
defineLoc: node.loc,
firstArgLoc: firstArgLoc,
factoryNode: factoryNode,
sourceUrlData: sourceUrlData
};
//Only transform ones that do not have IDs. If it has an
//ID but no dependency array, assume it is something like
//a phonegap implementation, that has its own internal
//define that cannot handle dependency array constructs,
//and if it is a named module, then it means it has been
//set for transport form.
if (range.needsId) {
if (foundAnon) {
logger.trace(path + ' has more than one anonymous ' +
'define. May be a built file from another ' +
'build system like, Ender. Skipping normalization.');
defineInfos = [];
return false;
} else {
foundAnon = range;
defineInfos.push(range);
}
} else if (depAction === 'scan') {
scanCount += 1;
if (scanCount > 1) {
//Just go back to an array that just has the
//anon one, since this is an already optimized
//file like the phonegap one.
if (!scanReset) {
defineInfos = foundAnon ? [foundAnon] : [];
scanReset = true;
}
} else {
defineInfos.push(range);
}
}
}
});
if (!defineInfos.length) {
return applySourceUrl(contents);
}
//Reverse the matches, need to start from the bottom of
//the file to modify it, so that the ranges are still true
//further up.
defineInfos.reverse();
contentLines = contents.split('\n');
modLine = function (loc, contentInsertion) {
var startIndex = loc.start.column,
//start.line is 1-based, not 0 based.
lineIndex = loc.start.line - 1,
line = contentLines[lineIndex];
contentLines[lineIndex] = line.substring(0, startIndex) +
contentInsertion +
line.substring(startIndex,
line.length);
};
defineInfos.forEach(function (info) {
var deps,
contentInsertion = '',
depString = '';
//Do the modifications "backwards", in other words, start with the
//one that is farthest down and work up, so that the ranges in the
//defineInfos still apply. So that means deps, id, then namespace.
if (info.needsId && moduleName) {
contentInsertion += "'" + moduleName + "',";
}
if (info.depAction === 'scan') {
deps = parse.getAnonDepsFromNode(info.factoryNode);
if (deps.length) {
depString = '[' + deps.map(function (dep) {
return "'" + dep + "'";
}) + ']';
} else {
depString = '[]';
}
depString += ',';
if (info.factoryNode) {
//Already have a named module, need to insert the
//dependencies after the name.
modLine(info.factoryNode.loc, depString);
} else {
contentInsertion += depString;
}
}
if (contentInsertion) {
modLine(info.firstArgLoc, contentInsertion);
}
//Do namespace last so that ui does not mess upthe parenRange
//used above.
if (namespace && !info.namespaceExists) {
modLine(info.defineLoc, namespace + '.');
}
//Notify any listener for the found info
if (onFound) {
onFound(info);
}
});
contents = contentLines.join('\n');
return applySourceUrl(contents);
},
/**
* Modify the contents of a require.config/requirejs.config call. This
* call will LOSE any existing comments that are in the config string.
*
* @param {String} fileContents String that may contain a config call
* @param {Function} onConfig Function called when the first config
* call is found. It will be passed an Object which is the current
* config, and the onConfig function should return an Object to use
* as the config.
* @return {String} the fileContents with the config changes applied.
*/
modifyConfig: function (fileContents, onConfig) {
var details = parse.findConfig(fileContents),
config = details.config;
if (config) {
config = onConfig(config);
if (config) {
return transform.serializeConfig(config,
fileContents,
details.range[0],
details.range[1],
{
quote: details.quote
});
}
}
return fileContents;
},
serializeConfig: function (config, fileContents, start, end, options) {
//Calculate base level of indent
var indent, match, configString, outDentRegExp,
baseIndent = '',
startString = fileContents.substring(0, start),
existingConfigString = fileContents.substring(start, end),
lineReturn = existingConfigString.indexOf('\r') === -1 ? '\n' : '\r\n',
lastReturnIndex = startString.lastIndexOf('\n');
//Get the basic amount of indent for the require config call.
if (lastReturnIndex === -1) {
lastReturnIndex = 0;
}
match = baseIndentRegExp.exec(startString.substring(lastReturnIndex + 1, start));
if (match && match[1]) {
baseIndent = match[1];
}
//Calculate internal indentation for config
match = indentRegExp.exec(existingConfigString);
if (match && match[1]) {
indent = match[1];
}
if (!indent || indent.length < baseIndent) {
indent = ' ';
} else {
indent = indent.substring(baseIndent.length);
}
outDentRegExp = new RegExp('(' + lineReturn + ')' + indent, 'g');
configString = transform.objectToString(config, {
indent: indent,
lineReturn: lineReturn,
outDentRegExp: outDentRegExp,
quote: options && options.quote
});
//Add in the base indenting level.
configString = applyIndent(configString, baseIndent, lineReturn);
return startString + configString + fileContents.substring(end);
},
/**
* Tries converting a JS object to a string. This will likely suck, and
* is tailored to the type of config expected in a loader config call.
* So, hasOwnProperty fields, strings, numbers, arrays and functions,
* no weird recursively referenced stuff.
* @param {Object} obj the object to convert
* @param {Object} options options object with the following values:
* {String} indent the indentation to use for each level
* {String} lineReturn the type of line return to use
* {outDentRegExp} outDentRegExp the regexp to use to outdent functions
* {String} quote the quote type to use, ' or ". Optional. Default is "
* @param {String} totalIndent the total indent to print for this level
* @return {String} a string representation of the object.
*/
objectToString: function (obj, options, totalIndent) {
var startBrace, endBrace, nextIndent,
first = true,
value = '',
lineReturn = options.lineReturn,
indent = options.indent,
outDentRegExp = options.outDentRegExp,
quote = options.quote || '"';
totalIndent = totalIndent || '';
nextIndent = totalIndent + indent;
if (obj === null) {
value = 'null';
} else if (obj === undefined) {
value = 'undefined';
} else if (typeof obj === 'number' || typeof obj === 'boolean') {
value = obj;
} else if (typeof obj === 'string') {
//Use double quotes in case the config may also work as JSON.
value = quote + lang.jsEscape(obj) + quote;
} else if (lang.isArray(obj)) {
lang.each(obj, function (item, i) {
value += (i !== 0 ? ',' + lineReturn : '' ) +
nextIndent +
transform.objectToString(item,
options,
nextIndent);
});
startBrace = '[';
endBrace = ']';
} else if (lang.isFunction(obj) || lang.isRegExp(obj)) {
//The outdent regexp just helps pretty up the conversion
//just in node. Rhino strips comments and does a different
//indent scheme for Function toString, so not really helpful
//there.
value = obj.toString().replace(outDentRegExp, '$1');
} else {
//An object
lang.eachProp(obj, function (v, prop) {
value += (first ? '': ',' + lineReturn) +
nextIndent +
(keyRegExp.test(prop) ? prop : quote + lang.jsEscape(prop) + quote )+
': ' +
transform.objectToString(v,
options,
nextIndent);
first = false;
});
startBrace = '{';
endBrace = '}';
}
if (startBrace) {
value = startBrace +
lineReturn +
value +
lineReturn + totalIndent +
endBrace;
}
return value;
}
};
return transform;
});

10709
build/jslib/uglifyjs.js Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,63 @@
Sets up uglifyjs for use in the optimizer.
Current embedded version: 2.8.29, source-map 0.5.6
Steps:
./generate.sh
Then update this file with the uglifyjs version fetched.
* UPDATE VERSION NUMBERS IN X.JS
* Confirm the `raw` array in combine.js is correct.
THINGS TO CHECK:
* Compare node_modules/uglify-js/tools/node.js and what
is put in last part of the combined file.
* REMOVE these functions from the end:
* readReservedFile
* exports.readReservedFile,
* exports.readDefaultReservedFile,
* exports.simple_glob
REMOVE this section:
```javascript
// workaround for tty output truncation upon process.exit()
[process.stdout, process.stderr].forEach(function(stream){
if (stream._handle && stream._handle.setBlocking)
stream._handle.setBlocking(true);
});
```
ALSO REMOVE this section:
```javascript
var path = require("path");
var fs = require("fs");
var UglifyJS = exports;
var FILES = exports.FILES = [
"../lib/utils.js",
"../lib/ast.js",
"../lib/parse.js",
"../lib/transform.js",
"../lib/scope.js",
"../lib/output.js",
"../lib/compress.js",
"../lib/sourcemap.js",
"../lib/mozilla-ast.js",
"../lib/propmangle.js",
"./exports.js",
].map(function(file){
return require.resolve(file);
});
new Function("MOZ_SourceMap", "exports", FILES.map(function(file){
return rjsFile.readFile(file, "utf8");
}).join("\n\n"))(
require("source-map"),
UglifyJS
);
```

View File

@ -0,0 +1,47 @@
/*jslint node: true, nomen: true */
var exportContents, exportIndex,
fs = require('fs'),
path = require('path'),
pkgDir = path.join(__dirname, 'temp', 'node_modules', 'uglify-js'),
pre = fs.readFileSync(__dirname + '/pre.txt', 'utf8'),
raw = [
"utils.js",
"ast.js",
"parse.js",
"transform.js",
"scope.js",
"output.js",
"compress.js",
"sourcemap.js",
"mozilla-ast.js",
"propmangle.js",
"../tools/exports.js"
].map(function (filePath) {
return fs.readFileSync(path.join(pkgDir, 'lib', filePath), 'utf8');
}).join('\n'),
post = fs.readFileSync(__dirname + '/post.txt', 'utf8'),
toolContents = fs.readFileSync(path.join(pkgDir, 'tools', 'node.js'), 'utf8');
exportContents = toolContents.replace(/UglifyJS\./g, 'exports.');
// Modify some things for the embedding:
exportContents = exportContents.replace(/fs\.readFileSync/g, 'rjsFile.readFile');
exportContents = exportContents.replace(/fs\.writeFileSync/g, 'rjsFile.writeFile');
exportContents = exportContents.replace('exports.minify = function(files, options) {', 'exports.minify = function(files, options, name) {');
exportContents = exportContents.replace('filename: options.fromString ? i : file,', 'filename: options.fromString ? name : file,');
// Node 0.10/0.12 do not like the addFile function declaration with the "use strict"
// that is used near that declaration, but not at the top of the file.
// https://github.com/requirejs/r.js/pull/929
exportContents = exportContents.replace(/function addFile\(/, 'var addFile = function(');
fs.writeFileSync(__dirname + '/../uglifyjs.js', [
pre,
raw,
'AST_Node.warn_function = function(txt) { logger.error("uglifyjs WARN: " + txt); };',
exportContents,
post
].join('\n'), 'utf8');

View File

@ -0,0 +1,11 @@
/*jslint node: true, regexp: true */
var fs = require('fs'),
path = require('path'),
fileName = process.argv[2],
umdStartRegExp = /\(function webpackUniversalModuleDefinition\(root, factory\) \{/;
var fileContents = fs.readFileSync(fileName, 'utf8');
fileContents = fileContents.replace(umdStartRegExp, '$&\nvar exports, module;');
fs.writeFileSync(fileName, fileContents, 'utf8');

View File

@ -0,0 +1,13 @@
#!/bin/bash
rm -rf ./temp
mkdir temp
cd temp
mkdir node_modules
npm install uglify-js@2.8.29
node_modules/.bin/uglifyjs --self -b -o raw.js
node ../combine.js
cp -r node_modules/source-map/dist/source-map.js ../../source-map.js
node ../fix-sourcemap.js ../../source-map.js

View File

@ -0,0 +1,2 @@
});

View File

@ -0,0 +1,3 @@
//Distributed under the BSD license:
//Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
define(['exports', 'source-map', 'logger', 'env!env/file'], function (exports, MOZ_SourceMap, logger, rjsFile) {

461
build/jslib/x.js Normal file
View File

@ -0,0 +1,461 @@
/**
* @license r.js 2.3.6 Copyright jQuery Foundation and other contributors.
* Released under MIT license, http://github.com/requirejs/r.js/LICENSE
*/
/*
* This is a bootstrap script to allow running RequireJS in the command line
* in either a Java/Rhino or Node environment. It is modified by the top-level
* dist.js file to inject other files to completely enable this file. It is
* the shell of the r.js file.
*/
/*jslint evil: true, nomen: true, sloppy: true */
/*global readFile: true, process: false, Packages: false, print: false,
console: false, java: false, module: false, requirejsVars, navigator,
document, importScripts, self, location, Components, FileUtils */
var requirejs, require, define, xpcUtil;
(function (console, args, readFileFunc) {
var fileName, env, fs, vm, path, exec, rhinoContext, dir, nodeRequire,
nodeDefine, exists, reqMain, loadedOptimizedLib, existsForNode, Cc, Ci,
version = '2.3.6',
jsSuffixRegExp = /\.js$/,
commandOption = '',
useLibLoaded = {},
//Used by jslib/rhino/args.js
rhinoArgs = args,
//Used by jslib/xpconnect/args.js
xpconnectArgs = args,
readFile = typeof readFileFunc !== 'undefined' ? readFileFunc : null;
function showHelp() {
console.log('See https://github.com/requirejs/r.js for usage.');
}
if (typeof process !== 'undefined' && process.versions && !!process.versions.node) {
env = 'node';
//Get the fs module via Node's require before it
//gets replaced. Used in require/node.js
fs = require('fs');
vm = require('vm');
path = require('path');
//In Node 0.7+ existsSync is on fs.
existsForNode = fs.existsSync || path.existsSync;
nodeRequire = require;
nodeDefine = define;
reqMain = require.main;
//Temporarily hide require and define to allow require.js to define
//them.
require = undefined;
define = undefined;
readFile = function (path) {
return fs.readFileSync(path, 'utf8');
};
exec = function (string, name) {
return vm.runInThisContext(this.requirejsVars.require.makeNodeWrapper(string),
name ? fs.realpathSync(name) : '');
};
exists = function (fileName) {
return existsForNode(fileName);
};
fileName = process.argv[2];
if (fileName && fileName.indexOf('-') === 0) {
commandOption = fileName.substring(1);
fileName = process.argv[3];
}
} else if (typeof Packages !== 'undefined') {
env = 'rhino';
fileName = args[0];
if (fileName && fileName.indexOf('-') === 0) {
commandOption = fileName.substring(1);
fileName = args[1];
}
//Exec/readFile differs between Rhino and Nashorn. Rhino has an
//importPackage where Nashorn does not, so branch on that. This is a
//coarser check -- detecting readFile existence might also be enough for
//this spot. However, sticking with importPackage to keep it the same
//as other Rhino/Nashorn detection branches.
if (typeof importPackage !== 'undefined') {
rhinoContext = Packages.org.mozilla.javascript.ContextFactory.getGlobal().enterContext();
exec = function (string, name) {
return rhinoContext.evaluateString(this, string, name, 0, null);
};
} else {
exec = function (string, name) {
load({ script: string, name: name});
};
readFile = readFully;
}
exists = function (fileName) {
return (new java.io.File(fileName)).exists();
};
//Define a console.log for easier logging. Don't
//get fancy though.
if (typeof console === 'undefined') {
console = {
log: function () {
print.apply(undefined, arguments);
}
};
}
} else if ((typeof navigator !== 'undefined' && typeof document !== 'undefined') ||
(typeof importScripts !== 'undefined' && typeof self !== 'undefined')) {
env = 'browser';
readFile = function (path) {
return fs.readFileSync(path, 'utf8');
};
exec = function (string) {
return eval(string);
};
exists = function () {
console.log('x.js exists not applicable in browser env');
return false;
};
} else if (typeof Components !== 'undefined' && Components.classes && Components.interfaces) {
env = 'xpconnect';
Components.utils['import']('resource://gre/modules/FileUtils.jsm');
Cc = Components.classes;
Ci = Components.interfaces;
fileName = args[0];
if (fileName && fileName.indexOf('-') === 0) {
commandOption = fileName.substring(1);
fileName = args[1];
}
xpcUtil = {
isWindows: ('@mozilla.org/windows-registry-key;1' in Cc),
cwd: function () {
return FileUtils.getFile("CurWorkD", []).path;
},
//Remove . and .. from paths, normalize on front slashes
normalize: function (path) {
//There has to be an easier way to do this.
var i, part, ary,
firstChar = path.charAt(0);
if (firstChar !== '/' &&
firstChar !== '\\' &&
path.indexOf(':') === -1) {
//A relative path. Use the current working directory.
path = xpcUtil.cwd() + '/' + path;
}
ary = path.replace(/\\/g, '/').split('/');
for (i = 0; i < ary.length; i += 1) {
part = ary[i];
if (part === '.') {
ary.splice(i, 1);
i -= 1;
} else if (part === '..') {
ary.splice(i - 1, 2);
i -= 2;
}
}
return ary.join('/');
},
xpfile: function (path) {
var fullPath;
try {
fullPath = xpcUtil.normalize(path);
if (xpcUtil.isWindows) {
fullPath = fullPath.replace(/\//g, '\\');
}
return new FileUtils.File(fullPath);
} catch (e) {
throw new Error((fullPath || path) + ' failed: ' + e);
}
},
readFile: function (/*String*/path, /*String?*/encoding) {
//A file read function that can deal with BOMs
encoding = encoding || "utf-8";
var inStream, convertStream,
readData = {},
fileObj = xpcUtil.xpfile(path);
//XPCOM, you so crazy
try {
inStream = Cc['@mozilla.org/network/file-input-stream;1']
.createInstance(Ci.nsIFileInputStream);
inStream.init(fileObj, 1, 0, false);
convertStream = Cc['@mozilla.org/intl/converter-input-stream;1']
.createInstance(Ci.nsIConverterInputStream);
convertStream.init(inStream, encoding, inStream.available(),
Ci.nsIConverterInputStream.DEFAULT_REPLACEMENT_CHARACTER);
convertStream.readString(inStream.available(), readData);
return readData.value;
} catch (e) {
throw new Error((fileObj && fileObj.path || '') + ': ' + e);
} finally {
if (convertStream) {
convertStream.close();
}
if (inStream) {
inStream.close();
}
}
}
};
readFile = xpcUtil.readFile;
exec = function (string) {
return eval(string);
};
exists = function (fileName) {
return xpcUtil.xpfile(fileName).exists();
};
//Define a console.log for easier logging. Don't
//get fancy though.
if (typeof console === 'undefined') {
console = {
log: function () {
print.apply(undefined, arguments);
}
};
}
}
//INSERT require.js
this.requirejsVars = {
require: require,
requirejs: require,
define: define
};
if (env === 'browser') {
//INSERT build/jslib/browser.js
} else if (env === 'rhino') {
//INSERT build/jslib/rhino.js
} else if (env === 'node') {
this.requirejsVars.nodeRequire = nodeRequire;
require.nodeRequire = nodeRequire;
//INSERT build/jslib/node.js
} else if (env === 'xpconnect') {
//INSERT build/jslib/xpconnect.js
}
//Support a default file name to execute. Useful for hosted envs
//like Joyent where it defaults to a server.js as the only executed
//script. But only do it if this is not an optimization run.
if (commandOption !== 'o' && (!fileName || !jsSuffixRegExp.test(fileName))) {
fileName = 'main.js';
}
/**
* Loads the library files that can be used for the optimizer, or for other
* tasks.
*/
function loadLib() {
//INSERT LIB
}
/**
* Sets the default baseUrl for requirejs to be directory of top level
* script.
*/
function setBaseUrl(fileName) {
//Use the file name's directory as the baseUrl if available.
dir = fileName.replace(/\\/g, '/');
if (dir.indexOf('/') !== -1) {
dir = dir.split('/');
dir.pop();
dir = dir.join('/');
//Make sure dir is JS-escaped, since it will be part of a JS string.
exec("require({baseUrl: '" + dir.replace(/[\\"']/g, '\\$&') + "'});");
}
}
function createRjsApi() {
//Create a method that will run the optimzer given an object
//config.
requirejs.optimize = function (config, callback, errback) {
if (!loadedOptimizedLib) {
loadLib();
loadedOptimizedLib = true;
}
//Create the function that will be called once build modules
//have been loaded.
var runBuild = function (build, logger, quit) {
//Make sure config has a log level, and if not,
//make it "silent" by default.
config.logLevel = config.hasOwnProperty('logLevel') ?
config.logLevel : logger.SILENT;
//Reset build internals first in case this is part
//of a long-running server process that could have
//exceptioned out in a bad state. It is only defined
//after the first call though.
if (requirejs._buildReset) {
requirejs._buildReset();
requirejs._cacheReset();
}
function done(result) {
//And clean up, in case something else triggers
//a build in another pathway.
if (requirejs._buildReset) {
requirejs._buildReset();
requirejs._cacheReset();
}
// Ensure errors get propagated to the errback
if (result instanceof Error) {
throw result;
}
return result;
}
errback = errback || function (err) {
// Using console here since logger may have
// turned off error logging. Since quit is
// called want to be sure a message is printed.
console.log(err);
quit(1);
};
build(config).then(done, done).then(callback, errback);
};
requirejs({
context: 'build'
}, ['build', 'logger', 'env!env/quit'], runBuild);
};
requirejs.tools = {
useLib: function (contextName, callback) {
if (!callback) {
callback = contextName;
contextName = 'uselib';
}
if (!useLibLoaded[contextName]) {
loadLib();
useLibLoaded[contextName] = true;
}
var req = requirejs({
context: contextName
});
req(['build'], function () {
callback(req);
});
}
};
requirejs.define = define;
}
//If in Node, and included via a require('requirejs'), just export and
//THROW IT ON THE GROUND!
if (env === 'node' && reqMain !== module) {
setBaseUrl(path.resolve(reqMain ? reqMain.filename : '.'));
createRjsApi();
module.exports = requirejs;
return;
} else if (env === 'browser') {
//Only option is to use the API.
setBaseUrl(location.href);
createRjsApi();
return;
} else if ((env === 'rhino' || env === 'xpconnect') &&
//User sets up requirejsAsLib variable to indicate it is loaded
//via load() to be used as a library.
typeof requirejsAsLib !== 'undefined' && requirejsAsLib) {
//This script is loaded via rhino's load() method, expose the
//API and get out.
setBaseUrl(fileName);
createRjsApi();
return;
}
if (commandOption === 'o') {
//Do the optimizer work.
loadLib();
//INSERT build/build.js
} else if (commandOption === 'v') {
console.log('r.js: ' + version +
', RequireJS: ' + this.requirejsVars.require.version +
', UglifyJS: 2.8.29');
} else if (commandOption === 'convert') {
loadLib();
this.requirejsVars.require(['env!env/args', 'commonJs', 'env!env/print'],
function (args, commonJs, print) {
var srcDir, outDir;
srcDir = args[0];
outDir = args[1];
if (!srcDir || !outDir) {
print('Usage: path/to/commonjs/modules output/dir');
return;
}
commonJs.convertDir(args[0], args[1]);
});
} else {
//Just run an app
//Load the bundled libraries for use in the app.
if (commandOption === 'lib') {
loadLib();
}
setBaseUrl(fileName);
if (exists(fileName)) {
exec(readFile(fileName), fileName);
} else {
showHelp();
}
}
}((typeof console !== 'undefined' ? console : undefined),
(typeof Packages !== 'undefined' || (typeof window === 'undefined' &&
typeof Components !== 'undefined' && Components.interfaces) ?
Array.prototype.slice.call(arguments, 0) : []),
(typeof readFile !== 'undefined' ? readFile : undefined)));

14
build/jslib/xpconnect.js Normal file
View File

@ -0,0 +1,14 @@
/*jslint */
/*global require, load */
(function () {
'use strict';
require.load = function (context, moduleName, url) {
load(url);
//Support anonymous modules.
context.completeLoad(moduleName);
};
}());

View File

@ -0,0 +1,15 @@
/*jslint strict: false */
/*global define, xpconnectArgs */
var jsLibXpConnectArgs = (typeof xpconnectArgs !== 'undefined' && xpconnectArgs) || [].concat(Array.prototype.slice.call(arguments, 0));
define(function () {
var args = jsLibXpConnectArgs;
//Ignore any command option used for main x.js branching
if (args[0] && args[0].indexOf('-') === 0) {
args = args.slice(1);
}
return args;
});

View File

@ -0,0 +1,7 @@
/*jslint strict: false */
/*global define: false, load: false */
//Just a stub for use with uglify's consolidator.js
define(function () {
return {};
});

View File

@ -0,0 +1,257 @@
//Helper functions to deal with file I/O.
/*jslint plusplus: false */
/*global define, Components, xpcUtil */
define(['prim'], function (prim) {
var file,
Cc = Components.classes,
Ci = Components.interfaces,
//Depends on xpcUtil which is set up in x.js
xpfile = xpcUtil.xpfile;
function mkFullDir(dirObj) {
//1 is DIRECTORY_TYPE, 511 is 0777 permissions
if (!dirObj.exists()) {
dirObj.create(1, 511);
}
}
file = {
backSlashRegExp: /\\/g,
exclusionRegExp: /^\./,
getLineSeparator: function () {
return file.lineSeparator;
},
lineSeparator: ('@mozilla.org/windows-registry-key;1' in Cc) ?
'\r\n' : '\n',
exists: function (fileName) {
return xpfile(fileName).exists();
},
parent: function (fileName) {
return xpfile(fileName).parent;
},
normalize: function (fileName) {
return file.absPath(fileName);
},
isFile: function (path) {
return xpfile(path).isFile();
},
isDirectory: function (path) {
return xpfile(path).isDirectory();
},
/**
* Gets the absolute file path as a string, normalized
* to using front slashes for path separators.
* @param {java.io.File||String} file
*/
absPath: function (fileObj) {
if (typeof fileObj === "string") {
fileObj = xpfile(fileObj);
}
return fileObj.path;
},
getFilteredFileList: function (/*String*/startDir, /*RegExp*/regExpFilters, /*boolean?*/makeUnixPaths, /*boolean?*/startDirIsObject) {
//summary: Recurses startDir and finds matches to the files that match regExpFilters.include
//and do not match regExpFilters.exclude. Or just one regexp can be passed in for regExpFilters,
//and it will be treated as the "include" case.
//Ignores files/directories that start with a period (.) unless exclusionRegExp
//is set to another value.
var files = [], topDir, regExpInclude, regExpExclude, dirFileArray,
fileObj, filePath, ok, dirFiles;
topDir = startDir;
if (!startDirIsObject) {
topDir = xpfile(startDir);
}
regExpInclude = regExpFilters.include || regExpFilters;
regExpExclude = regExpFilters.exclude || null;
if (topDir.exists()) {
dirFileArray = topDir.directoryEntries;
while (dirFileArray.hasMoreElements()) {
fileObj = dirFileArray.getNext().QueryInterface(Ci.nsILocalFile);
if (fileObj.isFile()) {
filePath = fileObj.path;
if (makeUnixPaths) {
if (filePath.indexOf("/") === -1) {
filePath = filePath.replace(/\\/g, "/");
}
}
ok = true;
if (regExpInclude) {
ok = filePath.match(regExpInclude);
}
if (ok && regExpExclude) {
ok = !filePath.match(regExpExclude);
}
if (ok && (!file.exclusionRegExp ||
!file.exclusionRegExp.test(fileObj.leafName))) {
files.push(filePath);
}
} else if (fileObj.isDirectory() &&
(!file.exclusionRegExp || !file.exclusionRegExp.test(fileObj.leafName))) {
dirFiles = this.getFilteredFileList(fileObj, regExpFilters, makeUnixPaths, true);
//Do not use push.apply for dir listings, can hit limit of max number
//of arguments to a function call, #921.
dirFiles.forEach(function (dirFile) {
files.push(dirFile);
});
}
}
}
return files; //Array
},
copyDir: function (/*String*/srcDir, /*String*/destDir, /*RegExp?*/regExpFilter, /*boolean?*/onlyCopyNew) {
//summary: copies files from srcDir to destDir using the regExpFilter to determine if the
//file should be copied. Returns a list file name strings of the destinations that were copied.
regExpFilter = regExpFilter || /\w/;
var fileNames = file.getFilteredFileList(srcDir, regExpFilter, true),
copiedFiles = [], i, srcFileName, destFileName;
for (i = 0; i < fileNames.length; i += 1) {
srcFileName = fileNames[i];
destFileName = srcFileName.replace(srcDir, destDir);
if (file.copyFile(srcFileName, destFileName, onlyCopyNew)) {
copiedFiles.push(destFileName);
}
}
return copiedFiles.length ? copiedFiles : null; //Array or null
},
copyFile: function (/*String*/srcFileName, /*String*/destFileName, /*boolean?*/onlyCopyNew) {
//summary: copies srcFileName to destFileName. If onlyCopyNew is set, it only copies the file if
//srcFileName is newer than destFileName. Returns a boolean indicating if the copy occurred.
var destFile = xpfile(destFileName),
srcFile = xpfile(srcFileName);
//logger.trace("Src filename: " + srcFileName);
//logger.trace("Dest filename: " + destFileName);
//If onlyCopyNew is true, then compare dates and only copy if the src is newer
//than dest.
if (onlyCopyNew) {
if (destFile.exists() && destFile.lastModifiedTime >= srcFile.lastModifiedTime) {
return false; //Boolean
}
}
srcFile.copyTo(destFile.parent, destFile.leafName);
return true; //Boolean
},
/**
* Renames a file. May fail if "to" already exists or is on another drive.
*/
renameFile: function (from, to) {
var toFile = xpfile(to);
return xpfile(from).moveTo(toFile.parent, toFile.leafName);
},
readFile: xpcUtil.readFile,
readFileAsync: function (path, encoding) {
var d = prim();
try {
d.resolve(file.readFile(path, encoding));
} catch (e) {
d.reject(e);
}
return d.promise;
},
saveUtf8File: function (/*String*/fileName, /*String*/fileContents) {
//summary: saves a file using UTF-8 encoding.
file.saveFile(fileName, fileContents, "utf-8");
},
saveFile: function (/*String*/fileName, /*String*/fileContents, /*String?*/encoding) {
var outStream, convertStream,
fileObj = xpfile(fileName);
mkFullDir(fileObj.parent);
try {
outStream = Cc['@mozilla.org/network/file-output-stream;1']
.createInstance(Ci.nsIFileOutputStream);
//438 is decimal for 0777
outStream.init(fileObj, 0x02 | 0x08 | 0x20, 511, 0);
convertStream = Cc['@mozilla.org/intl/converter-output-stream;1']
.createInstance(Ci.nsIConverterOutputStream);
convertStream.init(outStream, encoding, 0, 0);
convertStream.writeString(fileContents);
} catch (e) {
throw new Error((fileObj && fileObj.path || '') + ': ' + e);
} finally {
if (convertStream) {
convertStream.close();
}
if (outStream) {
outStream.close();
}
}
},
deleteFile: function (/*String*/fileName) {
//summary: deletes a file or directory if it exists.
var fileObj = xpfile(fileName);
if (fileObj.exists()) {
fileObj.remove(true);
}
},
/**
* Deletes any empty directories under the given directory.
* The startDirIsJavaObject is private to this implementation's
* recursion needs.
*/
deleteEmptyDirs: function (startDir, startDirIsObject) {
var topDir = startDir,
dirFileArray, fileObj;
if (!startDirIsObject) {
topDir = xpfile(startDir);
}
if (topDir.exists()) {
dirFileArray = topDir.directoryEntries;
while (dirFileArray.hasMoreElements()) {
fileObj = dirFileArray.getNext().QueryInterface(Ci.nsILocalFile);
if (fileObj.isDirectory()) {
file.deleteEmptyDirs(fileObj, true);
}
}
//If the directory is empty now, delete it.
dirFileArray = topDir.directoryEntries;
if (!dirFileArray.hasMoreElements()) {
file.deleteFile(topDir.path);
}
}
}
};
return file;
});

View File

@ -0,0 +1,6 @@
/*jslint strict: false */
/*global define: false, load: false */
define(function () {
return load;
});

View File

@ -0,0 +1 @@
define({});

View File

@ -0,0 +1,6 @@
/*jslint strict: false */
/*global define: false, print: false */
define(function () {
return print;
});

View File

@ -0,0 +1,7 @@
/*global quit */
define(function () {
'use strict';
return function (code) {
return quit(code);
};
});

42
build/tests/all.js Normal file
View File

@ -0,0 +1,42 @@
/**
* Run the tests in Node with this command:
* node ../../r.js all.js
*/
/*jslint plusplus: false, strict: false */
/*global require: false, doh: false, skipDohSetup: true */
//A hack to doh to avoid dojo setup stuff in doh/runner.js
skipDohSetup = true;
//Set baseUrl for default context, but use a different context
//to run the tests, since at least one test run clears out the
//default context between each run.
require({
baseUrl: '../jslib/'
});
//Run the tests in a different context.
require({
baseUrl: '../jslib/',
paths: {
tests: '../tests'
},
context: 'test'
}, [
'../../tests/doh/runner.js',
'env!../../tests/doh/_{env}Runner.js',
'tests/convert',
'tests/parse',
'tests/pragma',
'tests/transform',
'tests/buildUtils',
//Build tests should be last in case they alter the environment
//in a weird way.
'tests/builds'
], function () {
//Show final report.
doh.run();
});

3
build/tests/allj.sh Executable file
View File

@ -0,0 +1,3 @@
set -e
java -classpath ../../lib/rhino/js.jar:../../lib/closure/compiler.jar org.mozilla.javascript.tools.shell.Main -opt -1 ../../r.js all.js

3
build/tests/alljnashorn.sh Executable file
View File

@ -0,0 +1,3 @@
set -e
jjs -cp ../../lib/closure/compiler.jar -scripting ../../r.js -- all.js

24
build/tests/alln.sh Executable file
View File

@ -0,0 +1,24 @@
#Stop after any error
set -e
rm -rf ./builds/
echo "Running tests embedded in Node"
echo "=============================="
node nodeOptimize.js
rm -rf ./builds/
node nodeOptimizeNoCallback.js
rm -rf ./builds/
node nodeAll.js
rm -rf ./builds/
echo "Running tests via bootstrap"
echo "=============================="
node ../../r.js all.js
echo "Testing stdout, result should be: define("main",{name:"main"});"
echo "=============================="
node ../../r.js -o lib/stdout/build.js
echo ""

3
build/tests/allxpc.sh Executable file
View File

@ -0,0 +1,3 @@
set -e
../../env/xpcshell/xpcshell ../../r.js all.js

View File

@ -0,0 +1,7 @@
//A simple build file using the tests directory for requirejs
{
baseUrl: "../../../requirejs/tests/anon",
optimize: "none",
out: "builds/anonSimple.js",
include: ["magenta", "red", "blue", "green", "yellow"]
}

149
build/tests/buildUtils.js Normal file
View File

@ -0,0 +1,149 @@
/*global define, doh */
define(['build'], function (build) {
'use strict';
doh.register("toTransport",
[
function toTransport(t) {
var bad1 = 'this.define(field, value, {_resolve: false});',
bad2 = 'xdefine(fields, callback);',
bad3 = 'this.define(function () {});',
bad4 = 'define(fields, callback);',
bad5 = 'define(a[0]);',
bad6 = '(function () {\n' +
' (function () {\n' +
' var module = { exports: {} }, exports = module.exports;\n' +
' (function (name, context, definition) {\n' +
' if (typeof module != \'undefined\' && module.exports) module.exports = definition()\n' +
' else if (typeof define == \'function\' && define.amd) define(definition)\n' +
' else context[name] = definition()\n' +
' })(\'qwery\', this, function () {\n' +
' });\n' +
' }());\n' +
' (function () {\n' +
' var module = { exports: {} }, exports = module.exports;\n' +
' (function (name, context, definition) {\n' +
' if (typeof module != \'undefined\' && module.exports) module.exports = definition()\n' +
' else if (typeof define == \'function\' && define.amd) define(definition)\n' +
' else context[name] = definition()\n' +
' })(\'bonzo\', this, function () {\n' +
' });\n' +
' }());\n' +
'}());',
good1 = 'if (typeof define === "function" && define.amd) {\n' +
' define(definition);\n' +
'}',
goodExpected1 = 'if (typeof define === "function" && define.amd) {\n' +
' define(\'good/1\',definition);\n' +
'}',
good2 = '// define([\'bad\'], function () {});\n' +
'define([\'foo\'], function () {});',
goodExpected2 = '// define([\'bad\'], function () {});\n' +
'define(\'good/2\',[\'foo\'], function () {});',
multi = 'define("foo", function (require) { var bar = require("bar"); });\n' +
'define("bar", function (require) { var foo = require("foo"); });\n',
multiAnonWrapped = '(function (root, factory) {\n' +
' if (typeof define === \'function\' && define.amd) {\n' +
' define([\'b\'], factory);\n' +
' } else {\n' +
' // Browser globals\n' +
' root.amdWeb = factory(root.b);\n' +
' }\n' +
'}(this, function (b) {\n' +
' var stored = {};\n' +
' function define(id, func) { stored[id] = func();}\n' +
' define("foo", function (require) { var bar = require("bar"); });\n' +
' define("bar", function (require) { var foo = require("foo"); });\n' +
' return stored.bar;\n' +
'}));',
multiAnonWrappedExpected = '(function (root, factory) {\n' +
' if (typeof define === \'function\' && define.amd) {\n' +
' define(\'multiAnonWrapped\',[\'b\'], factory);\n' +
' } else {\n' +
' // Browser globals\n' +
' root.amdWeb = factory(root.b);\n' +
' }\n' +
'}(this, function (b) {\n' +
' var stored = {};\n' +
' function define(id, func) { stored[id] = func();}\n' +
' define("foo", function (require) { var bar = require("bar"); });\n' +
' define("bar", function (require) { var foo = require("foo"); });\n' +
' return stored.bar;\n' +
'}));',
good3 = 'define(\n' +
' // a comment\n' +
' [\n' +
' "some/dep"\n' +
' ],\nfunction (dep) {});',
goodExpected3 = 'define(\n' +
' // a comment\n' +
' \'good/3\',[\n' +
' "some/dep"\n' +
' ],\nfunction (dep) {});',
good4 = 'define(this.key)',
goodExpected4 = 'define(\'good/4\',this.key)',
good5 = 'if ("function" === typeof define && define.amd) {\n' +
' define(function (require) {\n' +
' return {\n' +
' name: "five",\n' +
' six: require("./six")\n' +
' };\n' +
' });\n' +
'}',
goodExpected5 = 'if ("function" === typeof define && define.amd) {\n' +
' foo.define(\'good/5\',[\'require\',\'./six\'],function (require) {\n' +
' return {\n' +
' name: "five",\n' +
' six: require("./six")\n' +
' };\n' +
' });\n' +
'}';
t.is(bad1, build.toTransport('', 'bad/1', 'bad1', bad1));
t.is(bad2, build.toTransport('', 'bad/2', 'bad2', bad2));
t.is(bad3, build.toTransport('', 'bad/3', 'bad3', bad3));
t.is(bad4, build.toTransport('', 'bad/4', 'bad4', bad4));
t.is(bad5, build.toTransport('', 'bad/5', 'bad5', bad5));
t.is(bad6, build.toTransport('', 'bad/6', 'bad6', bad6));
t.is(goodExpected1, build.toTransport('', 'good/1', 'good1', good1));
t.is(goodExpected2, build.toTransport('', 'good/2', 'good2', good2));
t.is(multi, build.toTransport('', 'multi', 'multi', multi));
t.is(multiAnonWrappedExpected, build.toTransport('',
'multiAnonWrapped', 'multiAnonWrapped', multiAnonWrapped));
t.is(goodExpected3, build.toTransport('', 'good/3', 'good3', good3));
t.is(goodExpected4, build.toTransport('', 'good/4', 'good4', good4));
t.is(goodExpected5, build.toTransport('foo', 'good/5', 'good5', good5));
}
]);
doh.run();
doh.register("makeRelativeFilePath",
[
function makeRelativeFilePath(t) {
t.is('sibling.js',
build.makeRelativeFilePath('/some/other/www-built/js/main.js',
'/some/other/www-built/js/sibling.js'));
t.is('sub/thing/other.js',
build.makeRelativeFilePath('/some/other/www-built/js/main.js',
'/some/other/www-built/js/sub/thing/other.js'));
t.is('../parent/thing/other.js',
build.makeRelativeFilePath('/some/other/www-built/js/main.js',
'/some/other/www-built/parent/thing/other.js'));
t.is('../../../Applications/foo/',
build.makeRelativeFilePath('/Users/some/thing/',
'/Applications/foo/'));
t.is('modules/player.js',
build.makeRelativeFilePath('/some/other/www-built/js/app/main.js',
'/some/other/www-built/js/main/lib/../../app/modules/player.js'));
}
]);
doh.run();
});

2914
build/tests/builds.js Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,18 @@
//A simple build file using the circular tests for requirejs
({
baseUrl: "../../../requirejs/tests",
optimize: "none",
dir: "builds/circular",
modules: [
{
name: "two"
},
{
name: "funcTwo"
},
{
name: "funcThree"
}
]
})

47
build/tests/convert.js Normal file
View File

@ -0,0 +1,47 @@
/*jslint */
/*global doh: false, define: false */
define(['commonJs'], function (commonJs) {
'use strict';
doh.register(
"convert",
[
function commonJsConvert(t) {
var source1 = 'define("fake", {lol: "you guise"});',
source2 = 'define("fake", [],\nfunction(){\nreturn{lol : \'you guise\'};\n});',
source3 = 'exports.name = "foo";',
expected3 = 'define(function (require, exports, module) {exports.name = "foo";\n});\n',
source4 = 'module.exports = "foo";',
expected4 = 'define(function (require, exports, module) {module.exports = "foo";\n});\n',
source5 = 'var a = require("a");\nexports.name = a;',
expected5 = 'define(function (require, exports, module) {var a = require("a");\nexports.name = a;\n});\n',
source6 = 'exports.name = __dirname;',
expected6 = 'define(function (require, exports, module) {' +
'var __filename = module.uri || "", __dirname = __filename.substring(0, __filename.lastIndexOf("/") + 1); ' +
'exports.name = __dirname;\n});\n',
source7 = 'exports.name = __filename;',
expected7 = 'define(function (require, exports, module) {' +
'var __filename = module.uri || "", __dirname = __filename.substring(0, __filename.lastIndexOf("/") + 1); ' +
'exports.name = __filename;\n});\n',
source8 = 'var MyModule = module.exports = "foo";',
expected8 = 'define(function (require, exports, module) {var MyModule = module.exports = "foo";\n});\n';
t.is(source1, commonJs.convert('fake.js', source1));
t.is(source2, commonJs.convert('fake.js', source2));
t.is(expected3, commonJs.convert('source3', source3));
t.is(expected4, commonJs.convert('source4', source4));
t.is(expected5, commonJs.convert('source5', source5));
t.is(expected6, commonJs.convert('source6', source6));
t.is(expected7, commonJs.convert('source7', source7));
t.is(expected8, commonJs.convert('source8', source8));
}
]
);
doh.run();
});

6
build/tests/css.build.js Normal file
View File

@ -0,0 +1,6 @@
//A simple build file for testing css optimizations.
({
appDir: "./css",
dir: "builds/css",
optimize: "none"
})

View File

@ -0,0 +1,4 @@
/* These are common styles */
body {
background: blue url(img/body.png) bottom center;
}

View File

@ -0,0 +1,15 @@
@import url('common/common.css');
@import url('relative/rel1.css');
/**
*
* This is the master css file.
*
*/
nav {
color: red;
background-image: url("nav.png");
}
#footer {
background-image: url("../img/footer.png") repeat-x;
}

View File

@ -0,0 +1,3 @@
#nest {
background-image: url('../../../img/bar.png');
}

View File

@ -0,0 +1,3 @@
/* tests issue #5 */
@import url(./rel_sub.css);
@import url("./nested/nest1.css");

View File

@ -0,0 +1,3 @@
.relsub {
background-image: url(../../img/foo.png);
}

View File

@ -0,0 +1,7 @@
@import url("../master.css");
.sub {
background: transparent url(../../img/sub.png);
/*Some comment in here*/
position: relative;
}

View File

@ -0,0 +1,29 @@
body {
background: blue url(../common/img/body.png) bottom center;
}
.relsub {
background-image: url(../../img/foo.png);
}
#nest {
background-image: url(../../img/bar.png);
}
nav {
color: red;
background-image: url(../nav.png);
}
#footer {
background-image: url(../../img/footer.png) repeat-x;
}
.sub {
background: transparent url(../../img/sub.png);
position: relative;
}

3
build/tests/end.frag Normal file
View File

@ -0,0 +1,3 @@
window.one = requirejs("one");
}());

View File

@ -0,0 +1,23 @@
define("tres",
[],function() {
return {
name: "tres"
};
}
);
define("uno",
["dos", "tres"],
function(dos, tres) {
return {
name: "uno",
doSomething: function() {
return {
dosName: dos.name,
tresName: tres.name
};
}
};
}
);

View File

@ -0,0 +1,12 @@
//A simple build file using the tests directory for requirejs
{
baseUrl: "../../../requirejs/tests/exports",
inlineText: false,
dir: "builds/exports",
optimize: "none",
modules: [
{
name: "simpleReturn"
}
]
}

View File

@ -0,0 +1,10 @@
({
baseUrl: "./",
optimize: "none",
name: "hasTestModule",
out: "builds/hasTestModule.js",
has: {
aTrueValue: true,
aFalseValue: false
}
})

View File

@ -0,0 +1,30 @@
/**
* Just a test module that tests has replacements. Not usable on its own.
*/
define(function (require) {
var foo, bar, baz, blurp, bat, blip;
if (has("aTrueValue")) {
foo = "is true";
} else {
foo = "is false";
}
if (has("aFalseValue")) {
bar = "is true";
} else {
bar = "is false";
}
if (has("some skipped value")) {
baz = "what";
} else {
baz = "ever";
}
blurp = has("aTrueValue") ? "OK" : "FAIL";
bat = has ('aFalseValue') ? "FAIL" : "OK";
blip = has("some skipped value") ? "what" : "ever";
return foo + bar + baz + blurp + bat + blip;
});

View File

@ -0,0 +1,69 @@
/*jslint strict: false*/
/*global require: false, console: false */
//If you install requirejs via npm, replace this line with require('requirejs')
var requirejs = require('../../../r.js'),
http = require('http'),
fs = require('fs'),
host = '127.0.0.1',
port = 4304,
config;
//Set up the config passed to the optimizer
config = {
baseUrl: 'scripts',
paths: {
//Put path to require.js in here, leaving off .js
//since it is a module ID path mapping. For final deployment,
//if a smaller AMD loader is desired, no dynamic
//loading needs to be done, and loader plugins are not
//in use, change this path to that file. One possibility
//could be the one at:
//https://github.com/ajaxorg/ace/blob/master/build_support/mini_require.js
requireLib: '../../../../require'
},
//Uncomment this line if uglify minification is not wanted.
//optimize: 'none',
//Specify the optimization target. Choose the requireLib,
//so that it is first in the output, then include the main.js
//for this project.
name: 'requireLib',
include: ['main'],
//Uncomment this if you want to debug three.js by itself
//excludeShallow: ['three'],
out: 'scripts/main-built.js'
};
function respond(res, code, contents) {
res.writeHead(code, {
'Content-Type': (code === 200 ? 'application/javascript;charset=UTF-8' : 'text/plain'),
'Content-Length': contents.length
});
res.write(contents, 'utf8');
res.end();
}
http.createServer(function (req, res) {
req.on('close', function (err) {
res.end();
});
req.on('end', function () {
//Does not matter what the request is,
//the answer is always OPTIMIZED JS!
requirejs.optimize(config, function (buildResponse) {
//buildResponse is just a text output of the modules
//included. Load the built file for the contents.
var contents = fs.readFileSync(config.out, 'utf8');
respond(res, 200, contents);
}, function (e) {
//As of r.js 2.1.2, errors are returned via an errback
respond(res, 500, e.toString());
});
});
}).listen(port, host);
console.log('Server running at http://' + host + ':' + port + '/');

View File

@ -0,0 +1,28 @@
<!DOCTYPE html>
<html>
<head>
<title></title>
</head>
<body>
<h1>One script via RequireJS optimizer</h1>
<p><strong>Be sure to start up the server: node httpBuild.js</strong></p>
<p>Demonstrates how to use the RequireJS optimizer to only load
one script in the page using Node as the server to serve the optimized
script. Using the "excludeShallow" build config option inside httpBuild.js
allows debugging one script separate from the rest of the optimized file.</p>
<p>See httpBuild.js in the same directory as this file for more information.</p>
<hr>
<!-- Successful load will print something after the hr tag -->
<!-- httpBuild.js will return optimized content no matter what the URL
is below. For deployment, replace this script tag to one that
loads main-built.js, or whatever the config.out name value is
inside httpBuild.js -->
<script src="http://127.0.0.1:4304/main.js"></script>
</body>
</html>

View File

@ -0,0 +1,18 @@
//Set the baseUrl for scripts, for use
//if individually debuggin files via
//excludeShallow in httpBuild.js
require.config({
baseUrl: 'scripts'
});
require(['one', 'two'], function (one, two) {
var html = "<b>Success!</b> One's name is: " + one.name +
", two's name is: " + two.name +
", three's name is: " + two.threeName,
node = document.createElement('div');
node.innerHTML = html;
document.getElementsByTagName('body')[0].appendChild(node);
});

View File

@ -0,0 +1,3 @@
define({
name: 'one'
});

View File

@ -0,0 +1,5 @@
define(function () {
return {
name: 'three'
};
});

View File

@ -0,0 +1,7 @@
define(function (require) {
var three = require('three');
return {
name: 'two',
threeName: three.name
};
});

17
build/tests/i18n.build.js Normal file
View File

@ -0,0 +1,17 @@
//A simple build file using the tests directory for requirejs
{
appDir: "../../../requirejs",
baseUrl: "tests/i18n",
inlineText: false,
dir: "builds/i18n",
locale: "en-us-surfer",
optimize: "none",
paths: {
"i18n": "../../../i18n/i18n"
},
modules: [
{
name: "testModule"
}
]
}

View File

@ -0,0 +1,8 @@
//A simple build file using the tests directory for requirejs
({
baseUrl: "../../../requirejs/tests/plugins/",
optimize: "none",
name: "earth",
include: ["prime/earth"],
out: "builds/indexPlugin.js"
})

View File

@ -0,0 +1,11 @@
{
appDir: 'output/generated',
baseUrl: '.',
optimize: 'none',
dir: 'output',
keepBuildDir: true,
allowSourceOverwrites: true,
modules: [{
name: 'main'
}]
}

View File

@ -0,0 +1,14 @@
define('c',{
name: 'c'
});
define('b',['c'], function (c) {
});
require(['b'], function(){});
define("main", function(){});

View File

@ -0,0 +1,3 @@
define(['c'], function (c) {
});

View File

@ -0,0 +1,4 @@
define({
name: 'c'
});

View File

@ -0,0 +1,2 @@
require(['b'], function(){});

Some files were not shown because too many files have changed in this diff Show More