initial commit
This commit is contained in:
20
node_modules/commoner/LICENSE
generated
vendored
Normal file
20
node_modules/commoner/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
Copyright (c) 2013 Ben Newman <bn@cs.stanford.edu>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
379
node_modules/commoner/README.md
generated
vendored
Normal file
379
node_modules/commoner/README.md
generated
vendored
Normal file
@@ -0,0 +1,379 @@
|
||||
Commoner [](https://travis-ci.org/benjamn/commoner)
|
||||
---
|
||||
|
||||
Commoner makes it easy to write scripts that flexibly and efficiently
|
||||
transpile any dialect of JavaScript into a directory structure of
|
||||
Node-compatible CommonJS module files.
|
||||
|
||||
This task is made possible by
|
||||
|
||||
1. a declarative syntax for defining how module source code should be
|
||||
found and processed,
|
||||
2. the use of [promises](https://github.com/kriskowal/q) to manage an
|
||||
asynchronous build pipeline, and
|
||||
3. never rebuilding modules that have already been built.
|
||||
|
||||
The output files can be required seamlessly by Node, or served by any
|
||||
static file server, or bundled together using a tool such as
|
||||
[Browserify](https://github.com/substack/node-browserify),
|
||||
[WrapUp](https://github.com/kamicane/wrapup), or
|
||||
[Stitch](https://github.com/sstephenson/stitch) for delivery to a web
|
||||
browser.
|
||||
|
||||
If you pass the `--relativize` option, Commoner also takes care to rewrite
|
||||
all `require` calls to use [relative module
|
||||
identifiers](http://wiki.commonjs.org/wiki/Modules/1.1#Module_Identifiers),
|
||||
so that the output files can be installed into any subdirectory of a
|
||||
larger project, and external tools do not have to give special treatment
|
||||
to top-level modules (or even know which modules are top-level and which
|
||||
are nested).
|
||||
|
||||
Commoner was derived from an earlier, more opinionated tool called
|
||||
[Brigade](https://github.com/benjamn/brigade) that provided additional
|
||||
support for packaging modules together into multiple non-overlapping
|
||||
bundles. Commoner grew out of the realization that many tools already
|
||||
exist for bundling CommonJS modules, but that fewer tools focus on getting
|
||||
to that point.
|
||||
|
||||
Installation
|
||||
---
|
||||
|
||||
From NPM:
|
||||
|
||||
npm install commoner
|
||||
|
||||
From GitHub:
|
||||
|
||||
cd path/to/node_modules
|
||||
git clone git://github.com/reactjs/commoner.git
|
||||
cd commoner
|
||||
npm install .
|
||||
|
||||
Usage
|
||||
---
|
||||
|
||||
Here's the output of `bin/commonize --help`:
|
||||
```
|
||||
Usage: commonize [options] <source directory> <output directory> [<module ID> [<module ID> ...]]
|
||||
|
||||
Options:
|
||||
|
||||
-h, --help output usage information
|
||||
-V, --version output the version number
|
||||
-c, --config [file] JSON configuration file (no file means STDIN)
|
||||
-w, --watch Continually rebuild
|
||||
-x, --extension <js | coffee | ...> File extension to assume when resolving module identifiers
|
||||
--relativize Rewrite all module identifiers to be relative
|
||||
--follow-requires Scan modules for required dependencies
|
||||
--cache-dir <directory> Alternate directory to use for disk cache
|
||||
--no-cache-dir Disable the disk cache
|
||||
--source-charset <utf8 | win1252 | ...> Charset of source (default: utf8)
|
||||
--output-charset <utf8 | win1252 | ...> Charset of output (default: utf8)
|
||||
```
|
||||
|
||||
In a single sentence: the `commonize` command finds modules with the given
|
||||
module identifiers in the source directory and places a processed copy of
|
||||
each module into the output directory, along with processed copies of all
|
||||
required modules.
|
||||
|
||||
If you do not provide any module identifiers, `commonize` will process all
|
||||
files that it can find under the source directory that have the preferred
|
||||
file extension (`.js` by default). If your source files have a file
|
||||
extension other than `.js`, use the `-x` or `--extension` option to
|
||||
specify it. For example, `--extension coffee` to find `.coffee` files.
|
||||
|
||||
Output
|
||||
---
|
||||
|
||||
Commoner prints various status messages to `STDERR`, so that you can see
|
||||
what it's doing, or figure out why it's not doing what you thought it
|
||||
would do.
|
||||
|
||||
The only information it prints to `STDOUT` is a JSON array of module
|
||||
identifiers, which includes the identifiers passed on the command line and
|
||||
all their dependencies. This array contains no duplicates.
|
||||
|
||||
Internally, each module that Commoner generates has a hash computed from
|
||||
the module's identifier, source code, and processing steps. Since this
|
||||
hash can be computed before processing takes place, Commoner is able to
|
||||
avoid processing a module if it has ever previously processed the same
|
||||
module in the same way.
|
||||
|
||||
If you dig into [the
|
||||
code](https://github.com/reactjs/commoner/blob/5e7f65cab2/lib/context.js#L94),
|
||||
you'll find that Commoner maintains a cache directory (by default,
|
||||
`~/.commoner/module-cache/`) containing files with names like
|
||||
`9ffc5c853aac07bc106da1dc1b2486903ca688bf.js`. When Commoner is about to
|
||||
process a module, it checks its hash against the file names in this
|
||||
directory. If no match is found, processing procedes and the resulting
|
||||
file is written to the cache directory with a new hash. If the appropriate
|
||||
hash file is already present in the cache directory, however, Commoner
|
||||
merely creates a hard link between the hash file and a file with a more
|
||||
meaningful name in the output directory.
|
||||
|
||||
When you pass the `--watch` flag to `bin/commonize`, Commoner avoids
|
||||
exiting after the first build and instead watches for changes to
|
||||
previously read files, printing a new JSON array of module identifiers to
|
||||
`STDOUT` each time rebuilding finishes. Thanks to the caching of processed
|
||||
modules, the time taken to rebuild is roughly proportional to the number
|
||||
of modified files.
|
||||
|
||||
Customization
|
||||
---
|
||||
|
||||
The `bin/commonize` script is actually quite simple, and you can write
|
||||
similar scripts yourself. Let's have a look:
|
||||
```js
|
||||
#!/usr/bin/env node
|
||||
|
||||
require("commoner").resolve(function(id) {
|
||||
var context = this;
|
||||
|
||||
return context.getProvidedP().then(function(idToPath) {
|
||||
// If a module declares its own identifier using @providesModule
|
||||
// then that identifier will be a key in the idToPath object.
|
||||
if (idToPath.hasOwnProperty(id))
|
||||
return context.readFileP(idToPath[id]);
|
||||
});
|
||||
|
||||
}, function(id) {
|
||||
// Otherwise assume the identifier maps directly to a filesystem path.
|
||||
// The readModuleP method simply appends the preferred file extension
|
||||
// (usually .js) to the given module identifier and opens that file.
|
||||
return this.readModuleP(id);
|
||||
});
|
||||
```
|
||||
The scriptable interface of the `commoner` module abstracts away many of
|
||||
the annoyances of writing a command-line script. In particular, you don't
|
||||
have to do any parsing of command-line arguments, and you don't have to
|
||||
worry about installing any dependencies other than `commoner` in your
|
||||
`$NODE_PATH`.
|
||||
|
||||
What you are responsible for, at a minimum, is telling Commoner how to
|
||||
find the source of a module given a module identifier, and you do this by
|
||||
passing callback functions to `require("commoner").resolve`. The script
|
||||
above uses two strategies that will be tried in sequence: first, it calls
|
||||
the helper function `this.getProvidedP` to retrieve an object mapping
|
||||
identifiers to file paths (more about this below); and, if that doesn't
|
||||
work, it falls back to interpreting the identifier as a path relative to
|
||||
the source directory.
|
||||
|
||||
Now, you might not care about `this.getProvidedP`. It's really just a
|
||||
proof of concept that Commoner can support modules that declare their own
|
||||
identifiers using the `// @providesModule <identifier>` syntax, and I
|
||||
included it by default because it doesn't make a difference unless you
|
||||
decide to use `@providesModule`. If you don't like it, you could write an
|
||||
even simpler script:
|
||||
```js
|
||||
#!/usr/bin/env node
|
||||
|
||||
require("commoner").resolve(function(id) {
|
||||
return this.readModuleP(id);
|
||||
});
|
||||
```
|
||||
The point is, it's entirely up to you to define how module identifiers are
|
||||
interpreted. In fact, the source you return doesn't even have to be valid
|
||||
JavaScript. It could be [CoffeeScript](http://coffeescript.org/), or
|
||||
[LESS](http://lesscss.org/), or whatever language you prefer to write by
|
||||
hand. Commoner doesn't care what your source code looks like, because
|
||||
Commoner allows you to define arbitrary build steps to turn that source
|
||||
code into plain old CommonJS.
|
||||
|
||||
Let's consider the example of using LESS to write dynamic CSS
|
||||
modules. First, let's apply what we already know to give special meaning
|
||||
to `.less` files:
|
||||
```js
|
||||
#!/usr/bin/env node
|
||||
|
||||
require("commoner").resolve(function(id) {
|
||||
if (isLess(id))
|
||||
return this.readFileP(id);
|
||||
}, function(id) {
|
||||
return this.readModuleP(id);
|
||||
});
|
||||
|
||||
function isLess(id) {
|
||||
return /\.less$/i.test(id);
|
||||
}
|
||||
```
|
||||
All this really accomplishes is to avoid appending the `.js` file
|
||||
extension to identifiers that already have the `.less` extension.
|
||||
|
||||
Now we need to make sure the contents of `.less` files somehow get
|
||||
transformed into plain old CommonJS, and for that we need
|
||||
`require("commoner").process`:
|
||||
```js
|
||||
require("commoner").resolve(function(id) {
|
||||
if (isLess(id))
|
||||
return this.readFileP(id);
|
||||
}, function(id) {
|
||||
return this.readModuleP(id);
|
||||
}).process(function(id, source) {
|
||||
if (isLess(id))
|
||||
return compileLessToJs(source);
|
||||
return source;
|
||||
});
|
||||
```
|
||||
How should `compileLessToJs` be implemented? At a high level, I propose
|
||||
that we generate a CommonJS module that will append a new `<style>` tag to
|
||||
the `<head>` the first time the module is required. This suggests to me
|
||||
that we need to take the CSS generated by LESS and somehow embed it as a
|
||||
string in a CommonJS module with a small amount of boilerplate JS.
|
||||
|
||||
Here's a first attempt:
|
||||
```js
|
||||
function compileLessToJs(less) {
|
||||
var css = require("less").render(less);
|
||||
return 'require("css").add(' + JSON.stringify(css) + ");";
|
||||
}
|
||||
```
|
||||
Implementing a `css` module with an appropriate `add` method is an
|
||||
exercise that I will leave to the reader (hint: you may find [this
|
||||
StackOverflow answer](http://stackoverflow.com/a/524721/128454) useful).
|
||||
|
||||
This almost works, but there's one problem: `require("less").render` does
|
||||
not actually return a string! For better or worse, it passes the compiled
|
||||
CSS to a callback function, which would make our task extremely painful
|
||||
*if Commoner were not deeply committed to supporting asynchronous
|
||||
processing*.
|
||||
|
||||
Commoner uses promises for asynchronous control flow, so we need to return
|
||||
a promise if we can't return a string immediately. The easiest way to make
|
||||
a promise is to call `this.makePromise` in the following style:
|
||||
```js
|
||||
#!/usr/bin/env node
|
||||
|
||||
require("commoner").resolve(function(id) {
|
||||
if (isLess(id))
|
||||
return this.readFileP(id);
|
||||
}, function(id) {
|
||||
return this.readModuleP(id);
|
||||
}).process(function(id, source) {
|
||||
if (isLess(id)) {
|
||||
return this.makePromise(function(nodeStyleCallback) {
|
||||
compileLessToJs(source, nodeStyleCallback);
|
||||
});
|
||||
}
|
||||
return source;
|
||||
});
|
||||
|
||||
function compileLessToJs(less, callback) {
|
||||
require("less").render(less, function(err, css) {
|
||||
callback(err, 'require("css").add(' + JSON.stringify(css) + ");")
|
||||
});
|
||||
}
|
||||
```
|
||||
And we're done! This example was admittedly pretty involved, but if you
|
||||
followed it to the end you now have all the knowledge you need to write
|
||||
source files like `sidebar.less` and require them from other modules by
|
||||
invoking `require("sidebar.less")`. (And, by the way, embedding dynamic
|
||||
CSS modules in your JavaScript turns out to be an excellent idea.)
|
||||
|
||||
Generating multiple files from one source module
|
||||
---
|
||||
|
||||
Commoner is not limited to generating just one output file from each
|
||||
source module. For example, if you want to follow best practices for
|
||||
producing source maps, you probably want to create a `.map.json` file
|
||||
corresponding to every `.js` file that you compile.
|
||||
|
||||
Recall that normally your `.process` callback returns a string (or a
|
||||
promise for a string) whose contents will be written as a `.js` file in
|
||||
the output directory. To write more than one file, just return an object
|
||||
whose keys are the file extensions of the files you want to write, and
|
||||
whose values are either strings or promises for strings representing the
|
||||
desired contents of those files.
|
||||
|
||||
Here's an example of generating two different files for every source
|
||||
module, one called `<id>.map.json` and the other called `<id>.js`:
|
||||
```js
|
||||
require("commoner").resolve(function(id) {
|
||||
return this.readModuleP(id);
|
||||
}).process(function(id, source) {
|
||||
var result = compile(source);
|
||||
return {
|
||||
".map.json": JSON.stringify(result.sourceMap),
|
||||
".js": [
|
||||
result.code,
|
||||
"//# sourceMappingURL=" + id + ".map.json"
|
||||
].join("\n")
|
||||
};
|
||||
});
|
||||
```
|
||||
|
||||
Note that
|
||||
```js
|
||||
return {
|
||||
".js": source
|
||||
};
|
||||
```
|
||||
would be equivalent to
|
||||
```js
|
||||
return source;
|
||||
```
|
||||
so you only have to return an object when you want to generate multiple
|
||||
files. However, the `.js` key is mandatory when returning an object.
|
||||
|
||||
For your convenience, if you have a sequence of multiple processing
|
||||
functions, the values of the object returned from each step will be
|
||||
resolved before the object is passed along to the next processing
|
||||
function, so you can be sure all the values are strings (instead of
|
||||
promises) at the beginning of the next processing function.
|
||||
|
||||
Configuration
|
||||
---
|
||||
|
||||
Of course, not all customization requires modifying code. Most of the
|
||||
time, in fact, configuration has more to do with providing different
|
||||
dynamic values to the same code.
|
||||
|
||||
For that kind of configuration, you don't need to modify your Commoner
|
||||
script at all, because Commoner scripts accept a flag called `--config`
|
||||
that can either specify a JSON file or (if `--config` is given without a
|
||||
file name) read a string of JSON from `STDIN`.
|
||||
|
||||
Examples:
|
||||
|
||||
bin/commonize source/ output/ main --config release.json
|
||||
bin/commonize source/ output/ main --config debug.json
|
||||
echo '{"debug":false}' | bin/commonize source/ output/ main --config
|
||||
echo '{"debug":true}' | bin/commonize source/ output/ main --config /dev/stdin
|
||||
|
||||
This configuration object is exposed to the `.resolve` and `.process`
|
||||
callbacks as `this.config`. So, for example, if you wanted to implement
|
||||
minification as a processing step, you might do it like this:
|
||||
```js
|
||||
require("commoner").resolve(function(id) {
|
||||
return this.readModule(id);
|
||||
}).process(function(id, source) {
|
||||
if (this.config.debug)
|
||||
return source;
|
||||
return minify(source);
|
||||
});
|
||||
```
|
||||
Perhaps the coolest thing about the configuration object is that Commoner
|
||||
generates a recursive hash of all its properties and their values which is
|
||||
then incorporated into every module hash. This means that two modules with
|
||||
the same identifier and identical source code and processing steps will
|
||||
have distinct hashes if built using different configuration objects.
|
||||
|
||||
Custom Options
|
||||
---
|
||||
|
||||
You can define custom options for your script by using the `option` function.
|
||||
|
||||
```js
|
||||
require("commoner").resolve(function(id) {
|
||||
return this.readModule(id);
|
||||
}).option(
|
||||
'--custom-option',
|
||||
'This is a custom option.'
|
||||
).process(function(id, source) {
|
||||
if (this.options.customOption) {
|
||||
source = doCustomThing(source);
|
||||
}
|
||||
return source;
|
||||
});
|
||||
```
|
||||
|
||||
For more information of the options object available inside the `process` function see [Commander](https://github.com/visionmedia/commander.js).
|
||||
21
node_modules/commoner/bin/commonize
generated
vendored
Executable file
21
node_modules/commoner/bin/commonize
generated
vendored
Executable file
@@ -0,0 +1,21 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
require("commoner").resolve(function(id) {
|
||||
var context = this;
|
||||
|
||||
return context.getProvidedP().then(function(idToPath) {
|
||||
// If a module declares its own identifier using @providesModule
|
||||
// then that identifier will be a key in the idToPath object.
|
||||
if (idToPath.hasOwnProperty(id))
|
||||
return context.readFileP(idToPath[id]);
|
||||
});
|
||||
|
||||
}, function(id) {
|
||||
// Otherwise assume the identifier maps directly to a filesystem path.
|
||||
return this.readModuleP(id);
|
||||
|
||||
}).process(function(id, source) {
|
||||
// As a simple example of a processing step, make sure the file ends
|
||||
// with exactly one newline character.
|
||||
return source.replace(/\s+$/m, "\n");
|
||||
});
|
||||
117
node_modules/commoner/lib/cache.js
generated
vendored
Normal file
117
node_modules/commoner/lib/cache.js
generated
vendored
Normal file
@@ -0,0 +1,117 @@
|
||||
var assert = require("assert");
|
||||
var Q = require("q");
|
||||
var fs = require("fs");
|
||||
var path = require("path");
|
||||
var util = require("./util");
|
||||
var EventEmitter = require("events").EventEmitter;
|
||||
var hasOwn = Object.prototype.hasOwnProperty;
|
||||
|
||||
/**
|
||||
* ReadFileCache is an EventEmitter subclass that caches file contents in
|
||||
* memory so that subsequent calls to readFileP return the same contents,
|
||||
* regardless of any changes in the underlying file.
|
||||
*/
|
||||
function ReadFileCache(sourceDir, charset) {
|
||||
assert.ok(this instanceof ReadFileCache);
|
||||
assert.strictEqual(typeof sourceDir, "string");
|
||||
|
||||
this.charset = charset;
|
||||
|
||||
EventEmitter.call(this);
|
||||
|
||||
Object.defineProperties(this, {
|
||||
sourceDir: { value: sourceDir },
|
||||
sourceCache: { value: {} }
|
||||
});
|
||||
}
|
||||
|
||||
util.inherits(ReadFileCache, EventEmitter);
|
||||
var RFCp = ReadFileCache.prototype;
|
||||
|
||||
/**
|
||||
* Read a file from the cache if possible, else from disk.
|
||||
*/
|
||||
RFCp.readFileP = function(relativePath) {
|
||||
var cache = this.sourceCache;
|
||||
|
||||
relativePath = path.normalize(relativePath);
|
||||
|
||||
return hasOwn.call(cache, relativePath)
|
||||
? cache[relativePath]
|
||||
: this.noCacheReadFileP(relativePath);
|
||||
};
|
||||
|
||||
/**
|
||||
* Read (or re-read) a file without using the cache.
|
||||
*
|
||||
* The new contents are stored in the cache for any future calls to
|
||||
* readFileP.
|
||||
*/
|
||||
RFCp.noCacheReadFileP = function(relativePath) {
|
||||
relativePath = path.normalize(relativePath);
|
||||
|
||||
var added = !hasOwn.call(this.sourceCache, relativePath);
|
||||
var promise = this.sourceCache[relativePath] = util.readFileP(
|
||||
path.join(this.sourceDir, relativePath), this.charset);
|
||||
|
||||
if (added) {
|
||||
this.emit("added", relativePath);
|
||||
}
|
||||
|
||||
return promise;
|
||||
};
|
||||
|
||||
/**
|
||||
* If you have reason to believe the contents of a file have changed, call
|
||||
* this method to re-read the file and compare the new contents to the
|
||||
* cached contents. If the new contents differ from the contents of the
|
||||
* cache, the "changed" event will be emitted.
|
||||
*/
|
||||
RFCp.reportPossiblyChanged = function(relativePath) {
|
||||
var self = this;
|
||||
var cached = self.readFileP(relativePath);
|
||||
var fresh = self.noCacheReadFileP(relativePath);
|
||||
|
||||
Q.spread([
|
||||
cached.catch(orNull),
|
||||
fresh.catch(orNull)
|
||||
], function(oldData, newData) {
|
||||
if (oldData !== newData) {
|
||||
self.emit("changed", relativePath);
|
||||
}
|
||||
}).done();
|
||||
};
|
||||
|
||||
/**
|
||||
* Invoke the given callback for all files currently known to the
|
||||
* ReadFileCache, and invoke it in the future when any new files become
|
||||
* known to the cache.
|
||||
*/
|
||||
RFCp.subscribe = function(callback, context) {
|
||||
for (var relativePath in this.sourceCache) {
|
||||
if (hasOwn.call(this.sourceCache, relativePath)) {
|
||||
callback.call(context || null, relativePath);
|
||||
}
|
||||
}
|
||||
|
||||
this.on("added", function(relativePath) {
|
||||
callback.call(context || null, relativePath);
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Avoid memory leaks by removing listeners and emptying the cache.
|
||||
*/
|
||||
RFCp.clear = function() {
|
||||
this.removeAllListeners();
|
||||
|
||||
for (var relativePath in this.sourceCache) {
|
||||
delete this.sourceCache[relativePath];
|
||||
}
|
||||
};
|
||||
|
||||
function orNull(err) {
|
||||
return null;
|
||||
}
|
||||
|
||||
exports.ReadFileCache = ReadFileCache;
|
||||
384
node_modules/commoner/lib/commoner.js
generated
vendored
Normal file
384
node_modules/commoner/lib/commoner.js
generated
vendored
Normal file
@@ -0,0 +1,384 @@
|
||||
var assert = require("assert");
|
||||
var path = require("path");
|
||||
var fs = require("fs");
|
||||
var Q = require("q");
|
||||
var iconv = require("iconv-lite");
|
||||
var ReadFileCache = require("./cache").ReadFileCache;
|
||||
var Watcher = require("./watcher").Watcher;
|
||||
var contextModule = require("./context");
|
||||
var BuildContext = contextModule.BuildContext;
|
||||
var PreferredFileExtension = contextModule.PreferredFileExtension;
|
||||
var ModuleReader = require("./reader").ModuleReader;
|
||||
var output = require("./output");
|
||||
var DirOutput = output.DirOutput;
|
||||
var StdOutput = output.StdOutput;
|
||||
var util = require("./util");
|
||||
var log = util.log;
|
||||
var Ap = Array.prototype;
|
||||
var each = Ap.forEach;
|
||||
|
||||
// Better stack traces for promises.
|
||||
Q.longStackSupport = true;
|
||||
|
||||
function Commoner() {
|
||||
var self = this;
|
||||
assert.ok(self instanceof Commoner);
|
||||
|
||||
Object.defineProperties(self, {
|
||||
customVersion: { value: null, writable: true },
|
||||
customOptions: { value: [] },
|
||||
resolvers: { value: [] },
|
||||
processors: { value: [] }
|
||||
});
|
||||
}
|
||||
|
||||
var Cp = Commoner.prototype;
|
||||
|
||||
Cp.version = function(version) {
|
||||
this.customVersion = version;
|
||||
return this; // For chaining.
|
||||
};
|
||||
|
||||
// Add custom command line options
|
||||
Cp.option = function() {
|
||||
this.customOptions.push(Ap.slice.call(arguments));
|
||||
return this; // For chaining.
|
||||
};
|
||||
|
||||
// A resolver is a function that takes a module identifier and returns
|
||||
// the unmodified source of the corresponding module, either as a string
|
||||
// or as a promise for a string.
|
||||
Cp.resolve = function() {
|
||||
each.call(arguments, function(resolver) {
|
||||
assert.strictEqual(typeof resolver, "function");
|
||||
this.resolvers.push(resolver);
|
||||
}, this);
|
||||
|
||||
return this; // For chaining.
|
||||
};
|
||||
|
||||
// A processor is a function that takes a module identifier and a string
|
||||
// representing the source of the module and returns a modified version of
|
||||
// the source, either as a string or as a promise for a string.
|
||||
Cp.process = function(processor) {
|
||||
each.call(arguments, function(processor) {
|
||||
assert.strictEqual(typeof processor, "function");
|
||||
this.processors.push(processor);
|
||||
}, this);
|
||||
|
||||
return this; // For chaining.
|
||||
};
|
||||
|
||||
Cp.buildP = function(options, roots) {
|
||||
var self = this;
|
||||
var sourceDir = options.sourceDir;
|
||||
var outputDir = options.outputDir;
|
||||
var readFileCache = new ReadFileCache(sourceDir, options.sourceCharset);
|
||||
var waiting = 0;
|
||||
var output = outputDir
|
||||
? new DirOutput(outputDir)
|
||||
: new StdOutput;
|
||||
|
||||
if (self.watch) {
|
||||
new Watcher(readFileCache).on("changed", function(file) {
|
||||
log.err(file + " changed; rebuilding...", "yellow");
|
||||
rebuild();
|
||||
});
|
||||
}
|
||||
|
||||
function outputModules(modules) {
|
||||
// Note that output.outputModules comes pre-bound.
|
||||
modules.forEach(output.outputModule);
|
||||
return modules;
|
||||
}
|
||||
|
||||
function finish(result) {
|
||||
rebuild.ing = false;
|
||||
|
||||
if (waiting > 0) {
|
||||
waiting = 0;
|
||||
process.nextTick(rebuild);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function rebuild() {
|
||||
if (rebuild.ing) {
|
||||
waiting += 1;
|
||||
return;
|
||||
}
|
||||
|
||||
rebuild.ing = true;
|
||||
|
||||
var context = new BuildContext(options, readFileCache);
|
||||
|
||||
if (self.preferredFileExtension)
|
||||
context.setPreferredFileExtension(
|
||||
self.preferredFileExtension);
|
||||
|
||||
context.setCacheDirectory(self.cacheDir);
|
||||
|
||||
context.setIgnoreDependencies(self.ignoreDependencies);
|
||||
|
||||
context.setRelativize(self.relativize);
|
||||
|
||||
context.setUseProvidesModule(self.useProvidesModule);
|
||||
|
||||
return new ModuleReader(
|
||||
context,
|
||||
self.resolvers,
|
||||
self.processors
|
||||
).readMultiP(context.expandIdsOrGlobsP(roots))
|
||||
.then(context.ignoreDependencies ? pass : collectDepsP)
|
||||
.then(outputModules)
|
||||
.then(outputDir ? printModuleIds : pass)
|
||||
.then(finish, function(err) {
|
||||
log.err(err.stack);
|
||||
|
||||
if (!self.watch) {
|
||||
// If we're not building with --watch, throw the error
|
||||
// so that cliBuildP can call process.exit(-1).
|
||||
throw err;
|
||||
}
|
||||
|
||||
finish();
|
||||
});
|
||||
}
|
||||
|
||||
return (
|
||||
// If outputDir is falsy, we can't (and don't need to) mkdirP it.
|
||||
outputDir ? util.mkdirP : Q
|
||||
)(outputDir).then(rebuild);
|
||||
};
|
||||
|
||||
function pass(modules) {
|
||||
return modules;
|
||||
}
|
||||
|
||||
function collectDepsP(rootModules) {
|
||||
var modules = [];
|
||||
var seenIds = {};
|
||||
|
||||
function traverse(module) {
|
||||
if (seenIds.hasOwnProperty(module.id))
|
||||
return Q(modules);
|
||||
seenIds[module.id] = true;
|
||||
|
||||
return module.getRequiredP().then(function(reqs) {
|
||||
return Q.all(reqs.map(traverse));
|
||||
}).then(function() {
|
||||
modules.push(module);
|
||||
return modules;
|
||||
});
|
||||
}
|
||||
|
||||
return Q.all(rootModules.map(traverse)).then(
|
||||
function() { return modules });
|
||||
}
|
||||
|
||||
function printModuleIds(modules) {
|
||||
log.out(JSON.stringify(modules.map(function(module) {
|
||||
return module.id;
|
||||
})));
|
||||
|
||||
return modules;
|
||||
}
|
||||
|
||||
Cp.forceResolve = function(forceId, source) {
|
||||
this.resolvers.unshift(function(id) {
|
||||
if (id === forceId)
|
||||
return source;
|
||||
});
|
||||
};
|
||||
|
||||
Cp.cliBuildP = function() {
|
||||
var version = this.customVersion || require("../package.json").version;
|
||||
return Q.spread([this, version], cliBuildP);
|
||||
};
|
||||
|
||||
function cliBuildP(commoner, version) {
|
||||
var options = require("commander");
|
||||
var workingDir = process.cwd();
|
||||
var sourceDir = workingDir;
|
||||
var outputDir = null;
|
||||
var roots;
|
||||
|
||||
options.version(version)
|
||||
.usage("[options] <source directory> <output directory> [<module ID> [<module ID> ...]]")
|
||||
.option("-c, --config [file]", "JSON configuration file (no file or - means STDIN)")
|
||||
.option("-w, --watch", "Continually rebuild")
|
||||
.option("-x, --extension <js | coffee | ...>",
|
||||
"File extension to assume when resolving module identifiers")
|
||||
.option("--relativize", "Rewrite all module identifiers to be relative")
|
||||
.option("--follow-requires", "Scan modules for required dependencies")
|
||||
.option("--use-provides-module", "Respect @providesModules pragma in files")
|
||||
.option("--cache-dir <directory>", "Alternate directory to use for disk cache")
|
||||
.option("--no-cache-dir", "Disable the disk cache")
|
||||
.option("--source-charset <utf8 | win1252 | ...>",
|
||||
"Charset of source (default: utf8)")
|
||||
.option("--output-charset <utf8 | win1252 | ...>",
|
||||
"Charset of output (default: utf8)");
|
||||
|
||||
commoner.customOptions.forEach(function(customOption) {
|
||||
options.option.apply(options, customOption);
|
||||
});
|
||||
|
||||
options.parse(process.argv.slice(0));
|
||||
|
||||
var pfe = new PreferredFileExtension(options.extension || "js");
|
||||
|
||||
// TODO Decide whether passing options to buildP via instance
|
||||
// variables is preferable to passing them as arguments.
|
||||
commoner.preferredFileExtension = pfe;
|
||||
commoner.watch = options.watch;
|
||||
commoner.ignoreDependencies = !options.followRequires;
|
||||
commoner.relativize = options.relativize;
|
||||
commoner.useProvidesModule = options.useProvidesModule;
|
||||
commoner.sourceCharset = normalizeCharset(options.sourceCharset);
|
||||
commoner.outputCharset = normalizeCharset(options.outputCharset);
|
||||
|
||||
function fileToId(file) {
|
||||
file = absolutePath(workingDir, file);
|
||||
assert.ok(fs.statSync(file).isFile(), file);
|
||||
return pfe.trim(path.relative(sourceDir, file));
|
||||
}
|
||||
|
||||
var args = options.args.slice(0);
|
||||
var argc = args.length;
|
||||
if (argc === 0) {
|
||||
if (options.config === true) {
|
||||
log.err("Cannot read --config from STDIN when reading " +
|
||||
"source from STDIN");
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
sourceDir = workingDir;
|
||||
outputDir = null;
|
||||
roots = ["<stdin>"];
|
||||
commoner.forceResolve("<stdin>", util.readFromStdinP());
|
||||
|
||||
// Ignore dependencies because we wouldn't know how to find them.
|
||||
commoner.ignoreDependencies = true;
|
||||
|
||||
} else {
|
||||
var first = absolutePath(workingDir, args[0]);
|
||||
var stats = fs.statSync(first);
|
||||
|
||||
if (argc === 1) {
|
||||
var firstId = fileToId(first);
|
||||
sourceDir = workingDir;
|
||||
outputDir = null;
|
||||
roots = [firstId];
|
||||
commoner.forceResolve(
|
||||
firstId,
|
||||
util.readFileP(first, commoner.sourceCharset)
|
||||
);
|
||||
|
||||
// Ignore dependencies because we wouldn't know how to find them.
|
||||
commoner.ignoreDependencies = true;
|
||||
|
||||
} else if (stats.isDirectory(first)) {
|
||||
sourceDir = first;
|
||||
outputDir = absolutePath(workingDir, args[1]);
|
||||
roots = args.slice(2);
|
||||
if (roots.length === 0)
|
||||
roots.push(commoner.preferredFileExtension.glob());
|
||||
|
||||
} else {
|
||||
options.help();
|
||||
process.exit(-1);
|
||||
}
|
||||
}
|
||||
|
||||
commoner.cacheDir = null;
|
||||
if (options.cacheDir === false) {
|
||||
// Received the --no-cache-dir option, so disable the disk cache.
|
||||
} else if (typeof options.cacheDir === "string") {
|
||||
commoner.cacheDir = absolutePath(workingDir, options.cacheDir);
|
||||
} else if (outputDir) {
|
||||
// The default cache directory lives inside the output directory.
|
||||
commoner.cacheDir = path.join(outputDir, ".module-cache");
|
||||
}
|
||||
|
||||
var promise = getConfigP(
|
||||
workingDir,
|
||||
options.config
|
||||
).then(function(config) {
|
||||
var cleanOptions = {};
|
||||
|
||||
options.options.forEach(function(option) {
|
||||
var name = util.camelize(option.name());
|
||||
if (options.hasOwnProperty(name)) {
|
||||
cleanOptions[name] = options[name];
|
||||
}
|
||||
});
|
||||
|
||||
cleanOptions.version = version;
|
||||
cleanOptions.config = config;
|
||||
cleanOptions.sourceDir = sourceDir;
|
||||
cleanOptions.outputDir = outputDir;
|
||||
cleanOptions.sourceCharset = commoner.sourceCharset;
|
||||
cleanOptions.outputCharset = commoner.outputCharset;
|
||||
|
||||
return commoner.buildP(cleanOptions, roots);
|
||||
});
|
||||
|
||||
if (!commoner.watch) {
|
||||
// If we're building from the command line without --watch, any
|
||||
// build errors should immediately terminate the process with a
|
||||
// non-zero error code.
|
||||
promise = promise.catch(function(err) {
|
||||
log.err(err.stack);
|
||||
process.exit(-1);
|
||||
});
|
||||
}
|
||||
|
||||
return promise;
|
||||
}
|
||||
|
||||
function normalizeCharset(charset) {
|
||||
charset = charset
|
||||
&& charset.replace(/[- ]/g, "").toLowerCase()
|
||||
|| "utf8";
|
||||
|
||||
assert.ok(
|
||||
iconv.encodingExists(charset),
|
||||
"Unrecognized charset: " + charset
|
||||
);
|
||||
|
||||
return charset;
|
||||
}
|
||||
|
||||
function absolutePath(workingDir, pathToJoin) {
|
||||
if (pathToJoin) {
|
||||
workingDir = path.normalize(workingDir);
|
||||
pathToJoin = path.normalize(pathToJoin);
|
||||
// TODO: use path.isAbsolute when Node < 0.10 is unsupported
|
||||
if (path.resolve(pathToJoin) !== pathToJoin) {
|
||||
pathToJoin = path.join(workingDir, pathToJoin);
|
||||
}
|
||||
}
|
||||
return pathToJoin;
|
||||
}
|
||||
|
||||
function getConfigP(workingDir, configFile) {
|
||||
if (typeof configFile === "undefined")
|
||||
return Q({}); // Empty config.
|
||||
|
||||
if (configFile === true || // --config is present but has no argument
|
||||
configFile === "<stdin>" ||
|
||||
configFile === "-" ||
|
||||
configFile === path.sep + path.join("dev", "stdin")) {
|
||||
return util.readJsonFromStdinP(
|
||||
1000, // Time limit in milliseconds before warning displayed.
|
||||
"Expecting configuration from STDIN (pass --config <file> " +
|
||||
"if stuck here)...",
|
||||
"yellow"
|
||||
);
|
||||
}
|
||||
|
||||
return util.readJsonFileP(absolutePath(workingDir, configFile));
|
||||
}
|
||||
|
||||
exports.Commoner = Commoner;
|
||||
265
node_modules/commoner/lib/context.js
generated
vendored
Normal file
265
node_modules/commoner/lib/context.js
generated
vendored
Normal file
@@ -0,0 +1,265 @@
|
||||
var assert = require("assert");
|
||||
var path = require("path");
|
||||
var Q = require("q");
|
||||
var util = require("./util");
|
||||
var spawn = require("child_process").spawn;
|
||||
var ReadFileCache = require("./cache").ReadFileCache;
|
||||
var grepP = require("./grep");
|
||||
var glob = require("glob");
|
||||
var env = process.env;
|
||||
|
||||
function BuildContext(options, readFileCache) {
|
||||
var self = this;
|
||||
assert.ok(self instanceof BuildContext);
|
||||
assert.ok(readFileCache instanceof ReadFileCache);
|
||||
|
||||
if (options) {
|
||||
assert.strictEqual(typeof options, "object");
|
||||
} else {
|
||||
options = {};
|
||||
}
|
||||
|
||||
Object.freeze(options);
|
||||
|
||||
Object.defineProperties(self, {
|
||||
readFileCache: { value: readFileCache },
|
||||
config: { value: options.config },
|
||||
options: { value: options },
|
||||
optionsHash: { value: util.deepHash(options) }
|
||||
});
|
||||
}
|
||||
|
||||
var BCp = BuildContext.prototype;
|
||||
|
||||
BCp.makePromise = function(callback, context) {
|
||||
return util.makePromise(callback, context);
|
||||
};
|
||||
|
||||
BCp.spawnP = function(command, args, kwargs) {
|
||||
args = args || [];
|
||||
kwargs = kwargs || {};
|
||||
|
||||
var deferred = Q.defer();
|
||||
|
||||
var outs = [];
|
||||
var errs = [];
|
||||
|
||||
var options = {
|
||||
stdio: "pipe",
|
||||
env: env
|
||||
};
|
||||
|
||||
if (kwargs.cwd) {
|
||||
options.cwd = kwargs.cwd;
|
||||
}
|
||||
|
||||
var child = spawn(command, args, options);
|
||||
|
||||
child.stdout.on("data", function(data) {
|
||||
outs.push(data);
|
||||
});
|
||||
|
||||
child.stderr.on("data", function(data) {
|
||||
errs.push(data);
|
||||
});
|
||||
|
||||
child.on("close", function(code) {
|
||||
if (errs.length > 0 || code !== 0) {
|
||||
var err = {
|
||||
code: code,
|
||||
text: errs.join("")
|
||||
};
|
||||
}
|
||||
|
||||
deferred.resolve([err, outs.join("")]);
|
||||
});
|
||||
|
||||
var stdin = kwargs && kwargs.stdin;
|
||||
if (stdin) {
|
||||
child.stdin.end(stdin);
|
||||
}
|
||||
|
||||
return deferred.promise;
|
||||
};
|
||||
|
||||
BCp.setIgnoreDependencies = function(value) {
|
||||
Object.defineProperty(this, "ignoreDependencies", {
|
||||
value: !!value
|
||||
});
|
||||
};
|
||||
|
||||
// This default can be overridden by individual BuildContext instances.
|
||||
BCp.setIgnoreDependencies(false);
|
||||
|
||||
BCp.setRelativize = function(value) {
|
||||
Object.defineProperty(this, "relativize", {
|
||||
value: !!value
|
||||
});
|
||||
};
|
||||
|
||||
// This default can be overridden by individual BuildContext instances.
|
||||
BCp.setRelativize(false);
|
||||
|
||||
BCp.setUseProvidesModule = function(value) {
|
||||
Object.defineProperty(this, "useProvidesModule", {
|
||||
value: !!value
|
||||
});
|
||||
};
|
||||
|
||||
// This default can be overridden by individual BuildContext instances.
|
||||
BCp.setUseProvidesModule(false);
|
||||
|
||||
BCp.setCacheDirectory = function(dir) {
|
||||
if (!dir) {
|
||||
// Disable the cache directory.
|
||||
} else {
|
||||
assert.strictEqual(typeof dir, "string");
|
||||
}
|
||||
|
||||
Object.defineProperty(this, "cacheDir", {
|
||||
value: dir || null
|
||||
});
|
||||
};
|
||||
|
||||
// This default can be overridden by individual BuildContext instances.
|
||||
BCp.setCacheDirectory(null);
|
||||
|
||||
function PreferredFileExtension(ext) {
|
||||
assert.strictEqual(typeof ext, "string");
|
||||
assert.ok(this instanceof PreferredFileExtension);
|
||||
Object.defineProperty(this, "extension", {
|
||||
value: ext.toLowerCase()
|
||||
});
|
||||
}
|
||||
|
||||
var PFEp = PreferredFileExtension.prototype;
|
||||
|
||||
PFEp.check = function(file) {
|
||||
return file.split(".").pop().toLowerCase() === this.extension;
|
||||
};
|
||||
|
||||
PFEp.trim = function(file) {
|
||||
if (this.check(file)) {
|
||||
var len = file.length;
|
||||
var extLen = 1 + this.extension.length;
|
||||
file = file.slice(0, len - extLen);
|
||||
}
|
||||
return file;
|
||||
};
|
||||
|
||||
PFEp.glob = function() {
|
||||
return "**/*." + this.extension;
|
||||
};
|
||||
|
||||
exports.PreferredFileExtension = PreferredFileExtension;
|
||||
|
||||
BCp.setPreferredFileExtension = function(pfe) {
|
||||
assert.ok(pfe instanceof PreferredFileExtension);
|
||||
Object.defineProperty(this, "preferredFileExtension", { value: pfe });
|
||||
};
|
||||
|
||||
BCp.setPreferredFileExtension(new PreferredFileExtension("js"));
|
||||
|
||||
BCp.expandIdsOrGlobsP = function(idsOrGlobs) {
|
||||
var context = this;
|
||||
|
||||
return Q.all(
|
||||
idsOrGlobs.map(this.expandSingleIdOrGlobP, this)
|
||||
).then(function(listOfListsOfIDs) {
|
||||
var result = [];
|
||||
var seen = {};
|
||||
|
||||
util.flatten(listOfListsOfIDs).forEach(function(id) {
|
||||
if (!seen.hasOwnProperty(id)) {
|
||||
seen[id] = true;
|
||||
if (util.isValidModuleId(id))
|
||||
result.push(id);
|
||||
}
|
||||
});
|
||||
|
||||
return result;
|
||||
});
|
||||
};
|
||||
|
||||
BCp.expandSingleIdOrGlobP = function(idOrGlob) {
|
||||
var context = this;
|
||||
|
||||
return util.makePromise(function(callback) {
|
||||
// If idOrGlob already looks like an acceptable identifier, don't
|
||||
// try to expand it.
|
||||
if (util.isValidModuleId(idOrGlob)) {
|
||||
callback(null, [idOrGlob]);
|
||||
return;
|
||||
}
|
||||
|
||||
glob(idOrGlob, {
|
||||
cwd: context.readFileCache.sourceDir
|
||||
}, function(err, files) {
|
||||
if (err) {
|
||||
callback(err);
|
||||
} else {
|
||||
callback(null, files.filter(function(file) {
|
||||
return !context.isHiddenFile(file);
|
||||
}).map(function(file) {
|
||||
return context.preferredFileExtension.trim(file);
|
||||
}));
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
BCp.readModuleP = function(id) {
|
||||
return this.readFileCache.readFileP(
|
||||
id + "." + this.preferredFileExtension.extension
|
||||
);
|
||||
};
|
||||
|
||||
BCp.readFileP = function(file) {
|
||||
return this.readFileCache.readFileP(file);
|
||||
};
|
||||
|
||||
// Text editors such as VIM and Emacs often create temporary swap files
|
||||
// that should be ignored.
|
||||
var hiddenExp = /^\.|~$/;
|
||||
BCp.isHiddenFile = function(file) {
|
||||
return hiddenExp.test(path.basename(file));
|
||||
};
|
||||
|
||||
BCp.getProvidedP = util.cachedMethod(function() {
|
||||
var context = this;
|
||||
var pattern = "@providesModule\\s+\\S+";
|
||||
|
||||
return grepP(
|
||||
pattern,
|
||||
context.readFileCache.sourceDir
|
||||
).then(function(pathToMatch) {
|
||||
var idToPath = {};
|
||||
|
||||
Object.keys(pathToMatch).sort().forEach(function(path) {
|
||||
if (context.isHiddenFile(path))
|
||||
return;
|
||||
|
||||
var id = pathToMatch[path].split(/\s+/).pop();
|
||||
|
||||
// If we're about to overwrite an existing module identifier,
|
||||
// make sure the corresponding path ends with the preferred
|
||||
// file extension. This allows @providesModule directives in
|
||||
// .coffee files, for example, but prevents .js~ temporary
|
||||
// files from taking precedence over actual .js files.
|
||||
if (!idToPath.hasOwnProperty(id) ||
|
||||
context.preferredFileExtension.check(path))
|
||||
idToPath[id] = path;
|
||||
});
|
||||
|
||||
return idToPath;
|
||||
});
|
||||
});
|
||||
|
||||
var providesExp = /@providesModule[ ]+(\S+)/;
|
||||
|
||||
BCp.getProvidedId = function(source) {
|
||||
var match = providesExp.exec(source);
|
||||
return match && match[1];
|
||||
};
|
||||
|
||||
exports.BuildContext = BuildContext;
|
||||
49
node_modules/commoner/lib/grep.js
generated
vendored
Normal file
49
node_modules/commoner/lib/grep.js
generated
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
var assert = require("assert");
|
||||
var path = require("path");
|
||||
var Q = require("q");
|
||||
var fs = require("graceful-fs");
|
||||
var util = require("./util");
|
||||
var readdir = Q.denodeify(fs.readdir);
|
||||
var lstat = Q.denodeify(fs.lstat);
|
||||
|
||||
function processDirP(pattern, dir) {
|
||||
return readdir(dir).then(function(files) {
|
||||
return Q.all(files.map(function(file) {
|
||||
file = path.join(dir, file);
|
||||
return lstat(file).then(function(stat) {
|
||||
return stat.isDirectory()
|
||||
? processDirP(pattern, file)
|
||||
: processFileP(pattern, file);
|
||||
});
|
||||
})).then(function(results) {
|
||||
return util.flatten(results);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function processFileP(pattern, file) {
|
||||
return util.readFileP(file).then(function(contents) {
|
||||
var result = new RegExp(pattern, 'g').exec(contents);
|
||||
return result ? [{
|
||||
path: file,
|
||||
match: result[0]
|
||||
}] : [];
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = function(pattern, sourceDir) {
|
||||
assert.strictEqual(typeof pattern, "string");
|
||||
|
||||
return processDirP(pattern, sourceDir).then(function(results) {
|
||||
var pathToMatch = {};
|
||||
|
||||
results.forEach(function(result) {
|
||||
pathToMatch[path.relative(
|
||||
sourceDir,
|
||||
result.path
|
||||
).split("\\").join("/")] = result.match;
|
||||
});
|
||||
|
||||
return pathToMatch;
|
||||
});
|
||||
};
|
||||
58
node_modules/commoner/lib/output.js
generated
vendored
Normal file
58
node_modules/commoner/lib/output.js
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
var assert = require("assert");
|
||||
var util = require("./util");
|
||||
var log = util.log;
|
||||
|
||||
function AbstractOutput() {
|
||||
assert.ok(this instanceof AbstractOutput);
|
||||
Object.defineProperties(this, {
|
||||
outputModule: { value: this.outputModule.bind(this) }
|
||||
});
|
||||
}
|
||||
|
||||
var AOp = AbstractOutput.prototype;
|
||||
exports.AbstractOutput = AbstractOutput;
|
||||
|
||||
AOp.outputModule = function(module) {
|
||||
throw new Error("not implemented");
|
||||
};
|
||||
|
||||
function StdOutput() {
|
||||
assert.ok(this instanceof StdOutput);
|
||||
AbstractOutput.call(this);
|
||||
}
|
||||
|
||||
var SOp = util.inherits(StdOutput, AbstractOutput);
|
||||
exports.StdOutput = StdOutput;
|
||||
|
||||
SOp.outputModule = function(module) {
|
||||
log.out(module.source);
|
||||
};
|
||||
|
||||
function DirOutput(outputDir) {
|
||||
assert.ok(this instanceof DirOutput);
|
||||
assert.strictEqual(typeof outputDir, "string");
|
||||
AbstractOutput.call(this);
|
||||
|
||||
Object.defineProperties(this, {
|
||||
outputDir: { value: outputDir }
|
||||
});
|
||||
}
|
||||
|
||||
var DOp = util.inherits(DirOutput, AbstractOutput);
|
||||
exports.DirOutput = DirOutput;
|
||||
|
||||
DOp.outputModule = function(module) {
|
||||
return module.writeVersionP(this.outputDir);
|
||||
};
|
||||
|
||||
function TestOutput() {
|
||||
assert.ok(this instanceof TestOutput);
|
||||
AbstractOutput.call(this);
|
||||
}
|
||||
|
||||
var TOp = util.inherits(TestOutput, AbstractOutput);
|
||||
exports.TestOutput = TestOutput;
|
||||
|
||||
TOp.outputModule = function(module) {
|
||||
// Swallow any output.
|
||||
};
|
||||
326
node_modules/commoner/lib/reader.js
generated
vendored
Normal file
326
node_modules/commoner/lib/reader.js
generated
vendored
Normal file
@@ -0,0 +1,326 @@
|
||||
var assert = require("assert");
|
||||
var path = require("path");
|
||||
var fs = require("fs");
|
||||
var Q = require("q");
|
||||
var iconv = require("iconv-lite");
|
||||
var createHash = require("crypto").createHash;
|
||||
var detective = require("detective");
|
||||
var util = require("./util");
|
||||
var BuildContext = require("./context").BuildContext;
|
||||
var slice = Array.prototype.slice;
|
||||
|
||||
function getRequiredIDs(id, source) {
|
||||
var ids = {};
|
||||
detective(source).forEach(function (dep) {
|
||||
ids[path.normalize(path.join(id, "..", dep))] = true;
|
||||
});
|
||||
return Object.keys(ids);
|
||||
}
|
||||
|
||||
function ModuleReader(context, resolvers, processors) {
|
||||
var self = this;
|
||||
assert.ok(self instanceof ModuleReader);
|
||||
assert.ok(context instanceof BuildContext);
|
||||
assert.ok(resolvers instanceof Array);
|
||||
assert.ok(processors instanceof Array);
|
||||
|
||||
var hash = createHash("sha1").update(context.optionsHash + "\0");
|
||||
|
||||
function hashCallbacks(salt) {
|
||||
hash.update(salt + "\0");
|
||||
|
||||
var cbs = util.flatten(slice.call(arguments, 1));
|
||||
|
||||
cbs.forEach(function(cb) {
|
||||
assert.strictEqual(typeof cb, "function");
|
||||
hash.update(cb + "\0");
|
||||
});
|
||||
|
||||
return cbs;
|
||||
}
|
||||
|
||||
resolvers = hashCallbacks("resolvers", resolvers, warnMissingModule);
|
||||
|
||||
var procArgs = [processors];
|
||||
if (context.relativize && !context.ignoreDependencies)
|
||||
procArgs.push(require("./relative").getProcessor(self));
|
||||
processors = hashCallbacks("processors", procArgs);
|
||||
|
||||
Object.defineProperties(self, {
|
||||
context: { value: context },
|
||||
idToHash: { value: {} },
|
||||
resolvers: { value: resolvers },
|
||||
processors: { value: processors },
|
||||
salt: { value: hash.digest("hex") }
|
||||
});
|
||||
}
|
||||
|
||||
ModuleReader.prototype = {
|
||||
getSourceP: util.cachedMethod(function(id) {
|
||||
var context = this.context;
|
||||
var copy = this.resolvers.slice(0).reverse();
|
||||
assert.ok(copy.length > 0, "no source resolvers registered");
|
||||
|
||||
function tryNextResolverP() {
|
||||
var resolve = copy.pop();
|
||||
|
||||
try {
|
||||
var promise = Q(resolve && resolve.call(context, id));
|
||||
} catch (e) {
|
||||
promise = Q.reject(e);
|
||||
}
|
||||
|
||||
return resolve ? promise.then(function(result) {
|
||||
if (typeof result === "string")
|
||||
return result;
|
||||
return tryNextResolverP();
|
||||
}, tryNextResolverP) : promise;
|
||||
}
|
||||
|
||||
return tryNextResolverP();
|
||||
}),
|
||||
|
||||
getCanonicalIdP: util.cachedMethod(function(id) {
|
||||
var reader = this;
|
||||
if (reader.context.useProvidesModule) {
|
||||
return reader.getSourceP(id).then(function(source) {
|
||||
return reader.context.getProvidedId(source) || id;
|
||||
});
|
||||
} else {
|
||||
return Q(id);
|
||||
}
|
||||
}),
|
||||
|
||||
readModuleP: util.cachedMethod(function(id) {
|
||||
var reader = this;
|
||||
|
||||
return reader.getSourceP(id).then(function(source) {
|
||||
if (reader.context.useProvidesModule) {
|
||||
// If the source contains a @providesModule declaration, treat
|
||||
// that declaration as canonical. Note that the Module object
|
||||
// returned by readModuleP might have an .id property whose
|
||||
// value differs from the original id parameter.
|
||||
id = reader.context.getProvidedId(source) || id;
|
||||
}
|
||||
|
||||
assert.strictEqual(typeof source, "string");
|
||||
|
||||
var hash = createHash("sha1")
|
||||
.update("module\0")
|
||||
.update(id + "\0")
|
||||
.update(reader.salt + "\0")
|
||||
.update(source.length + "\0" + source)
|
||||
.digest("hex");
|
||||
|
||||
if (reader.idToHash.hasOwnProperty(id)) {
|
||||
// Ensure that the same module identifier is not
|
||||
// provided by distinct modules.
|
||||
assert.strictEqual(
|
||||
reader.idToHash[id], hash,
|
||||
"more than one module named " +
|
||||
JSON.stringify(id));
|
||||
} else {
|
||||
reader.idToHash[id] = hash;
|
||||
}
|
||||
|
||||
return reader.buildModuleP(id, hash, source);
|
||||
});
|
||||
}),
|
||||
|
||||
buildModuleP: util.cachedMethod(function(id, hash, source) {
|
||||
var reader = this;
|
||||
return reader.processOutputP(
|
||||
id, hash, source
|
||||
).then(function(output) {
|
||||
return new Module(reader, id, hash, output);
|
||||
});
|
||||
}, function(id, hash, source) {
|
||||
return hash;
|
||||
}),
|
||||
|
||||
processOutputP: function(id, hash, source) {
|
||||
var reader = this;
|
||||
var cacheDir = reader.context.cacheDir;
|
||||
var manifestDir = cacheDir && path.join(cacheDir, "manifest");
|
||||
var charset = reader.context.options.outputCharset;
|
||||
|
||||
function buildP() {
|
||||
var promise = Q(source);
|
||||
|
||||
reader.processors.forEach(function(build) {
|
||||
promise = promise.then(function(input) {
|
||||
return util.waitForValuesP(
|
||||
build.call(reader.context, id, input)
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
return promise.then(function(output) {
|
||||
if (typeof output === "string") {
|
||||
output = { ".js": output };
|
||||
} else {
|
||||
assert.strictEqual(typeof output, "object");
|
||||
}
|
||||
|
||||
return util.waitForValuesP(output);
|
||||
|
||||
}).then(function(output) {
|
||||
util.log.err(
|
||||
"built Module(" + JSON.stringify(id) + ")",
|
||||
"cyan"
|
||||
);
|
||||
|
||||
return output;
|
||||
|
||||
}).catch(function(err) {
|
||||
// Provide additional context for uncaught build errors.
|
||||
util.log.err("Error while reading module " + id + ":");
|
||||
throw err;
|
||||
});
|
||||
}
|
||||
|
||||
if (manifestDir) {
|
||||
return util.mkdirP(manifestDir).then(function(manifestDir) {
|
||||
var manifestFile = path.join(manifestDir, hash + ".json");
|
||||
|
||||
return util.readJsonFileP(manifestFile).then(function(manifest) {
|
||||
Object.keys(manifest).forEach(function(key) {
|
||||
var cacheFile = path.join(cacheDir, manifest[key]);
|
||||
manifest[key] = util.readFileP(cacheFile);
|
||||
});
|
||||
|
||||
return util.waitForValuesP(manifest, true);
|
||||
|
||||
}).catch(function(err) {
|
||||
return buildP().then(function(output) {
|
||||
var manifest = {};
|
||||
|
||||
Object.keys(output).forEach(function(key) {
|
||||
var cacheFile = manifest[key] = hash + key;
|
||||
var fullPath = path.join(cacheDir, cacheFile);
|
||||
|
||||
if (charset) {
|
||||
fs.writeFileSync(fullPath, iconv.encode(output[key], charset))
|
||||
} else {
|
||||
fs.writeFileSync(fullPath, output[key], "utf8");
|
||||
}
|
||||
});
|
||||
|
||||
fs.writeFileSync(
|
||||
manifestFile,
|
||||
JSON.stringify(manifest),
|
||||
"utf8"
|
||||
);
|
||||
|
||||
return output;
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return buildP();
|
||||
},
|
||||
|
||||
readMultiP: function(ids) {
|
||||
var reader = this;
|
||||
|
||||
return Q(ids).all().then(function(ids) {
|
||||
if (ids.length === 0)
|
||||
return ids; // Shortcut.
|
||||
|
||||
var modulePs = ids.map(reader.readModuleP, reader);
|
||||
return Q(modulePs).all().then(function(modules) {
|
||||
var seen = {};
|
||||
var result = [];
|
||||
|
||||
modules.forEach(function(module) {
|
||||
if (!seen.hasOwnProperty(module.id)) {
|
||||
seen[module.id] = true;
|
||||
result.push(module);
|
||||
}
|
||||
});
|
||||
|
||||
return result;
|
||||
});
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
exports.ModuleReader = ModuleReader;
|
||||
|
||||
function warnMissingModule(id) {
|
||||
// A missing module may be a false positive and therefore does not warrant
|
||||
// a fatal error, but a warning is certainly in order.
|
||||
util.log.err(
|
||||
"unable to resolve module " + JSON.stringify(id) + "; false positive?",
|
||||
"yellow");
|
||||
|
||||
// Missing modules are installed as if they existed, but it's a run-time
|
||||
// error if one is ever actually required.
|
||||
var message = "nonexistent module required: " + id;
|
||||
return "throw new Error(" + JSON.stringify(message) + ");";
|
||||
}
|
||||
|
||||
function Module(reader, id, hash, output) {
|
||||
assert.ok(this instanceof Module);
|
||||
assert.ok(reader instanceof ModuleReader);
|
||||
assert.strictEqual(typeof output, "object");
|
||||
|
||||
var source = output[".js"];
|
||||
assert.strictEqual(typeof source, "string");
|
||||
|
||||
Object.defineProperties(this, {
|
||||
reader: { value: reader },
|
||||
id: { value: id },
|
||||
hash: { value: hash }, // TODO Remove?
|
||||
deps: { value: getRequiredIDs(id, source) },
|
||||
source: { value: source },
|
||||
output: { value: output }
|
||||
});
|
||||
}
|
||||
|
||||
Module.prototype = {
|
||||
getRequiredP: function() {
|
||||
return this.reader.readMultiP(this.deps);
|
||||
},
|
||||
|
||||
writeVersionP: function(outputDir) {
|
||||
var id = this.id;
|
||||
var hash = this.hash;
|
||||
var output = this.output;
|
||||
var cacheDir = this.reader.context.cacheDir;
|
||||
var charset = this.reader.context.options.outputCharset;
|
||||
|
||||
return Q.all(Object.keys(output).map(function(key) {
|
||||
var outputFile = path.join(outputDir, id + key);
|
||||
|
||||
function writeCopy() {
|
||||
if (charset) {
|
||||
fs.writeFileSync(outputFile, iconv.encode(output[key], charset));
|
||||
} else {
|
||||
fs.writeFileSync(outputFile, output[key], "utf8");
|
||||
}
|
||||
return outputFile;
|
||||
}
|
||||
|
||||
if (cacheDir) {
|
||||
var cacheFile = path.join(cacheDir, hash + key);
|
||||
return util.linkP(cacheFile, outputFile)
|
||||
// If the hard linking fails, the cache directory
|
||||
// might be on a different device, so fall back to
|
||||
// writing a copy of the file (slightly slower).
|
||||
.catch(writeCopy);
|
||||
}
|
||||
|
||||
return util.mkdirP(path.dirname(outputFile)).then(writeCopy);
|
||||
}));
|
||||
},
|
||||
|
||||
toString: function() {
|
||||
return "Module(" + JSON.stringify(this.id) + ")";
|
||||
},
|
||||
|
||||
resolveId: function(id) {
|
||||
return util.absolutize(this.id, id);
|
||||
}
|
||||
};
|
||||
87
node_modules/commoner/lib/relative.js
generated
vendored
Normal file
87
node_modules/commoner/lib/relative.js
generated
vendored
Normal file
@@ -0,0 +1,87 @@
|
||||
var assert = require("assert");
|
||||
var Q = require("q");
|
||||
var path = require("path");
|
||||
var util = require("./util");
|
||||
var recast = require("recast");
|
||||
var n = recast.types.namedTypes;
|
||||
|
||||
function Relativizer(reader) {
|
||||
assert.ok(this instanceof Relativizer);
|
||||
assert.ok(reader === null ||
|
||||
reader instanceof require("./reader").ModuleReader);
|
||||
|
||||
Object.defineProperties(this, {
|
||||
reader: { value: reader }
|
||||
});
|
||||
}
|
||||
|
||||
var Rp = Relativizer.prototype;
|
||||
|
||||
exports.getProcessor = function(reader) {
|
||||
var relativizer = new Relativizer(reader);
|
||||
return function(id, input) {
|
||||
return relativizer.processSourceP(id, input);
|
||||
};
|
||||
};
|
||||
|
||||
Rp.processSourceP = function(id, input) {
|
||||
var relativizer = this;
|
||||
var output = typeof input === "string" ? {
|
||||
".js": input
|
||||
} : input;
|
||||
|
||||
return Q(output[".js"]).then(function(source) {
|
||||
var promises = [];
|
||||
var ast = recast.parse(source);
|
||||
|
||||
function fixRequireP(literal) {
|
||||
promises.push(relativizer.relativizeP(
|
||||
id, literal.value
|
||||
).then(function(newValue) {
|
||||
return literal.value = newValue;
|
||||
}));
|
||||
}
|
||||
|
||||
recast.visit(ast, {
|
||||
visitCallExpression: function(path) {
|
||||
var args = path.value.arguments;
|
||||
var callee = path.value.callee;
|
||||
|
||||
if (n.Identifier.check(callee) &&
|
||||
callee.name === "require" &&
|
||||
args.length === 1) {
|
||||
var arg = args[0];
|
||||
if (n.Literal.check(arg) &&
|
||||
typeof arg.value === "string") {
|
||||
fixRequireP(arg);
|
||||
}
|
||||
}
|
||||
|
||||
this.traverse(path);
|
||||
}
|
||||
});
|
||||
|
||||
return Q.all(promises).then(function() {
|
||||
output[".js"] = recast.print(ast).code;
|
||||
return output;
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
Rp.absolutizeP = function(moduleId, requiredId) {
|
||||
requiredId = util.absolutize(moduleId, requiredId);
|
||||
|
||||
if (this.reader)
|
||||
return this.reader.getCanonicalIdP(requiredId);
|
||||
|
||||
return Q(requiredId);
|
||||
};
|
||||
|
||||
Rp.relativizeP = function(moduleId, requiredId) {
|
||||
return this.absolutizeP(
|
||||
moduleId,
|
||||
requiredId
|
||||
).then(function(absoluteId) {
|
||||
return util.relativize(moduleId, absoluteId);
|
||||
});
|
||||
};
|
||||
370
node_modules/commoner/lib/util.js
generated
vendored
Normal file
370
node_modules/commoner/lib/util.js
generated
vendored
Normal file
@@ -0,0 +1,370 @@
|
||||
var assert = require("assert");
|
||||
var path = require("path");
|
||||
var fs = require("graceful-fs");
|
||||
var Q = require("q");
|
||||
var createHash = require("crypto").createHash;
|
||||
var mkdirp = require("mkdirp");
|
||||
var iconv = require("iconv-lite");
|
||||
var Ap = Array.prototype;
|
||||
var slice = Ap.slice;
|
||||
var join = Ap.join;
|
||||
|
||||
// The graceful-fs module attempts to limit the total number of open files
|
||||
// by queueing fs operations, but it doesn't know about all open files, so
|
||||
// we set the limit somewhat lower than the default to provide a healthy
|
||||
// buffer against EMFILE (too many open files) errors.
|
||||
fs.MAX_OPEN = 512;
|
||||
|
||||
function makePromise(callback, context) {
|
||||
var deferred = Q.defer();
|
||||
|
||||
function finish(err, result) {
|
||||
if (err) {
|
||||
deferred.reject(err);
|
||||
} else {
|
||||
deferred.resolve(result);
|
||||
}
|
||||
}
|
||||
|
||||
process.nextTick(function() {
|
||||
try {
|
||||
callback.call(context || null, finish);
|
||||
} catch (err) {
|
||||
finish(err);
|
||||
}
|
||||
});
|
||||
|
||||
return deferred.promise;
|
||||
}
|
||||
exports.makePromise = makePromise;
|
||||
|
||||
exports.cachedMethod = function(fn, keyFn) {
|
||||
var p = require("private").makeAccessor();
|
||||
|
||||
function wrapper() {
|
||||
var priv = p(this);
|
||||
var cache = priv.cache || (priv.cache = {});
|
||||
var args = arguments;
|
||||
var key = keyFn
|
||||
? keyFn.apply(this, args)
|
||||
: join.call(args, "\0");
|
||||
return cache.hasOwnProperty(key)
|
||||
? cache[key]
|
||||
: cache[key] = fn.apply(this, args);
|
||||
}
|
||||
|
||||
wrapper.originalFn = fn;
|
||||
|
||||
return wrapper;
|
||||
};
|
||||
|
||||
function readFileP(file, charset) {
|
||||
return makePromise(charset ? function(callback) {
|
||||
return fs.readFile(file, function(err, data) {
|
||||
if (err) {
|
||||
callback(err);
|
||||
} else {
|
||||
callback(null, iconv.decode(data, charset));
|
||||
}
|
||||
});
|
||||
} : function(callback) {
|
||||
return fs.readFile(file, "utf8", callback);
|
||||
});
|
||||
}
|
||||
exports.readFileP = readFileP;
|
||||
|
||||
exports.readJsonFileP = function(file) {
|
||||
return readFileP(file).then(function(json) {
|
||||
return JSON.parse(json);
|
||||
});
|
||||
};
|
||||
|
||||
function mkdirP(dir) {
|
||||
return makePromise(function(callback) {
|
||||
mkdirp(dir, function(err) {
|
||||
callback(err, dir);
|
||||
});
|
||||
});
|
||||
}
|
||||
exports.mkdirP = mkdirP;
|
||||
|
||||
function readFromStdinP(timeLimit, message, color) {
|
||||
var deferred = Q.defer();
|
||||
var ins = [];
|
||||
|
||||
timeLimit = timeLimit || 1000;
|
||||
var timeout = setTimeout(function() {
|
||||
log.err(
|
||||
message || ("Warning: still waiting for STDIN after " +
|
||||
timeLimit + "ms"),
|
||||
color || "yellow"
|
||||
);
|
||||
}, timeLimit);
|
||||
|
||||
try {
|
||||
// On Windows, just accessing process.stdin throws an exception
|
||||
// when no standard input has been provided. For consistency with
|
||||
// other platforms, log the error but continue waiting (until
|
||||
// killed) for the nonexistent input.
|
||||
var stdin = process.stdin;
|
||||
} catch (err) {
|
||||
log.err(err);
|
||||
}
|
||||
|
||||
if (stdin) {
|
||||
stdin.resume();
|
||||
stdin.setEncoding("utf8");
|
||||
|
||||
stdin.on("data", function(data) {
|
||||
ins.push(data);
|
||||
}).on("end", function() {
|
||||
clearTimeout(timeout);
|
||||
deferred.resolve(ins.join(""));
|
||||
});
|
||||
}
|
||||
|
||||
return deferred.promise;
|
||||
}
|
||||
exports.readFromStdinP = readFromStdinP;
|
||||
|
||||
exports.readJsonFromStdinP = function(timeLimit) {
|
||||
return readFromStdinP(timeLimit).then(function(input) {
|
||||
return JSON.parse(input);
|
||||
});
|
||||
};
|
||||
|
||||
function deepHash(val) {
|
||||
var hash = createHash("sha1");
|
||||
var type = typeof val;
|
||||
|
||||
if (val === null) {
|
||||
type = "null";
|
||||
}
|
||||
|
||||
switch (type) {
|
||||
case "object":
|
||||
Object.keys(val).sort().forEach(function(key) {
|
||||
if (typeof val[key] === "function") {
|
||||
// Silently ignore nested methods, but nevertheless
|
||||
// complain below if the root value is a function.
|
||||
return;
|
||||
}
|
||||
|
||||
hash.update(key + "\0")
|
||||
.update(deepHash(val[key]));
|
||||
});
|
||||
break;
|
||||
|
||||
case "function":
|
||||
assert.ok(false, "cannot hash function objects");
|
||||
break;
|
||||
|
||||
default:
|
||||
hash.update(val + "");
|
||||
break;
|
||||
}
|
||||
|
||||
return hash.digest("hex");
|
||||
}
|
||||
exports.deepHash = deepHash;
|
||||
|
||||
exports.existsP = function(fullPath) {
|
||||
return makePromise(function(callback) {
|
||||
fs.exists(fullPath, function(exists) {
|
||||
callback(null, exists);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
function writeFdP(fd, content) {
|
||||
return makePromise(function(callback) {
|
||||
content += "";
|
||||
var buffer = new Buffer(content, "utf8");
|
||||
var length = fs.writeSync(fd, buffer, 0, buffer.length, null);
|
||||
assert.strictEqual(length, buffer.length);
|
||||
callback(null, content);
|
||||
}).finally(function() {
|
||||
fs.closeSync(fd);
|
||||
});
|
||||
}
|
||||
exports.writeFdP = writeFdP;
|
||||
|
||||
function openFileP(file, mode) {
|
||||
return makePromise(function(callback) {
|
||||
fs.open(file, mode || "w+", callback);
|
||||
});
|
||||
}
|
||||
exports.openFileP = openFileP;
|
||||
|
||||
function openExclusiveP(file) {
|
||||
// The 'x' in "wx+" means the file must be newly created.
|
||||
return openFileP(file, "wx+");
|
||||
}
|
||||
exports.openExclusiveP = openExclusiveP;
|
||||
|
||||
exports.copyP = function(srcFile, dstFile) {
|
||||
return makePromise(function(callback) {
|
||||
var reader = fs.createReadStream(srcFile);
|
||||
|
||||
function onError(err) {
|
||||
callback(err || new Error(
|
||||
"error in util.copyP(" +
|
||||
JSON.stringify(srcFile) + ", " +
|
||||
JSON.stringify(dstFile) + ")"
|
||||
));
|
||||
}
|
||||
|
||||
reader.on("error", onError).pipe(
|
||||
fs.createWriteStream(dstFile)
|
||||
).on("finish", function() {
|
||||
callback(null, dstFile);
|
||||
}).on("error", onError);
|
||||
});
|
||||
};
|
||||
|
||||
// Even though they use synchronous operations to avoid race conditions,
|
||||
// linkP and unlinkP have promise interfaces, for consistency. Note that
|
||||
// this means the operation will not happen until at least the next tick
|
||||
// of the event loop, but it will be atomic when it happens.
|
||||
exports.linkP = function(srcFile, dstFile) {
|
||||
return mkdirP(path.dirname(dstFile)).then(function() {
|
||||
if (fs.existsSync(dstFile))
|
||||
fs.unlinkSync(dstFile);
|
||||
fs.linkSync(srcFile, dstFile);
|
||||
return dstFile;
|
||||
});
|
||||
};
|
||||
|
||||
exports.unlinkP = function(file) {
|
||||
return makePromise(function(callback) {
|
||||
try {
|
||||
if (fs.existsSync(file))
|
||||
fs.unlinkSync(file);
|
||||
callback(null, file);
|
||||
} catch (err) {
|
||||
callback(err);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
var colors = {
|
||||
bold: "\033[1m",
|
||||
red: "\033[31m",
|
||||
green: "\033[32m",
|
||||
yellow: "\033[33m",
|
||||
cyan: "\033[36m",
|
||||
reset: "\033[0m"
|
||||
};
|
||||
|
||||
Object.keys(colors).forEach(function(key) {
|
||||
if (key !== "reset") {
|
||||
exports[key] = function(text) {
|
||||
return colors[key] + text + colors.reset;
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
var log = exports.log = {
|
||||
out: function(text, color) {
|
||||
text = (text + "").trim();
|
||||
if (colors.hasOwnProperty(color))
|
||||
text = colors[color] + text + colors.reset;
|
||||
process.stdout.write(text + "\n");
|
||||
},
|
||||
|
||||
err: function(text, color) {
|
||||
text = (text + "").trim();
|
||||
if (!colors.hasOwnProperty(color))
|
||||
color = "red";
|
||||
text = colors[color] + text + colors.reset;
|
||||
process.stderr.write(text + "\n");
|
||||
}
|
||||
};
|
||||
|
||||
var slugExp = /[^a-z\-]/ig;
|
||||
exports.idToSlug = function(id) {
|
||||
return id.replace(slugExp, "_");
|
||||
};
|
||||
|
||||
var moduleIdExp = /^[ a-z0-9\-_\/\.]+$/i;
|
||||
exports.isValidModuleId = function(id) {
|
||||
return id === "<stdin>" || moduleIdExp.test(id);
|
||||
};
|
||||
|
||||
var objToStr = Object.prototype.toString;
|
||||
var arrStr = objToStr.call([]);
|
||||
|
||||
function flatten(value, into) {
|
||||
if (objToStr.call(value) === arrStr) {
|
||||
into = into || [];
|
||||
for (var i = 0, len = value.length; i < len; ++i)
|
||||
if (i in value) // Skip holes.
|
||||
flatten(value[i], into);
|
||||
} else if (into) {
|
||||
into.push(value);
|
||||
} else {
|
||||
return value;
|
||||
}
|
||||
|
||||
return into;
|
||||
};
|
||||
exports.flatten = flatten;
|
||||
|
||||
exports.inherits = function(ctor, base) {
|
||||
return ctor.prototype = Object.create(base.prototype, {
|
||||
constructor: { value: ctor }
|
||||
});
|
||||
};
|
||||
|
||||
function absolutize(moduleId, requiredId) {
|
||||
if (requiredId.charAt(0) === ".")
|
||||
requiredId = path.join(moduleId, "..", requiredId);
|
||||
return path.normalize(requiredId).replace(/\\/g, '/');
|
||||
}
|
||||
exports.absolutize = absolutize;
|
||||
|
||||
function relativize(moduleId, requiredId) {
|
||||
requiredId = absolutize(moduleId, requiredId);
|
||||
|
||||
if (requiredId.charAt(0) === ".") {
|
||||
// Keep the required ID relative.
|
||||
} else {
|
||||
// Relativize the required ID.
|
||||
requiredId = path.relative(
|
||||
path.join(moduleId, ".."),
|
||||
requiredId
|
||||
);
|
||||
}
|
||||
|
||||
if (requiredId.charAt(0) !== ".")
|
||||
requiredId = "./" + requiredId;
|
||||
|
||||
return requiredId.replace(/\\/g, '/');
|
||||
}
|
||||
exports.relativize = relativize;
|
||||
|
||||
function waitForValuesP(obj, makeCopy) {
|
||||
if (typeof obj !== "object")
|
||||
return Q(obj);
|
||||
|
||||
var result = makeCopy ? {} : obj;
|
||||
var keys = Object.keys(obj);
|
||||
if (keys.length === 0)
|
||||
return Q(result);
|
||||
|
||||
return Q.all(keys.map(function(key) {
|
||||
return obj[key];
|
||||
})).then(function(values) {
|
||||
for (var i = values.length - 1; i >= 0; --i)
|
||||
result[keys[i]] = values[i];
|
||||
return result;
|
||||
});
|
||||
}
|
||||
exports.waitForValuesP = waitForValuesP;
|
||||
|
||||
function camelize(hyphenated) {
|
||||
return hyphenated.replace(/-(.)/g, function(_, ch) {
|
||||
return ch.toUpperCase();
|
||||
});
|
||||
}
|
||||
exports.camelize = camelize;
|
||||
255
node_modules/commoner/lib/watcher.js
generated
vendored
Normal file
255
node_modules/commoner/lib/watcher.js
generated
vendored
Normal file
@@ -0,0 +1,255 @@
|
||||
var assert = require("assert");
|
||||
var path = require("path");
|
||||
var fs = require("graceful-fs");
|
||||
var spawn = require("child_process").spawn;
|
||||
var Q = require("q");
|
||||
var EventEmitter = require("events").EventEmitter;
|
||||
var ReadFileCache = require("./cache").ReadFileCache;
|
||||
var util = require("./util");
|
||||
var hasOwn = Object.prototype.hasOwnProperty;
|
||||
|
||||
function Watcher(readFileCache, persistent) {
|
||||
assert.ok(this instanceof Watcher);
|
||||
assert.ok(this instanceof EventEmitter);
|
||||
assert.ok(readFileCache instanceof ReadFileCache);
|
||||
|
||||
// During tests (and only during tests), persistent === false so that
|
||||
// the test suite can actually finish and exit.
|
||||
if (typeof persistent === "undefined") {
|
||||
persistent = true;
|
||||
}
|
||||
|
||||
EventEmitter.call(this);
|
||||
|
||||
var self = this;
|
||||
var sourceDir = readFileCache.sourceDir;
|
||||
var dirWatcher = new DirWatcher(sourceDir, persistent);
|
||||
|
||||
Object.defineProperties(self, {
|
||||
sourceDir: { value: sourceDir },
|
||||
readFileCache: { value: readFileCache },
|
||||
dirWatcher: { value: dirWatcher }
|
||||
});
|
||||
|
||||
// Watch everything the readFileCache already knows about, and any new
|
||||
// files added in the future.
|
||||
readFileCache.subscribe(function(relativePath) {
|
||||
self.watch(relativePath);
|
||||
});
|
||||
|
||||
readFileCache.on("changed", function(relativePath) {
|
||||
self.emit("changed", relativePath);
|
||||
});
|
||||
|
||||
function handleDirEvent(event, relativePath) {
|
||||
if (self.dirWatcher.ready) {
|
||||
self.getFileHandler(relativePath)(event);
|
||||
}
|
||||
}
|
||||
|
||||
dirWatcher.on("added", function(relativePath) {
|
||||
handleDirEvent("added", relativePath);
|
||||
}).on("deleted", function(relativePath) {
|
||||
handleDirEvent("deleted", relativePath);
|
||||
}).on("changed", function(relativePath) {
|
||||
handleDirEvent("changed", relativePath);
|
||||
});
|
||||
}
|
||||
|
||||
util.inherits(Watcher, EventEmitter);
|
||||
var Wp = Watcher.prototype;
|
||||
|
||||
Wp.watch = function(relativePath) {
|
||||
this.dirWatcher.add(path.dirname(path.join(
|
||||
this.sourceDir, relativePath)));
|
||||
};
|
||||
|
||||
Wp.readFileP = function(relativePath) {
|
||||
return this.readFileCache.readFileP(relativePath);
|
||||
};
|
||||
|
||||
Wp.noCacheReadFileP = function(relativePath) {
|
||||
return this.readFileCache.noCacheReadFileP(relativePath);
|
||||
};
|
||||
|
||||
Wp.getFileHandler = util.cachedMethod(function(relativePath) {
|
||||
var self = this;
|
||||
return function handler(event) {
|
||||
self.readFileCache.reportPossiblyChanged(relativePath);
|
||||
};
|
||||
});
|
||||
|
||||
function orNull(err) {
|
||||
return null;
|
||||
}
|
||||
|
||||
Wp.close = function() {
|
||||
this.dirWatcher.close();
|
||||
};
|
||||
|
||||
/**
|
||||
* DirWatcher code adapted from Jeffrey Lin's original implementation:
|
||||
* https://github.com/jeffreylin/jsx_transformer_fun/blob/master/dirWatcher.js
|
||||
*
|
||||
* Invariant: this only watches the dir inode, not the actual path.
|
||||
* That means the dir can't be renamed and swapped with another dir.
|
||||
*/
|
||||
function DirWatcher(inputPath, persistent) {
|
||||
assert.ok(this instanceof DirWatcher);
|
||||
|
||||
var self = this;
|
||||
var absPath = path.resolve(inputPath);
|
||||
|
||||
if (!fs.statSync(absPath).isDirectory()) {
|
||||
throw new Error(inputPath + "is not a directory!");
|
||||
}
|
||||
|
||||
EventEmitter.call(self);
|
||||
|
||||
self.ready = false;
|
||||
self.on("ready", function(){
|
||||
self.ready = true;
|
||||
});
|
||||
|
||||
Object.defineProperties(self, {
|
||||
// Map of absDirPaths to fs.FSWatcher objects from fs.watch().
|
||||
watchers: { value: {} },
|
||||
dirContents: { value: {} },
|
||||
rootPath: { value: absPath },
|
||||
persistent: { value: !!persistent }
|
||||
});
|
||||
|
||||
process.nextTick(function() {
|
||||
self.add(absPath);
|
||||
self.emit("ready");
|
||||
});
|
||||
}
|
||||
|
||||
util.inherits(DirWatcher, EventEmitter);
|
||||
var DWp = DirWatcher.prototype;
|
||||
|
||||
DWp.add = function(absDirPath) {
|
||||
var self = this;
|
||||
if (hasOwn.call(self.watchers, absDirPath)) {
|
||||
return;
|
||||
}
|
||||
|
||||
self.watchers[absDirPath] = fs.watch(absDirPath, {
|
||||
persistent: this.persistent
|
||||
}).on("change", function(event, filename) {
|
||||
self.updateDirContents(absDirPath, event, filename);
|
||||
});
|
||||
|
||||
// Update internal dir contents.
|
||||
self.updateDirContents(absDirPath);
|
||||
|
||||
// Since we've never seen this path before, recursively add child
|
||||
// directories of this path. TODO: Don't do fs.readdirSync on the
|
||||
// same dir twice in a row. We already do an fs.statSync in
|
||||
// this.updateDirContents() and we're just going to do another one
|
||||
// here...
|
||||
fs.readdirSync(absDirPath).forEach(function(filename) {
|
||||
var filepath = path.join(absDirPath, filename);
|
||||
|
||||
// Look for directories.
|
||||
if (fs.statSync(filepath).isDirectory()) {
|
||||
self.add(filepath);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
DWp.updateDirContents = function(absDirPath, event, fsWatchReportedFilename) {
|
||||
var self = this;
|
||||
|
||||
if (!hasOwn.call(self.dirContents, absDirPath)) {
|
||||
self.dirContents[absDirPath] = [];
|
||||
}
|
||||
|
||||
var oldContents = self.dirContents[absDirPath];
|
||||
var newContents = fs.readdirSync(absDirPath);
|
||||
|
||||
var deleted = {};
|
||||
var added = {};
|
||||
|
||||
oldContents.forEach(function(filename) {
|
||||
deleted[filename] = true;
|
||||
});
|
||||
|
||||
newContents.forEach(function(filename) {
|
||||
if (hasOwn.call(deleted, filename)) {
|
||||
delete deleted[filename];
|
||||
} else {
|
||||
added[filename] = true;
|
||||
}
|
||||
});
|
||||
|
||||
var deletedNames = Object.keys(deleted);
|
||||
deletedNames.forEach(function(filename) {
|
||||
self.emit(
|
||||
"deleted",
|
||||
path.relative(
|
||||
self.rootPath,
|
||||
path.join(absDirPath, filename)
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
var addedNames = Object.keys(added);
|
||||
addedNames.forEach(function(filename) {
|
||||
self.emit(
|
||||
"added",
|
||||
path.relative(
|
||||
self.rootPath,
|
||||
path.join(absDirPath, filename)
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
// So changed is not deleted or added?
|
||||
if (fsWatchReportedFilename &&
|
||||
!hasOwn.call(deleted, fsWatchReportedFilename) &&
|
||||
!hasOwn.call(added, fsWatchReportedFilename))
|
||||
{
|
||||
self.emit(
|
||||
"changed",
|
||||
path.relative(
|
||||
self.rootPath,
|
||||
path.join(absDirPath, fsWatchReportedFilename)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// If any of the things removed were directories, remove their watchers.
|
||||
// If a dir was moved, hopefully two changed events fired?
|
||||
// 1) event in dir where it was removed
|
||||
// 2) event in dir where it was moved to (added)
|
||||
deletedNames.forEach(function(filename) {
|
||||
var filepath = path.join(absDirPath, filename);
|
||||
delete self.dirContents[filepath];
|
||||
delete self.watchers[filepath];
|
||||
});
|
||||
|
||||
// if any of the things added were directories, recursively deal with them
|
||||
addedNames.forEach(function(filename) {
|
||||
var filepath = path.join(absDirPath, filename);
|
||||
if (fs.existsSync(filepath) &&
|
||||
fs.statSync(filepath).isDirectory())
|
||||
{
|
||||
self.add(filepath);
|
||||
// mighttttttt need a self.updateDirContents() here in case
|
||||
// we're somehow adding a path that replaces another one...?
|
||||
}
|
||||
});
|
||||
|
||||
// Update state of internal dir contents.
|
||||
self.dirContents[absDirPath] = newContents;
|
||||
};
|
||||
|
||||
DWp.close = function() {
|
||||
var watchers = this.watchers;
|
||||
Object.keys(watchers).forEach(function(filename) {
|
||||
watchers[filename].close();
|
||||
});
|
||||
};
|
||||
|
||||
exports.Watcher = Watcher;
|
||||
16
node_modules/commoner/main.js
generated
vendored
Normal file
16
node_modules/commoner/main.js
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
var path = require("path");
|
||||
var Commoner = require("./lib/commoner").Commoner;
|
||||
exports.Commoner = Commoner;
|
||||
|
||||
function defCallback(name) {
|
||||
exports[name] = function() {
|
||||
var commoner = new Commoner;
|
||||
commoner[name].apply(commoner, arguments);
|
||||
commoner.cliBuildP();
|
||||
return commoner;
|
||||
};
|
||||
}
|
||||
|
||||
defCallback("version");
|
||||
defCallback("resolve");
|
||||
defCallback("process");
|
||||
15
node_modules/commoner/node_modules/graceful-fs/LICENSE
generated
vendored
Normal file
15
node_modules/commoner/node_modules/graceful-fs/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
The ISC License
|
||||
|
||||
Copyright (c) 2011-2022 Isaac Z. Schlueter, Ben Noordhuis, and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
143
node_modules/commoner/node_modules/graceful-fs/README.md
generated
vendored
Normal file
143
node_modules/commoner/node_modules/graceful-fs/README.md
generated
vendored
Normal file
@@ -0,0 +1,143 @@
|
||||
# graceful-fs
|
||||
|
||||
graceful-fs functions as a drop-in replacement for the fs module,
|
||||
making various improvements.
|
||||
|
||||
The improvements are meant to normalize behavior across different
|
||||
platforms and environments, and to make filesystem access more
|
||||
resilient to errors.
|
||||
|
||||
## Improvements over [fs module](https://nodejs.org/api/fs.html)
|
||||
|
||||
* Queues up `open` and `readdir` calls, and retries them once
|
||||
something closes if there is an EMFILE error from too many file
|
||||
descriptors.
|
||||
* fixes `lchmod` for Node versions prior to 0.6.2.
|
||||
* implements `fs.lutimes` if possible. Otherwise it becomes a noop.
|
||||
* ignores `EINVAL` and `EPERM` errors in `chown`, `fchown` or
|
||||
`lchown` if the user isn't root.
|
||||
* makes `lchmod` and `lchown` become noops, if not available.
|
||||
* retries reading a file if `read` results in EAGAIN error.
|
||||
|
||||
On Windows, it retries renaming a file for up to one second if `EACCESS`
|
||||
or `EPERM` error occurs, likely because antivirus software has locked
|
||||
the directory.
|
||||
|
||||
## USAGE
|
||||
|
||||
```javascript
|
||||
// use just like fs
|
||||
var fs = require('graceful-fs')
|
||||
|
||||
// now go and do stuff with it...
|
||||
fs.readFile('some-file-or-whatever', (err, data) => {
|
||||
// Do stuff here.
|
||||
})
|
||||
```
|
||||
|
||||
## Sync methods
|
||||
|
||||
This module cannot intercept or handle `EMFILE` or `ENFILE` errors from sync
|
||||
methods. If you use sync methods which open file descriptors then you are
|
||||
responsible for dealing with any errors.
|
||||
|
||||
This is a known limitation, not a bug.
|
||||
|
||||
## Global Patching
|
||||
|
||||
If you want to patch the global fs module (or any other fs-like
|
||||
module) you can do this:
|
||||
|
||||
```javascript
|
||||
// Make sure to read the caveat below.
|
||||
var realFs = require('fs')
|
||||
var gracefulFs = require('graceful-fs')
|
||||
gracefulFs.gracefulify(realFs)
|
||||
```
|
||||
|
||||
This should only ever be done at the top-level application layer, in
|
||||
order to delay on EMFILE errors from any fs-using dependencies. You
|
||||
should **not** do this in a library, because it can cause unexpected
|
||||
delays in other parts of the program.
|
||||
|
||||
## Changes
|
||||
|
||||
This module is fairly stable at this point, and used by a lot of
|
||||
things. That being said, because it implements a subtle behavior
|
||||
change in a core part of the node API, even modest changes can be
|
||||
extremely breaking, and the versioning is thus biased towards
|
||||
bumping the major when in doubt.
|
||||
|
||||
The main change between major versions has been switching between
|
||||
providing a fully-patched `fs` module vs monkey-patching the node core
|
||||
builtin, and the approach by which a non-monkey-patched `fs` was
|
||||
created.
|
||||
|
||||
The goal is to trade `EMFILE` errors for slower fs operations. So, if
|
||||
you try to open a zillion files, rather than crashing, `open`
|
||||
operations will be queued up and wait for something else to `close`.
|
||||
|
||||
There are advantages to each approach. Monkey-patching the fs means
|
||||
that no `EMFILE` errors can possibly occur anywhere in your
|
||||
application, because everything is using the same core `fs` module,
|
||||
which is patched. However, it can also obviously cause undesirable
|
||||
side-effects, especially if the module is loaded multiple times.
|
||||
|
||||
Implementing a separate-but-identical patched `fs` module is more
|
||||
surgical (and doesn't run the risk of patching multiple times), but
|
||||
also imposes the challenge of keeping in sync with the core module.
|
||||
|
||||
The current approach loads the `fs` module, and then creates a
|
||||
lookalike object that has all the same methods, except a few that are
|
||||
patched. It is safe to use in all versions of Node from 0.8 through
|
||||
7.0.
|
||||
|
||||
### v4
|
||||
|
||||
* Do not monkey-patch the fs module. This module may now be used as a
|
||||
drop-in dep, and users can opt into monkey-patching the fs builtin
|
||||
if their app requires it.
|
||||
|
||||
### v3
|
||||
|
||||
* Monkey-patch fs, because the eval approach no longer works on recent
|
||||
node.
|
||||
* fixed possible type-error throw if rename fails on windows
|
||||
* verify that we *never* get EMFILE errors
|
||||
* Ignore ENOSYS from chmod/chown
|
||||
* clarify that graceful-fs must be used as a drop-in
|
||||
|
||||
### v2.1.0
|
||||
|
||||
* Use eval rather than monkey-patching fs.
|
||||
* readdir: Always sort the results
|
||||
* win32: requeue a file if error has an OK status
|
||||
|
||||
### v2.0
|
||||
|
||||
* A return to monkey patching
|
||||
* wrap process.cwd
|
||||
|
||||
### v1.1
|
||||
|
||||
* wrap readFile
|
||||
* Wrap fs.writeFile.
|
||||
* readdir protection
|
||||
* Don't clobber the fs builtin
|
||||
* Handle fs.read EAGAIN errors by trying again
|
||||
* Expose the curOpen counter
|
||||
* No-op lchown/lchmod if not implemented
|
||||
* fs.rename patch only for win32
|
||||
* Patch fs.rename to handle AV software on Windows
|
||||
* Close #4 Chown should not fail on einval or eperm if non-root
|
||||
* Fix isaacs/fstream#1 Only wrap fs one time
|
||||
* Fix #3 Start at 1024 max files, then back off on EMFILE
|
||||
* lutimes that doens't blow up on Linux
|
||||
* A full on-rewrite using a queue instead of just swallowing the EMFILE error
|
||||
* Wrap Read/Write streams as well
|
||||
|
||||
### 1.0
|
||||
|
||||
* Update engines for node 0.6
|
||||
* Be lstat-graceful on Windows
|
||||
* first
|
||||
23
node_modules/commoner/node_modules/graceful-fs/clone.js
generated
vendored
Normal file
23
node_modules/commoner/node_modules/graceful-fs/clone.js
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
'use strict'
|
||||
|
||||
module.exports = clone
|
||||
|
||||
var getPrototypeOf = Object.getPrototypeOf || function (obj) {
|
||||
return obj.__proto__
|
||||
}
|
||||
|
||||
function clone (obj) {
|
||||
if (obj === null || typeof obj !== 'object')
|
||||
return obj
|
||||
|
||||
if (obj instanceof Object)
|
||||
var copy = { __proto__: getPrototypeOf(obj) }
|
||||
else
|
||||
var copy = Object.create(null)
|
||||
|
||||
Object.getOwnPropertyNames(obj).forEach(function (key) {
|
||||
Object.defineProperty(copy, key, Object.getOwnPropertyDescriptor(obj, key))
|
||||
})
|
||||
|
||||
return copy
|
||||
}
|
||||
448
node_modules/commoner/node_modules/graceful-fs/graceful-fs.js
generated
vendored
Normal file
448
node_modules/commoner/node_modules/graceful-fs/graceful-fs.js
generated
vendored
Normal file
@@ -0,0 +1,448 @@
|
||||
var fs = require('fs')
|
||||
var polyfills = require('./polyfills.js')
|
||||
var legacy = require('./legacy-streams.js')
|
||||
var clone = require('./clone.js')
|
||||
|
||||
var util = require('util')
|
||||
|
||||
/* istanbul ignore next - node 0.x polyfill */
|
||||
var gracefulQueue
|
||||
var previousSymbol
|
||||
|
||||
/* istanbul ignore else - node 0.x polyfill */
|
||||
if (typeof Symbol === 'function' && typeof Symbol.for === 'function') {
|
||||
gracefulQueue = Symbol.for('graceful-fs.queue')
|
||||
// This is used in testing by future versions
|
||||
previousSymbol = Symbol.for('graceful-fs.previous')
|
||||
} else {
|
||||
gracefulQueue = '___graceful-fs.queue'
|
||||
previousSymbol = '___graceful-fs.previous'
|
||||
}
|
||||
|
||||
function noop () {}
|
||||
|
||||
function publishQueue(context, queue) {
|
||||
Object.defineProperty(context, gracefulQueue, {
|
||||
get: function() {
|
||||
return queue
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
var debug = noop
|
||||
if (util.debuglog)
|
||||
debug = util.debuglog('gfs4')
|
||||
else if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || ''))
|
||||
debug = function() {
|
||||
var m = util.format.apply(util, arguments)
|
||||
m = 'GFS4: ' + m.split(/\n/).join('\nGFS4: ')
|
||||
console.error(m)
|
||||
}
|
||||
|
||||
// Once time initialization
|
||||
if (!fs[gracefulQueue]) {
|
||||
// This queue can be shared by multiple loaded instances
|
||||
var queue = global[gracefulQueue] || []
|
||||
publishQueue(fs, queue)
|
||||
|
||||
// Patch fs.close/closeSync to shared queue version, because we need
|
||||
// to retry() whenever a close happens *anywhere* in the program.
|
||||
// This is essential when multiple graceful-fs instances are
|
||||
// in play at the same time.
|
||||
fs.close = (function (fs$close) {
|
||||
function close (fd, cb) {
|
||||
return fs$close.call(fs, fd, function (err) {
|
||||
// This function uses the graceful-fs shared queue
|
||||
if (!err) {
|
||||
resetQueue()
|
||||
}
|
||||
|
||||
if (typeof cb === 'function')
|
||||
cb.apply(this, arguments)
|
||||
})
|
||||
}
|
||||
|
||||
Object.defineProperty(close, previousSymbol, {
|
||||
value: fs$close
|
||||
})
|
||||
return close
|
||||
})(fs.close)
|
||||
|
||||
fs.closeSync = (function (fs$closeSync) {
|
||||
function closeSync (fd) {
|
||||
// This function uses the graceful-fs shared queue
|
||||
fs$closeSync.apply(fs, arguments)
|
||||
resetQueue()
|
||||
}
|
||||
|
||||
Object.defineProperty(closeSync, previousSymbol, {
|
||||
value: fs$closeSync
|
||||
})
|
||||
return closeSync
|
||||
})(fs.closeSync)
|
||||
|
||||
if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) {
|
||||
process.on('exit', function() {
|
||||
debug(fs[gracefulQueue])
|
||||
require('assert').equal(fs[gracefulQueue].length, 0)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if (!global[gracefulQueue]) {
|
||||
publishQueue(global, fs[gracefulQueue]);
|
||||
}
|
||||
|
||||
module.exports = patch(clone(fs))
|
||||
if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs.__patched) {
|
||||
module.exports = patch(fs)
|
||||
fs.__patched = true;
|
||||
}
|
||||
|
||||
function patch (fs) {
|
||||
// Everything that references the open() function needs to be in here
|
||||
polyfills(fs)
|
||||
fs.gracefulify = patch
|
||||
|
||||
fs.createReadStream = createReadStream
|
||||
fs.createWriteStream = createWriteStream
|
||||
var fs$readFile = fs.readFile
|
||||
fs.readFile = readFile
|
||||
function readFile (path, options, cb) {
|
||||
if (typeof options === 'function')
|
||||
cb = options, options = null
|
||||
|
||||
return go$readFile(path, options, cb)
|
||||
|
||||
function go$readFile (path, options, cb, startTime) {
|
||||
return fs$readFile(path, options, function (err) {
|
||||
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
|
||||
enqueue([go$readFile, [path, options, cb], err, startTime || Date.now(), Date.now()])
|
||||
else {
|
||||
if (typeof cb === 'function')
|
||||
cb.apply(this, arguments)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
var fs$writeFile = fs.writeFile
|
||||
fs.writeFile = writeFile
|
||||
function writeFile (path, data, options, cb) {
|
||||
if (typeof options === 'function')
|
||||
cb = options, options = null
|
||||
|
||||
return go$writeFile(path, data, options, cb)
|
||||
|
||||
function go$writeFile (path, data, options, cb, startTime) {
|
||||
return fs$writeFile(path, data, options, function (err) {
|
||||
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
|
||||
enqueue([go$writeFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()])
|
||||
else {
|
||||
if (typeof cb === 'function')
|
||||
cb.apply(this, arguments)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
var fs$appendFile = fs.appendFile
|
||||
if (fs$appendFile)
|
||||
fs.appendFile = appendFile
|
||||
function appendFile (path, data, options, cb) {
|
||||
if (typeof options === 'function')
|
||||
cb = options, options = null
|
||||
|
||||
return go$appendFile(path, data, options, cb)
|
||||
|
||||
function go$appendFile (path, data, options, cb, startTime) {
|
||||
return fs$appendFile(path, data, options, function (err) {
|
||||
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
|
||||
enqueue([go$appendFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()])
|
||||
else {
|
||||
if (typeof cb === 'function')
|
||||
cb.apply(this, arguments)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
var fs$copyFile = fs.copyFile
|
||||
if (fs$copyFile)
|
||||
fs.copyFile = copyFile
|
||||
function copyFile (src, dest, flags, cb) {
|
||||
if (typeof flags === 'function') {
|
||||
cb = flags
|
||||
flags = 0
|
||||
}
|
||||
return go$copyFile(src, dest, flags, cb)
|
||||
|
||||
function go$copyFile (src, dest, flags, cb, startTime) {
|
||||
return fs$copyFile(src, dest, flags, function (err) {
|
||||
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
|
||||
enqueue([go$copyFile, [src, dest, flags, cb], err, startTime || Date.now(), Date.now()])
|
||||
else {
|
||||
if (typeof cb === 'function')
|
||||
cb.apply(this, arguments)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
var fs$readdir = fs.readdir
|
||||
fs.readdir = readdir
|
||||
var noReaddirOptionVersions = /^v[0-5]\./
|
||||
function readdir (path, options, cb) {
|
||||
if (typeof options === 'function')
|
||||
cb = options, options = null
|
||||
|
||||
var go$readdir = noReaddirOptionVersions.test(process.version)
|
||||
? function go$readdir (path, options, cb, startTime) {
|
||||
return fs$readdir(path, fs$readdirCallback(
|
||||
path, options, cb, startTime
|
||||
))
|
||||
}
|
||||
: function go$readdir (path, options, cb, startTime) {
|
||||
return fs$readdir(path, options, fs$readdirCallback(
|
||||
path, options, cb, startTime
|
||||
))
|
||||
}
|
||||
|
||||
return go$readdir(path, options, cb)
|
||||
|
||||
function fs$readdirCallback (path, options, cb, startTime) {
|
||||
return function (err, files) {
|
||||
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
|
||||
enqueue([
|
||||
go$readdir,
|
||||
[path, options, cb],
|
||||
err,
|
||||
startTime || Date.now(),
|
||||
Date.now()
|
||||
])
|
||||
else {
|
||||
if (files && files.sort)
|
||||
files.sort()
|
||||
|
||||
if (typeof cb === 'function')
|
||||
cb.call(this, err, files)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (process.version.substr(0, 4) === 'v0.8') {
|
||||
var legStreams = legacy(fs)
|
||||
ReadStream = legStreams.ReadStream
|
||||
WriteStream = legStreams.WriteStream
|
||||
}
|
||||
|
||||
var fs$ReadStream = fs.ReadStream
|
||||
if (fs$ReadStream) {
|
||||
ReadStream.prototype = Object.create(fs$ReadStream.prototype)
|
||||
ReadStream.prototype.open = ReadStream$open
|
||||
}
|
||||
|
||||
var fs$WriteStream = fs.WriteStream
|
||||
if (fs$WriteStream) {
|
||||
WriteStream.prototype = Object.create(fs$WriteStream.prototype)
|
||||
WriteStream.prototype.open = WriteStream$open
|
||||
}
|
||||
|
||||
Object.defineProperty(fs, 'ReadStream', {
|
||||
get: function () {
|
||||
return ReadStream
|
||||
},
|
||||
set: function (val) {
|
||||
ReadStream = val
|
||||
},
|
||||
enumerable: true,
|
||||
configurable: true
|
||||
})
|
||||
Object.defineProperty(fs, 'WriteStream', {
|
||||
get: function () {
|
||||
return WriteStream
|
||||
},
|
||||
set: function (val) {
|
||||
WriteStream = val
|
||||
},
|
||||
enumerable: true,
|
||||
configurable: true
|
||||
})
|
||||
|
||||
// legacy names
|
||||
var FileReadStream = ReadStream
|
||||
Object.defineProperty(fs, 'FileReadStream', {
|
||||
get: function () {
|
||||
return FileReadStream
|
||||
},
|
||||
set: function (val) {
|
||||
FileReadStream = val
|
||||
},
|
||||
enumerable: true,
|
||||
configurable: true
|
||||
})
|
||||
var FileWriteStream = WriteStream
|
||||
Object.defineProperty(fs, 'FileWriteStream', {
|
||||
get: function () {
|
||||
return FileWriteStream
|
||||
},
|
||||
set: function (val) {
|
||||
FileWriteStream = val
|
||||
},
|
||||
enumerable: true,
|
||||
configurable: true
|
||||
})
|
||||
|
||||
function ReadStream (path, options) {
|
||||
if (this instanceof ReadStream)
|
||||
return fs$ReadStream.apply(this, arguments), this
|
||||
else
|
||||
return ReadStream.apply(Object.create(ReadStream.prototype), arguments)
|
||||
}
|
||||
|
||||
function ReadStream$open () {
|
||||
var that = this
|
||||
open(that.path, that.flags, that.mode, function (err, fd) {
|
||||
if (err) {
|
||||
if (that.autoClose)
|
||||
that.destroy()
|
||||
|
||||
that.emit('error', err)
|
||||
} else {
|
||||
that.fd = fd
|
||||
that.emit('open', fd)
|
||||
that.read()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function WriteStream (path, options) {
|
||||
if (this instanceof WriteStream)
|
||||
return fs$WriteStream.apply(this, arguments), this
|
||||
else
|
||||
return WriteStream.apply(Object.create(WriteStream.prototype), arguments)
|
||||
}
|
||||
|
||||
function WriteStream$open () {
|
||||
var that = this
|
||||
open(that.path, that.flags, that.mode, function (err, fd) {
|
||||
if (err) {
|
||||
that.destroy()
|
||||
that.emit('error', err)
|
||||
} else {
|
||||
that.fd = fd
|
||||
that.emit('open', fd)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function createReadStream (path, options) {
|
||||
return new fs.ReadStream(path, options)
|
||||
}
|
||||
|
||||
function createWriteStream (path, options) {
|
||||
return new fs.WriteStream(path, options)
|
||||
}
|
||||
|
||||
var fs$open = fs.open
|
||||
fs.open = open
|
||||
function open (path, flags, mode, cb) {
|
||||
if (typeof mode === 'function')
|
||||
cb = mode, mode = null
|
||||
|
||||
return go$open(path, flags, mode, cb)
|
||||
|
||||
function go$open (path, flags, mode, cb, startTime) {
|
||||
return fs$open(path, flags, mode, function (err, fd) {
|
||||
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
|
||||
enqueue([go$open, [path, flags, mode, cb], err, startTime || Date.now(), Date.now()])
|
||||
else {
|
||||
if (typeof cb === 'function')
|
||||
cb.apply(this, arguments)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return fs
|
||||
}
|
||||
|
||||
function enqueue (elem) {
|
||||
debug('ENQUEUE', elem[0].name, elem[1])
|
||||
fs[gracefulQueue].push(elem)
|
||||
retry()
|
||||
}
|
||||
|
||||
// keep track of the timeout between retry() calls
|
||||
var retryTimer
|
||||
|
||||
// reset the startTime and lastTime to now
|
||||
// this resets the start of the 60 second overall timeout as well as the
|
||||
// delay between attempts so that we'll retry these jobs sooner
|
||||
function resetQueue () {
|
||||
var now = Date.now()
|
||||
for (var i = 0; i < fs[gracefulQueue].length; ++i) {
|
||||
// entries that are only a length of 2 are from an older version, don't
|
||||
// bother modifying those since they'll be retried anyway.
|
||||
if (fs[gracefulQueue][i].length > 2) {
|
||||
fs[gracefulQueue][i][3] = now // startTime
|
||||
fs[gracefulQueue][i][4] = now // lastTime
|
||||
}
|
||||
}
|
||||
// call retry to make sure we're actively processing the queue
|
||||
retry()
|
||||
}
|
||||
|
||||
function retry () {
|
||||
// clear the timer and remove it to help prevent unintended concurrency
|
||||
clearTimeout(retryTimer)
|
||||
retryTimer = undefined
|
||||
|
||||
if (fs[gracefulQueue].length === 0)
|
||||
return
|
||||
|
||||
var elem = fs[gracefulQueue].shift()
|
||||
var fn = elem[0]
|
||||
var args = elem[1]
|
||||
// these items may be unset if they were added by an older graceful-fs
|
||||
var err = elem[2]
|
||||
var startTime = elem[3]
|
||||
var lastTime = elem[4]
|
||||
|
||||
// if we don't have a startTime we have no way of knowing if we've waited
|
||||
// long enough, so go ahead and retry this item now
|
||||
if (startTime === undefined) {
|
||||
debug('RETRY', fn.name, args)
|
||||
fn.apply(null, args)
|
||||
} else if (Date.now() - startTime >= 60000) {
|
||||
// it's been more than 60 seconds total, bail now
|
||||
debug('TIMEOUT', fn.name, args)
|
||||
var cb = args.pop()
|
||||
if (typeof cb === 'function')
|
||||
cb.call(null, err)
|
||||
} else {
|
||||
// the amount of time between the last attempt and right now
|
||||
var sinceAttempt = Date.now() - lastTime
|
||||
// the amount of time between when we first tried, and when we last tried
|
||||
// rounded up to at least 1
|
||||
var sinceStart = Math.max(lastTime - startTime, 1)
|
||||
// backoff. wait longer than the total time we've been retrying, but only
|
||||
// up to a maximum of 100ms
|
||||
var desiredDelay = Math.min(sinceStart * 1.2, 100)
|
||||
// it's been long enough since the last retry, do it again
|
||||
if (sinceAttempt >= desiredDelay) {
|
||||
debug('RETRY', fn.name, args)
|
||||
fn.apply(null, args.concat([startTime]))
|
||||
} else {
|
||||
// if we can't do this job yet, push it to the end of the queue
|
||||
// and let the next iteration check again
|
||||
fs[gracefulQueue].push(elem)
|
||||
}
|
||||
}
|
||||
|
||||
// schedule our next run if one isn't already scheduled
|
||||
if (retryTimer === undefined) {
|
||||
retryTimer = setTimeout(retry, 0)
|
||||
}
|
||||
}
|
||||
118
node_modules/commoner/node_modules/graceful-fs/legacy-streams.js
generated
vendored
Normal file
118
node_modules/commoner/node_modules/graceful-fs/legacy-streams.js
generated
vendored
Normal file
@@ -0,0 +1,118 @@
|
||||
var Stream = require('stream').Stream
|
||||
|
||||
module.exports = legacy
|
||||
|
||||
function legacy (fs) {
|
||||
return {
|
||||
ReadStream: ReadStream,
|
||||
WriteStream: WriteStream
|
||||
}
|
||||
|
||||
function ReadStream (path, options) {
|
||||
if (!(this instanceof ReadStream)) return new ReadStream(path, options);
|
||||
|
||||
Stream.call(this);
|
||||
|
||||
var self = this;
|
||||
|
||||
this.path = path;
|
||||
this.fd = null;
|
||||
this.readable = true;
|
||||
this.paused = false;
|
||||
|
||||
this.flags = 'r';
|
||||
this.mode = 438; /*=0666*/
|
||||
this.bufferSize = 64 * 1024;
|
||||
|
||||
options = options || {};
|
||||
|
||||
// Mixin options into this
|
||||
var keys = Object.keys(options);
|
||||
for (var index = 0, length = keys.length; index < length; index++) {
|
||||
var key = keys[index];
|
||||
this[key] = options[key];
|
||||
}
|
||||
|
||||
if (this.encoding) this.setEncoding(this.encoding);
|
||||
|
||||
if (this.start !== undefined) {
|
||||
if ('number' !== typeof this.start) {
|
||||
throw TypeError('start must be a Number');
|
||||
}
|
||||
if (this.end === undefined) {
|
||||
this.end = Infinity;
|
||||
} else if ('number' !== typeof this.end) {
|
||||
throw TypeError('end must be a Number');
|
||||
}
|
||||
|
||||
if (this.start > this.end) {
|
||||
throw new Error('start must be <= end');
|
||||
}
|
||||
|
||||
this.pos = this.start;
|
||||
}
|
||||
|
||||
if (this.fd !== null) {
|
||||
process.nextTick(function() {
|
||||
self._read();
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
fs.open(this.path, this.flags, this.mode, function (err, fd) {
|
||||
if (err) {
|
||||
self.emit('error', err);
|
||||
self.readable = false;
|
||||
return;
|
||||
}
|
||||
|
||||
self.fd = fd;
|
||||
self.emit('open', fd);
|
||||
self._read();
|
||||
})
|
||||
}
|
||||
|
||||
function WriteStream (path, options) {
|
||||
if (!(this instanceof WriteStream)) return new WriteStream(path, options);
|
||||
|
||||
Stream.call(this);
|
||||
|
||||
this.path = path;
|
||||
this.fd = null;
|
||||
this.writable = true;
|
||||
|
||||
this.flags = 'w';
|
||||
this.encoding = 'binary';
|
||||
this.mode = 438; /*=0666*/
|
||||
this.bytesWritten = 0;
|
||||
|
||||
options = options || {};
|
||||
|
||||
// Mixin options into this
|
||||
var keys = Object.keys(options);
|
||||
for (var index = 0, length = keys.length; index < length; index++) {
|
||||
var key = keys[index];
|
||||
this[key] = options[key];
|
||||
}
|
||||
|
||||
if (this.start !== undefined) {
|
||||
if ('number' !== typeof this.start) {
|
||||
throw TypeError('start must be a Number');
|
||||
}
|
||||
if (this.start < 0) {
|
||||
throw new Error('start must be >= zero');
|
||||
}
|
||||
|
||||
this.pos = this.start;
|
||||
}
|
||||
|
||||
this.busy = false;
|
||||
this._queue = [];
|
||||
|
||||
if (this.fd === null) {
|
||||
this._open = fs.open;
|
||||
this._queue.push([this._open, this.path, this.flags, this.mode, undefined]);
|
||||
this.flush();
|
||||
}
|
||||
}
|
||||
}
|
||||
53
node_modules/commoner/node_modules/graceful-fs/package.json
generated
vendored
Normal file
53
node_modules/commoner/node_modules/graceful-fs/package.json
generated
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
{
|
||||
"name": "graceful-fs",
|
||||
"description": "A drop-in replacement for fs, making various improvements.",
|
||||
"version": "4.2.11",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/isaacs/node-graceful-fs"
|
||||
},
|
||||
"main": "graceful-fs.js",
|
||||
"directories": {
|
||||
"test": "test"
|
||||
},
|
||||
"scripts": {
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"postpublish": "git push origin --follow-tags",
|
||||
"test": "nyc --silent node test.js | tap -c -",
|
||||
"posttest": "nyc report"
|
||||
},
|
||||
"keywords": [
|
||||
"fs",
|
||||
"module",
|
||||
"reading",
|
||||
"retry",
|
||||
"retries",
|
||||
"queue",
|
||||
"error",
|
||||
"errors",
|
||||
"handling",
|
||||
"EMFILE",
|
||||
"EAGAIN",
|
||||
"EINVAL",
|
||||
"EPERM",
|
||||
"EACCESS"
|
||||
],
|
||||
"license": "ISC",
|
||||
"devDependencies": {
|
||||
"import-fresh": "^2.0.0",
|
||||
"mkdirp": "^0.5.0",
|
||||
"rimraf": "^2.2.8",
|
||||
"tap": "^16.3.4"
|
||||
},
|
||||
"files": [
|
||||
"fs.js",
|
||||
"graceful-fs.js",
|
||||
"legacy-streams.js",
|
||||
"polyfills.js",
|
||||
"clone.js"
|
||||
],
|
||||
"tap": {
|
||||
"reporter": "classic"
|
||||
}
|
||||
}
|
||||
355
node_modules/commoner/node_modules/graceful-fs/polyfills.js
generated
vendored
Normal file
355
node_modules/commoner/node_modules/graceful-fs/polyfills.js
generated
vendored
Normal file
@@ -0,0 +1,355 @@
|
||||
var constants = require('constants')
|
||||
|
||||
var origCwd = process.cwd
|
||||
var cwd = null
|
||||
|
||||
var platform = process.env.GRACEFUL_FS_PLATFORM || process.platform
|
||||
|
||||
process.cwd = function() {
|
||||
if (!cwd)
|
||||
cwd = origCwd.call(process)
|
||||
return cwd
|
||||
}
|
||||
try {
|
||||
process.cwd()
|
||||
} catch (er) {}
|
||||
|
||||
// This check is needed until node.js 12 is required
|
||||
if (typeof process.chdir === 'function') {
|
||||
var chdir = process.chdir
|
||||
process.chdir = function (d) {
|
||||
cwd = null
|
||||
chdir.call(process, d)
|
||||
}
|
||||
if (Object.setPrototypeOf) Object.setPrototypeOf(process.chdir, chdir)
|
||||
}
|
||||
|
||||
module.exports = patch
|
||||
|
||||
function patch (fs) {
|
||||
// (re-)implement some things that are known busted or missing.
|
||||
|
||||
// lchmod, broken prior to 0.6.2
|
||||
// back-port the fix here.
|
||||
if (constants.hasOwnProperty('O_SYMLINK') &&
|
||||
process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) {
|
||||
patchLchmod(fs)
|
||||
}
|
||||
|
||||
// lutimes implementation, or no-op
|
||||
if (!fs.lutimes) {
|
||||
patchLutimes(fs)
|
||||
}
|
||||
|
||||
// https://github.com/isaacs/node-graceful-fs/issues/4
|
||||
// Chown should not fail on einval or eperm if non-root.
|
||||
// It should not fail on enosys ever, as this just indicates
|
||||
// that a fs doesn't support the intended operation.
|
||||
|
||||
fs.chown = chownFix(fs.chown)
|
||||
fs.fchown = chownFix(fs.fchown)
|
||||
fs.lchown = chownFix(fs.lchown)
|
||||
|
||||
fs.chmod = chmodFix(fs.chmod)
|
||||
fs.fchmod = chmodFix(fs.fchmod)
|
||||
fs.lchmod = chmodFix(fs.lchmod)
|
||||
|
||||
fs.chownSync = chownFixSync(fs.chownSync)
|
||||
fs.fchownSync = chownFixSync(fs.fchownSync)
|
||||
fs.lchownSync = chownFixSync(fs.lchownSync)
|
||||
|
||||
fs.chmodSync = chmodFixSync(fs.chmodSync)
|
||||
fs.fchmodSync = chmodFixSync(fs.fchmodSync)
|
||||
fs.lchmodSync = chmodFixSync(fs.lchmodSync)
|
||||
|
||||
fs.stat = statFix(fs.stat)
|
||||
fs.fstat = statFix(fs.fstat)
|
||||
fs.lstat = statFix(fs.lstat)
|
||||
|
||||
fs.statSync = statFixSync(fs.statSync)
|
||||
fs.fstatSync = statFixSync(fs.fstatSync)
|
||||
fs.lstatSync = statFixSync(fs.lstatSync)
|
||||
|
||||
// if lchmod/lchown do not exist, then make them no-ops
|
||||
if (fs.chmod && !fs.lchmod) {
|
||||
fs.lchmod = function (path, mode, cb) {
|
||||
if (cb) process.nextTick(cb)
|
||||
}
|
||||
fs.lchmodSync = function () {}
|
||||
}
|
||||
if (fs.chown && !fs.lchown) {
|
||||
fs.lchown = function (path, uid, gid, cb) {
|
||||
if (cb) process.nextTick(cb)
|
||||
}
|
||||
fs.lchownSync = function () {}
|
||||
}
|
||||
|
||||
// on Windows, A/V software can lock the directory, causing this
|
||||
// to fail with an EACCES or EPERM if the directory contains newly
|
||||
// created files. Try again on failure, for up to 60 seconds.
|
||||
|
||||
// Set the timeout this long because some Windows Anti-Virus, such as Parity
|
||||
// bit9, may lock files for up to a minute, causing npm package install
|
||||
// failures. Also, take care to yield the scheduler. Windows scheduling gives
|
||||
// CPU to a busy looping process, which can cause the program causing the lock
|
||||
// contention to be starved of CPU by node, so the contention doesn't resolve.
|
||||
if (platform === "win32") {
|
||||
fs.rename = typeof fs.rename !== 'function' ? fs.rename
|
||||
: (function (fs$rename) {
|
||||
function rename (from, to, cb) {
|
||||
var start = Date.now()
|
||||
var backoff = 0;
|
||||
fs$rename(from, to, function CB (er) {
|
||||
if (er
|
||||
&& (er.code === "EACCES" || er.code === "EPERM" || er.code === "EBUSY")
|
||||
&& Date.now() - start < 60000) {
|
||||
setTimeout(function() {
|
||||
fs.stat(to, function (stater, st) {
|
||||
if (stater && stater.code === "ENOENT")
|
||||
fs$rename(from, to, CB);
|
||||
else
|
||||
cb(er)
|
||||
})
|
||||
}, backoff)
|
||||
if (backoff < 100)
|
||||
backoff += 10;
|
||||
return;
|
||||
}
|
||||
if (cb) cb(er)
|
||||
})
|
||||
}
|
||||
if (Object.setPrototypeOf) Object.setPrototypeOf(rename, fs$rename)
|
||||
return rename
|
||||
})(fs.rename)
|
||||
}
|
||||
|
||||
// if read() returns EAGAIN, then just try it again.
|
||||
fs.read = typeof fs.read !== 'function' ? fs.read
|
||||
: (function (fs$read) {
|
||||
function read (fd, buffer, offset, length, position, callback_) {
|
||||
var callback
|
||||
if (callback_ && typeof callback_ === 'function') {
|
||||
var eagCounter = 0
|
||||
callback = function (er, _, __) {
|
||||
if (er && er.code === 'EAGAIN' && eagCounter < 10) {
|
||||
eagCounter ++
|
||||
return fs$read.call(fs, fd, buffer, offset, length, position, callback)
|
||||
}
|
||||
callback_.apply(this, arguments)
|
||||
}
|
||||
}
|
||||
return fs$read.call(fs, fd, buffer, offset, length, position, callback)
|
||||
}
|
||||
|
||||
// This ensures `util.promisify` works as it does for native `fs.read`.
|
||||
if (Object.setPrototypeOf) Object.setPrototypeOf(read, fs$read)
|
||||
return read
|
||||
})(fs.read)
|
||||
|
||||
fs.readSync = typeof fs.readSync !== 'function' ? fs.readSync
|
||||
: (function (fs$readSync) { return function (fd, buffer, offset, length, position) {
|
||||
var eagCounter = 0
|
||||
while (true) {
|
||||
try {
|
||||
return fs$readSync.call(fs, fd, buffer, offset, length, position)
|
||||
} catch (er) {
|
||||
if (er.code === 'EAGAIN' && eagCounter < 10) {
|
||||
eagCounter ++
|
||||
continue
|
||||
}
|
||||
throw er
|
||||
}
|
||||
}
|
||||
}})(fs.readSync)
|
||||
|
||||
function patchLchmod (fs) {
|
||||
fs.lchmod = function (path, mode, callback) {
|
||||
fs.open( path
|
||||
, constants.O_WRONLY | constants.O_SYMLINK
|
||||
, mode
|
||||
, function (err, fd) {
|
||||
if (err) {
|
||||
if (callback) callback(err)
|
||||
return
|
||||
}
|
||||
// prefer to return the chmod error, if one occurs,
|
||||
// but still try to close, and report closing errors if they occur.
|
||||
fs.fchmod(fd, mode, function (err) {
|
||||
fs.close(fd, function(err2) {
|
||||
if (callback) callback(err || err2)
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fs.lchmodSync = function (path, mode) {
|
||||
var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode)
|
||||
|
||||
// prefer to return the chmod error, if one occurs,
|
||||
// but still try to close, and report closing errors if they occur.
|
||||
var threw = true
|
||||
var ret
|
||||
try {
|
||||
ret = fs.fchmodSync(fd, mode)
|
||||
threw = false
|
||||
} finally {
|
||||
if (threw) {
|
||||
try {
|
||||
fs.closeSync(fd)
|
||||
} catch (er) {}
|
||||
} else {
|
||||
fs.closeSync(fd)
|
||||
}
|
||||
}
|
||||
return ret
|
||||
}
|
||||
}
|
||||
|
||||
function patchLutimes (fs) {
|
||||
if (constants.hasOwnProperty("O_SYMLINK") && fs.futimes) {
|
||||
fs.lutimes = function (path, at, mt, cb) {
|
||||
fs.open(path, constants.O_SYMLINK, function (er, fd) {
|
||||
if (er) {
|
||||
if (cb) cb(er)
|
||||
return
|
||||
}
|
||||
fs.futimes(fd, at, mt, function (er) {
|
||||
fs.close(fd, function (er2) {
|
||||
if (cb) cb(er || er2)
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fs.lutimesSync = function (path, at, mt) {
|
||||
var fd = fs.openSync(path, constants.O_SYMLINK)
|
||||
var ret
|
||||
var threw = true
|
||||
try {
|
||||
ret = fs.futimesSync(fd, at, mt)
|
||||
threw = false
|
||||
} finally {
|
||||
if (threw) {
|
||||
try {
|
||||
fs.closeSync(fd)
|
||||
} catch (er) {}
|
||||
} else {
|
||||
fs.closeSync(fd)
|
||||
}
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
} else if (fs.futimes) {
|
||||
fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb) }
|
||||
fs.lutimesSync = function () {}
|
||||
}
|
||||
}
|
||||
|
||||
function chmodFix (orig) {
|
||||
if (!orig) return orig
|
||||
return function (target, mode, cb) {
|
||||
return orig.call(fs, target, mode, function (er) {
|
||||
if (chownErOk(er)) er = null
|
||||
if (cb) cb.apply(this, arguments)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function chmodFixSync (orig) {
|
||||
if (!orig) return orig
|
||||
return function (target, mode) {
|
||||
try {
|
||||
return orig.call(fs, target, mode)
|
||||
} catch (er) {
|
||||
if (!chownErOk(er)) throw er
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function chownFix (orig) {
|
||||
if (!orig) return orig
|
||||
return function (target, uid, gid, cb) {
|
||||
return orig.call(fs, target, uid, gid, function (er) {
|
||||
if (chownErOk(er)) er = null
|
||||
if (cb) cb.apply(this, arguments)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function chownFixSync (orig) {
|
||||
if (!orig) return orig
|
||||
return function (target, uid, gid) {
|
||||
try {
|
||||
return orig.call(fs, target, uid, gid)
|
||||
} catch (er) {
|
||||
if (!chownErOk(er)) throw er
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function statFix (orig) {
|
||||
if (!orig) return orig
|
||||
// Older versions of Node erroneously returned signed integers for
|
||||
// uid + gid.
|
||||
return function (target, options, cb) {
|
||||
if (typeof options === 'function') {
|
||||
cb = options
|
||||
options = null
|
||||
}
|
||||
function callback (er, stats) {
|
||||
if (stats) {
|
||||
if (stats.uid < 0) stats.uid += 0x100000000
|
||||
if (stats.gid < 0) stats.gid += 0x100000000
|
||||
}
|
||||
if (cb) cb.apply(this, arguments)
|
||||
}
|
||||
return options ? orig.call(fs, target, options, callback)
|
||||
: orig.call(fs, target, callback)
|
||||
}
|
||||
}
|
||||
|
||||
function statFixSync (orig) {
|
||||
if (!orig) return orig
|
||||
// Older versions of Node erroneously returned signed integers for
|
||||
// uid + gid.
|
||||
return function (target, options) {
|
||||
var stats = options ? orig.call(fs, target, options)
|
||||
: orig.call(fs, target)
|
||||
if (stats) {
|
||||
if (stats.uid < 0) stats.uid += 0x100000000
|
||||
if (stats.gid < 0) stats.gid += 0x100000000
|
||||
}
|
||||
return stats;
|
||||
}
|
||||
}
|
||||
|
||||
// ENOSYS means that the fs doesn't support the op. Just ignore
|
||||
// that, because it doesn't matter.
|
||||
//
|
||||
// if there's no getuid, or if getuid() is something other
|
||||
// than 0, and the error is EINVAL or EPERM, then just ignore
|
||||
// it.
|
||||
//
|
||||
// This specific case is a silent failure in cp, install, tar,
|
||||
// and most other unix tools that manage permissions.
|
||||
//
|
||||
// When running as root, or if other types of errors are
|
||||
// encountered, then it's strict.
|
||||
function chownErOk (er) {
|
||||
if (!er)
|
||||
return true
|
||||
|
||||
if (er.code === "ENOSYS")
|
||||
return true
|
||||
|
||||
var nonroot = !process.getuid || process.getuid() !== 0
|
||||
if (nonroot) {
|
||||
if (er.code === "EINVAL" || er.code === "EPERM")
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
}
|
||||
53
node_modules/commoner/package.json
generated
vendored
Normal file
53
node_modules/commoner/package.json
generated
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
{
|
||||
"author": {
|
||||
"name": "Ben Newman",
|
||||
"email": "ben@benjamn.com"
|
||||
},
|
||||
"name": "commoner",
|
||||
"description": "Flexible tool for translating any dialect of JavaScript into Node-readable CommonJS modules",
|
||||
"keywords": [
|
||||
"modules",
|
||||
"require",
|
||||
"commonjs",
|
||||
"exports",
|
||||
"commoner",
|
||||
"browserify",
|
||||
"stitch"
|
||||
],
|
||||
"version": "0.10.8",
|
||||
"license": "MIT",
|
||||
"homepage": "http://github.com/benjamn/commoner",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/benjamn/commoner.git"
|
||||
},
|
||||
"main": "main.js",
|
||||
"files": [
|
||||
"bin",
|
||||
"lib",
|
||||
"main.js"
|
||||
],
|
||||
"bin": {
|
||||
"commonize": "./bin/commonize"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "rm -rf test/output ; node ./node_modules/mocha/bin/mocha --reporter spec test/run.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"commander": "^2.5.0",
|
||||
"detective": "^4.3.1",
|
||||
"glob": "^5.0.15",
|
||||
"graceful-fs": "^4.1.2",
|
||||
"iconv-lite": "^0.4.5",
|
||||
"mkdirp": "^0.5.0",
|
||||
"private": "^0.1.6",
|
||||
"q": "^1.1.2",
|
||||
"recast": "^0.11.17"
|
||||
},
|
||||
"devDependencies": {
|
||||
"mocha": "^2.3.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user