lo borro
parent
846d7149ad
commit
b43f7db8bb
@ -1 +0,0 @@
|
||||
../mime/cli.js
|
@ -1 +0,0 @@
|
||||
../rimraf/bin.js
|
@ -1,14 +0,0 @@
|
||||
## Changelog
|
||||
|
||||
**2.1.0** — <small> July 19, 2019 </small> — [Diff](https://github.com/archiverjs/archiver-utils/compare/2.0.0...2.1.0)
|
||||
|
||||
- other: less lodash (#16)
|
||||
- other: update dependencies
|
||||
|
||||
**2.0.0** — <small> August 22, 2018 </small> — [Diff](https://github.com/archiverjs/archiver-utils/compare/1.3.0...2.0.0)
|
||||
|
||||
- breaking: follow node LTS, remove support for versions under 6.
|
||||
- other: remove unused lodash dependence (#13)
|
||||
- test: now targeting node v10
|
||||
|
||||
[Release Archive](https://github.com/archiverjs/archiver-utils/releases)
|
@ -1,22 +0,0 @@
|
||||
Copyright (c) 2015 Chris Talkington.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without
|
||||
restriction, including without limitation the rights to use,
|
||||
copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS IN THE SOFTWARE.
|
@ -1,7 +0,0 @@
|
||||
# Archiver Utils [](https://travis-ci.org/archiverjs/archiver-utils) [](https://ci.appveyor.com/project/ctalkington/archiver-utils/branch/master)
|
||||
|
||||
|
||||
## Things of Interest
|
||||
- [Changelog](https://github.com/archiverjs/archiver-utils/releases)
|
||||
- [Contributing](https://github.com/archiverjs/archiver-utils/blob/master/CONTRIBUTING.md)
|
||||
- [MIT License](https://github.com/archiverjs/archiver-utils/blob/master/LICENSE)
|
@ -1,209 +0,0 @@
|
||||
/**
|
||||
* archiver-utils
|
||||
*
|
||||
* Copyright (c) 2012-2014 Chris Talkington, contributors.
|
||||
* Licensed under the MIT license.
|
||||
* https://github.com/archiverjs/node-archiver/blob/master/LICENSE-MIT
|
||||
*/
|
||||
var fs = require('graceful-fs');
|
||||
var path = require('path');
|
||||
|
||||
var flatten = require('lodash.flatten');
|
||||
var difference = require('lodash.difference');
|
||||
var union = require('lodash.union');
|
||||
var isPlainObject = require('lodash.isplainobject');
|
||||
|
||||
var glob = require('glob');
|
||||
|
||||
var file = module.exports = {};
|
||||
|
||||
var pathSeparatorRe = /[\/\\]/g;
|
||||
|
||||
// Process specified wildcard glob patterns or filenames against a
|
||||
// callback, excluding and uniquing files in the result set.
|
||||
var processPatterns = function(patterns, fn) {
|
||||
// Filepaths to return.
|
||||
var result = [];
|
||||
// Iterate over flattened patterns array.
|
||||
flatten(patterns).forEach(function(pattern) {
|
||||
// If the first character is ! it should be omitted
|
||||
var exclusion = pattern.indexOf('!') === 0;
|
||||
// If the pattern is an exclusion, remove the !
|
||||
if (exclusion) { pattern = pattern.slice(1); }
|
||||
// Find all matching files for this pattern.
|
||||
var matches = fn(pattern);
|
||||
if (exclusion) {
|
||||
// If an exclusion, remove matching files.
|
||||
result = difference(result, matches);
|
||||
} else {
|
||||
// Otherwise add matching files.
|
||||
result = union(result, matches);
|
||||
}
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
// True if the file path exists.
|
||||
file.exists = function() {
|
||||
var filepath = path.join.apply(path, arguments);
|
||||
return fs.existsSync(filepath);
|
||||
};
|
||||
|
||||
// Return an array of all file paths that match the given wildcard patterns.
|
||||
file.expand = function(...args) {
|
||||
// If the first argument is an options object, save those options to pass
|
||||
// into the File.prototype.glob.sync method.
|
||||
var options = isPlainObject(args[0]) ? args.shift() : {};
|
||||
// Use the first argument if it's an Array, otherwise convert the arguments
|
||||
// object to an array and use that.
|
||||
var patterns = Array.isArray(args[0]) ? args[0] : args;
|
||||
// Return empty set if there are no patterns or filepaths.
|
||||
if (patterns.length === 0) { return []; }
|
||||
// Return all matching filepaths.
|
||||
var matches = processPatterns(patterns, function(pattern) {
|
||||
// Find all matching files for this pattern.
|
||||
return glob.sync(pattern, options);
|
||||
});
|
||||
// Filter result set?
|
||||
if (options.filter) {
|
||||
matches = matches.filter(function(filepath) {
|
||||
filepath = path.join(options.cwd || '', filepath);
|
||||
try {
|
||||
if (typeof options.filter === 'function') {
|
||||
return options.filter(filepath);
|
||||
} else {
|
||||
// If the file is of the right type and exists, this should work.
|
||||
return fs.statSync(filepath)[options.filter]();
|
||||
}
|
||||
} catch(e) {
|
||||
// Otherwise, it's probably not the right type.
|
||||
return false;
|
||||
}
|
||||
});
|
||||
}
|
||||
return matches;
|
||||
};
|
||||
|
||||
// Build a multi task "files" object dynamically.
|
||||
file.expandMapping = function(patterns, destBase, options) {
|
||||
options = Object.assign({
|
||||
rename: function(destBase, destPath) {
|
||||
return path.join(destBase || '', destPath);
|
||||
}
|
||||
}, options);
|
||||
var files = [];
|
||||
var fileByDest = {};
|
||||
// Find all files matching pattern, using passed-in options.
|
||||
file.expand(options, patterns).forEach(function(src) {
|
||||
var destPath = src;
|
||||
// Flatten?
|
||||
if (options.flatten) {
|
||||
destPath = path.basename(destPath);
|
||||
}
|
||||
// Change the extension?
|
||||
if (options.ext) {
|
||||
destPath = destPath.replace(/(\.[^\/]*)?$/, options.ext);
|
||||
}
|
||||
// Generate destination filename.
|
||||
var dest = options.rename(destBase, destPath, options);
|
||||
// Prepend cwd to src path if necessary.
|
||||
if (options.cwd) { src = path.join(options.cwd, src); }
|
||||
// Normalize filepaths to be unix-style.
|
||||
dest = dest.replace(pathSeparatorRe, '/');
|
||||
src = src.replace(pathSeparatorRe, '/');
|
||||
// Map correct src path to dest path.
|
||||
if (fileByDest[dest]) {
|
||||
// If dest already exists, push this src onto that dest's src array.
|
||||
fileByDest[dest].src.push(src);
|
||||
} else {
|
||||
// Otherwise create a new src-dest file mapping object.
|
||||
files.push({
|
||||
src: [src],
|
||||
dest: dest,
|
||||
});
|
||||
// And store a reference for later use.
|
||||
fileByDest[dest] = files[files.length - 1];
|
||||
}
|
||||
});
|
||||
return files;
|
||||
};
|
||||
|
||||
// reusing bits of grunt's multi-task source normalization
|
||||
file.normalizeFilesArray = function(data) {
|
||||
var files = [];
|
||||
|
||||
data.forEach(function(obj) {
|
||||
var prop;
|
||||
if ('src' in obj || 'dest' in obj) {
|
||||
files.push(obj);
|
||||
}
|
||||
});
|
||||
|
||||
if (files.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
files = _(files).chain().forEach(function(obj) {
|
||||
if (!('src' in obj) || !obj.src) { return; }
|
||||
// Normalize .src properties to flattened array.
|
||||
if (Array.isArray(obj.src)) {
|
||||
obj.src = flatten(obj.src);
|
||||
} else {
|
||||
obj.src = [obj.src];
|
||||
}
|
||||
}).map(function(obj) {
|
||||
// Build options object, removing unwanted properties.
|
||||
var expandOptions = Object.assign({}, obj);
|
||||
delete expandOptions.src;
|
||||
delete expandOptions.dest;
|
||||
|
||||
// Expand file mappings.
|
||||
if (obj.expand) {
|
||||
return file.expandMapping(obj.src, obj.dest, expandOptions).map(function(mapObj) {
|
||||
// Copy obj properties to result.
|
||||
var result = Object.assign({}, obj);
|
||||
// Make a clone of the orig obj available.
|
||||
result.orig = Object.assign({}, obj);
|
||||
// Set .src and .dest, processing both as templates.
|
||||
result.src = mapObj.src;
|
||||
result.dest = mapObj.dest;
|
||||
// Remove unwanted properties.
|
||||
['expand', 'cwd', 'flatten', 'rename', 'ext'].forEach(function(prop) {
|
||||
delete result[prop];
|
||||
});
|
||||
return result;
|
||||
});
|
||||
}
|
||||
|
||||
// Copy obj properties to result, adding an .orig property.
|
||||
var result = Object.assign({}, obj);
|
||||
// Make a clone of the orig obj available.
|
||||
result.orig = Object.assign({}, obj);
|
||||
|
||||
if ('src' in result) {
|
||||
// Expose an expand-on-demand getter method as .src.
|
||||
Object.defineProperty(result, 'src', {
|
||||
enumerable: true,
|
||||
get: function fn() {
|
||||
var src;
|
||||
if (!('result' in fn)) {
|
||||
src = obj.src;
|
||||
// If src is an array, flatten it. Otherwise, make it into an array.
|
||||
src = Array.isArray(src) ? flatten(src) : [src];
|
||||
// Expand src files, memoizing result.
|
||||
fn.result = file.expand(expandOptions, src);
|
||||
}
|
||||
return fn.result;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if ('dest' in result) {
|
||||
result.dest = obj.dest;
|
||||
}
|
||||
|
||||
return result;
|
||||
}).flatten().value();
|
||||
|
||||
return files;
|
||||
};
|
@ -1,156 +0,0 @@
|
||||
/**
|
||||
* archiver-utils
|
||||
*
|
||||
* Copyright (c) 2015 Chris Talkington.
|
||||
* Licensed under the MIT license.
|
||||
* https://github.com/archiverjs/archiver-utils/blob/master/LICENSE
|
||||
*/
|
||||
var fs = require('graceful-fs');
|
||||
var path = require('path');
|
||||
var nutil = require('util');
|
||||
var lazystream = require('lazystream');
|
||||
var normalizePath = require('normalize-path');
|
||||
var defaults = require('lodash.defaults');
|
||||
|
||||
var Stream = require('stream').Stream;
|
||||
var PassThrough = require('readable-stream').PassThrough;
|
||||
|
||||
var utils = module.exports = {};
|
||||
utils.file = require('./file.js');
|
||||
|
||||
function assertPath(path) {
|
||||
if (typeof path !== 'string') {
|
||||
throw new TypeError('Path must be a string. Received ' + nutils.inspect(path));
|
||||
}
|
||||
}
|
||||
|
||||
utils.collectStream = function(source, callback) {
|
||||
var collection = [];
|
||||
var size = 0;
|
||||
|
||||
source.on('error', callback);
|
||||
|
||||
source.on('data', function(chunk) {
|
||||
collection.push(chunk);
|
||||
size += chunk.length;
|
||||
});
|
||||
|
||||
source.on('end', function() {
|
||||
var buf = new Buffer(size);
|
||||
var offset = 0;
|
||||
|
||||
collection.forEach(function(data) {
|
||||
data.copy(buf, offset);
|
||||
offset += data.length;
|
||||
});
|
||||
|
||||
callback(null, buf);
|
||||
});
|
||||
};
|
||||
|
||||
utils.dateify = function(dateish) {
|
||||
dateish = dateish || new Date();
|
||||
|
||||
if (dateish instanceof Date) {
|
||||
dateish = dateish;
|
||||
} else if (typeof dateish === 'string') {
|
||||
dateish = new Date(dateish);
|
||||
} else {
|
||||
dateish = new Date();
|
||||
}
|
||||
|
||||
return dateish;
|
||||
};
|
||||
|
||||
// this is slightly different from lodash version
|
||||
utils.defaults = function(object, source, guard) {
|
||||
var args = arguments;
|
||||
args[0] = args[0] || {};
|
||||
|
||||
return defaults(...args);
|
||||
};
|
||||
|
||||
utils.isStream = function(source) {
|
||||
return source instanceof Stream;
|
||||
};
|
||||
|
||||
utils.lazyReadStream = function(filepath) {
|
||||
return new lazystream.Readable(function() {
|
||||
return fs.createReadStream(filepath);
|
||||
});
|
||||
};
|
||||
|
||||
utils.normalizeInputSource = function(source) {
|
||||
if (source === null) {
|
||||
return new Buffer(0);
|
||||
} else if (typeof source === 'string') {
|
||||
return new Buffer(source);
|
||||
} else if (utils.isStream(source) && !source._readableState) {
|
||||
var normalized = new PassThrough();
|
||||
source.pipe(normalized);
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
return source;
|
||||
};
|
||||
|
||||
utils.sanitizePath = function(filepath) {
|
||||
return normalizePath(filepath, false).replace(/^\w+:/, '').replace(/^(\.\.\/|\/)+/, '');
|
||||
};
|
||||
|
||||
utils.trailingSlashIt = function(str) {
|
||||
return str.slice(-1) !== '/' ? str + '/' : str;
|
||||
};
|
||||
|
||||
utils.unixifyPath = function(filepath) {
|
||||
return normalizePath(filepath, false).replace(/^\w+:/, '');
|
||||
};
|
||||
|
||||
utils.walkdir = function(dirpath, base, callback) {
|
||||
var results = [];
|
||||
|
||||
if (typeof base === 'function') {
|
||||
callback = base;
|
||||
base = dirpath;
|
||||
}
|
||||
|
||||
fs.readdir(dirpath, function(err, list) {
|
||||
var i = 0;
|
||||
var file;
|
||||
var filepath;
|
||||
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
(function next() {
|
||||
file = list[i++];
|
||||
|
||||
if (!file) {
|
||||
return callback(null, results);
|
||||
}
|
||||
|
||||
filepath = path.join(dirpath, file);
|
||||
|
||||
fs.stat(filepath, function(err, stats) {
|
||||
results.push({
|
||||
path: filepath,
|
||||
relative: path.relative(base, filepath).replace(/\\/g, '/'),
|
||||
stats: stats
|
||||
});
|
||||
|
||||
if (stats && stats.isDirectory()) {
|
||||
utils.walkdir(filepath, base, function(err, res) {
|
||||
res.forEach(function(dirEntry) {
|
||||
results.push(dirEntry);
|
||||
});
|
||||
next();
|
||||
});
|
||||
} else {
|
||||
next();
|
||||
}
|
||||
});
|
||||
})();
|
||||
});
|
||||
};
|
@ -1,34 +0,0 @@
|
||||
sudo: false
|
||||
language: node_js
|
||||
before_install:
|
||||
- (test $NPM_LEGACY && npm install -g npm@2 && npm install -g npm@3) || true
|
||||
notifications:
|
||||
email: false
|
||||
matrix:
|
||||
fast_finish: true
|
||||
include:
|
||||
- node_js: '0.8'
|
||||
env: NPM_LEGACY=true
|
||||
- node_js: '0.10'
|
||||
env: NPM_LEGACY=true
|
||||
- node_js: '0.11'
|
||||
env: NPM_LEGACY=true
|
||||
- node_js: '0.12'
|
||||
env: NPM_LEGACY=true
|
||||
- node_js: 1
|
||||
env: NPM_LEGACY=true
|
||||
- node_js: 2
|
||||
env: NPM_LEGACY=true
|
||||
- node_js: 3
|
||||
env: NPM_LEGACY=true
|
||||
- node_js: 4
|
||||
- node_js: 5
|
||||
- node_js: 6
|
||||
- node_js: 7
|
||||
- node_js: 8
|
||||
- node_js: 9
|
||||
script: "npm run test"
|
||||
env:
|
||||
global:
|
||||
- secure: rE2Vvo7vnjabYNULNyLFxOyt98BoJexDqsiOnfiD6kLYYsiQGfr/sbZkPMOFm9qfQG7pjqx+zZWZjGSswhTt+626C0t/njXqug7Yps4c3dFblzGfreQHp7wNX5TFsvrxd6dAowVasMp61sJcRnB2w8cUzoe3RAYUDHyiHktwqMc=
|
||||
- secure: g9YINaKAdMatsJ28G9jCGbSaguXCyxSTy+pBO6Ch0Cf57ZLOTka3HqDj8p3nV28LUIHZ3ut5WO43CeYKwt4AUtLpBS3a0dndHdY6D83uY6b2qh5hXlrcbeQTq2cvw2y95F7hm4D1kwrgZ7ViqaKggRcEupAL69YbJnxeUDKWEdI=
|
@ -1,38 +0,0 @@
|
||||
# Developer's Certificate of Origin 1.1
|
||||
|
||||
By making a contribution to this project, I certify that:
|
||||
|
||||
* (a) The contribution was created in whole or in part by me and I
|
||||
have the right to submit it under the open source license
|
||||
indicated in the file; or
|
||||
|
||||
* (b) The contribution is based upon previous work that, to the best
|
||||
of my knowledge, is covered under an appropriate open source
|
||||
license and I have the right under that license to submit that
|
||||
work with modifications, whether created in whole or in part
|
||||
by me, under the same open source license (unless I am
|
||||
permitted to submit under a different license), as indicated
|
||||
in the file; or
|
||||
|
||||
* (c) The contribution was provided directly to me by some other
|
||||
person who certified (a), (b) or (c) and I have not modified
|
||||
it.
|
||||
|
||||
* (d) I understand and agree that this project and the contribution
|
||||
are public and that a record of the contribution (including all
|
||||
personal information I submit with it, including my sign-off) is
|
||||
maintained indefinitely and may be redistributed consistent with
|
||||
this project or the open source license(s) involved.
|
||||
|
||||
## Moderation Policy
|
||||
|
||||
The [Node.js Moderation Policy] applies to this WG.
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
The [Node.js Code of Conduct][] applies to this WG.
|
||||
|
||||
[Node.js Code of Conduct]:
|
||||
https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md
|
||||
[Node.js Moderation Policy]:
|
||||
https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md
|
@ -1,136 +0,0 @@
|
||||
### Streams Working Group
|
||||
|
||||
The Node.js Streams is jointly governed by a Working Group
|
||||
(WG)
|
||||
that is responsible for high-level guidance of the project.
|
||||
|
||||
The WG has final authority over this project including:
|
||||
|
||||
* Technical direction
|
||||
* Project governance and process (including this policy)
|
||||
* Contribution policy
|
||||
* GitHub repository hosting
|
||||
* Conduct guidelines
|
||||
* Maintaining the list of additional Collaborators
|
||||
|
||||
For the current list of WG members, see the project
|
||||
[README.md](./README.md#current-project-team-members).
|
||||
|
||||
### Collaborators
|
||||
|
||||
The readable-stream GitHub repository is
|
||||
maintained by the WG and additional Collaborators who are added by the
|
||||
WG on an ongoing basis.
|
||||
|
||||
Individuals making significant and valuable contributions are made
|
||||
Collaborators and given commit-access to the project. These
|
||||
individuals are identified by the WG and their addition as
|
||||
Collaborators is discussed during the WG meeting.
|
||||
|
||||
_Note:_ If you make a significant contribution and are not considered
|
||||
for commit-access log an issue or contact a WG member directly and it
|
||||
will be brought up in the next WG meeting.
|
||||
|
||||
Modifications of the contents of the readable-stream repository are
|
||||
made on
|
||||
a collaborative basis. Anybody with a GitHub account may propose a
|
||||
modification via pull request and it will be considered by the project
|
||||
Collaborators. All pull requests must be reviewed and accepted by a
|
||||
Collaborator with sufficient expertise who is able to take full
|
||||
responsibility for the change. In the case of pull requests proposed
|
||||
by an existing Collaborator, an additional Collaborator is required
|
||||
for sign-off. Consensus should be sought if additional Collaborators
|
||||
participate and there is disagreement around a particular
|
||||
modification. See _Consensus Seeking Process_ below for further detail
|
||||
on the consensus model used for governance.
|
||||
|
||||
Collaborators may opt to elevate significant or controversial
|
||||
modifications, or modifications that have not found consensus to the
|
||||
WG for discussion by assigning the ***WG-agenda*** tag to a pull
|
||||
request or issue. The WG should serve as the final arbiter where
|
||||
required.
|
||||
|
||||
For the current list of Collaborators, see the project
|
||||
[README.md](./README.md#members).
|
||||
|
||||
### WG Membership
|
||||
|
||||
WG seats are not time-limited. There is no fixed size of the WG.
|
||||
However, the expected target is between 6 and 12, to ensure adequate
|
||||
coverage of important areas of expertise, balanced with the ability to
|
||||
make decisions efficiently.
|
||||
|
||||
There is no specific set of requirements or qualifications for WG
|
||||
membership beyond these rules.
|
||||
|
||||
The WG may add additional members to the WG by unanimous consensus.
|
||||
|
||||
A WG member may be removed from the WG by voluntary resignation, or by
|
||||
unanimous consensus of all other WG members.
|
||||
|
||||
Changes to WG membership should be posted in the agenda, and may be
|
||||
suggested as any other agenda item (see "WG Meetings" below).
|
||||
|
||||
If an addition or removal is proposed during a meeting, and the full
|
||||
WG is not in attendance to participate, then the addition or removal
|
||||
is added to the agenda for the subsequent meeting. This is to ensure
|
||||
that all members are given the opportunity to participate in all
|
||||
membership decisions. If a WG member is unable to attend a meeting
|
||||
where a planned membership decision is being made, then their consent
|
||||
is assumed.
|
||||
|
||||
No more than 1/3 of the WG members may be affiliated with the same
|
||||
employer. If removal or resignation of a WG member, or a change of
|
||||
employment by a WG member, creates a situation where more than 1/3 of
|
||||
the WG membership shares an employer, then the situation must be
|
||||
immediately remedied by the resignation or removal of one or more WG
|
||||
members affiliated with the over-represented employer(s).
|
||||
|
||||
### WG Meetings
|
||||
|
||||
The WG meets occasionally on a Google Hangout On Air. A designated moderator
|
||||
approved by the WG runs the meeting. Each meeting should be
|
||||
published to YouTube.
|
||||
|
||||
Items are added to the WG agenda that are considered contentious or
|
||||
are modifications of governance, contribution policy, WG membership,
|
||||
or release process.
|
||||
|
||||
The intention of the agenda is not to approve or review all patches;
|
||||
that should happen continuously on GitHub and be handled by the larger
|
||||
group of Collaborators.
|
||||
|
||||
Any community member or contributor can ask that something be added to
|
||||
the next meeting's agenda by logging a GitHub Issue. Any Collaborator,
|
||||
WG member or the moderator can add the item to the agenda by adding
|
||||
the ***WG-agenda*** tag to the issue.
|
||||
|
||||
Prior to each WG meeting the moderator will share the Agenda with
|
||||
members of the WG. WG members can add any items they like to the
|
||||
agenda at the beginning of each meeting. The moderator and the WG
|
||||
cannot veto or remove items.
|
||||
|
||||
The WG may invite persons or representatives from certain projects to
|
||||
participate in a non-voting capacity.
|
||||
|
||||
The moderator is responsible for summarizing the discussion of each
|
||||
agenda item and sends it as a pull request after the meeting.
|
||||
|
||||
### Consensus Seeking Process
|
||||
|
||||
The WG follows a
|
||||
[Consensus
|
||||
Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making)
|
||||
decision-making model.
|
||||
|
||||
When an agenda item has appeared to reach a consensus the moderator
|
||||
will ask "Does anyone object?" as a final call for dissent from the
|
||||
consensus.
|
||||
|
||||
If an agenda item cannot reach a consensus a WG member can call for
|
||||
either a closing vote or a vote to table the issue to the next
|
||||
meeting. The call for a vote must be seconded by a majority of the WG
|
||||
or else the discussion will continue. Simple majority wins.
|
||||
|
||||
Note that changes to WG membership require a majority consensus. See
|
||||
"WG Membership" above.
|
@ -1,47 +0,0 @@
|
||||
Node.js is licensed for use as follows:
|
||||
|
||||
"""
|
||||
Copyright Node.js contributors. All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to
|
||||
deal in the Software without restriction, including without limitation the
|
||||
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
sell copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
IN THE SOFTWARE.
|
||||
"""
|
||||
|
||||
This license applies to parts of Node.js originating from the
|
||||
https://github.com/joyent/node repository:
|
||||
|
||||
"""
|
||||
Copyright Joyent, Inc. and other Node contributors. All rights reserved.
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to
|
||||
deal in the Software without restriction, including without limitation the
|
||||
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
sell copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
IN THE SOFTWARE.
|
||||
"""
|
@ -1,58 +0,0 @@
|
||||
# readable-stream
|
||||
|
||||
***Node-core v8.17.0 streams for userland*** [](https://travis-ci.org/nodejs/readable-stream)
|
||||
|
||||
|
||||
[](https://nodei.co/npm/readable-stream/)
|
||||
[](https://nodei.co/npm/readable-stream/)
|
||||
|
||||
|
||||
[](https://saucelabs.com/u/readable-stream)
|
||||
|
||||
```bash
|
||||
npm install --save readable-stream
|
||||
```
|
||||
|
||||
***Node-core streams for userland***
|
||||
|
||||
This package is a mirror of the Streams2 and Streams3 implementations in
|
||||
Node-core.
|
||||
|
||||
Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.17.0/docs/api/stream.html).
|
||||
|
||||
If you want to guarantee a stable streams base, regardless of what version of
|
||||
Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html).
|
||||
|
||||
As of version 2.0.0 **readable-stream** uses semantic versioning.
|
||||
|
||||
# Streams Working Group
|
||||
|
||||
`readable-stream` is maintained by the Streams Working Group, which
|
||||
oversees the development and maintenance of the Streams API within
|
||||
Node.js. The responsibilities of the Streams Working Group include:
|
||||
|
||||
* Addressing stream issues on the Node.js issue tracker.
|
||||
* Authoring and editing stream documentation within the Node.js project.
|
||||
* Reviewing changes to stream subclasses within the Node.js project.
|
||||
* Redirecting changes to streams from the Node.js project to this
|
||||
project.
|
||||
* Assisting in the implementation of stream providers within Node.js.
|
||||
* Recommending versions of `readable-stream` to be included in Node.js.
|
||||
* Messaging about the future of streams to give the community advance
|
||||
notice of changes.
|
||||
|
||||
<a name="members"></a>
|
||||
## Team Members
|
||||
|
||||
* **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) <christopher.s.dickinson@gmail.com>
|
||||
- Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B
|
||||
* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com>
|
||||
- Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242
|
||||
* **Rod Vagg** ([@rvagg](https://github.com/rvagg)) <rod@vagg.org>
|
||||
- Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D
|
||||
* **Sam Newman** ([@sonewman](https://github.com/sonewman)) <newmansam@outlook.com>
|
||||
* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com>
|
||||
* **Domenic Denicola** ([@domenic](https://github.com/domenic)) <d@domenic.me>
|
||||
* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com>
|
||||
- Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E
|
||||
* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) <shestak.irina@gmail.com>
|
60
node_modules/archiver-utils/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md
generated
vendored
60
node_modules/archiver-utils/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md
generated
vendored
@ -1 +0,0 @@
|
||||
module.exports = require('./lib/_stream_duplex.js');
|
@ -1 +0,0 @@
|
||||
module.exports = require('./readable').Duplex
|
131
node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_duplex.js
generated
vendored
131
node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_duplex.js
generated
vendored
@ -1,131 +0,0 @@
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
// a duplex stream is just a stream that is both readable and writable.
|
||||
// Since JS doesn't have multiple prototypal inheritance, this class
|
||||
// prototypally inherits from Readable, and then parasitically from
|
||||
// Writable.
|
||||
|
||||
'use strict';
|
||||
|
||||
/*<replacement>*/
|
||||
|
||||
var pna = require('process-nextick-args');
|
||||
/*</replacement>*/
|
||||
|
||||
/*<replacement>*/
|
||||
var objectKeys = Object.keys || function (obj) {
|
||||
var keys = [];
|
||||
for (var key in obj) {
|
||||
keys.push(key);
|
||||
}return keys;
|
||||
};
|
||||
/*</replacement>*/
|
||||
|
||||
module.exports = Duplex;
|
||||
|
||||
/*<replacement>*/
|
||||
var util = Object.create(require('core-util-is'));
|
||||
util.inherits = require('inherits');
|
||||
/*</replacement>*/
|
||||
|
||||
var Readable = require('./_stream_readable');
|
||||
var Writable = require('./_stream_writable');
|
||||
|
||||
util.inherits(Duplex, Readable);
|
||||
|
||||
{
|
||||
// avoid scope creep, the keys array can then be collected
|
||||
var keys = objectKeys(Writable.prototype);
|
||||
for (var v = 0; v < keys.length; v++) {
|
||||
var method = keys[v];
|
||||
if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method];
|
||||
}
|
||||
}
|
||||
|
||||
function Duplex(options) {
|
||||
if (!(this instanceof Duplex)) return new Duplex(options);
|
||||
|
||||
Readable.call(this, options);
|
||||
Writable.call(this, options);
|
||||
|
||||
if (options && options.readable === false) this.readable = false;
|
||||
|
||||
if (options && options.writable === false) this.writable = false;
|
||||
|
||||
this.allowHalfOpen = true;
|
||||
if (options && options.allowHalfOpen === false) this.allowHalfOpen = false;
|
||||
|
||||
this.once('end', onend);
|
||||
}
|
||||
|
||||
Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', {
|
||||
// making it explicit this property is not enumerable
|
||||
// because otherwise some prototype manipulation in
|
||||
// userland will fail
|
||||
enumerable: false,
|
||||
get: function () {
|
||||
return this._writableState.highWaterMark;
|
||||
}
|
||||
});
|
||||
|
||||
// the no-half-open enforcer
|
||||
function onend() {
|
||||
// if we allow half-open state, or if the writable side ended,
|
||||
// then we're ok.
|
||||
if (this.allowHalfOpen || this._writableState.ended) return;
|
||||
|
||||
// no more data can be written.
|
||||
// But allow more writes to happen in this tick.
|
||||
pna.nextTick(onEndNT, this);
|
||||
}
|
||||
|
||||
function onEndNT(self) {
|
||||
self.end();
|
||||
}
|
||||
|
||||
Object.defineProperty(Duplex.prototype, 'destroyed', {
|
||||
get: function () {
|
||||
if (this._readableState === undefined || this._writableState === undefined) {
|
||||
return false;
|
||||
}
|
||||
return this._readableState.destroyed && this._writableState.destroyed;
|
||||
},
|
||||
set: function (value) {
|
||||
// we ignore the value if the stream
|
||||
// has not been initialized yet
|
||||
if (this._readableState === undefined || this._writableState === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
// backward compatibility, the user is explicitly
|
||||
// managing destroyed
|
||||
this._readableState.destroyed = value;
|
||||
this._writableState.destroyed = value;
|
||||
}
|
||||
});
|
||||
|
||||
Duplex.prototype._destroy = function (err, cb) {
|
||||
this.push(null);
|
||||
this.end();
|
||||
|
||||
pna.nextTick(cb, err);
|
||||
};
|
47
node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_passthrough.js
generated
vendored
47
node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_passthrough.js
generated
vendored
@ -1,47 +0,0 @@
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
// a passthrough stream.
|
||||
// basically just the most minimal sort of Transform stream.
|
||||
// Every written chunk gets output as-is.
|
||||
|
||||
'use strict';
|
||||
|
||||
module.exports = PassThrough;
|
||||
|
||||
var Transform = require('./_stream_transform');
|
||||
|
||||
/*<replacement>*/
|
||||
var util = Object.create(require('core-util-is'));
|
||||
util.inherits = require('inherits');
|
||||
/*</replacement>*/
|
||||
|
||||
util.inherits(PassThrough, Transform);
|
||||
|
||||
function PassThrough(options) {
|
||||
if (!(this instanceof PassThrough)) return new PassThrough(options);
|
||||
|
||||
Transform.call(this, options);
|
||||
}
|
||||
|
||||
PassThrough.prototype._transform = function (chunk, encoding, cb) {
|
||||
cb(null, chunk);
|
||||
};
|
1019
node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_readable.js
generated
vendored
1019
node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_readable.js
generated
vendored
File diff suppressed because it is too large
Load Diff
214
node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_transform.js
generated
vendored
214
node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_transform.js
generated
vendored
@ -1,214 +0,0 @@
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
// a transform stream is a readable/writable stream where you do
|
||||
// something with the data. Sometimes it's called a "filter",
|
||||
// but that's not a great name for it, since that implies a thing where
|
||||
// some bits pass through, and others are simply ignored. (That would
|
||||
// be a valid example of a transform, of course.)
|
||||
//
|
||||
// While the output is causally related to the input, it's not a
|
||||
// necessarily symmetric or synchronous transformation. For example,
|
||||
// a zlib stream might take multiple plain-text writes(), and then
|
||||
// emit a single compressed chunk some time in the future.
|
||||
//
|
||||
// Here's how this works:
|
||||
//
|
||||
// The Transform stream has all the aspects of the readable and writable
|
||||
// stream classes. When you write(chunk), that calls _write(chunk,cb)
|
||||
// internally, and returns false if there's a lot of pending writes
|
||||
// buffered up. When you call read(), that calls _read(n) until
|
||||
// there's enough pending readable data buffered up.
|
||||
//
|
||||
// In a transform stream, the written data is placed in a buffer. When
|
||||
// _read(n) is called, it transforms the queued up data, calling the
|
||||
// buffered _write cb's as it consumes chunks. If consuming a single
|
||||
// written chunk would result in multiple output chunks, then the first
|
||||
// outputted bit calls the readcb, and subsequent chunks just go into
|
||||
// the read buffer, and will cause it to emit 'readable' if necessary.
|
||||
//
|
||||
// This way, back-pressure is actually determined by the reading side,
|
||||
// since _read has to be called to start processing a new chunk. However,
|
||||
// a pathological inflate type of transform can cause excessive buffering
|
||||
// here. For example, imagine a stream where every byte of input is
|
||||
// interpreted as an integer from 0-255, and then results in that many
|
||||
// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
|
||||
// 1kb of data being output. In this case, you could write a very small
|
||||
// amount of input, and end up with a very large amount of output. In
|
||||
// such a pathological inflating mechanism, there'd be no way to tell
|
||||
// the system to stop doing the transform. A single 4MB write could
|
||||
// cause the system to run out of memory.
|
||||
//
|
||||
// However, even in such a pathological case, only a single written chunk
|
||||
// would be consumed, and then the rest would wait (un-transformed) until
|
||||
// the results of the previous transformed chunk were consumed.
|
||||
|
||||
'use strict';
|
||||
|
||||
module.exports = Transform;
|
||||
|
||||
var Duplex = require('./_stream_duplex');
|
||||
|
||||
/*<replacement>*/
|
||||
var util = Object.create(require('core-util-is'));
|
||||
util.inherits = require('inherits');
|
||||
/*</replacement>*/
|
||||
|
||||
util.inherits(Transform, Duplex);
|
||||
|
||||
function afterTransform(er, data) {
|
||||
var ts = this._transformState;
|
||||
ts.transforming = false;
|
||||
|
||||
var cb = ts.writecb;
|
||||
|
||||
if (!cb) {
|
||||
return this.emit('error', new Error('write callback called multiple times'));
|
||||
}
|
||||
|
||||
ts.writechunk = null;
|
||||
ts.writecb = null;
|
||||
|
||||
if (data != null) // single equals check for both `null` and `undefined`
|
||||
this.push(data);
|
||||
|
||||
cb(er);
|
||||
|
||||
var rs = this._readableState;
|
||||
rs.reading = false;
|
||||
if (rs.needReadable || rs.length < rs.highWaterMark) {
|
||||
this._read(rs.highWaterMark);
|
||||
}
|
||||
}
|
||||
|
||||
function Transform(options) {
|
||||
if (!(this instanceof Transform)) return new Transform(options);
|
||||
|
||||
Duplex.call(this, options);
|
||||
|
||||
this._transformState = {
|
||||
afterTransform: afterTransform.bind(this),
|
||||
needTransform: false,
|
||||
transforming: false,
|
||||
writecb: null,
|
||||
writechunk: null,
|
||||
writeencoding: null
|
||||
};
|
||||
|
||||
// start out asking for a readable event once data is transformed.
|
||||
this._readableState.needReadable = true;
|
||||
|
||||
// we have implemented the _read method, and done the other things
|
||||
// that Readable wants before the first _read call, so unset the
|
||||
// sync guard flag.
|
||||
this._readableState.sync = false;
|
||||
|
||||
if (options) {
|
||||
if (typeof options.transform === 'function') this._transform = options.transform;
|
||||
|
||||
if (typeof options.flush === 'function') this._flush = options.flush;
|
||||
}
|
||||
|
||||
// When the writable side finishes, then flush out anything remaining.
|
||||
this.on('prefinish', prefinish);
|
||||
}
|
||||
|
||||
function prefinish() {
|
||||
var _this = this;
|
||||
|
||||
if (typeof this._flush === 'function') {
|
||||
this._flush(function (er, data) {
|
||||
done(_this, er, data);
|
||||
});
|
||||
} else {
|
||||
done(this, null, null);
|
||||
}
|
||||
}
|
||||
|
||||
Transform.prototype.push = function (chunk, encoding) {
|
||||
this._transformState.needTransform = false;
|
||||
return Duplex.prototype.push.call(this, chunk, encoding);
|
||||
};
|
||||
|
||||
// This is the part where you do stuff!
|
||||
// override this function in implementation classes.
|
||||
// 'chunk' is an input chunk.
|
||||
//
|
||||
// Call `push(newChunk)` to pass along transformed output
|
||||
// to the readable side. You may call 'push' zero or more times.
|
||||
//
|
||||
// Call `cb(err)` when you are done with this chunk. If you pass
|
||||
// an error, then that'll put the hurt on the whole operation. If you
|
||||
// never call cb(), then you'll never get another chunk.
|
||||
Transform.prototype._transform = function (chunk, encoding, cb) {
|
||||
throw new Error('_transform() is not implemented');
|
||||
};
|
||||
|
||||
Transform.prototype._write = function (chunk, encoding, cb) {
|
||||
var ts = this._transformState;
|
||||
ts.writecb = cb;
|
||||
ts.writechunk = chunk;
|
||||
ts.writeencoding = encoding;
|
||||
if (!ts.transforming) {
|
||||
var rs = this._readableState;
|
||||
if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);
|
||||
}
|
||||
};
|
||||
|
||||
// Doesn't matter what the args are here.
|
||||
// _transform does all the work.
|
||||
// That we got here means that the readable side wants more data.
|
||||
Transform.prototype._read = function (n) {
|
||||
var ts = this._transformState;
|
||||
|
||||
if (ts.writechunk !== null && ts.writecb && !ts.transforming) {
|
||||
ts.transforming = true;
|
||||
this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
|
||||
} else {
|
||||
// mark that we need a transform, so that any data that comes in
|
||||
// will get processed, now that we've asked for it.
|
||||
ts.needTransform = true;
|
||||
}
|
||||
};
|
||||
|
||||
Transform.prototype._destroy = function (err, cb) {
|
||||
var _this2 = this;
|
||||
|
||||
Duplex.prototype._destroy.call(this, err, function (err2) {
|
||||
cb(err2);
|
||||
_this2.emit('close');
|
||||
});
|
||||
};
|
||||
|
||||
function done(stream, er, data) {
|
||||
if (er) return stream.emit('error', er);
|
||||
|
||||
if (data != null) // single equals check for both `null` and `undefined`
|
||||
stream.push(data);
|
||||
|
||||
// if there's nothing in the write buffer, then that means
|
||||
// that nothing more will ever be provided
|
||||
if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0');
|
||||
|
||||
if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming');
|
||||
|
||||
return stream.push(null);
|
||||
}
|
685
node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_writable.js
generated
vendored
685
node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_writable.js
generated
vendored
@ -1,685 +0,0 @@
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
// A bit simpler than readable streams.
|
||||
// Implement an async ._write(chunk, encoding, cb), and it'll handle all
|
||||
// the drain event emission and buffering.
|
||||
|
||||
'use strict';
|
||||
|
||||
/*<replacement>*/
|
||||
|
||||
var pna = require('process-nextick-args');
|
||||
/*</replacement>*/
|
||||
|
||||
module.exports = Writable;
|
||||
|
||||
/* <replacement> */
|
||||
function WriteReq(chunk, encoding, cb) {
|
||||
this.chunk = chunk;
|
||||
this.encoding = encoding;
|
||||
this.callback = cb;
|
||||
this.next = null;
|
||||
}
|
||||
|
||||
// It seems a linked list but it is not
|
||||
// there will be only 2 of these for each stream
|
||||
function CorkedRequest(state) {
|
||||
var _this = this;
|
||||
|
||||
this.next = null;
|
||||
this.entry = null;
|
||||
this.finish = function () {
|
||||
onCorkedFinish(_this, state);
|
||||
};
|
||||
}
|
||||
/* </replacement> */
|
||||
|
||||
/*<replacement>*/
|
||||
var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick;
|
||||
/*</replacement>*/
|
||||
|
||||
/*<replacement>*/
|
||||
var Duplex;
|
||||
/*</replacement>*/
|
||||
|
||||
Writable.WritableState = WritableState;
|
||||
|
||||
/*<replacement>*/
|
||||
var util = Object.create(require('core-util-is'));
|
||||
util.inherits = require('inherits');
|
||||
/*</replacement>*/
|
||||
|
||||
/*<replacement>*/
|
||||
var internalUtil = {
|
||||
deprecate: require('util-deprecate')
|
||||
};
|
||||
/*</replacement>*/
|
||||
|
||||
/*<replacement>*/
|
||||
var Stream = require('./internal/streams/stream');
|
||||
/*</replacement>*/
|
||||
|
||||
/*<replacement>*/
|
||||
|
||||
var Buffer = require('safe-buffer').Buffer;
|
||||
var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {};
|
||||
function _uint8ArrayToBuffer(chunk) {
|
||||
return Buffer.from(chunk);
|
||||
}
|
||||
function _isUint8Array(obj) {
|
||||
return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
|
||||
}
|
||||
|
||||
/*</replacement>*/
|
||||
|
||||
var destroyImpl = require('./internal/streams/destroy');
|
||||
|
||||
util.inherits(Writable, Stream);
|
||||
|
||||
function nop() {}
|
||||
|
||||
function WritableState(options, stream) {
|
||||
Duplex = Duplex || require('./_stream_duplex');
|
||||
|
||||
options = options || {};
|
||||
|
||||
// Duplex streams are both readable and writable, but share
|
||||
// the same options object.
|
||||
// However, some cases require setting options to different
|
||||
// values for the readable and the writable sides of the duplex stream.
|
||||
// These options can be provided separately as readableXXX and writableXXX.
|
||||
var isDuplex = stream instanceof Duplex;
|
||||
|
||||
// object stream flag to indicate whether or not this stream
|
||||
// contains buffers or objects.
|
||||
this.objectMode = !!options.objectMode;
|
||||
|
||||
if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode;
|
||||
|
||||
// the point at which write() starts returning false
|
||||
// Note: 0 is a valid value, means that we always return false if
|
||||
// the entire buffer is not flushed immediately on write()
|
||||
var hwm = options.highWaterMark;
|
||||
var writableHwm = options.writableHighWaterMark;
|
||||
var defaultHwm = this.objectMode ? 16 : 16 * 1024;
|
||||
|
||||
if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm;
|
||||
|
||||
// cast to ints.
|
||||
this.highWaterMark = Math.floor(this.highWaterMark);
|
||||
|
||||
// if _final has been called
|
||||
this.finalCalled = false;
|
||||
|
||||
// drain event flag.
|
||||
this.needDrain = false;
|
||||
// at the start of calling end()
|
||||
this.ending = false;
|
||||
// when end() has been called, and returned
|
||||
this.ended = false;
|
||||
// when 'finish' is emitted
|
||||
this.finished = false;
|
||||
|
||||
// has it been destroyed
|
||||
this.destroyed = false;
|
||||
|
||||
// should we decode strings into buffers before passing to _write?
|
||||
// this is here so that some node-core streams can optimize string
|
||||
// handling at a lower level.
|
||||
var noDecode = options.decodeStrings === false;
|
||||
this.decodeStrings = !noDecode;
|
||||
|
||||
// Crypto is kind of old and crusty. Historically, its default string
|
||||
// encoding is 'binary' so we have to make this configurable.
|
||||
// Everything else in the universe uses 'utf8', though.
|
||||
this.defaultEncoding = options.defaultEncoding || 'utf8';
|
||||
|
||||
// not an actual buffer we keep track of, but a measurement
|
||||
// of how much we're waiting to get pushed to some underlying
|
||||
// socket or file.
|
||||
this.length = 0;
|
||||
|
||||
// a flag to see when we're in the middle of a write.
|
||||
this.writing = false;
|
||||
|
||||
// when true all writes will be buffered until .uncork() call
|
||||
this.corked = 0;
|
||||
|
||||
// a flag to be able to tell if the onwrite cb is called immediately,
|
||||
// or on a later tick. We set this to true at first, because any
|
||||
// actions that shouldn't happen until "later" should generally also
|
||||
// not happen before the first write call.
|
||||
this.sync = true;
|
||||
|
||||
// a flag to know if we're processing previously buffered items, which
|
||||
// may call the _write() callback in the same tick, so that we don't
|
||||
// end up in an overlapped onwrite situation.
|
||||
this.bufferProcessing = false;
|
||||
|
||||
// the callback that's passed to _write(chunk,cb)
|
||||
this.onwrite = function (er) {
|
||||
onwrite(stream, er);
|
||||
};
|
||||
|
||||
// the callback that the user supplies to write(chunk,encoding,cb)
|
||||
this.writecb = null;
|
||||
|
||||
// the amount that is being written when _write is called.
|
||||
this.writelen = 0;
|
||||
|
||||
this.bufferedRequest = null;
|
||||
this.lastBufferedRequest = null;
|
||||
|
||||
// number of pending user-supplied write callbacks
|
||||
// this must be 0 before 'finish' can be emitted
|
||||
this.pendingcb = 0;
|
||||
|
||||
// emit prefinish if the only thing we're waiting for is _write cbs
|
||||
// This is relevant for synchronous Transform streams
|
||||
this.prefinished = false;
|
||||
|
||||
// True if the error was already emitted and should not be thrown again
|
||||
this.errorEmitted = false;
|
||||
|
||||
// count buffered requests
|
||||
this.bufferedRequestCount = 0;
|
||||
|
||||
// allocate the first CorkedRequest, there is always
|
||||
// one allocated and free to use, and we maintain at most two
|
||||
this.corkedRequestsFree = new CorkedRequest(this);
|
||||
}
|
||||
|
||||
WritableState.prototype.getBuffer = function getBuffer() {
|
||||
var current = this.bufferedRequest;
|
||||
var out = [];
|
||||
while (current) {
|
||||
out.push(current);
|
||||
current = current.next;
|
||||
}
|
||||
return out;
|
||||
};
|
||||
|
||||
(function () {
|
||||
try {
|
||||
Object.defineProperty(WritableState.prototype, 'buffer', {
|
||||
get: internalUtil.deprecate(function () {
|
||||
return this.getBuffer();
|
||||
}, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003')
|
||||
});
|
||||
} catch (_) {}
|
||||
})();
|
||||
|
||||
// Test _writableState for inheritance to account for Duplex streams,
|
||||
// whose prototype chain only points to Readable.
|
||||
var realHasInstance;
|
||||
if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {
|
||||
realHasInstance = Function.prototype[Symbol.hasInstance];
|
||||
Object.defineProperty(Writable, Symbol.hasInstance, {
|
||||
value: function (object) {
|
||||
if (realHasInstance.call(this, object)) return true;
|
||||
if (this !== Writable) return false;
|
||||
|
||||
return object && object._writableState instanceof WritableState;
|
||||
}
|
||||
});
|
||||
} else {
|
||||
realHasInstance = function (object) {
|
||||
return object instanceof this;
|
||||
};
|
||||
}
|
||||
|
||||
function Writable(options) {
|
||||
Duplex = Duplex || require('./_stream_duplex');
|
||||
|
||||
// Writable ctor is applied to Duplexes, too.
|
||||
// `realHasInstance` is necessary because using plain `instanceof`
|
||||
// would return false, as no `_writableState` property is attached.
|
||||
|
||||
// Trying to use the custom `instanceof` for Writable here will also break the
|
||||
// Node.js LazyTransform implementation, which has a non-trivial getter for
|
||||
// `_writableState` that would lead to infinite recursion.
|
||||
if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) {
|
||||
return new Writable(options);
|
||||
}
|
||||
|
||||
this._writableState = new WritableState(options, this);
|
||||
|
||||
// legacy.
|
||||
this.writable = true;
|
||||
|
||||
if (options) {
|
||||
if (typeof options.write === 'function') this._write = options.write;
|
||||
|
||||
if (typeof options.writev === 'function') this._writev = options.writev;
|
||||
|
||||
if (typeof options.destroy === 'function') this._destroy = options.destroy;
|
||||
|
||||
if (typeof options.final === 'function') this._final = options.final;
|
||||
}
|
||||
|
||||
Stream.call(this);
|
||||
}
|
||||
|
||||
// Otherwise people can pipe Writable streams, which is just wrong.
|
||||
Writable.prototype.pipe = function () {
|
||||
this.emit('error', new Error('Cannot pipe, not readable'));
|
||||
};
|
||||
|
||||
function writeAfterEnd(stream, cb) {
|
||||
var er = new Error('write after end');
|
||||
// TODO: defer error events consistently everywhere, not just the cb
|
||||
stream.emit('error', er);
|
||||
pna.nextTick(cb, er);
|
||||
}
|
||||
|
||||
// Checks that a user-supplied chunk is valid, especially for the particular
|
||||
// mode the stream is in. Currently this means that `null` is never accepted
|
||||
// and undefined/non-string values are only allowed in object mode.
|
||||
function validChunk(stream, state, chunk, cb) {
|
||||
var valid = true;
|
||||
var er = false;
|
||||
|
||||
if (chunk === null) {
|
||||
er = new TypeError('May not write null values to stream');
|
||||
} else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {
|
||||
er = new TypeError('Invalid non-string/buffer chunk');
|
||||
}
|
||||
if (er) {
|
||||
stream.emit('error', er);
|
||||
pna.nextTick(cb, er);
|
||||
valid = false;
|
||||
}
|
||||
return valid;
|
||||
}
|
||||
|
||||
Writable.prototype.write = function (chunk, encoding, cb) {
|
||||
var state = this._writableState;
|
||||
var ret = false;
|
||||
var isBuf = !state.objectMode && _isUint8Array(chunk);
|
||||
|
||||
if (isBuf && !Buffer.isBuffer(chunk)) {
|
||||
chunk = _uint8ArrayToBuffer(chunk);
|
||||
}
|
||||
|
||||
if (typeof encoding === 'function') {
|
||||
cb = encoding;
|
||||
encoding = null;
|
||||
}
|
||||
|
||||
if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;
|
||||
|
||||
if (typeof cb !== 'function') cb = nop;
|
||||
|
||||
if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) {
|
||||
state.pendingcb++;
|
||||
ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb);
|
||||
}
|
||||
|
||||
return ret;
|
||||
};
|
||||
|
||||
Writable.prototype.cork = function () {
|
||||
var state = this._writableState;
|
||||
|
||||
state.corked++;
|
||||
};
|
||||
|
||||
Writable.prototype.uncork = function () {
|
||||
var state = this._writableState;
|
||||
|
||||
if (state.corked) {
|
||||
state.corked--;
|
||||
|
||||
if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);
|
||||
}
|
||||
};
|
||||
|
||||
Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
|
||||
// node::ParseEncoding() requires lower case.
|
||||
if (typeof encoding === 'string') encoding = encoding.toLowerCase();
|
||||
if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding);
|
||||
this._writableState.defaultEncoding = encoding;
|
||||
return this;
|
||||
};
|
||||
|
||||
function decodeChunk(state, chunk, encoding) {
|
||||
if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {
|
||||
chunk = Buffer.from(chunk, encoding);
|
||||
}
|
||||
return chunk;
|
||||
}
|
||||
|
||||
Object.defineProperty(Writable.prototype, 'writableHighWaterMark', {
|
||||
// making it explicit this property is not enumerable
|
||||
// because otherwise some prototype manipulation in
|
||||
// userland will fail
|
||||
enumerable: false,
|
||||
get: function () {
|
||||
return this._writableState.highWaterMark;
|
||||
}
|
||||
});
|
||||
|
||||
// if we're already writing something, then just put this
|
||||
// in the queue, and wait our turn. Otherwise, call _write
|
||||
// If we return false, then we need a drain event, so set that flag.
|
||||
function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) {
|
||||
if (!isBuf) {
|
||||
var newChunk = decodeChunk(state, chunk, encoding);
|
||||
if (chunk !== newChunk) {
|
||||
isBuf = true;
|
||||
encoding = 'buffer';
|
||||
chunk = newChunk;
|
||||
}
|
||||
}
|
||||
var len = state.objectMode ? 1 : chunk.length;
|
||||
|
||||
state.length += len;
|
||||
|
||||
var ret = state.length < state.highWaterMark;
|
||||
// we must ensure that previous needDrain will not be reset to false.
|
||||
if (!ret) state.needDrain = true;
|
||||
|
||||
if (state.writing || state.corked) {
|
||||
var last = state.lastBufferedRequest;
|
||||
state.lastBufferedRequest = {
|
||||
chunk: chunk,
|
||||
encoding: encoding,
|
||||
isBuf: isBuf,
|
||||
callback: cb,
|
||||
next: null
|
||||
};
|
||||
if (last) {
|
||||
last.next = state.lastBufferedRequest;
|
||||
} else {
|
||||
state.bufferedRequest = state.lastBufferedRequest;
|
||||
}
|
||||
state.bufferedRequestCount += 1;
|
||||
} else {
|
||||
doWrite(stream, state, false, len, chunk, encoding, cb);
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
function doWrite(stream, state, writev, len, chunk, encoding, cb) {
|
||||
state.writelen = len;
|
||||
state.writecb = cb;
|
||||
state.writing = true;
|
||||
state.sync = true;
|
||||
if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);
|
||||
state.sync = false;
|
||||
}
|
||||
|
||||
function onwriteError(stream, state, sync, er, cb) {
|
||||
--state.pendingcb;
|
||||
|
||||
if (sync) {
|
||||
// defer the callback if we are being called synchronously
|
||||
// to avoid piling up things on the stack
|
||||
pna.nextTick(cb, er);
|
||||
// this can emit finish, and it will always happen
|
||||
// after error
|
||||
pna.nextTick(finishMaybe, stream, state);
|
||||
stream._writableState.errorEmitted = true;
|
||||
stream.emit('error', er);
|
||||
} else {
|
||||
// the caller expect this to happen before if
|
||||
// it is async
|
||||
cb(er);
|
||||
stream._writableState.errorEmitted = true;
|
||||
stream.emit('error', er);
|
||||
// this can emit finish, but finish must
|
||||
// always follow error
|
||||
finishMaybe(stream, state);
|
||||
}
|
||||
}
|
||||
|
||||
function onwriteStateUpdate(state) {
|
||||
state.writing = false;
|
||||
state.writecb = null;
|
||||
state.length -= state.writelen;
|
||||
state.writelen = 0;
|
||||
}
|
||||
|
||||
function onwrite(stream, er) {
|
||||
var state = stream._writableState;
|
||||
var sync = state.sync;
|
||||
var cb = state.writecb;
|
||||
|
||||
onwriteStateUpdate(state);
|
||||
|
||||
if (er) onwriteError(stream, state, sync, er, cb);else {
|
||||
// Check if we're actually ready to finish, but don't emit yet
|
||||
var finished = needFinish(state);
|
||||
|
||||
if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {
|
||||
clearBuffer(stream, state);
|
||||
}
|
||||
|
||||
if (sync) {
|
||||
/*<replacement>*/
|
||||
asyncWrite(afterWrite, stream, state, finished, cb);
|
||||
/*</replacement>*/
|
||||
} else {
|
||||
afterWrite(stream, state, finished, cb);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function afterWrite(stream, state, finished, cb) {
|
||||
if (!finished) onwriteDrain(stream, state);
|
||||
state.pendingcb--;
|
||||
cb();
|
||||
finishMaybe(stream, state);
|
||||
}
|
||||
|
||||
// Must force callback to be called on nextTick, so that we don't
|
||||
// emit 'drain' before the write() consumer gets the 'false' return
|
||||
// value, and has a chance to attach a 'drain' listener.
|
||||
function onwriteDrain(stream, state) {
|
||||
if (state.length === 0 && state.needDrain) {
|
||||
state.needDrain = false;
|
||||
stream.emit('drain');
|
||||
}
|
||||
}
|
||||
|
||||
// if there's something in the buffer waiting, then process it
|
||||
function clearBuffer(stream, state) {
|
||||
state.bufferProcessing = true;
|
||||
var entry = state.bufferedRequest;
|
||||
|
||||
if (stream._writev && entry && entry.next) {
|
||||
// Fast case, write everything using _writev()
|
||||
var l = state.bufferedRequestCount;
|
||||
var buffer = new Array(l);
|
||||
var holder = state.corkedRequestsFree;
|
||||
holder.entry = entry;
|
||||
|
||||
var count = 0;
|
||||
var allBuffers = true;
|
||||
while (entry) {
|
||||
buffer[count] = entry;
|
||||
if (!entry.isBuf) allBuffers = false;
|
||||
entry = entry.next;
|
||||
count += 1;
|
||||
}
|
||||
buffer.allBuffers = allBuffers;
|
||||
|
||||
doWrite(stream, state, true, state.length, buffer, '', holder.finish);
|
||||
|
||||
// doWrite is almost always async, defer these to save a bit of time
|
||||
// as the hot path ends with doWrite
|
||||
state.pendingcb++;
|
||||
state.lastBufferedRequest = null;
|
||||
if (holder.next) {
|
||||
state.corkedRequestsFree = holder.next;
|
||||
holder.next = null;
|
||||
} else {
|
||||
state.corkedRequestsFree = new CorkedRequest(state);
|
||||
}
|
||||
state.bufferedRequestCount = 0;
|
||||
} else {
|
||||
// Slow case, write chunks one-by-one
|
||||
while (entry) {
|
||||
var chunk = entry.chunk;
|
||||
var encoding = entry.encoding;
|
||||
var cb = entry.callback;
|
||||
var len = state.objectMode ? 1 : chunk.length;
|
||||
|
||||
doWrite(stream, state, false, len, chunk, encoding, cb);
|
||||
entry = entry.next;
|
||||
state.bufferedRequestCount--;
|
||||
// if we didn't call the onwrite immediately, then
|
||||
// it means that we need to wait until it does.
|
||||
// also, that means that the chunk and cb are currently
|
||||
// being processed, so move the buffer counter past them.
|
||||
if (state.writing) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (entry === null) state.lastBufferedRequest = null;
|
||||
}
|
||||
|
||||
state.bufferedRequest = entry;
|
||||
state.bufferProcessing = false;
|
||||
}
|
||||
|
||||
Writable.prototype._write = function (chunk, encoding, cb) {
|
||||
cb(new Error('_write() is not implemented'));
|
||||
};
|
||||
|
||||
Writable.prototype._writev = null;
|
||||
|
||||
Writable.prototype.end = function (chunk, encoding, cb) {
|
||||
var state = this._writableState;
|
||||
|
||||
if (typeof chunk === 'function') {
|
||||
cb = chunk;
|
||||
chunk = null;
|
||||
encoding = null;
|
||||
} else if (typeof encoding === 'function') {
|
||||
cb = encoding;
|
||||
encoding = null;
|
||||
}
|
||||
|
||||
if (chunk !== null && chunk !== undefined) this.write(chunk, encoding);
|
||||
|
||||
// .end() fully uncorks
|
||||
if (state.corked) {
|
||||
state.corked = 1;
|
||||
this.uncork();
|
||||
}
|
||||
|
||||
// ignore unnecessary end() calls.
|
||||
if (!state.ending) endWritable(this, state, cb);
|
||||
};
|
||||
|
||||
function needFinish(state) {
|
||||
return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;
|
||||
}
|
||||
function callFinal(stream, state) {
|
||||
stream._final(function (err) {
|
||||
state.pendingcb--;
|
||||
if (err) {
|
||||
stream.emit('error', err);
|
||||
}
|
||||
state.prefinished = true;
|
||||
stream.emit('prefinish');
|
||||
finishMaybe(stream, state);
|
||||
});
|
||||
}
|
||||
function prefinish(stream, state) {
|
||||
if (!state.prefinished && !state.finalCalled) {
|
||||
if (typeof stream._final === 'function') {
|
||||
state.pendingcb++;
|
||||
state.finalCalled = true;
|
||||
pna.nextTick(callFinal, stream, state);
|
||||
} else {
|
||||
state.prefinished = true;
|
||||
stream.emit('prefinish');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function finishMaybe(stream, state) {
|
||||
var need = needFinish(state);
|
||||
if (need) {
|
||||
prefinish(stream, state);
|
||||
if (state.pendingcb === 0) {
|
||||
state.finished = true;
|
||||
stream.emit('finish');
|
||||
}
|
||||
}
|
||||
return need;
|
||||
}
|
||||
|
||||
function endWritable(stream, state, cb) {
|
||||
state.ending = true;
|
||||
finishMaybe(stream, state);
|
||||
if (cb) {
|
||||
if (state.finished) pna.nextTick(cb);else stream.once('finish', cb);
|
||||
}
|
||||
state.ended = true;
|
||||
stream.writable = false;
|
||||
}
|
||||
|
||||
function onCorkedFinish(corkReq, state, err) {
|
||||
var entry = corkReq.entry;
|
||||
corkReq.entry = null;
|
||||
while (entry) {
|
||||
var cb = entry.callback;
|
||||
state.pendingcb--;
|
||||
cb(err);
|
||||
entry = entry.next;
|
||||
}
|
||||
|
||||
// reuse the free corkReq.
|
||||
state.corkedRequestsFree.next = corkReq;
|
||||
}
|
||||
|
||||
Object.defineProperty(Writable.prototype, 'destroyed', {
|
||||
get: function () {
|
||||
if (this._writableState === undefined) {
|
||||
return false;
|
||||
}
|
||||
return this._writableState.destroyed;
|
||||
},
|
||||
set: function (value) {
|
||||
// we ignore the value if the stream
|
||||
// has not been initialized yet
|
||||
if (!this._writableState) {
|
||||
return;
|
||||
}
|
||||
|
||||
// backward compatibility, the user is explicitly
|
||||
// managing destroyed
|
||||
this._writableState.destroyed = value;
|
||||
}
|
||||
});
|
||||
|
||||
Writable.prototype.destroy = destroyImpl.destroy;
|
||||
Writable.prototype._undestroy = destroyImpl.undestroy;
|
||||
Writable.prototype._destroy = function (err, cb) {
|
||||
this.end();
|
||||
cb(err);
|
||||
};
|
@ -1,78 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
|
||||
|
||||
var Buffer = require('safe-buffer').Buffer;
|
||||
var util = require('util');
|
||||
|
||||
function copyBuffer(src, target, offset) {
|
||||
src.copy(target, offset);
|
||||
}
|
||||
|
||||
module.exports = function () {
|
||||
function BufferList() {
|
||||
_classCallCheck(this, BufferList);
|
||||
|
||||
this.head = null;
|
||||
this.tail = null;
|
||||
this.length = 0;
|
||||
}
|
||||
|
||||
BufferList.prototype.push = function push(v) {
|
||||
var entry = { data: v, next: null };
|
||||
if (this.length > 0) this.tail.next = entry;else this.head = entry;
|
||||
this.tail = entry;
|
||||
++this.length;
|
||||
};
|
||||
|
||||
BufferList.prototype.unshift = function unshift(v) {
|
||||
var entry = { data: v, next: this.head };
|
||||
if (this.length === 0) this.tail = entry;
|
||||
this.head = entry;
|
||||
++this.length;
|
||||
};
|
||||
|
||||
BufferList.prototype.shift = function shift() {
|
||||
if (this.length === 0) return;
|
||||
var ret = this.head.data;
|
||||
if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next;
|
||||
--this.length;
|
||||
return ret;
|
||||
};
|
||||
|
||||
BufferList.prototype.clear = function clear() {
|
||||
this.head = this.tail = null;
|
||||
this.length = 0;
|
||||
};
|
||||
|
||||
BufferList.prototype.join = function join(s) {
|
||||
if (this.length === 0) return '';
|
||||
var p = this.head;
|
||||
var ret = '' + p.data;
|
||||
while (p = p.next) {
|
||||
ret += s + p.data;
|
||||
}return ret;
|
||||
};
|
||||
|
||||
BufferList.prototype.concat = function concat(n) {
|
||||
if (this.length === 0) return Buffer.alloc(0);
|
||||
var ret = Buffer.allocUnsafe(n >>> 0);
|
||||
var p = this.head;
|
||||
var i = 0;
|
||||
while (p) {
|
||||
copyBuffer(p.data, ret, i);
|
||||
i += p.data.length;
|
||||
p = p.next;
|
||||
}
|
||||
return ret;
|
||||
};
|
||||
|
||||
return BufferList;
|
||||
}();
|
||||
|
||||
if (util && util.inspect && util.inspect.custom) {
|
||||
module.exports.prototype[util.inspect.custom] = function () {
|
||||
var obj = util.inspect({ length: this.length });
|
||||
return this.constructor.name + ' ' + obj;
|
||||
};
|
||||
}
|
84
node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/destroy.js
generated
vendored
84
node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/destroy.js
generated
vendored
@ -1,84 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
/*<replacement>*/
|
||||
|
||||
var pna = require('process-nextick-args');
|
||||
/*</replacement>*/
|
||||
|
||||
// undocumented cb() API, needed for core, not for public API
|
||||
function destroy(err, cb) {
|
||||
var _this = this;
|
||||
|
||||
var readableDestroyed = this._readableState && this._readableState.destroyed;
|
||||
var writableDestroyed = this._writableState && this._writableState.destroyed;
|
||||
|
||||
if (readableDestroyed || writableDestroyed) {
|
||||
if (cb) {
|
||||
cb(err);
|
||||
} else if (err) {
|
||||
if (!this._writableState) {
|
||||
pna.nextTick(emitErrorNT, this, err);
|
||||
} else if (!this._writableState.errorEmitted) {
|
||||
this._writableState.errorEmitted = true;
|
||||
pna.nextTick(emitErrorNT, this, err);
|
||||
}
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
// we set destroyed to true before firing error callbacks in order
|
||||
// to make it re-entrance safe in case destroy() is called within callbacks
|
||||
|
||||
if (this._readableState) {
|
||||
this._readableState.destroyed = true;
|
||||
}
|
||||
|
||||
// if this is a duplex stream mark the writable part as destroyed as well
|
||||
if (this._writableState) {
|
||||
this._writableState.destroyed = true;
|
||||
}
|
||||
|
||||
this._destroy(err || null, function (err) {
|
||||
if (!cb && err) {
|
||||
if (!_this._writableState) {
|
||||
pna.nextTick(emitErrorNT, _this, err);
|
||||
} else if (!_this._writableState.errorEmitted) {
|
||||
_this._writableState.errorEmitted = true;
|
||||
pna.nextTick(emitErrorNT, _this, err);
|
||||
}
|
||||
} else if (cb) {
|
||||
cb(err);
|
||||
}
|
||||
});
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
function undestroy() {
|
||||
if (this._readableState) {
|
||||
this._readableState.destroyed = false;
|
||||
this._readableState.reading = false;
|
||||
this._readableState.ended = false;
|
||||
this._readableState.endEmitted = false;
|
||||
}
|
||||
|
||||
if (this._writableState) {
|
||||
this._writableState.destroyed = false;
|
||||
this._writableState.ended = false;
|
||||
this._writableState.ending = false;
|
||||
this._writableState.finalCalled = false;
|
||||
this._writableState.prefinished = false;
|
||||
this._writableState.finished = false;
|
||||
this._writableState.errorEmitted = false;
|
||||
}
|
||||
}
|
||||
|
||||
function emitErrorNT(self, err) {
|
||||
self.emit('error', err);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
destroy: destroy,
|
||||
undestroy: undestroy
|
||||
};
|
@ -1 +0,0 @@
|
||||
module.exports = require('events').EventEmitter;
|
@ -1 +0,0 @@
|
||||
module.exports = require('stream');
|
@ -1,21 +0,0 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) Feross Aboukhadijeh
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
584
node_modules/archiver-utils/node_modules/readable-stream/node_modules/safe-buffer/README.md
generated
vendored
584
node_modules/archiver-utils/node_modules/readable-stream/node_modules/safe-buffer/README.md
generated
vendored
@ -1,584 +0,0 @@
|
||||
# safe-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url]
|
||||
|
||||
[travis-image]: https://img.shields.io/travis/feross/safe-buffer/master.svg
|
||||
[travis-url]: https://travis-ci.org/feross/safe-buffer
|
||||
[npm-image]: https://img.shields.io/npm/v/safe-buffer.svg
|
||||
[npm-url]: https://npmjs.org/package/safe-buffer
|
||||
[downloads-image]: https://img.shields.io/npm/dm/safe-buffer.svg
|
||||
[downloads-url]: https://npmjs.org/package/safe-buffer
|
||||
[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg
|
||||
[standard-url]: https://standardjs.com
|
||||
|
||||
#### Safer Node.js Buffer API
|
||||
|
||||
**Use the new Node.js Buffer APIs (`Buffer.from`, `Buffer.alloc`,
|
||||
`Buffer.allocUnsafe`, `Buffer.allocUnsafeSlow`) in all versions of Node.js.**
|
||||
|
||||
**Uses the built-in implementation when available.**
|
||||
|
||||
## install
|
||||
|
||||
```
|
||||
npm install safe-buffer
|
||||
```
|
||||
|
||||
## usage
|
||||
|
||||
The goal of this package is to provide a safe replacement for the node.js `Buffer`.
|
||||
|
||||
It's a drop-in replacement for `Buffer`. You can use it by adding one `require` line to
|
||||
the top of your node.js modules:
|
||||
|
||||
```js
|
||||
var Buffer = require('safe-buffer').Buffer
|
||||
|
||||
// Existing buffer code will continue to work without issues:
|
||||
|
||||
new Buffer('hey', 'utf8')
|
||||
new Buffer([1, 2, 3], 'utf8')
|
||||
new Buffer(obj)
|
||||
new Buffer(16) // create an uninitialized buffer (potentially unsafe)
|
||||
|
||||
// But you can use these new explicit APIs to make clear what you want:
|
||||
|
||||
Buffer.from('hey', 'utf8') // convert from many types to a Buffer
|
||||
Buffer.alloc(16) // create a zero-filled buffer (safe)
|
||||
Buffer.allocUnsafe(16) // create an uninitialized buffer (potentially unsafe)
|
||||
```
|
||||
|
||||
## api
|
||||
|
||||
### Class Method: Buffer.from(array)
|
||||
<!-- YAML
|
||||
added: v3.0.0
|
||||
-->
|
||||
|
||||
* `array` {Array}
|
||||
|
||||
Allocates a new `Buffer` using an `array` of octets.
|
||||
|
||||
```js
|
||||
const buf = Buffer.from([0x62,0x75,0x66,0x66,0x65,0x72]);
|
||||
// creates a new Buffer containing ASCII bytes
|
||||
// ['b','u','f','f','e','r']
|
||||
```
|
||||
|
||||
A `TypeError` will be thrown if `array` is not an `Array`.
|
||||
|
||||
### Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]])
|
||||
<!-- YAML
|
||||
added: v5.10.0
|
||||
-->
|
||||
|
||||
* `arrayBuffer` {ArrayBuffer} The `.buffer` property of a `TypedArray` or
|
||||
a `new ArrayBuffer()`
|
||||
* `byteOffset` {Number} Default: `0`
|
||||
* `length` {Number} Default: `arrayBuffer.length - byteOffset`
|
||||
|
||||
When passed a reference to the `.buffer` property of a `TypedArray` instance,
|
||||
the newly created `Buffer` will share the same allocated memory as the
|
||||
TypedArray.
|
||||
|
||||
```js
|
||||
const arr = new Uint16Array(2);
|
||||
arr[0] = 5000;
|
||||
arr[1] = 4000;
|
||||
|
||||
const buf = Buffer.from(arr.buffer); // shares the memory with arr;
|
||||
|
||||
console.log(buf);
|
||||
// Prints: <Buffer 88 13 a0 0f>
|
||||
|
||||
// changing the TypedArray changes the Buffer also
|
||||
arr[1] = 6000;
|
||||
|
||||
console.log(buf);
|
||||
// Prints: <Buffer 88 13 70 17>
|
||||
```
|
||||
|
||||
The optional `byteOffset` and `length` arguments specify a memory range within
|
||||
the `arrayBuffer` that will be shared by the `Buffer`.
|
||||
|
||||
```js
|
||||
const ab = new ArrayBuffer(10);
|
||||
const buf = Buffer.from(ab, 0, 2);
|
||||
console.log(buf.length);
|
||||
// Prints: 2
|
||||
```
|
||||
|
||||
A `TypeError` will be thrown if `arrayBuffer` is not an `ArrayBuffer`.
|
||||
|
||||
### Class Method: Buffer.from(buffer)
|
||||
<!-- YAML
|
||||
added: v3.0.0
|
||||
-->
|
||||
|
||||
* `buffer` {Buffer}
|
||||
|
||||
Copies the passed `buffer` data onto a new `Buffer` instance.
|
||||
|
||||
```js
|
||||
const buf1 = Buffer.from('buffer');
|
||||
const buf2 = Buffer.from(buf1);
|
||||
|
||||
buf1[0] = 0x61;
|
||||
console.log(buf1.toString());
|
||||
// 'auffer'
|
||||
console.log(buf2.toString());
|
||||
// 'buffer' (copy is not changed)
|
||||
```
|
||||
|
||||
A `TypeError` will be thrown if `buffer` is not a `Buffer`.
|
||||
|
||||
### Class Method: Buffer.from(str[, encoding])
|
||||
<!-- YAML
|
||||
added: v5.10.0
|
||||
-->
|
||||
|
||||
* `str` {String} String to encode.
|
||||
* `encoding` {String} Encoding to use, Default: `'utf8'`
|
||||
|
||||
Creates a new `Buffer` containing the given JavaScript string `str`. If
|
||||
provided, the `encoding` parameter identifies the character encoding.
|
||||
If not provided, `encoding` defaults to `'utf8'`.
|
||||
|
||||
```js
|
||||
const buf1 = Buffer.from('this is a tést');
|
||||
console.log(buf1.toString());
|
||||
// prints: this is a tést
|
||||
console.log(buf1.toString('ascii'));
|
||||
// prints: this is a tC)st
|
||||
|
||||
const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex');
|
||||
console.log(buf2.toString());
|
||||
// prints: this is a tést
|
||||
```
|
||||
|
||||
A `TypeError` will be thrown if `str` is not a string.
|
||||
|
||||
### Class Method: Buffer.alloc(size[, fill[, encoding]])
|
||||
<!-- YAML
|
||||
added: v5.10.0
|
||||
-->
|
||||
|
||||
* `size` {Number}
|
||||
* `fill` {Value} Default: `undefined`
|
||||
* `encoding` {String} Default: `utf8`
|
||||
|
||||
Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the
|
||||
`Buffer` will be *zero-filled*.
|
||||
|
||||
```js
|
||||
const buf = Buffer.alloc(5);
|
||||
console.log(buf);
|
||||
// <Buffer 00 00 00 00 00>
|
||||
```
|
||||
|
||||
The `size` must be less than or equal to the value of
|
||||
`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is
|
||||
`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will
|
||||
be created if a `size` less than or equal to 0 is specified.
|
||||
|
||||
If `fill` is specified, the allocated `Buffer` will be initialized by calling
|
||||
`buf.fill(fill)`. See [`buf.fill()`][] for more information.
|
||||
|
||||
```js
|
||||
const buf = Buffer.alloc(5, 'a');
|
||||
console.log(buf);
|
||||
// <Buffer 61 61 61 61 61>
|
||||
```
|
||||
|
||||
If both `fill` and `encoding` are specified, the allocated `Buffer` will be
|
||||
initialized by calling `buf.fill(fill, encoding)`. For example:
|
||||
|
||||
```js
|
||||
const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64');
|
||||
console.log(buf);
|
||||
// <Buffer 68 65 6c 6c 6f 20 77 6f 72 6c 64>
|
||||
```
|
||||
|
||||
Calling `Buffer.alloc(size)` can be significantly slower than the alternative
|
||||
`Buffer.allocUnsafe(size)` but ensures that the newly created `Buffer` instance
|
||||
contents will *never contain sensitive data*.
|
||||
|
||||
A `TypeError` will be thrown if `size` is not a number.
|
||||
|
||||
### Class Method: Buffer.allocUnsafe(size)
|
||||
<!-- YAML
|
||||
added: v5.10.0
|
||||
-->
|
||||
|
||||
* `size` {Number}
|
||||
|
||||
Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must
|
||||
be less than or equal to the value of `require('buffer').kMaxLength` (on 64-bit
|
||||
architectures, `kMaxLength` is `(2^31)-1`). Otherwise, a [`RangeError`][] is
|
||||
thrown. A zero-length Buffer will be created if a `size` less than or equal to
|
||||
0 is specified.
|
||||
|
||||
The underlying memory for `Buffer` instances created in this way is *not
|
||||
initialized*. The contents of the newly created `Buffer` are unknown and
|
||||
*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such
|
||||
`Buffer` instances to zeroes.
|
||||
|
||||
```js
|
||||
const buf = Buffer.allocUnsafe(5);
|
||||
console.log(buf);
|
||||
// <Buffer 78 e0 82 02 01>
|
||||
// (octets will be different, every time)
|
||||
buf.fill(0);
|
||||
console.log(buf);
|
||||
// <Buffer 00 00 00 00 00>
|
||||
```
|
||||
|
||||
A `TypeError` will be thrown if `size` is not a number.
|
||||
|
||||
Note that the `Buffer` module pre-allocates an internal `Buffer` instance of
|
||||
size `Buffer.poolSize` that is used as a pool for the fast allocation of new
|
||||
`Buffer` instances created using `Buffer.allocUnsafe(size)` (and the deprecated
|
||||
`new Buffer(size)` constructor) only when `size` is less than or equal to
|
||||
`Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two). The default
|
||||
value of `Buffer.poolSize` is `8192` but can be modified.
|
||||
|
||||
Use of this pre-allocated internal memory pool is a key difference between
|
||||
calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`.
|
||||
Specifically, `Buffer.alloc(size, fill)` will *never* use the internal Buffer
|
||||
pool, while `Buffer.allocUnsafe(size).fill(fill)` *will* use the internal
|
||||
Buffer pool if `size` is less than or equal to half `Buffer.poolSize`. The
|
||||
difference is subtle but can be important when an application requires the
|
||||
additional performance that `Buffer.allocUnsafe(size)` provides.
|
||||
|
||||
### Class Method: Buffer.allocUnsafeSlow(size)
|
||||
<!-- YAML
|
||||
added: v5.10.0
|
||||
-->
|
||||
|
||||
* `size` {Number}
|
||||
|
||||
Allocates a new *non-zero-filled* and non-pooled `Buffer` of `size` bytes. The
|
||||
`size` must be less than or equal to the value of
|
||||
`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is
|
||||
`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will
|
||||
be created if a `size` less than or equal to 0 is specified.
|
||||
|
||||
The underlying memory for `Buffer` instances created in this way is *not
|
||||
initialized*. The contents of the newly created `Buffer` are unknown and
|
||||
*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such
|
||||
`Buffer` instances to zeroes.
|
||||
|
||||
When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances,
|
||||
allocations under 4KB are, by default, sliced from a single pre-allocated
|
||||
`Buffer`. This allows applications to avoid the garbage collection overhead of
|
||||
creating many individually allocated Buffers. This approach improves both
|
||||
performance and memory usage by eliminating the need to track and cleanup as
|
||||
many `Persistent` objects.
|
||||
|
||||
However, in the case where a developer may need to retain a small chunk of
|
||||
memory from a pool for an indeterminate amount of time, it may be appropriate
|
||||
to create an un-pooled Buffer instance using `Buffer.allocUnsafeSlow()` then
|
||||
copy out the relevant bits.
|
||||
|
||||
```js
|
||||
// need to keep around a few small chunks of memory
|
||||
const store = [];
|
||||
|
||||
socket.on('readable', () => {
|
||||
const data = socket.read();
|
||||
// allocate for retained data
|
||||
const sb = Buffer.allocUnsafeSlow(10);
|
||||
// copy the data into the new allocation
|
||||
data.copy(sb, 0, 0, 10);
|
||||
store.push(sb);
|
||||
});
|
||||
```
|
||||
|
||||
Use of `Buffer.allocUnsafeSlow()` should be used only as a last resort *after*
|
||||
a developer has observed undue memory retention in their applications.
|
||||
|
||||
A `TypeError` will be thrown if `size` is not a number.
|
||||
|
||||
### All the Rest
|
||||
|
||||
The rest of the `Buffer` API is exactly the same as in node.js.
|
||||
[See the docs](https://nodejs.org/api/buffer.html).
|
||||
|
||||
|
||||
## Related links
|
||||
|
||||
- [Node.js issue: Buffer(number) is unsafe](https://github.com/nodejs/node/issues/4660)
|
||||
- [Node.js Enhancement Proposal: Buffer.from/Buffer.alloc/Buffer.zalloc/Buffer() soft-deprecate](https://github.com/nodejs/node-eps/pull/4)
|
||||
|
||||
## Why is `Buffer` unsafe?
|
||||
|
||||
Today, the node.js `Buffer` constructor is overloaded to handle many different argument
|
||||
types like `String`, `Array`, `Object`, `TypedArrayView` (`Uint8Array`, etc.),
|
||||
`ArrayBuffer`, and also `Number`.
|
||||
|
||||
The API is optimized for convenience: you can throw any type at it, and it will try to do
|
||||
what you want.
|
||||
|
||||
Because the Buffer constructor is so powerful, you often see code like this:
|
||||
|
||||
```js
|
||||
// Convert UTF-8 strings to hex
|
||||
function toHex (str) {
|
||||
return new Buffer(str).toString('hex')
|
||||
}
|
||||
```
|
||||
|
||||
***But what happens if `toHex` is called with a `Number` argument?***
|
||||
|
||||
### Remote Memory Disclosure
|
||||
|
||||
If an attacker can make your program call the `Buffer` constructor with a `Number`
|
||||
argument, then they can make it allocate uninitialized memory from the node.js process.
|
||||
This could potentially disclose TLS private keys, user data, or database passwords.
|
||||
|
||||
When the `Buffer` constructor is passed a `Number` argument, it returns an
|
||||
**UNINITIALIZED** block of memory of the specified `size`. When you create a `Buffer` like
|
||||
this, you **MUST** overwrite the contents before returning it to the user.
|
||||
|
||||
From the [node.js docs](https://nodejs.org/api/buffer.html#buffer_new_buffer_size):
|
||||
|
||||
> `new Buffer(size)`
|
||||
>
|
||||
> - `size` Number
|
||||
>
|
||||
> The underlying memory for `Buffer` instances created in this way is not initialized.
|
||||
> **The contents of a newly created `Buffer` are unknown and could contain sensitive
|
||||
> data.** Use `buf.fill(0)` to initialize a Buffer to zeroes.
|
||||
|
||||
(Emphasis our own.)
|
||||
|
||||
Whenever the programmer intended to create an uninitialized `Buffer` you often see code
|
||||
like this:
|
||||
|
||||
```js
|
||||
var buf = new Buffer(16)
|
||||
|
||||
// Immediately overwrite the uninitialized buffer with data from another buffer
|
||||
for (var i = 0; i < buf.length; i++) {
|
||||
buf[i] = otherBuf[i]
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### Would this ever be a problem in real code?
|
||||
|
||||
Yes. It's surprisingly common to forget to check the type of your variables in a
|
||||
dynamically-typed language like JavaScript.
|
||||
|
||||
Usually the consequences of assuming the wrong type is that your program crashes with an
|
||||
uncaught exception. But the failure mode for forgetting to check the type of arguments to
|
||||
the `Buffer` constructor is more catastrophic.
|
||||
|
||||
Here's an example of a vulnerable service that takes a JSON payload and converts it to
|
||||
hex:
|
||||
|
||||
```js
|
||||
// Take a JSON payload {str: "some string"} and convert it to hex
|
||||
var server = http.createServer(function (req, res) {
|
||||
var data = ''
|
||||
req.setEncoding('utf8')
|
||||
req.on('data', function (chunk) {
|
||||
data += chunk
|
||||
})
|
||||
req.on('end', function () {
|
||||
var body = JSON.parse(data)
|
||||
res.end(new Buffer(body.str).toString('hex'))
|
||||
})
|
||||
})
|
||||
|
||||
server.listen(8080)
|
||||
```
|
||||
|
||||
In this example, an http client just has to send:
|
||||
|
||||
```json
|
||||
{
|
||||
"str": 1000
|
||||
}
|
||||
```
|
||||
|
||||
and it will get back 1,000 bytes of uninitialized memory from the server.
|
||||
|
||||
This is a very serious bug. It's similar in severity to the
|
||||
[the Heartbleed bug](http://heartbleed.com/) that allowed disclosure of OpenSSL process
|
||||
memory by remote attackers.
|
||||
|
||||
|
||||
### Which real-world packages were vulnerable?
|
||||
|
||||
#### [`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht)
|
||||
|
||||
[Mathias Buus](https://github.com/mafintosh) and I
|
||||
([Feross Aboukhadijeh](http://feross.org/)) found this issue in one of our own packages,
|
||||
[`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht). The bug would allow
|
||||
anyone on the internet to send a series of messages to a user of `bittorrent-dht` and get
|
||||
them to reveal 20 bytes at a time of uninitialized memory from the node.js process.
|
||||
|
||||
Here's
|
||||
[the commit](https://github.com/feross/bittorrent-dht/commit/6c7da04025d5633699800a99ec3fbadf70ad35b8)
|
||||
that fixed it. We released a new fixed version, created a
|
||||
[Node Security Project disclosure](https://nodesecurity.io/advisories/68), and deprecated all
|
||||
vulnerable versions on npm so users will get a warning to upgrade to a newer version.
|
||||
|
||||
#### [`ws`](https://www.npmjs.com/package/ws)
|
||||
|
||||
That got us wondering if there were other vulnerable packages. Sure enough, within a short
|
||||
period of time, we found the same issue in [`ws`](https://www.npmjs.com/package/ws), the
|
||||
most popular WebSocket implementation in node.js.
|
||||
|
||||
If certain APIs were called with `Number` parameters instead of `String` or `Buffer` as
|
||||
expected, then uninitialized server memory would be disclosed to the remote peer.
|
||||
|
||||
These were the vulnerable methods:
|
||||
|
||||
```js
|
||||
socket.send(number)
|
||||
socket.ping(number)
|
||||
socket.pong(number)
|
||||
```
|
||||
|
||||
Here's a vulnerable socket server with some echo functionality:
|
||||
|
||||
```js
|
||||
server.on('connection', function (socket) {
|
||||
socket.on('message', function (message) {
|
||||
message = JSON.parse(message)
|
||||
if (message.type === 'echo') {
|
||||
socket.send(message.data) // send back the user's message
|
||||
}
|
||||
})
|
||||
})
|
||||
```
|
||||
|
||||
`socket.send(number)` called on the server, will disclose server memory.
|
||||
|
||||
Here's [the release](https://github.com/websockets/ws/releases/tag/1.0.1) where the issue
|
||||
was fixed, with a more detailed explanation. Props to
|
||||
[Arnout Kazemier](https://github.com/3rd-Eden) for the quick fix. Here's the
|
||||
[Node Security Project disclosure](https://nodesecurity.io/advisories/67).
|
||||
|
||||
|
||||
### What's the solution?
|
||||
|
||||
It's important that node.js offers a fast way to get memory otherwise performance-critical
|
||||
applications would needlessly get a lot slower.
|
||||
|
||||
But we need a better way to *signal our intent* as programmers. **When we want
|
||||
uninitialized memory, we should request it explicitly.**
|
||||
|
||||
Sensitive functionality should not be packed into a developer-friendly API that loosely
|
||||
accepts many different types. This type of API encourages the lazy practice of passing
|
||||
variables in without checking the type very carefully.
|
||||
|
||||
#### A new API: `Buffer.allocUnsafe(number)`
|
||||
|
||||
The functionality of creating buffers with uninitialized memory should be part of another
|
||||
API. We propose `Buffer.allocUnsafe(number)`. This way, it's not part of an API that
|
||||
frequently gets user input of all sorts of different types passed into it.
|
||||
|
||||
```js
|
||||
var buf = Buffer.allocUnsafe(16) // careful, uninitialized memory!
|
||||
|
||||
// Immediately overwrite the uninitialized buffer with data from another buffer
|
||||
for (var i = 0; i < buf.length; i++) {
|
||||
buf[i] = otherBuf[i]
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### How do we fix node.js core?
|
||||
|
||||
We sent [a PR to node.js core](https://github.com/nodejs/node/pull/4514) (merged as
|
||||
`semver-major`) which defends against one case:
|
||||
|
||||
```js
|
||||
var str = 16
|
||||
new Buffer(str, 'utf8')
|
||||
```
|
||||
|
||||
In this situation, it's implied that the programmer intended the first argument to be a
|
||||
string, since they passed an encoding as a second argument. Today, node.js will allocate
|
||||
uninitialized memory in the case of `new Buffer(number, encoding)`, which is probably not
|
||||
what the programmer intended.
|
||||
|
||||
But this is only a partial solution, since if the programmer does `new Buffer(variable)`
|
||||
(without an `encoding` parameter) there's no way to know what they intended. If `variable`
|
||||
is sometimes a number, then uninitialized memory will sometimes be returned.
|
||||
|
||||
### What's the real long-term fix?
|
||||
|
||||
We could deprecate and remove `new Buffer(number)` and use `Buffer.allocUnsafe(number)` when
|
||||
we need uninitialized memory. But that would break 1000s of packages.
|
||||
|
||||
~~We believe the best solution is to:~~
|
||||
|
||||
~~1. Change `new Buffer(number)` to return safe, zeroed-out memory~~
|
||||
|
||||
~~2. Create a new API for creating uninitialized Buffers. We propose: `Buffer.allocUnsafe(number)`~~
|
||||
|
||||
#### Update
|
||||
|
||||
We now support adding three new APIs:
|
||||
|
||||
- `Buffer.from(value)` - convert from any type to a buffer
|
||||
- `Buffer.alloc(size)` - create a zero-filled buffer
|
||||
- `Buffer.allocUnsafe(size)` - create an uninitialized buffer with given size
|
||||
|
||||
This solves the core problem that affected `ws` and `bittorrent-dht` which is
|
||||
`Buffer(variable)` getting tricked into taking a number argument.
|
||||
|
||||
This way, existing code continues working and the impact on the npm ecosystem will be
|
||||
minimal. Over time, npm maintainers can migrate performance-critical code to use
|
||||
`Buffer.allocUnsafe(number)` instead of `new Buffer(number)`.
|
||||
|
||||
|
||||
### Conclusion
|
||||
|
||||
We think there's a serious design issue with the `Buffer` API as it exists today. It
|
||||
promotes insecure software by putting high-risk functionality into a convenient API
|
||||
with friendly "developer ergonomics".
|
||||
|
||||
This wasn't merely a theoretical exercise because we found the issue in some of the
|
||||
most popular npm packages.
|
||||
|
||||
Fortunately, there's an easy fix that can be applied today. Use `safe-buffer` in place of
|
||||
`buffer`.
|
||||
|
||||
```js
|
||||
var Buffer = require('safe-buffer').Buffer
|
||||
```
|
||||
|
||||
Eventually, we hope that node.js core can switch to this new, safer behavior. We believe
|
||||
the impact on the ecosystem would be minimal since it's not a breaking change.
|
||||
Well-maintained, popular packages would be updated to use `Buffer.alloc` quickly, while
|
||||
older, insecure packages would magically become safe from this attack vector.
|
||||
|
||||
|
||||
## links
|
||||
|
||||
- [Node.js PR: buffer: throw if both length and enc are passed](https://github.com/nodejs/node/pull/4514)
|
||||
- [Node Security Project disclosure for `ws`](https://nodesecurity.io/advisories/67)
|
||||
- [Node Security Project disclosure for`bittorrent-dht`](https://nodesecurity.io/advisories/68)
|
||||
|
||||
|
||||
## credit
|
||||
|
||||
The original issues in `bittorrent-dht`
|
||||
([disclosure](https://nodesecurity.io/advisories/68)) and
|
||||
`ws` ([disclosure](https://nodesecurity.io/advisories/67)) were discovered by
|
||||
[Mathias Buus](https://github.com/mafintosh) and
|
||||
[Feross Aboukhadijeh](http://feross.org/).
|
||||
|
||||
Thanks to [Adam Baldwin](https://github.com/evilpacket) for helping disclose these issues
|
||||
and for his work running the [Node Security Project](https://nodesecurity.io/).
|
||||
|
||||
Thanks to [John Hiesey](https://github.com/jhiesey) for proofreading this README and
|
||||
auditing the code.
|
||||
|
||||
|
||||
## license
|
||||
|
||||
MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org)
|
187
node_modules/archiver-utils/node_modules/readable-stream/node_modules/safe-buffer/index.d.ts
generated
vendored
187
node_modules/archiver-utils/node_modules/readable-stream/node_modules/safe-buffer/index.d.ts
generated
vendored
@ -1,187 +0,0 @@
|
||||
declare module "safe-buffer" {
|
||||
export class Buffer {
|
||||
length: number
|
||||
write(string: string, offset?: number, length?: number, encoding?: string): number;
|
||||
toString(encoding?: string, start?: number, end?: number): string;
|
||||
toJSON(): { type: 'Buffer', data: any[] };
|
||||
equals(otherBuffer: Buffer): boolean;
|
||||
compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number;
|
||||
copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number;
|
||||
slice(start?: number, end?: number): Buffer;
|
||||
writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
|
||||
writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
|
||||
writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
|
||||
writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
|
||||
readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number;
|
||||
readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number;
|
||||
readIntLE(offset: number, byteLength: number, noAssert?: boolean): number;
|
||||
readIntBE(offset: number, byteLength: number, noAssert?: boolean): number;
|
||||
readUInt8(offset: number, noAssert?: boolean): number;
|
||||
readUInt16LE(offset: number, noAssert?: boolean): number;
|
||||
readUInt16BE(offset: number, noAssert?: boolean): number;
|
||||
readUInt32LE(offset: number, noAssert?: boolean): number;
|
||||
readUInt32BE(offset: number, noAssert?: boolean): number;
|
||||
readInt8(offset: number, noAssert?: boolean): number;
|
||||
readInt16LE(offset: number, noAssert?: boolean): number;
|
||||
readInt16BE(offset: number, noAssert?: boolean): number;
|
||||
readInt32LE(offset: number, noAssert?: boolean): number;
|
||||
readInt32BE(offset: number, noAssert?: boolean): number;
|
||||
readFloatLE(offset: number, noAssert?: boolean): number;
|
||||
readFloatBE(offset: number, noAssert?: boolean): number;
|
||||
readDoubleLE(offset: number, noAssert?: boolean): number;
|
||||
readDoubleBE(offset: number, noAssert?: boolean): number;
|
||||
swap16(): Buffer;
|
||||
swap32(): Buffer;
|
||||
swap64(): Buffer;
|
||||
writeUInt8(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeUInt16LE(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeUInt16BE(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeUInt32LE(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeUInt32BE(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeInt8(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeInt16LE(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeInt16BE(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeInt32LE(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeInt32BE(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeFloatLE(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeFloatBE(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeDoubleLE(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeDoubleBE(value: number, offset: number, noAssert?: boolean): number;
|
||||
fill(value: any, offset?: number, end?: number): this;
|
||||
indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number;
|
||||
lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number;
|
||||
includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean;
|
||||
|
||||
/**
|
||||
* Allocates a new buffer containing the given {str}.
|
||||
*
|
||||
* @param str String to store in buffer.
|
||||
* @param encoding encoding to use, optional. Default is 'utf8'
|
||||
*/
|
||||
constructor (str: string, encoding?: string);
|
||||
/**
|
||||
* Allocates a new buffer of {size} octets.
|
||||
*
|
||||
* @param size count of octets to allocate.
|
||||
*/
|
||||
constructor (size: number);
|
||||
/**
|
||||
* Allocates a new buffer containing the given {array} of octets.
|
||||
*
|
||||
* @param array The octets to store.
|
||||
*/
|
||||
constructor (array: Uint8Array);
|
||||
/**
|
||||
* Produces a Buffer backed by the same allocated memory as
|
||||
* the given {ArrayBuffer}.
|
||||
*
|
||||
*
|
||||
* @param arrayBuffer The ArrayBuffer with which to share memory.
|
||||
*/
|
||||
constructor (arrayBuffer: ArrayBuffer);
|
||||
/**
|
||||
* Allocates a new buffer containing the given {array} of octets.
|
||||
*
|
||||
* @param array The octets to store.
|
||||
*/
|
||||
constructor (array: any[]);
|
||||
/**
|
||||
* Copies the passed {buffer} data onto a new {Buffer} instance.
|
||||
*
|
||||
* @param buffer The buffer to copy.
|
||||
*/
|
||||
constructor (buffer: Buffer);
|
||||
prototype: Buffer;
|
||||
/**
|
||||
* Allocates a new Buffer using an {array} of octets.
|
||||
*
|
||||
* @param array
|
||||
*/
|
||||
static from(array: any[]): Buffer;
|
||||
/**
|
||||
* When passed a reference to the .buffer property of a TypedArray instance,
|
||||
* the newly created Buffer will share the same allocated memory as the TypedArray.
|
||||
* The optional {byteOffset} and {length} arguments specify a memory range
|
||||
* within the {arrayBuffer} that will be shared by the Buffer.
|
||||
*
|
||||
* @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer()
|
||||
* @param byteOffset
|
||||
* @param length
|
||||
*/
|
||||
static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer;
|
||||
/**
|
||||
* Copies the passed {buffer} data onto a new Buffer instance.
|
||||
*
|
||||
* @param buffer
|
||||
*/
|
||||
static from(buffer: Buffer): Buffer;
|
||||
/**
|
||||
* Creates a new Buffer containing the given JavaScript string {str}.
|
||||
* If provided, the {encoding} parameter identifies the character encoding.
|
||||
* If not provided, {encoding} defaults to 'utf8'.
|
||||
*
|
||||
* @param str
|
||||
*/
|
||||
static from(str: string, encoding?: string): Buffer;
|
||||
/**
|
||||
* Returns true if {obj} is a Buffer
|
||||
*
|
||||
* @param obj object to test.
|
||||
*/
|
||||
static isBuffer(obj: any): obj is Buffer;
|
||||
/**
|
||||
* Returns true if {encoding} is a valid encoding argument.
|
||||
* Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex'
|
||||
*
|
||||
* @param encoding string to test.
|
||||
*/
|
||||
static isEncoding(encoding: string): boolean;
|
||||
/**
|
||||
* Gives the actual byte length of a string. encoding defaults to 'utf8'.
|
||||
* This is not the same as String.prototype.length since that returns the number of characters in a string.
|
||||
*
|
||||
* @param string string to test.
|
||||
* @param encoding encoding used to evaluate (defaults to 'utf8')
|
||||
*/
|
||||
static byteLength(string: string, encoding?: string): number;
|
||||
/**
|
||||
* Returns a buffer which is the result of concatenating all the buffers in the list together.
|
||||
*
|
||||
* If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer.
|
||||
* If the list has exactly one item, then the first item of the list is returned.
|
||||
* If the list has more than one item, then a new Buffer is created.
|
||||
*
|
||||
* @param list An array of Buffer objects to concatenate
|
||||
* @param totalLength Total length of the buffers when concatenated.
|
||||
* If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly.
|
||||
*/
|
||||
static concat(list: Buffer[], totalLength?: number): Buffer;
|
||||
/**
|
||||
* The same as buf1.compare(buf2).
|
||||
*/
|
||||
static compare(buf1: Buffer, buf2: Buffer): number;
|
||||
/**
|
||||
* Allocates a new buffer of {size} octets.
|
||||
*
|
||||
* @param size count of octets to allocate.
|
||||
* @param fill if specified, buffer will be initialized by calling buf.fill(fill).
|
||||
* If parameter is omitted, buffer will be filled with zeros.
|
||||
* @param encoding encoding used for call to buf.fill while initalizing
|
||||
*/
|
||||
static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer;
|
||||
/**
|
||||
* Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents
|
||||
* of the newly created Buffer are unknown and may contain sensitive data.
|
||||
*
|
||||
* @param size count of octets to allocate
|
||||
*/
|
||||
static allocUnsafe(size: number): Buffer;
|
||||
/**
|
||||
* Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents
|
||||
* of the newly created Buffer are unknown and may contain sensitive data.
|
||||
*
|
||||
* @param size count of octets to allocate
|
||||
*/
|
||||
static allocUnsafeSlow(size: number): Buffer;
|
||||
}
|
||||
}
|
@ -1,62 +0,0 @@
|
||||
/* eslint-disable node/no-deprecated-api */
|
||||
var buffer = require('buffer')
|
||||
var Buffer = buffer.Buffer
|
||||
|
||||
// alternative to using Object.keys for old browsers
|
||||
function copyProps (src, dst) {
|
||||
for (var key in src) {
|
||||
dst[key] = src[key]
|
||||
}
|
||||
}
|
||||
if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) {
|
||||
module.exports = buffer
|
||||
} else {
|
||||
// Copy properties from require('buffer')
|
||||
copyProps(buffer, exports)
|
||||
exports.Buffer = SafeBuffer
|
||||
}
|
||||
|
||||
function SafeBuffer (arg, encodingOrOffset, length) {
|
||||
return Buffer(arg, encodingOrOffset, length)
|
||||
}
|
||||
|
||||
// Copy static methods from Buffer
|
||||
copyProps(Buffer, SafeBuffer)
|
||||
|
||||
SafeBuffer.from = function (arg, encodingOrOffset, length) {
|
||||
if (typeof arg === 'number') {
|
||||
throw new TypeError('Argument must not be a number')
|
||||
}
|
||||
return Buffer(arg, encodingOrOffset, length)
|
||||
}
|
||||
|
||||
SafeBuffer.alloc = function (size, fill, encoding) {
|
||||
if (typeof size !== 'number') {
|
||||
throw new TypeError('Argument must be a number')
|
||||
}
|
||||
var buf = Buffer(size)
|
||||
if (fill !== undefined) {
|
||||
if (typeof encoding === 'string') {
|
||||
buf.fill(fill, encoding)
|
||||
} else {
|
||||
buf.fill(fill)
|
||||
}
|
||||
} else {
|
||||
buf.fill(0)
|
||||
}
|
||||
return buf
|
||||
}
|
||||
|
||||
SafeBuffer.allocUnsafe = function (size) {
|
||||
if (typeof size !== 'number') {
|
||||
throw new TypeError('Argument must be a number')
|
||||
}
|
||||
return Buffer(size)
|
||||
}
|
||||
|
||||
SafeBuffer.allocUnsafeSlow = function (size) {
|
||||
if (typeof size !== 'number') {
|
||||
throw new TypeError('Argument must be a number')
|
||||
}
|
||||
return buffer.SlowBuffer(size)
|
||||
}
|
@ -1,37 +0,0 @@
|
||||
{
|
||||
"name": "safe-buffer",
|
||||
"description": "Safer Node.js Buffer API",
|
||||
"version": "5.1.2",
|
||||
"author": {
|
||||
"name": "Feross Aboukhadijeh",
|
||||
"email": "feross@feross.org",
|
||||
"url": "http://feross.org"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/feross/safe-buffer/issues"
|
||||
},
|
||||
"devDependencies": {
|
||||
"standard": "*",
|
||||
"tape": "^4.0.0"
|
||||
},
|
||||
"homepage": "https://github.com/feross/safe-buffer",
|
||||
"keywords": [
|
||||
"buffer",
|
||||
"buffer allocate",
|
||||
"node security",
|
||||
"safe",
|
||||
"safe-buffer",
|
||||
"security",
|
||||
"uninitialized"
|
||||
],
|
||||
"license": "MIT",
|
||||
"main": "index.js",
|
||||
"types": "index.d.ts",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/feross/safe-buffer.git"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "standard && tape test/*.js"
|
||||
}
|
||||
}
|
@ -1,50 +0,0 @@
|
||||
sudo: false
|
||||
language: node_js
|
||||
before_install:
|
||||
- npm install -g npm@2
|
||||
- test $NPM_LEGACY && npm install -g npm@latest-3 || npm install npm -g
|
||||
notifications:
|
||||
email: false
|
||||
matrix:
|
||||
fast_finish: true
|
||||
include:
|
||||
- node_js: '0.8'
|
||||
env:
|
||||
- TASK=test
|
||||
- NPM_LEGACY=true
|
||||
- node_js: '0.10'
|
||||
env:
|
||||
- TASK=test
|
||||
- NPM_LEGACY=true
|
||||
- node_js: '0.11'
|
||||
env:
|
||||
- TASK=test
|
||||
- NPM_LEGACY=true
|
||||
- node_js: '0.12'
|
||||
env:
|
||||
- TASK=test
|
||||
- NPM_LEGACY=true
|
||||
- node_js: 1
|
||||
env:
|
||||
- TASK=test
|
||||
- NPM_LEGACY=true
|
||||
- node_js: 2
|
||||
env:
|
||||
- TASK=test
|
||||
- NPM_LEGACY=true
|
||||
- node_js: 3
|
||||
env:
|
||||
- TASK=test
|
||||
- NPM_LEGACY=true
|
||||
- node_js: 4
|
||||
env: TASK=test
|
||||
- node_js: 5
|
||||
env: TASK=test
|
||||
- node_js: 6
|
||||
env: TASK=test
|
||||
- node_js: 7
|
||||
env: TASK=test
|
||||
- node_js: 8
|
||||
env: TASK=test
|
||||
- node_js: 9
|
||||
env: TASK=test
|
@ -1,48 +0,0 @@
|
||||
Node.js is licensed for use as follows:
|
||||
|
||||
"""
|
||||
Copyright Node.js contributors. All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to
|
||||
deal in the Software without restriction, including without limitation the
|
||||
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
sell copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
IN THE SOFTWARE.
|
||||
"""
|
||||
|
||||
This license applies to parts of Node.js originating from the
|
||||
https://github.com/joyent/node repository:
|
||||
|
||||
"""
|
||||
Copyright Joyent, Inc. and other Node contributors. All rights reserved.
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to
|
||||
deal in the Software without restriction, including without limitation the
|
||||
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
sell copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
IN THE SOFTWARE.
|
||||
"""
|
||||
|
@ -1,47 +0,0 @@
|
||||
# string_decoder
|
||||
|
||||
***Node-core v8.9.4 string_decoder for userland***
|
||||
|
||||
|
||||
[](https://nodei.co/npm/string_decoder/)
|
||||
[](https://nodei.co/npm/string_decoder/)
|
||||
|
||||
|
||||
```bash
|
||||
npm install --save string_decoder
|
||||
```
|
||||
|
||||
***Node-core string_decoder for userland***
|
||||
|
||||
This package is a mirror of the string_decoder implementation in Node-core.
|
||||
|
||||
Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.9.4/docs/api/).
|
||||
|
||||
As of version 1.0.0 **string_decoder** uses semantic versioning.
|
||||
|
||||
## Previous versions
|
||||
|
||||
Previous version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10.
|
||||
|
||||
## Update
|
||||
|
||||
The *build/* directory contains a build script that will scrape the source from the [nodejs/node](https://github.com/nodejs/node) repo given a specific Node version.
|
||||
|
||||
## Streams Working Group
|
||||
|
||||
`string_decoder` is maintained by the Streams Working Group, which
|
||||
oversees the development and maintenance of the Streams API within
|
||||
Node.js. The responsibilities of the Streams Working Group include:
|
||||
|
||||
* Addressing stream issues on the Node.js issue tracker.
|
||||
* Authoring and editing stream documentation within the Node.js project.
|
||||
* Reviewing changes to stream subclasses within the Node.js project.
|
||||
* Redirecting changes to streams from the Node.js project to this
|
||||
project.
|
||||
* Assisting in the implementation of stream providers within Node.js.
|
||||
* Recommending versions of `readable-stream` to be included in Node.js.
|
||||
* Messaging about the future of streams to give the community advance
|
||||
notice of changes.
|
||||
|
||||
See [readable-stream](https://github.com/nodejs/readable-stream) for
|
||||
more details.
|
@ -1,296 +0,0 @@
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
'use strict';
|
||||
|
||||
/*<replacement>*/
|
||||
|
||||
var Buffer = require('safe-buffer').Buffer;
|
||||
/*</replacement>*/
|
||||
|
||||
var isEncoding = Buffer.isEncoding || function (encoding) {
|
||||
encoding = '' + encoding;
|
||||
switch (encoding && encoding.toLowerCase()) {
|
||||
case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw':
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
function _normalizeEncoding(enc) {
|
||||
if (!enc) return 'utf8';
|
||||
var retried;
|
||||
while (true) {
|
||||
switch (enc) {
|
||||
case 'utf8':
|
||||
case 'utf-8':
|
||||
return 'utf8';
|
||||
case 'ucs2':
|
||||
case 'ucs-2':
|
||||
case 'utf16le':
|
||||
case 'utf-16le':
|
||||
return 'utf16le';
|
||||
case 'latin1':
|
||||
case 'binary':
|
||||
return 'latin1';
|
||||
case 'base64':
|
||||
case 'ascii':
|
||||
case 'hex':
|
||||
return enc;
|
||||
default:
|
||||
if (retried) return; // undefined
|
||||
enc = ('' + enc).toLowerCase();
|
||||
retried = true;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Do not cache `Buffer.isEncoding` when checking encoding names as some
|
||||
// modules monkey-patch it to support additional encodings
|
||||
function normalizeEncoding(enc) {
|
||||
var nenc = _normalizeEncoding(enc);
|
||||
if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc);
|
||||
return nenc || enc;
|
||||
}
|
||||
|
||||
// StringDecoder provides an interface for efficiently splitting a series of
|
||||
// buffers into a series of JS strings without breaking apart multi-byte
|
||||
// characters.
|
||||
exports.StringDecoder = StringDecoder;
|
||||
function StringDecoder(encoding) {
|
||||
this.encoding = normalizeEncoding(encoding);
|
||||
var nb;
|
||||
switch (this.encoding) {
|
||||
case 'utf16le':
|
||||
this.text = utf16Text;
|
||||
this.end = utf16End;
|
||||
nb = 4;
|
||||
break;
|
||||
case 'utf8':
|
||||
this.fillLast = utf8FillLast;
|
||||
nb = 4;
|
||||
break;
|
||||
case 'base64':
|
||||
this.text = base64Text;
|
||||
this.end = base64End;
|
||||
nb = 3;
|
||||
break;
|
||||
default:
|
||||
this.write = simpleWrite;
|
||||
this.end = simpleEnd;
|
||||
return;
|
||||
}
|
||||
this.lastNeed = 0;
|
||||
this.lastTotal = 0;
|
||||
this.lastChar = Buffer.allocUnsafe(nb);
|
||||
}
|
||||
|
||||
StringDecoder.prototype.write = function (buf) {
|
||||
if (buf.length === 0) return '';
|
||||
var r;
|
||||
var i;
|
||||
if (this.lastNeed) {
|
||||
r = this.fillLast(buf);
|
||||
if (r === undefined) return '';
|
||||
i = this.lastNeed;
|
||||
this.lastNeed = 0;
|
||||
} else {
|
||||
i = 0;
|
||||
}
|
||||
if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i);
|
||||
return r || '';
|
||||
};
|
||||
|
||||
StringDecoder.prototype.end = utf8End;
|
||||
|
||||
// Returns only complete characters in a Buffer
|
||||
StringDecoder.prototype.text = utf8Text;
|
||||
|
||||
// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer
|
||||
StringDecoder.prototype.fillLast = function (buf) {
|
||||
if (this.lastNeed <= buf.length) {
|
||||
buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed);
|
||||
return this.lastChar.toString(this.encoding, 0, this.lastTotal);
|
||||
}
|
||||
buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length);
|
||||
this.lastNeed -= buf.length;
|
||||
};
|
||||
|
||||
// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a
|
||||
// continuation byte. If an invalid byte is detected, -2 is returned.
|
||||
function utf8CheckByte(byte) {
|
||||
if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4;
|
||||
return byte >> 6 === 0x02 ? -1 : -2;
|
||||
}
|
||||
|
||||
// Checks at most 3 bytes at the end of a Buffer in order to detect an
|
||||
// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4)
|
||||
// needed to complete the UTF-8 character (if applicable) are returned.
|
||||
function utf8CheckIncomplete(self, buf, i) {
|
||||
var j = buf.length - 1;
|
||||
if (j < i) return 0;
|
||||
var nb = utf8CheckByte(buf[j]);
|
||||
if (nb >= 0) {
|
||||
if (nb > 0) self.lastNeed = nb - 1;
|
||||
return nb;
|
||||
}
|
||||
if (--j < i || nb === -2) return 0;
|
||||
nb = utf8CheckByte(buf[j]);
|
||||
if (nb >= 0) {
|
||||
if (nb > 0) self.lastNeed = nb - 2;
|
||||
return nb;
|
||||
}
|
||||
if (--j < i || nb === -2) return 0;
|
||||
nb = utf8CheckByte(buf[j]);
|
||||
if (nb >= 0) {
|
||||
if (nb > 0) {
|
||||
if (nb === 2) nb = 0;else self.lastNeed = nb - 3;
|
||||
}
|
||||
return nb;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Validates as many continuation bytes for a multi-byte UTF-8 character as
|
||||
// needed or are available. If we see a non-continuation byte where we expect
|
||||
// one, we "replace" the validated continuation bytes we've seen so far with
|
||||
// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding
|
||||
// behavior. The continuation byte check is included three times in the case
|
||||
// where all of the continuation bytes for a character exist in the same buffer.
|
||||
// It is also done this way as a slight performance increase instead of using a
|
||||
// loop.
|
||||
function utf8CheckExtraBytes(self, buf, p) {
|
||||
if ((buf[0] & 0xC0) !== 0x80) {
|
||||
self.lastNeed = 0;
|
||||
return '\ufffd';
|
||||
}
|
||||
if (self.lastNeed > 1 && buf.length > 1) {
|
||||
if ((buf[1] & 0xC0) !== 0x80) {
|
||||
self.lastNeed = 1;
|
||||
return '\ufffd';
|
||||
}
|
||||
if (self.lastNeed > 2 && buf.length > 2) {
|
||||
if ((buf[2] & 0xC0) !== 0x80) {
|
||||
self.lastNeed = 2;
|
||||
return '\ufffd';
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer.
|
||||
function utf8FillLast(buf) {
|
||||
var p = this.lastTotal - this.lastNeed;
|
||||
var r = utf8CheckExtraBytes(this, buf, p);
|
||||
if (r !== undefined) return r;
|
||||
if (this.lastNeed <= buf.length) {
|
||||
buf.copy(this.lastChar, p, 0, this.lastNeed);
|
||||
return this.lastChar.toString(this.encoding, 0, this.lastTotal);
|
||||
}
|
||||
buf.copy(this.lastChar, p, 0, buf.length);
|
||||
this.lastNeed -= buf.length;
|
||||
}
|
||||
|
||||
// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a
|
||||
// partial character, the character's bytes are buffered until the required
|
||||
// number of bytes are available.
|
||||
function utf8Text(buf, i) {
|
||||
var total = utf8CheckIncomplete(this, buf, i);
|
||||
if (!this.lastNeed) return buf.toString('utf8', i);
|
||||
this.lastTotal = total;
|
||||
var end = buf.length - (total - this.lastNeed);
|
||||
buf.copy(this.lastChar, 0, end);
|
||||
return buf.toString('utf8', i, end);
|
||||
}
|
||||
|
||||
// For UTF-8, a replacement character is added when ending on a partial
|
||||
// character.
|
||||
function utf8End(buf) {
|
||||
var r = buf && buf.length ? this.write(buf) : '';
|
||||
if (this.lastNeed) return r + '\ufffd';
|
||||
return r;
|
||||
}
|
||||
|
||||
// UTF-16LE typically needs two bytes per character, but even if we have an even
|
||||
// number of bytes available, we need to check if we end on a leading/high
|
||||
// surrogate. In that case, we need to wait for the next two bytes in order to
|
||||
// decode the last character properly.
|
||||
function utf16Text(buf, i) {
|
||||
if ((buf.length - i) % 2 === 0) {
|
||||
var r = buf.toString('utf16le', i);
|
||||
if (r) {
|
||||
var c = r.charCodeAt(r.length - 1);
|
||||
if (c >= 0xD800 && c <= 0xDBFF) {
|
||||
this.lastNeed = 2;
|
||||
this.lastTotal = 4;
|
||||
this.lastChar[0] = buf[buf.length - 2];
|
||||
this.lastChar[1] = buf[buf.length - 1];
|
||||
return r.slice(0, -1);
|
||||
}
|
||||
}
|
||||
return r;
|
||||
}
|
||||
this.lastNeed = 1;
|
||||
this.lastTotal = 2;
|
||||
this.lastChar[0] = buf[buf.length - 1];
|
||||
return buf.toString('utf16le', i, buf.length - 1);
|
||||
}
|
||||
|
||||
// For UTF-16LE we do not explicitly append special replacement characters if we
|
||||
// end on a partial character, we simply let v8 handle that.
|
||||
function utf16End(buf) {
|
||||
var r = buf && buf.length ? this.write(buf) : '';
|
||||
if (this.lastNeed) {
|
||||
var end = this.lastTotal - this.lastNeed;
|
||||
return r + this.lastChar.toString('utf16le', 0, end);
|
||||
}
|
||||
return r;
|
||||
}
|
||||
|
||||
function base64Text(buf, i) {
|
||||
var n = (buf.length - i) % 3;
|
||||
if (n === 0) return buf.toString('base64', i);
|
||||
this.lastNeed = 3 - n;
|
||||
this.lastTotal = 3;
|
||||
if (n === 1) {
|
||||
this.lastChar[0] = buf[buf.length - 1];
|
||||
} else {
|
||||
this.lastChar[0] = buf[buf.length - 2];
|
||||
this.lastChar[1] = buf[buf.length - 1];
|
||||
}
|
||||
return buf.toString('base64', i, buf.length - n);
|
||||
}
|
||||
|
||||
function base64End(buf) {
|
||||
var r = buf && buf.length ? this.write(buf) : '';
|
||||
if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed);
|
||||
return r;
|
||||
}
|
||||
|
||||
// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex)
|
||||
function simpleWrite(buf) {
|
||||
return buf.toString(this.encoding);
|
||||
}
|
||||
|
||||
function simpleEnd(buf) {
|
||||
return buf && buf.length ? this.write(buf) : '';
|
||||
}
|
@ -1,31 +0,0 @@
|
||||
{
|
||||
"name": "string_decoder",
|
||||
"version": "1.1.1",
|
||||
"description": "The string_decoder module from Node core",
|
||||
"main": "lib/string_decoder.js",
|
||||
"dependencies": {
|
||||
"safe-buffer": "~5.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"babel-polyfill": "^6.23.0",
|
||||
"core-util-is": "^1.0.2",
|
||||
"inherits": "^2.0.3",
|
||||
"tap": "~0.4.8"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "tap test/parallel/*.js && node test/verify-dependencies",
|
||||
"ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/nodejs/string_decoder.git"
|
||||
},
|
||||
"homepage": "https://github.com/nodejs/string_decoder",
|
||||
"keywords": [
|
||||
"string",
|
||||
"decoder",
|
||||
"browser",
|
||||
"browserify"
|
||||
],
|
||||
"license": "MIT"
|
||||
}
|
@ -1,52 +0,0 @@
|
||||
{
|
||||
"name": "readable-stream",
|
||||
"version": "2.3.8",
|
||||
"description": "Streams3, a user-land copy of the stream library from Node.js",
|
||||
"main": "readable.js",
|
||||
"dependencies": {
|
||||
"core-util-is": "~1.0.0",
|
||||
"inherits": "~2.0.3",
|
||||
"isarray": "~1.0.0",
|
||||
"process-nextick-args": "~2.0.0",
|
||||
"safe-buffer": "~5.1.1",
|
||||
"string_decoder": "~1.1.1",
|
||||
"util-deprecate": "~1.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"assert": "^1.4.0",
|
||||
"babel-polyfill": "^6.9.1",
|
||||
"buffer": "^4.9.0",
|
||||
"lolex": "^2.3.2",
|
||||
"nyc": "^6.4.0",
|
||||
"tap": "^0.7.0",
|
||||
"tape": "^4.8.0"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "tap test/parallel/*.js test/ours/*.js && node test/verify-dependencies.js",
|
||||
"ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js",
|
||||
"cover": "nyc npm test",
|
||||
"report": "nyc report --reporter=lcov"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/nodejs/readable-stream"
|
||||
},
|
||||
"keywords": [
|
||||
"readable",
|
||||
"stream",
|
||||
"pipe"
|
||||
],
|
||||
"browser": {
|
||||
"util": false,
|
||||
"./readable.js": "./readable-browser.js",
|
||||
"./writable.js": "./writable-browser.js",
|
||||
"./duplex.js": "./duplex-browser.js",
|
||||
"./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js"
|
||||
},
|
||||
"nyc": {
|
||||
"include": [
|
||||
"lib/**.js"
|
||||
]
|
||||
},
|
||||
"license": "MIT"
|
||||
}
|
@ -1 +0,0 @@
|
||||
module.exports = require('./readable').PassThrough
|
@ -1,7 +0,0 @@
|
||||
exports = module.exports = require('./lib/_stream_readable.js');
|
||||
exports.Stream = exports;
|
||||
exports.Readable = exports;
|
||||
exports.Writable = require('./lib/_stream_writable.js');
|
||||
exports.Duplex = require('./lib/_stream_duplex.js');
|
||||
exports.Transform = require('./lib/_stream_transform.js');
|
||||
exports.PassThrough = require('./lib/_stream_passthrough.js');
|
@ -1,19 +0,0 @@
|
||||
var Stream = require('stream');
|
||||
if (process.env.READABLE_STREAM === 'disable' && Stream) {
|
||||
module.exports = Stream;
|
||||
exports = module.exports = Stream.Readable;
|
||||
exports.Readable = Stream.Readable;
|
||||
exports.Writable = Stream.Writable;
|
||||
exports.Duplex = Stream.Duplex;
|
||||
exports.Transform = Stream.Transform;
|
||||
exports.PassThrough = Stream.PassThrough;
|
||||
exports.Stream = Stream;
|
||||
} else {
|
||||
exports = module.exports = require('./lib/_stream_readable.js');
|
||||
exports.Stream = Stream || exports;
|
||||
exports.Readable = exports;
|
||||
exports.Writable = require('./lib/_stream_writable.js');
|
||||
exports.Duplex = require('./lib/_stream_duplex.js');
|
||||
exports.Transform = require('./lib/_stream_transform.js');
|
||||
exports.PassThrough = require('./lib/_stream_passthrough.js');
|
||||
}
|
@ -1 +0,0 @@
|
||||
module.exports = require('./readable').Transform
|
@ -1 +0,0 @@
|
||||
module.exports = require('./lib/_stream_writable.js');
|
@ -1,8 +0,0 @@
|
||||
var Stream = require("stream")
|
||||
var Writable = require("./lib/_stream_writable.js")
|
||||
|
||||
if (process.env.READABLE_STREAM === 'disable') {
|
||||
module.exports = Stream && Stream.Writable || Writable
|
||||
} else {
|
||||
module.exports = Writable
|
||||
}
|
@ -1,54 +0,0 @@
|
||||
{
|
||||
"name": "archiver-utils",
|
||||
"version": "2.1.0",
|
||||
"license": "MIT",
|
||||
"description": "utility functions for archiver",
|
||||
"homepage": "https://github.com/archiverjs/archiver-utils#readme",
|
||||
"author": {
|
||||
"name": "Chris Talkington",
|
||||
"url": "http://christalkington.com/"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/archiverjs/archiver-utils.git"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/archiverjs/archiver-utils/issues"
|
||||
},
|
||||
"keywords": [
|
||||
"archiver",
|
||||
"utils"
|
||||
],
|
||||
"main": "index.js",
|
||||
"files": [
|
||||
"index.js",
|
||||
"file.js"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "mocha --reporter dot"
|
||||
},
|
||||
"dependencies": {
|
||||
"glob": "^7.1.4",
|
||||
"graceful-fs": "^4.2.0",
|
||||
"lazystream": "^1.0.0",
|
||||
"lodash.defaults": "^4.2.0",
|
||||
"lodash.difference": "^4.5.0",
|
||||
"lodash.flatten": "^4.4.0",
|
||||
"lodash.isplainobject": "^4.0.6",
|
||||
"lodash.union": "^4.6.0",
|
||||
"normalize-path": "^3.0.0",
|
||||
"readable-stream": "^2.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"chai": "^4.2.0",
|
||||
"mkdirp": "^0.5.0",
|
||||
"mocha": "^5.0.0",
|
||||
"rimraf": "^2.6.3"
|
||||
},
|
||||
"publishConfig": {
|
||||
"registry": "https://registry.npmjs.org/"
|
||||
}
|
||||
}
|
@ -1,108 +0,0 @@
|
||||
## Changelog
|
||||
|
||||
**3.1.1** - <small>_August 2, 2019_</small> — [Diff](https://github.com/archiverjs/node-archiver/compare/3.1.0...3.1.1)
|
||||
|
||||
- update zip-stream to v2.1.2
|
||||
|
||||
**3.1.0** - <small>_August 2, 2019_</small> — [Diff](https://github.com/archiverjs/node-archiver/compare/3.0.3...3.1.0)
|
||||
|
||||
- update zip-stream to v2.1.0
|
||||
|
||||
**3.0.3** - <small>_July 19, 2019_</small> — [Diff](https://github.com/archiverjs/node-archiver/compare/3.0.2...3.0.3)
|
||||
|
||||
- test: now targeting node v12
|
||||
- other: update zip-stream@2.0.0
|
||||
|
||||
**3.0.2** - <small>_July 19, 2019_</small> — [Diff](https://github.com/archiverjs/node-archiver/compare/3.0.1...3.0.2)
|
||||
|
||||
- other: update dependencies
|
||||
|
||||
**3.0.1** - <small>_July 19, 2019_</small> — [Diff](https://github.com/archiverjs/node-archiver/compare/3.0.0...3.0.1)
|
||||
|
||||
- other: update dependencies
|
||||
- docs: now deployed using netlify
|
||||
|
||||
**3.0.0** - <small>_August 22, 2018_</small> — [Diff](https://github.com/archiverjs/node-archiver/compare/2.1.1...3.0.0)
|
||||
|
||||
- breaking: follow node LTS, remove support for versions under 6. (#339)
|
||||
- bugfix: use stats in tar.js and core.js (#326)
|
||||
- other: update to archiver-utils@2 and zip-stream@2
|
||||
- other: remove lodash npm module usage (#335, #339)
|
||||
- other: Avoid using deprecated Buffer constructor (#312)
|
||||
- other: Remove unnecessary return and fix indentation (#297)
|
||||
- test: now targeting node v10 (#320)
|
||||
|
||||
**2.1.1** — <small>_January 10, 2018_</small> — [Diff](https://github.com/archiverjs/node-archiver/compare/2.1.0...2.1.1)
|
||||
|
||||
- bugfix: fix relative symlink paths (#293)
|
||||
- other: coding style fixes (#294)
|
||||
|
||||
**2.1.0** — <small>_October 12, 2017_</small> — [Diff](https://github.com/archiverjs/node-archiver/compare/2.0.3...2.1.0)
|
||||
|
||||
- refactor: `directory` now uses glob behind the scenes. should fix some directory recursion issues. (#267, #275)
|
||||
- docs: more info in quick start. (#284)
|
||||
|
||||
**2.0.3** — <small>_August 25, 2017_</small> — [Diff](https://github.com/archiverjs/node-archiver/compare/2.0.2...2.0.3)
|
||||
|
||||
- bugfix: revert #261 due to potential issues with editing entryData in special cases.
|
||||
- bugfix: add var to entryData in glob callback (#273)
|
||||
|
||||
**2.0.2** — <small>_August 25, 2017_</small> — [Diff](https://github.com/archiverjs/node-archiver/compare/2.0.1...2.0.2)
|
||||
|
||||
- docs: fix changelog date.
|
||||
|
||||
**2.0.1** — <small>_August 25, 2017_</small> — [Diff](https://github.com/archiverjs/node-archiver/compare/2.0.0...2.0.1)
|
||||
|
||||
- bugfix: add const to entryData in glob callback (#261)
|
||||
- other: coding style fixes (#263)
|
||||
|
||||
**2.0.0** — <small>_July 5, 2017_</small> — [Diff](https://github.com/archiverjs/node-archiver/compare/1.3.0...2.0.0)
|
||||
|
||||
- feature: support for symlinks. (#228)
|
||||
- feature: support for promises on `finalize`. (#248)
|
||||
- feature: addition of `symlink` method for programmatically creating symlinks within an archive.
|
||||
- change: emit `warning` instead of `error` when stat fails and the process can still continue.
|
||||
- change: errors and warnings now contain extended data (where available) and have standardized error codes (#256)
|
||||
- change: removal of deprecated `bulk` functionality. (#249)
|
||||
- change: removal of internal `_entries` property in favor of `progress` event. (#247)
|
||||
- change: support for node v4.0+ only. node v0.10 and v0.12 support has been dropped. (#241)
|
||||
|
||||
**1.3.0** — <small>_December 13, 2016_</small> — [Diff](https://github.com/archiverjs/node-archiver/compare/1.2.0...1.3.0)
|
||||
|
||||
- improve `directory` and `glob` methods to use events rather than callbacks. (#203)
|
||||
- fix bulk warning spam (#208)
|
||||
- updated mocha (#205)
|
||||
|
||||
**1.2.0** — <small>_November 2, 2016_</small> — [Diff](https://github.com/archiverjs/node-archiver/compare/1.1.0...1.2.0)
|
||||
|
||||
- Add a `process.emitWarning` for `deprecated` (#202)
|
||||
|
||||
**1.1.0** — <small>_August 29, 2016_</small> — [Diff](https://github.com/archiverjs/node-archiver/compare/1.0.1...1.1.0)
|
||||
|
||||
- minor doc fixes.
|
||||
- bump deps to ensure latest versions are used.
|
||||
|
||||
**1.0.1** — <small>_July 27, 2016_</small> — [Diff](https://github.com/archiverjs/node-archiver/compare/1.0.0...1.0.1)
|
||||
|
||||
- minor doc fixes.
|
||||
- dependencies upgraded.
|
||||
|
||||
**1.0.0** — <small>_April 5, 2016_</small> — [Diff](https://github.com/archiverjs/node-archiver/compare/0.21.0...1.0.0)
|
||||
|
||||
- version unification across many archiver packages.
|
||||
- dependencies upgraded and now using semver caret (^).
|
||||
|
||||
**0.21.0** — <small>_December 21, 2015_</small> — [Diff](https://github.com/archiverjs/node-archiver/compare/0.20.0...0.21.0)
|
||||
|
||||
- core: add support for `entry.prefix`. update some internals to use it.
|
||||
- core(glob): when setting `options.cwd` get an absolute path to the file and use the relative path for `entry.name`. #173
|
||||
- core(bulk): soft-deprecation of `bulk` feature. will remain for time being with no new features or support.
|
||||
- docs: initial jsdoc for core. http://archiverjs.com/docs
|
||||
- tests: restructure a bit.
|
||||
|
||||
**0.20.0** — <small>_November 30, 2015_</small> — [Diff](https://github.com/archiverjs/node-archiver/compare/0.19.0...0.20.0)
|
||||
|
||||
- simpler path normalization as path.join was a bit restrictive. #162
|
||||
- move utils to separate module to DRY.
|
||||
|
||||
[Release Archive](https://github.com/archiverjs/node-archiver/releases)
|
@ -1,22 +0,0 @@
|
||||
Copyright (c) 2012-2014 Chris Talkington, contributors.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without
|
||||
restriction, including without limitation the rights to use,
|
||||
copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS IN THE SOFTWARE.
|
@ -1,94 +0,0 @@
|
||||
# Archiver
|
||||
|
||||
[](https://travis-ci.org/archiverjs/node-archiver) [](https://ci.appveyor.com/project/ctalkington/node-archiver/branch/master)
|
||||
|
||||
a streaming interface for archive generation
|
||||
|
||||
Visit the [API documentation](https://www.archiverjs.com/) for a list of all methods available.
|
||||
|
||||
## Install
|
||||
|
||||
```bash
|
||||
npm install archiver --save
|
||||
```
|
||||
|
||||
## Quick Start
|
||||
|
||||
```js
|
||||
// require modules
|
||||
var fs = require('fs');
|
||||
var archiver = require('archiver');
|
||||
|
||||
// create a file to stream archive data to.
|
||||
var output = fs.createWriteStream(__dirname + '/example.zip');
|
||||
var archive = archiver('zip', {
|
||||
zlib: { level: 9 } // Sets the compression level.
|
||||
});
|
||||
|
||||
// listen for all archive data to be written
|
||||
// 'close' event is fired only when a file descriptor is involved
|
||||
output.on('close', function() {
|
||||
console.log(archive.pointer() + ' total bytes');
|
||||
console.log('archiver has been finalized and the output file descriptor has closed.');
|
||||
});
|
||||
|
||||
// This event is fired when the data source is drained no matter what was the data source.
|
||||
// It is not part of this library but rather from the NodeJS Stream API.
|
||||
// @see: https://nodejs.org/api/stream.html#stream_event_end
|
||||
output.on('end', function() {
|
||||
console.log('Data has been drained');
|
||||
});
|
||||
|
||||
// good practice to catch warnings (ie stat failures and other non-blocking errors)
|
||||
archive.on('warning', function(err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
// log warning
|
||||
} else {
|
||||
// throw error
|
||||
throw err;
|
||||
}
|
||||
});
|
||||
|
||||
// good practice to catch this error explicitly
|
||||
archive.on('error', function(err) {
|
||||
throw err;
|
||||
});
|
||||
|
||||
// pipe archive data to the file
|
||||
archive.pipe(output);
|
||||
|
||||
// append a file from stream
|
||||
var file1 = __dirname + '/file1.txt';
|
||||
archive.append(fs.createReadStream(file1), { name: 'file1.txt' });
|
||||
|
||||
// append a file from string
|
||||
archive.append('string cheese!', { name: 'file2.txt' });
|
||||
|
||||
// append a file from buffer
|
||||
var buffer3 = Buffer.from('buff it!');
|
||||
archive.append(buffer3, { name: 'file3.txt' });
|
||||
|
||||
// append a file
|
||||
archive.file('file1.txt', { name: 'file4.txt' });
|
||||
|
||||
// append files from a sub-directory and naming it `new-subdir` within the archive
|
||||
archive.directory('subdir/', 'new-subdir');
|
||||
|
||||
// append files from a sub-directory, putting its contents at the root of archive
|
||||
archive.directory('subdir/', false);
|
||||
|
||||
// append files from a glob pattern
|
||||
archive.glob('subdir/*.txt');
|
||||
|
||||
// finalize the archive (ie we are done appending files but streams have to finish yet)
|
||||
// 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand
|
||||
archive.finalize();
|
||||
```
|
||||
|
||||
## Formats
|
||||
|
||||
Archiver ships with out of the box support for TAR and ZIP archives.
|
||||
|
||||
You can register additional formats with `registerFormat`.
|
||||
|
||||
_Formats will be changing in the next few releases to implement a middleware approach._
|
@ -1,70 +0,0 @@
|
||||
/**
|
||||
* Archiver Vending
|
||||
*
|
||||
* @ignore
|
||||
* @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE}
|
||||
* @copyright (c) 2012-2014 Chris Talkington, contributors.
|
||||
*/
|
||||
var Archiver = require('./lib/core');
|
||||
|
||||
var formats = {};
|
||||
|
||||
/**
|
||||
* Dispenses a new Archiver instance.
|
||||
*
|
||||
* @constructor
|
||||
* @param {String} format The archive format to use.
|
||||
* @param {Object} options See [Archiver]{@link Archiver}
|
||||
* @return {Archiver}
|
||||
*/
|
||||
var vending = function(format, options) {
|
||||
return vending.create(format, options);
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a new Archiver instance.
|
||||
*
|
||||
* @param {String} format The archive format to use.
|
||||
* @param {Object} options See [Archiver]{@link Archiver}
|
||||
* @return {Archiver}
|
||||
*/
|
||||
vending.create = function(format, options) {
|
||||
if (formats[format]) {
|
||||
var instance = new Archiver(format, options);
|
||||
instance.setFormat(format);
|
||||
instance.setModule(new formats[format](options));
|
||||
|
||||
return instance;
|
||||
} else {
|
||||
throw new Error('create(' + format + '): format not registered');
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Registers a format for use with archiver.
|
||||
*
|
||||
* @param {String} format The name of the format.
|
||||
* @param {Function} module The function for archiver to interact with.
|
||||
* @return void
|
||||
*/
|
||||
vending.registerFormat = function(format, module) {
|
||||
if (formats[format]) {
|
||||
throw new Error('register(' + format + '): format already registered');
|
||||
}
|
||||
|
||||
if (typeof module !== 'function') {
|
||||
throw new Error('register(' + format + '): format module invalid');
|
||||
}
|
||||
|
||||
if (typeof module.prototype.append !== 'function' || typeof module.prototype.finalize !== 'function') {
|
||||
throw new Error('register(' + format + '): format module missing methods');
|
||||
}
|
||||
|
||||
formats[format] = module;
|
||||
};
|
||||
|
||||
vending.registerFormat('zip', require('./lib/plugins/zip'));
|
||||
vending.registerFormat('tar', require('./lib/plugins/tar'));
|
||||
vending.registerFormat('json', require('./lib/plugins/json'));
|
||||
|
||||
module.exports = vending;
|
@ -1,956 +0,0 @@
|
||||
/**
|
||||
* Archiver Core
|
||||
*
|
||||
* @ignore
|
||||
* @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE}
|
||||
* @copyright (c) 2012-2014 Chris Talkington, contributors.
|
||||
*/
|
||||
var fs = require('fs');
|
||||
var glob = require('glob');
|
||||
var async = require('async');
|
||||
var path = require('path');
|
||||
var util = require('archiver-utils');
|
||||
|
||||
var inherits = require('util').inherits;
|
||||
var ArchiverError = require('./error');
|
||||
var Transform = require('readable-stream').Transform;
|
||||
|
||||
var win32 = process.platform === 'win32';
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
* @param {String} format The archive format to use.
|
||||
* @param {(CoreOptions|TransformOptions)} options See also {@link ZipOptions} and {@link TarOptions}.
|
||||
*/
|
||||
var Archiver = function(format, options) {
|
||||
if (!(this instanceof Archiver)) {
|
||||
return new Archiver(format, options);
|
||||
}
|
||||
|
||||
if (typeof format !== 'string') {
|
||||
options = format;
|
||||
format = 'zip';
|
||||
}
|
||||
|
||||
options = this.options = util.defaults(options, {
|
||||
highWaterMark: 1024 * 1024,
|
||||
statConcurrency: 4
|
||||
});
|
||||
|
||||
Transform.call(this, options);
|
||||
|
||||
this._format = false;
|
||||
this._module = false;
|
||||
this._pending = 0;
|
||||
this._pointer = 0;
|
||||
|
||||
this._entriesCount = 0;
|
||||
this._entriesProcessedCount = 0;
|
||||
this._fsEntriesTotalBytes = 0;
|
||||
this._fsEntriesProcessedBytes = 0;
|
||||
|
||||
this._queue = async.queue(this._onQueueTask.bind(this), 1);
|
||||
this._queue.drain = this._onQueueDrain.bind(this);
|
||||
|
||||
this._statQueue = async.queue(this._onStatQueueTask.bind(this), options.statConcurrency);
|
||||
|
||||
this._state = {
|
||||
aborted: false,
|
||||
finalize: false,
|
||||
finalizing: false,
|
||||
finalized: false,
|
||||
modulePiped: false
|
||||
};
|
||||
|
||||
this._streams = [];
|
||||
};
|
||||
|
||||
inherits(Archiver, Transform);
|
||||
|
||||
/**
|
||||
* Internal logic for `abort`.
|
||||
*
|
||||
* @private
|
||||
* @return void
|
||||
*/
|
||||
Archiver.prototype._abort = function() {
|
||||
this._state.aborted = true;
|
||||
this._queue.kill();
|
||||
this._statQueue.kill();
|
||||
|
||||
if (this._queue.idle()) {
|
||||
this._shutdown();
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Internal helper for appending files.
|
||||
*
|
||||
* @private
|
||||
* @param {String} filepath The source filepath.
|
||||
* @param {EntryData} data The entry data.
|
||||
* @return void
|
||||
*/
|
||||
Archiver.prototype._append = function(filepath, data) {
|
||||
data = data || {};
|
||||
|
||||
var task = {
|
||||
source: null,
|
||||
filepath: filepath
|
||||
};
|
||||
|
||||
if (!data.name) {
|
||||
data.name = filepath;
|
||||
}
|
||||
|
||||
data.sourcePath = filepath;
|
||||
task.data = data;
|
||||
this._entriesCount++;
|
||||
|
||||
if (data.stats && data.stats instanceof fs.Stats) {
|
||||
task = this._updateQueueTaskWithStats(task, data.stats);
|
||||
if (task) {
|
||||
if (data.stats.size) {
|
||||
this._fsEntriesTotalBytes += data.stats.size;
|
||||
}
|
||||
|
||||
this._queue.push(task);
|
||||
}
|
||||
} else {
|
||||
this._statQueue.push(task);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Internal logic for `finalize`.
|
||||
*
|
||||
* @private
|
||||
* @return void
|
||||
*/
|
||||
Archiver.prototype._finalize = function() {
|
||||
if (this._state.finalizing || this._state.finalized || this._state.aborted) {
|
||||
return;
|
||||
}
|
||||
|
||||
this._state.finalizing = true;
|
||||
|
||||
this._moduleFinalize();
|
||||
|
||||
this._state.finalizing = false;
|
||||
this._state.finalized = true;
|
||||
};
|
||||
|
||||
/**
|
||||
* Checks the various state variables to determine if we can `finalize`.
|
||||
*
|
||||
* @private
|
||||
* @return {Boolean}
|
||||
*/
|
||||
Archiver.prototype._maybeFinalize = function() {
|
||||
if (this._state.finalizing || this._state.finalized || this._state.aborted) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (this._state.finalize && this._pending === 0 && this._queue.idle() && this._statQueue.idle()) {
|
||||
this._finalize();
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
/**
|
||||
* Appends an entry to the module.
|
||||
*
|
||||
* @private
|
||||
* @fires Archiver#entry
|
||||
* @param {(Buffer|Stream)} source
|
||||
* @param {EntryData} data
|
||||
* @param {Function} callback
|
||||
* @return void
|
||||
*/
|
||||
Archiver.prototype._moduleAppend = function(source, data, callback) {
|
||||
if (this._state.aborted) {
|
||||
callback();
|
||||
return;
|
||||
}
|
||||
|
||||
this._module.append(source, data, function(err) {
|
||||
this._task = null;
|
||||
|
||||
if (this._state.aborted) {
|
||||
this._shutdown();
|
||||
return;
|
||||
}
|
||||
|
||||
if (err) {
|
||||
this.emit('error', err);
|
||||
setImmediate(callback);
|
||||
return;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fires when the entry's input has been processed and appended to the archive.
|
||||
*
|
||||
* @event Archiver#entry
|
||||
* @type {EntryData}
|
||||
*/
|
||||
this.emit('entry', data);
|
||||
this._entriesProcessedCount++;
|
||||
|
||||
if (data.stats && data.stats.size) {
|
||||
this._fsEntriesProcessedBytes += data.stats.size;
|
||||
}
|
||||
|
||||
/**
|
||||
* @event Archiver#progress
|
||||
* @type {ProgressData}
|
||||
*/
|
||||
this.emit('progress', {
|
||||
entries: {
|
||||
total: this._entriesCount,
|
||||
processed: this._entriesProcessedCount
|
||||
},
|
||||
fs: {
|
||||
totalBytes: this._fsEntriesTotalBytes,
|
||||
processedBytes: this._fsEntriesProcessedBytes
|
||||
}
|
||||
});
|
||||
|
||||
setImmediate(callback);
|
||||
}.bind(this));
|
||||
};
|
||||
|
||||
/**
|
||||
* Finalizes the module.
|
||||
*
|
||||
* @private
|
||||
* @return void
|
||||
*/
|
||||
Archiver.prototype._moduleFinalize = function() {
|
||||
if (typeof this._module.finalize === 'function') {
|
||||
this._module.finalize();
|
||||
} else if (typeof this._module.end === 'function') {
|
||||
this._module.end();
|
||||
} else {
|
||||
this.emit('error', new ArchiverError('NOENDMETHOD'));
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Pipes the module to our internal stream with error bubbling.
|
||||
*
|
||||
* @private
|
||||
* @return void
|
||||
*/
|
||||
Archiver.prototype._modulePipe = function() {
|
||||
this._module.on('error', this._onModuleError.bind(this));
|
||||
this._module.pipe(this);
|
||||
this._state.modulePiped = true;
|
||||
};
|
||||
|
||||
/**
|
||||
* Determines if the current module supports a defined feature.
|
||||
*
|
||||
* @private
|
||||
* @param {String} key
|
||||
* @return {Boolean}
|
||||
*/
|
||||
Archiver.prototype._moduleSupports = function(key) {
|
||||
if (!this._module.supports || !this._module.supports[key]) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return this._module.supports[key];
|
||||
};
|
||||
|
||||
/**
|
||||
* Unpipes the module from our internal stream.
|
||||
*
|
||||
* @private
|
||||
* @return void
|
||||
*/
|
||||
Archiver.prototype._moduleUnpipe = function() {
|
||||
this._module.unpipe(this);
|
||||
this._state.modulePiped = false;
|
||||
};
|
||||
|
||||
/**
|
||||
* Normalizes entry data with fallbacks for key properties.
|
||||
*
|
||||
* @private
|
||||
* @param {Object} data
|
||||
* @param {fs.Stats} stats
|
||||
* @return {Object}
|
||||
*/
|
||||
Archiver.prototype._normalizeEntryData = function(data, stats) {
|
||||
data = util.defaults(data, {
|
||||
type: 'file',
|
||||
name: null,
|
||||
date: null,
|
||||
mode: null,
|
||||
prefix: null,
|
||||
sourcePath: null,
|
||||
stats: false
|
||||
});
|
||||
|
||||
if (stats && data.stats === false) {
|
||||
data.stats = stats;
|
||||
}
|
||||
|
||||
var isDir = data.type === 'directory';
|
||||
|
||||
if (data.name) {
|
||||
if (typeof data.prefix === 'string' && '' !== data.prefix) {
|
||||
data.name = data.prefix + '/' + data.name;
|
||||
data.prefix = null;
|
||||
}
|
||||
|
||||
data.name = util.sanitizePath(data.name);
|
||||
|
||||
if (data.type !== 'symlink' && data.name.slice(-1) === '/') {
|
||||
isDir = true;
|
||||
data.type = 'directory';
|
||||
} else if (isDir) {
|
||||
data.name += '/';
|
||||
}
|
||||
}
|
||||
|
||||
// 511 === 0777; 493 === 0755; 438 === 0666; 420 === 0644
|
||||
if (typeof data.mode === 'number') {
|
||||
if (win32) {
|
||||
data.mode &= 511;
|
||||
} else {
|
||||
data.mode &= 4095
|
||||
}
|
||||
} else if (data.stats && data.mode === null) {
|
||||
if (win32) {
|
||||
data.mode = data.stats.mode & 511;
|
||||
} else {
|
||||
data.mode = data.stats.mode & 4095;
|
||||
}
|
||||
|
||||
// stat isn't reliable on windows; force 0755 for dir
|
||||
if (win32 && isDir) {
|
||||
data.mode = 493;
|
||||
}
|
||||
} else if (data.mode === null) {
|
||||
data.mode = isDir ? 493 : 420;
|
||||
}
|
||||
|
||||
if (data.stats && data.date === null) {
|
||||
data.date = data.stats.mtime;
|
||||
} else {
|
||||
data.date = util.dateify(data.date);
|
||||
}
|
||||
|
||||
return data;
|
||||
};
|
||||
|
||||
/**
|
||||
* Error listener that re-emits error on to our internal stream.
|
||||
*
|
||||
* @private
|
||||
* @param {Error} err
|
||||
* @return void
|
||||
*/
|
||||
Archiver.prototype._onModuleError = function(err) {
|
||||
/**
|
||||
* @event Archiver#error
|
||||
* @type {ErrorData}
|
||||
*/
|
||||
this.emit('error', err);
|
||||
};
|
||||
|
||||
/**
|
||||
* Checks the various state variables after queue has drained to determine if
|
||||
* we need to `finalize`.
|
||||
*
|
||||
* @private
|
||||
* @return void
|
||||
*/
|
||||
Archiver.prototype._onQueueDrain = function() {
|
||||
if (this._state.finalizing || this._state.finalized || this._state.aborted) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (this._state.finalize && this._pending === 0 && this._queue.idle() && this._statQueue.idle()) {
|
||||
this._finalize();
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Appends each queue task to the module.
|
||||
*
|
||||
* @private
|
||||
* @param {Object} task
|
||||
* @param {Function} callback
|
||||
* @return void
|
||||
*/
|
||||
Archiver.prototype._onQueueTask = function(task, callback) {
|
||||
if (this._state.finalizing || this._state.finalized || this._state.aborted) {
|
||||
callback();
|
||||
return;
|
||||
}
|
||||
|
||||
this._task = task;
|
||||
this._moduleAppend(task.source, task.data, callback);
|
||||
};
|
||||
|
||||
/**
|
||||
* Performs a file stat and reinjects the task back into the queue.
|
||||
*
|
||||
* @private
|
||||
* @param {Object} task
|
||||
* @param {Function} callback
|
||||
* @return void
|
||||
*/
|
||||
Archiver.prototype._onStatQueueTask = function(task, callback) {
|
||||
if (this._state.finalizing || this._state.finalized || this._state.aborted) {
|
||||
callback();
|
||||
return;
|
||||
}
|
||||
|
||||
fs.lstat(task.filepath, function(err, stats) {
|
||||
if (this._state.aborted) {
|
||||
setImmediate(callback);
|
||||
return;
|
||||
}
|
||||
|
||||
if (err) {
|
||||
this._entriesCount--;
|
||||
|
||||
/**
|
||||
* @event Archiver#warning
|
||||
* @type {ErrorData}
|
||||
*/
|
||||
this.emit('warning', err);
|
||||
setImmediate(callback);
|
||||
return;
|
||||
}
|
||||
|
||||
task = this._updateQueueTaskWithStats(task, stats);
|
||||
|
||||
if (task) {
|
||||
if (stats.size) {
|
||||
this._fsEntriesTotalBytes += stats.size;
|
||||
}
|
||||
|
||||
this._queue.push(task);
|
||||
}
|
||||
|
||||
setImmediate(callback);
|
||||
}.bind(this));
|
||||
};
|
||||
|
||||
/**
|
||||
* Unpipes the module and ends our internal stream.
|
||||
*
|
||||
* @private
|
||||
* @return void
|
||||
*/
|
||||
Archiver.prototype._shutdown = function() {
|
||||
this._moduleUnpipe();
|
||||
this.end();
|
||||
};
|
||||
|
||||
/**
|
||||
* Tracks the bytes emitted by our internal stream.
|
||||
*
|
||||
* @private
|
||||
* @param {Buffer} chunk
|
||||
* @param {String} encoding
|
||||
* @param {Function} callback
|
||||
* @return void
|
||||
*/
|
||||
Archiver.prototype._transform = function(chunk, encoding, callback) {
|
||||
if (chunk) {
|
||||
this._pointer += chunk.length;
|
||||
}
|
||||
|
||||
callback(null, chunk);
|
||||
};
|
||||
|
||||
/**
|
||||
* Updates and normalizes a queue task using stats data.
|
||||
*
|
||||
* @private
|
||||
* @param {Object} task
|
||||
* @param {fs.Stats} stats
|
||||
* @return {Object}
|
||||
*/
|
||||
Archiver.prototype._updateQueueTaskWithStats = function(task, stats) {
|
||||
if (stats.isFile()) {
|
||||
task.data.type = 'file';
|
||||
task.data.sourceType = 'stream';
|
||||
task.source = util.lazyReadStream(task.filepath);
|
||||
} else if (stats.isDirectory() && this._moduleSupports('directory')) {
|
||||
task.data.name = util.trailingSlashIt(task.data.name);
|
||||
task.data.type = 'directory';
|
||||
task.data.sourcePath = util.trailingSlashIt(task.filepath);
|
||||
task.data.sourceType = 'buffer';
|
||||
task.source = Buffer.concat([]);
|
||||
} else if (stats.isSymbolicLink() && this._moduleSupports('symlink')) {
|
||||
var linkPath = fs.readlinkSync(task.filepath);
|
||||
var dirName = path.dirname(task.filepath);
|
||||
task.data.type = 'symlink';
|
||||
task.data.linkname = path.relative(dirName, path.resolve(dirName, linkPath));
|
||||
task.data.sourceType = 'buffer';
|
||||
task.source = Buffer.concat([]);
|
||||
} else {
|
||||
if (stats.isDirectory()) {
|
||||
this.emit('warning', new ArchiverError('DIRECTORYNOTSUPPORTED', task.data));
|
||||
} else if (stats.isSymbolicLink()) {
|
||||
this.emit('warning', new ArchiverError('SYMLINKNOTSUPPORTED', task.data));
|
||||
} else {
|
||||
this.emit('warning', new ArchiverError('ENTRYNOTSUPPORTED', task.data));
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
task.data = this._normalizeEntryData(task.data, stats);
|
||||
|
||||
return task;
|
||||
};
|
||||
|
||||
/**
|
||||
* Aborts the archiving process, taking a best-effort approach, by:
|
||||
*
|
||||
* - removing any pending queue tasks
|
||||
* - allowing any active queue workers to finish
|
||||
* - detaching internal module pipes
|
||||
* - ending both sides of the Transform stream
|
||||
*
|
||||
* It will NOT drain any remaining sources.
|
||||
*
|
||||
* @return {this}
|
||||
*/
|
||||
Archiver.prototype.abort = function() {
|
||||
if (this._state.aborted || this._state.finalized) {
|
||||
return this;
|
||||
}
|
||||
|
||||
this._abort();
|
||||
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Appends an input source (text string, buffer, or stream) to the instance.
|
||||
*
|
||||
* When the instance has received, processed, and emitted the input, the `entry`
|
||||
* event is fired.
|
||||
*
|
||||
* @fires Archiver#entry
|
||||
* @param {(Buffer|Stream|String)} source The input source.
|
||||
* @param {EntryData} data See also {@link ZipEntryData} and {@link TarEntryData}.
|
||||
* @return {this}
|
||||
*/
|
||||
Archiver.prototype.append = function(source, data) {
|
||||
if (this._state.finalize || this._state.aborted) {
|
||||
this.emit('error', new ArchiverError('QUEUECLOSED'));
|
||||
return this;
|
||||
}
|
||||
|
||||
data = this._normalizeEntryData(data);
|
||||
|
||||
if (typeof data.name !== 'string' || data.name.length === 0) {
|
||||
this.emit('error', new ArchiverError('ENTRYNAMEREQUIRED'));
|
||||
return this;
|
||||
}
|
||||
|
||||
if (data.type === 'directory' && !this._moduleSupports('directory')) {
|
||||
this.emit('error', new ArchiverError('DIRECTORYNOTSUPPORTED', { name: data.name }));
|
||||
return this;
|
||||
}
|
||||
|
||||
source = util.normalizeInputSource(source);
|
||||
|
||||
if (Buffer.isBuffer(source)) {
|
||||
data.sourceType = 'buffer';
|
||||
} else if (util.isStream(source)) {
|
||||
data.sourceType = 'stream';
|
||||
} else {
|
||||
this.emit('error', new ArchiverError('INPUTSTEAMBUFFERREQUIRED', { name: data.name }));
|
||||
return this;
|
||||
}
|
||||
|
||||
this._entriesCount++;
|
||||
this._queue.push({
|
||||
data: data,
|
||||
source: source
|
||||
});
|
||||
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Appends a directory and its files, recursively, given its dirpath.
|
||||
*
|
||||
* @param {String} dirpath The source directory path.
|
||||
* @param {String} destpath The destination path within the archive.
|
||||
* @param {(EntryData|Function)} data See also [ZipEntryData]{@link ZipEntryData} and
|
||||
* [TarEntryData]{@link TarEntryData}.
|
||||
* @return {this}
|
||||
*/
|
||||
Archiver.prototype.directory = function(dirpath, destpath, data) {
|
||||
if (this._state.finalize || this._state.aborted) {
|
||||
this.emit('error', new ArchiverError('QUEUECLOSED'));
|
||||
return this;
|
||||
}
|
||||
|
||||
if (typeof dirpath !== 'string' || dirpath.length === 0) {
|
||||
this.emit('error', new ArchiverError('DIRECTORYDIRPATHREQUIRED'));
|
||||
return this;
|
||||
}
|
||||
|
||||
this._pending++;
|
||||
|
||||
if (destpath === false) {
|
||||
destpath = '';
|
||||
} else if (typeof destpath !== 'string'){
|
||||
destpath = dirpath;
|
||||
}
|
||||
|
||||
var dataFunction = false;
|
||||
if (typeof data === 'function') {
|
||||
dataFunction = data;
|
||||
data = {};
|
||||
} else if (typeof data !== 'object') {
|
||||
data = {};
|
||||
}
|
||||
|
||||
var globOptions = {
|
||||
stat: false,
|
||||
dot: true,
|
||||
cwd: dirpath
|
||||
};
|
||||
|
||||
function onGlobEnd() {
|
||||
this._pending--;
|
||||
this._maybeFinalize();
|
||||
}
|
||||
|
||||
function onGlobError(err) {
|
||||
this.emit('error', err);
|
||||
}
|
||||
|
||||
function onGlobMatch(match){
|
||||
var ignoreMatch = false;
|
||||
var entryData = Object.assign({}, data);
|
||||
entryData.name = match;
|
||||
entryData.prefix = destpath;
|
||||
match = globber._makeAbs(match);
|
||||
|
||||
try {
|
||||
if (dataFunction) {
|
||||
entryData = dataFunction(entryData);
|
||||
|
||||
if (entryData === false) {
|
||||
ignoreMatch = true;
|
||||
} else if (typeof entryData !== 'object') {
|
||||
throw new ArchiverError('DIRECTORYFUNCTIONINVALIDDATA', { dirpath: dirpath });
|
||||
}
|
||||
}
|
||||
} catch(e) {
|
||||
this.emit('error', e);
|
||||
return;
|
||||
}
|
||||
|
||||
if (ignoreMatch) {
|
||||
return;
|
||||
}
|
||||
|
||||
this._append(match, entryData);
|
||||
}
|
||||
|
||||
var globber = glob('**', globOptions);
|
||||
globber.on('error', onGlobError.bind(this));
|
||||
globber.on('match', onGlobMatch.bind(this));
|
||||
globber.on('end', onGlobEnd.bind(this));
|
||||
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Appends a file given its filepath using a
|
||||
* [lazystream]{@link https://github.com/jpommerening/node-lazystream} wrapper to
|
||||
* prevent issues with open file limits.
|
||||
*
|
||||
* When the instance has received, processed, and emitted the file, the `entry`
|
||||
* event is fired.
|
||||
*
|
||||
* @param {String} filepath The source filepath.
|
||||
* @param {EntryData} data See also [ZipEntryData]{@link ZipEntryData} and
|
||||
* [TarEntryData]{@link TarEntryData}.
|
||||
* @return {this}
|
||||
*/
|
||||
Archiver.prototype.file = function(filepath, data) {
|
||||
if (this._state.finalize || this._state.aborted) {
|
||||
this.emit('error', new ArchiverError('QUEUECLOSED'));
|
||||
return this;
|
||||
}
|
||||
|
||||
if (typeof filepath !== 'string' || filepath.length === 0) {
|
||||
this.emit('error', new ArchiverError('FILEFILEPATHREQUIRED'));
|
||||
return this;
|
||||
}
|
||||
|
||||
this._append(filepath, data);
|
||||
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Appends multiple files that match a glob pattern.
|
||||
*
|
||||
* @param {String} pattern The [glob pattern]{@link https://github.com/isaacs/node-glob#glob-primer} to match.
|
||||
* @param {Object} options See [node-glob]{@link https://github.com/isaacs/node-glob#options}.
|
||||
* @param {EntryData} data See also [ZipEntryData]{@link ZipEntryData} and
|
||||
* [TarEntryData]{@link TarEntryData}.
|
||||
* @return {this}
|
||||
*/
|
||||
Archiver.prototype.glob = function(pattern, options, data) {
|
||||
this._pending++;
|
||||
|
||||
options = util.defaults(options, {
|
||||
stat: false
|
||||
});
|
||||
|
||||
function onGlobEnd() {
|
||||
this._pending--;
|
||||
this._maybeFinalize();
|
||||
}
|
||||
|
||||
function onGlobError(err) {
|
||||
this.emit('error', err);
|
||||
}
|
||||
|
||||
function onGlobMatch(match){
|
||||
var entryData = Object.assign({}, data);
|
||||
|
||||
if (options.cwd) {
|
||||
entryData.name = match;
|
||||
match = globber._makeAbs(match);
|
||||
}
|
||||
|
||||
this._append(match, entryData);
|
||||
}
|
||||
|
||||
var globber = glob(pattern, options);
|
||||
globber.on('error', onGlobError.bind(this));
|
||||
globber.on('match', onGlobMatch.bind(this));
|
||||
globber.on('end', onGlobEnd.bind(this));
|
||||
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Finalizes the instance and prevents further appending to the archive
|
||||
* structure (queue will continue til drained).
|
||||
*
|
||||
* The `end`, `close` or `finish` events on the destination stream may fire
|
||||
* right after calling this method so you should set listeners beforehand to
|
||||
* properly detect stream completion.
|
||||
*
|
||||
* @return {this}
|
||||
*/
|
||||
Archiver.prototype.finalize = function() {
|
||||
if (this._state.aborted) {
|
||||
this.emit('error', new ArchiverError('ABORTED'));
|
||||
return this;
|
||||
}
|
||||
|
||||
if (this._state.finalize) {
|
||||
this.emit('error', new ArchiverError('FINALIZING'));
|
||||
return this;
|
||||
}
|
||||
|
||||
this._state.finalize = true;
|
||||
|
||||
if (this._pending === 0 && this._queue.idle() && this._statQueue.idle()) {
|
||||
this._finalize();
|
||||
}
|
||||
|
||||
var self = this;
|
||||
|
||||
return new Promise(function(resolve, reject) {
|
||||
var errored;
|
||||
|
||||
self._module.on('end', function() {
|
||||
if (!errored) {
|
||||
resolve();
|
||||
}
|
||||
})
|
||||
|
||||
self._module.on('error', function(err) {
|
||||
errored = true;
|
||||
reject(err);
|
||||
})
|
||||
})
|
||||
};
|
||||
|
||||
/**
|
||||
* Sets the module format name used for archiving.
|
||||
*
|
||||
* @param {String} format The name of the format.
|
||||
* @return {this}
|
||||
*/
|
||||
Archiver.prototype.setFormat = function(format) {
|
||||
if (this._format) {
|
||||
this.emit('error', new ArchiverError('FORMATSET'));
|
||||
return this;
|
||||
}
|
||||
|
||||
this._format = format;
|
||||
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Sets the module used for archiving.
|
||||
*
|
||||
* @param {Function} module The function for archiver to interact with.
|
||||
* @return {this}
|
||||
*/
|
||||
Archiver.prototype.setModule = function(module) {
|
||||
if (this._state.aborted) {
|
||||
this.emit('error', new ArchiverError('ABORTED'));
|
||||
return this;
|
||||
}
|
||||
|
||||
if (this._state.module) {
|
||||
this.emit('error', new ArchiverError('MODULESET'));
|
||||
return this;
|
||||
}
|
||||
|
||||
this._module = module;
|
||||
this._modulePipe();
|
||||
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Appends a symlink to the instance.
|
||||
*
|
||||
* This does NOT interact with filesystem and is used for programmatically creating symlinks.
|
||||
*
|
||||
* @param {String} filepath The symlink path (within archive).
|
||||
* @param {String} target The target path (within archive).
|
||||
* @return {this}
|
||||
*/
|
||||
Archiver.prototype.symlink = function(filepath, target) {
|
||||
if (this._state.finalize || this._state.aborted) {
|
||||
this.emit('error', new ArchiverError('QUEUECLOSED'));
|
||||
return this;
|
||||
}
|
||||
|
||||
if (typeof filepath !== 'string' || filepath.length === 0) {
|
||||
this.emit('error', new ArchiverError('SYMLINKFILEPATHREQUIRED'));
|
||||
return this;
|
||||
}
|
||||
|
||||
if (typeof target !== 'string' || target.length === 0) {
|
||||
this.emit('error', new ArchiverError('SYMLINKTARGETREQUIRED', { filepath: filepath }));
|
||||
return this;
|
||||
}
|
||||
|
||||
if (!this._moduleSupports('symlink')) {
|
||||
this.emit('error', new ArchiverError('SYMLINKNOTSUPPORTED', { filepath: filepath }));
|
||||
return this;
|
||||
}
|
||||
|
||||
var data = {};
|
||||
data.type = 'symlink';
|
||||
data.name = filepath.replace(/\\/g, '/');
|
||||
data.linkname = target.replace(/\\/g, '/');
|
||||
data.sourceType = 'buffer';
|
||||
|
||||
this._entriesCount++;
|
||||
this._queue.push({
|
||||
data: data,
|
||||
source: Buffer.concat([])
|
||||
});
|
||||
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the current length (in bytes) that has been emitted.
|
||||
*
|
||||
* @return {Number}
|
||||
*/
|
||||
Archiver.prototype.pointer = function() {
|
||||
return this._pointer;
|
||||
};
|
||||
|
||||
/**
|
||||
* Middleware-like helper that has yet to be fully implemented.
|
||||
*
|
||||
* @private
|
||||
* @param {Function} plugin
|
||||
* @return {this}
|
||||
*/
|
||||
Archiver.prototype.use = function(plugin) {
|
||||
this._streams.push(plugin);
|
||||
return this;
|
||||
};
|
||||
|
||||
module.exports = Archiver;
|
||||
|
||||
/**
|
||||
* @typedef {Object} CoreOptions
|
||||
* @global
|
||||
* @property {Number} [statConcurrency=4] Sets the number of workers used to
|
||||
* process the internal fs stat queue.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} TransformOptions
|
||||
* @property {Boolean} [allowHalfOpen=true] If set to false, then the stream
|
||||
* will automatically end the readable side when the writable side ends and vice
|
||||
* versa.
|
||||
* @property {Boolean} [readableObjectMode=false] Sets objectMode for readable
|
||||
* side of the stream. Has no effect if objectMode is true.
|
||||
* @property {Boolean} [writableObjectMode=false] Sets objectMode for writable
|
||||
* side of the stream. Has no effect if objectMode is true.
|
||||
* @property {Boolean} [decodeStrings=true] Whether or not to decode strings
|
||||
* into Buffers before passing them to _write(). `Writable`
|
||||
* @property {String} [encoding=NULL] If specified, then buffers will be decoded
|
||||
* to strings using the specified encoding. `Readable`
|
||||
* @property {Number} [highWaterMark=16kb] The maximum number of bytes to store
|
||||
* in the internal buffer before ceasing to read from the underlying resource.
|
||||
* `Readable` `Writable`
|
||||
* @property {Boolean} [objectMode=false] Whether this stream should behave as a
|
||||
* stream of objects. Meaning that stream.read(n) returns a single value instead
|
||||
* of a Buffer of size n. `Readable` `Writable`
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} EntryData
|
||||
* @property {String} name Sets the entry name including internal path.
|
||||
* @property {(String|Date)} [date=NOW()] Sets the entry date.
|
||||
* @property {Number} [mode=D:0755/F:0644] Sets the entry permissions.
|
||||
* @property {String} [prefix] Sets a path prefix for the entry name. Useful
|
||||
* when working with methods like `directory` or `glob`.
|
||||
* @property {fs.Stats} [stats] Sets the fs stat data for this entry allowing
|
||||
* for reduction of fs stat calls when stat data is already known.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} ErrorData
|
||||
* @property {String} message The message of the error.
|
||||
* @property {String} code The error code assigned to this error.
|
||||
* @property {String} data Additional data provided for reporting or debugging (where available).
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} ProgressData
|
||||
* @property {Object} entries
|
||||
* @property {Number} entries.total Number of entries that have been appended.
|
||||
* @property {Number} entries.processed Number of entries that have been processed.
|
||||
* @property {Object} fs
|
||||
* @property {Number} fs.totalBytes Number of bytes that have been appended. Calculated asynchronously and might not be accurate: it growth while entries are added. (based on fs.Stats)
|
||||
* @property {Number} fs.processedBytes Number of bytes that have been processed. (based on fs.Stats)
|
||||
*/
|
@ -1,40 +0,0 @@
|
||||
/**
|
||||
* Archiver Core
|
||||
*
|
||||
* @ignore
|
||||
* @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE}
|
||||
* @copyright (c) 2012-2014 Chris Talkington, contributors.
|
||||
*/
|
||||
|
||||
var util = require('util');
|
||||
|
||||
const ERROR_CODES = {
|
||||
'ABORTED': 'archive was aborted',
|
||||
'DIRECTORYDIRPATHREQUIRED': 'diretory dirpath argument must be a non-empty string value',
|
||||
'DIRECTORYFUNCTIONINVALIDDATA': 'invalid data returned by directory custom data function',
|
||||
'ENTRYNAMEREQUIRED': 'entry name must be a non-empty string value',
|
||||
'FILEFILEPATHREQUIRED': 'file filepath argument must be a non-empty string value',
|
||||
'FINALIZING': 'archive already finalizing',
|
||||
'QUEUECLOSED': 'queue closed',
|
||||
'NOENDMETHOD': 'no suitable finalize/end method defined by module',
|
||||
'DIRECTORYNOTSUPPORTED': 'support for directory entries not defined by module',
|
||||
'FORMATSET': 'archive format already set',
|
||||
'INPUTSTEAMBUFFERREQUIRED': 'input source must be valid Stream or Buffer instance',
|
||||
'MODULESET': 'module already set',
|
||||
'SYMLINKNOTSUPPORTED': 'support for symlink entries not defined by module',
|
||||
'SYMLINKFILEPATHREQUIRED': 'symlink filepath argument must be a non-empty string value',
|
||||
'SYMLINKTARGETREQUIRED': 'symlink target argument must be a non-empty string value',
|
||||
'ENTRYNOTSUPPORTED': 'entry not supported'
|
||||
};
|
||||
|
||||
function ArchiverError(code, data) {
|
||||
Error.captureStackTrace(this, this.constructor);
|
||||
//this.name = this.constructor.name;
|
||||
this.message = ERROR_CODES[code] || code;
|
||||
this.code = code;
|
||||
this.data = data;
|
||||
}
|
||||
|
||||
util.inherits(ArchiverError, Error);
|
||||
|
||||
exports = module.exports = ArchiverError;
|
@ -1,110 +0,0 @@
|
||||
/**
|
||||
* JSON Format Plugin
|
||||
*
|
||||
* @module plugins/json
|
||||
* @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE}
|
||||
* @copyright (c) 2012-2014 Chris Talkington, contributors.
|
||||
*/
|
||||
var inherits = require('util').inherits;
|
||||
var Transform = require('readable-stream').Transform;
|
||||
|
||||
var crc32 = require('buffer-crc32');
|
||||
var util = require('archiver-utils');
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
* @param {(JsonOptions|TransformOptions)} options
|
||||
*/
|
||||
var Json = function(options) {
|
||||
if (!(this instanceof Json)) {
|
||||
return new Json(options);
|
||||
}
|
||||
|
||||
options = this.options = util.defaults(options, {});
|
||||
|
||||
Transform.call(this, options);
|
||||
|
||||
this.supports = {
|
||||
directory: true,
|
||||
symlink: true
|
||||
};
|
||||
|
||||
this.files = [];
|
||||
};
|
||||
|
||||
inherits(Json, Transform);
|
||||
|
||||
/**
|
||||
* [_transform description]
|
||||
*
|
||||
* @private
|
||||
* @param {Buffer} chunk
|
||||
* @param {String} encoding
|
||||
* @param {Function} callback
|
||||
* @return void
|
||||
*/
|
||||
Json.prototype._transform = function(chunk, encoding, callback) {
|
||||
callback(null, chunk);
|
||||
};
|
||||
|
||||
/**
|
||||
* [_writeStringified description]
|
||||
*
|
||||
* @private
|
||||
* @return void
|
||||
*/
|
||||
Json.prototype._writeStringified = function() {
|
||||
var fileString = JSON.stringify(this.files);
|
||||
this.write(fileString);
|
||||
};
|
||||
|
||||
/**
|
||||
* [append description]
|
||||
*
|
||||
* @param {(Buffer|Stream)} source
|
||||
* @param {EntryData} data
|
||||
* @param {Function} callback
|
||||
* @return void
|
||||
*/
|
||||
Json.prototype.append = function(source, data, callback) {
|
||||
var self = this;
|
||||
|
||||
data.crc32 = 0;
|
||||
|
||||
function onend(err, sourceBuffer) {
|
||||
if (err) {
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
|
||||
data.size = sourceBuffer.length || 0;
|
||||
data.crc32 = crc32.unsigned(sourceBuffer);
|
||||
|
||||
self.files.push(data);
|
||||
|
||||
callback(null, data);
|
||||
}
|
||||
|
||||
if (data.sourceType === 'buffer') {
|
||||
onend(null, source);
|
||||
} else if (data.sourceType === 'stream') {
|
||||
util.collectStream(source, onend);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* [finalize description]
|
||||
*
|
||||
* @return void
|
||||
*/
|
||||
Json.prototype.finalize = function() {
|
||||
this._writeStringified();
|
||||
this.end();
|
||||
};
|
||||
|
||||
module.exports = Json;
|
||||
|
||||
/**
|
||||
* @typedef {Object} JsonOptions
|
||||
* @global
|
||||
*/
|
@ -1,167 +0,0 @@
|
||||
/**
|
||||
* TAR Format Plugin
|
||||
*
|
||||
* @module plugins/tar
|
||||
* @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE}
|
||||
* @copyright (c) 2012-2014 Chris Talkington, contributors.
|
||||
*/
|
||||
var zlib = require('zlib');
|
||||
|
||||
var engine = require('tar-stream');
|
||||
var util = require('archiver-utils');
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
* @param {TarOptions} options
|
||||
*/
|
||||
var Tar = function(options) {
|
||||
if (!(this instanceof Tar)) {
|
||||
return new Tar(options);
|
||||
}
|
||||
|
||||
options = this.options = util.defaults(options, {
|
||||
gzip: false
|
||||
});
|
||||
|
||||
if (typeof options.gzipOptions !== 'object') {
|
||||
options.gzipOptions = {};
|
||||
}
|
||||
|
||||
this.supports = {
|
||||
directory: true,
|
||||
symlink: true
|
||||
};
|
||||
|
||||
this.engine = engine.pack(options);
|
||||
this.compressor = false;
|
||||
|
||||
if (options.gzip) {
|
||||
this.compressor = zlib.createGzip(options.gzipOptions);
|
||||
this.compressor.on('error', this._onCompressorError.bind(this));
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* [_onCompressorError description]
|
||||
*
|
||||
* @private
|
||||
* @param {Error} err
|
||||
* @return void
|
||||
*/
|
||||
Tar.prototype._onCompressorError = function(err) {
|
||||
this.engine.emit('error', err);
|
||||
};
|
||||
|
||||
/**
|
||||
* [append description]
|
||||
*
|
||||
* @param {(Buffer|Stream)} source
|
||||
* @param {TarEntryData} data
|
||||
* @param {Function} callback
|
||||
* @return void
|
||||
*/
|
||||
Tar.prototype.append = function(source, data, callback) {
|
||||
var self = this;
|
||||
|
||||
data.mtime = data.date;
|
||||
|
||||
function append(err, sourceBuffer) {
|
||||
if (err) {
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
|
||||
self.engine.entry(data, sourceBuffer, function(err) {
|
||||
callback(err, data);
|
||||
});
|
||||
}
|
||||
|
||||
if (data.sourceType === 'buffer') {
|
||||
append(null, source);
|
||||
} else if (data.sourceType === 'stream' && data.stats) {
|
||||
data.size = data.stats.size;
|
||||
|
||||
var entry = self.engine.entry(data, function(err) {
|
||||
callback(err, data);
|
||||
});
|
||||
|
||||
source.pipe(entry);
|
||||
} else if (data.sourceType === 'stream') {
|
||||
util.collectStream(source, append);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* [finalize description]
|
||||
*
|
||||
* @return void
|
||||
*/
|
||||
Tar.prototype.finalize = function() {
|
||||
this.engine.finalize();
|
||||
};
|
||||
|
||||
/**
|
||||
* [on description]
|
||||
*
|
||||
* @return this.engine
|
||||
*/
|
||||
Tar.prototype.on = function() {
|
||||
return this.engine.on.apply(this.engine, arguments);
|
||||
};
|
||||
|
||||
/**
|
||||
* [pipe description]
|
||||
*
|
||||
* @param {String} destination
|
||||
* @param {Object} options
|
||||
* @return this.engine
|
||||
*/
|
||||
Tar.prototype.pipe = function(destination, options) {
|
||||
if (this.compressor) {
|
||||
return this.engine.pipe.apply(this.engine, [this.compressor]).pipe(destination, options);
|
||||
} else {
|
||||
return this.engine.pipe.apply(this.engine, arguments);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* [unpipe description]
|
||||
*
|
||||
* @return this.engine
|
||||
*/
|
||||
Tar.prototype.unpipe = function() {
|
||||
if (this.compressor) {
|
||||
return this.compressor.unpipe.apply(this.compressor, arguments);
|
||||
} else {
|
||||
return this.engine.unpipe.apply(this.engine, arguments);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = Tar;
|
||||
|
||||
/**
|
||||
* @typedef {Object} TarOptions
|
||||
* @global
|
||||
* @property {Boolean} [gzip=false] Compress the tar archive using gzip.
|
||||
* @property {Object} [gzipOptions] Passed to [zlib]{@link https://nodejs.org/api/zlib.html#zlib_class_options}
|
||||
* to control compression.
|
||||
* @property {*} [*] See [tar-stream]{@link https://github.com/mafintosh/tar-stream} documentation for additional properties.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} TarEntryData
|
||||
* @global
|
||||
* @property {String} name Sets the entry name including internal path.
|
||||
* @property {(String|Date)} [date=NOW()] Sets the entry date.
|
||||
* @property {Number} [mode=D:0755/F:0644] Sets the entry permissions.
|
||||
* @property {String} [prefix] Sets a path prefix for the entry name. Useful
|
||||
* when working with methods like `directory` or `glob`.
|
||||
* @property {fs.Stats} [stats] Sets the fs stat data for this entry allowing
|
||||
* for reduction of fs stat calls when stat data is already known.
|
||||
*/
|
||||
|
||||
/**
|
||||
* TarStream Module
|
||||
* @external TarStream
|
||||
* @see {@link https://github.com/mafintosh/tar-stream}
|
||||
*/
|
@ -1,116 +0,0 @@
|
||||
/**
|
||||
* ZIP Format Plugin
|
||||
*
|
||||
* @module plugins/zip
|
||||
* @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE}
|
||||
* @copyright (c) 2012-2014 Chris Talkington, contributors.
|
||||
*/
|
||||
var engine = require('zip-stream');
|
||||
var util = require('archiver-utils');
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
* @param {ZipOptions} [options]
|
||||
* @param {String} [options.comment] Sets the zip archive comment.
|
||||
* @param {Boolean} [options.forceLocalTime=false] Forces the archive to contain local file times instead of UTC.
|
||||
* @param {Boolean} [options.forceZip64=false] Forces the archive to contain ZIP64 headers.
|
||||
* @param {Boolean} [options.store=false] Sets the compression method to STORE.
|
||||
* @param {Object} [options.zlib] Passed to [zlib]{@link https://nodejs.org/api/zlib.html#zlib_class_options}
|
||||
*/
|
||||
var Zip = function(options) {
|
||||
if (!(this instanceof Zip)) {
|
||||
return new Zip(options);
|
||||
}
|
||||
|
||||
options = this.options = util.defaults(options, {
|
||||
comment: '',
|
||||
forceUTC: false,
|
||||
store: false
|
||||
});
|
||||
|
||||
this.supports = {
|
||||
directory: true,
|
||||
symlink: true
|
||||
};
|
||||
|
||||
this.engine = new engine(options);
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {(Buffer|Stream)} source
|
||||
* @param {ZipEntryData} data
|
||||
* @param {String} data.name Sets the entry name including internal path.
|
||||
* @param {(String|Date)} [data.date=NOW()] Sets the entry date.
|
||||
* @param {Number} [data.mode=D:0755/F:0644] Sets the entry permissions.
|
||||
* @param {String} [data.prefix] Sets a path prefix for the entry name. Useful
|
||||
* when working with methods like `directory` or `glob`.
|
||||
* @param {fs.Stats} [data.stats] Sets the fs stat data for this entry allowing
|
||||
* for reduction of fs stat calls when stat data is already known.
|
||||
* @param {Boolean} [data.store=ZipOptions.store] Sets the compression method to STORE.
|
||||
* @param {Function} callback
|
||||
* @return void
|
||||
*/
|
||||
Zip.prototype.append = function(source, data, callback) {
|
||||
this.engine.entry(source, data, callback);
|
||||
};
|
||||
|
||||
/**
|
||||
* @return void
|
||||
*/
|
||||
Zip.prototype.finalize = function() {
|
||||
this.engine.finalize();
|
||||
};
|
||||
|
||||
/**
|
||||
* @return this.engine
|
||||
*/
|
||||
Zip.prototype.on = function() {
|
||||
return this.engine.on.apply(this.engine, arguments);
|
||||
};
|
||||
|
||||
/**
|
||||
* @return this.engine
|
||||
*/
|
||||
Zip.prototype.pipe = function() {
|
||||
return this.engine.pipe.apply(this.engine, arguments);
|
||||
};
|
||||
|
||||
/**
|
||||
* @return this.engine
|
||||
*/
|
||||
Zip.prototype.unpipe = function() {
|
||||
return this.engine.unpipe.apply(this.engine, arguments);
|
||||
};
|
||||
|
||||
module.exports = Zip;
|
||||
|
||||
/**
|
||||
* @typedef {Object} ZipOptions
|
||||
* @global
|
||||
* @property {String} [comment] Sets the zip archive comment.
|
||||
* @property {Boolean} [forceLocalTime=false] Forces the archive to contain local file times instead of UTC.
|
||||
* @property {Boolean} [forceZip64=false] Forces the archive to contain ZIP64 headers.
|
||||
* @property {Boolean} [store=false] Sets the compression method to STORE.
|
||||
* @property {Object} [zlib] Passed to [zlib]{@link https://nodejs.org/api/zlib.html#zlib_class_options}
|
||||
* to control compression.
|
||||
* @property {*} [*] See [zip-stream]{@link https://archiverjs.com/zip-stream/ZipStream.html} documentation for current list of properties.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} ZipEntryData
|
||||
* @global
|
||||
* @property {String} name Sets the entry name including internal path.
|
||||
* @property {(String|Date)} [date=NOW()] Sets the entry date.
|
||||
* @property {Number} [mode=D:0755/F:0644] Sets the entry permissions.
|
||||
* @property {String} [prefix] Sets a path prefix for the entry name. Useful
|
||||
* when working with methods like `directory` or `glob`.
|
||||
* @property {fs.Stats} [stats] Sets the fs stat data for this entry allowing
|
||||
* for reduction of fs stat calls when stat data is already known.
|
||||
* @property {Boolean} [store=ZipOptions.store] Sets the compression method to STORE.
|
||||
*/
|
||||
|
||||
/**
|
||||
* ZipStream Module
|
||||
* @external ZipStream
|
||||
* @see {@link https://www.archiverjs.com/zip-stream/ZipStream.html}
|
||||
*/
|
@ -1,61 +0,0 @@
|
||||
{
|
||||
"name": "archiver",
|
||||
"version": "3.1.1",
|
||||
"description": "a streaming interface for archive generation",
|
||||
"homepage": "https://github.com/archiverjs/node-archiver",
|
||||
"author": {
|
||||
"name": "Chris Talkington",
|
||||
"url": "http://christalkington.com/"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/archiverjs/node-archiver.git"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/archiverjs/node-archiver/issues"
|
||||
},
|
||||
"license": "MIT",
|
||||
"main": "index.js",
|
||||
"files": [
|
||||
"index.js",
|
||||
"lib"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "mocha --reporter dot",
|
||||
"jsdoc": "jsdoc -c jsdoc.json README.md",
|
||||
"bench": "node benchmark/simple/pack-zip.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"archiver-utils": "^2.1.0",
|
||||
"async": "^2.6.3",
|
||||
"buffer-crc32": "^0.2.1",
|
||||
"glob": "^7.1.4",
|
||||
"readable-stream": "^3.4.0",
|
||||
"tar-stream": "^2.1.0",
|
||||
"zip-stream": "^2.1.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"archiver-jsdoc-theme": "^1.1.1",
|
||||
"chai": "^4.2.0",
|
||||
"jsdoc": "^3.6.3",
|
||||
"mkdirp": "^0.5.0",
|
||||
"mocha": "^6.2.0",
|
||||
"rimraf": "^2.6.3",
|
||||
"stream-bench": "^0.1.2",
|
||||
"tar": "^4.4.10",
|
||||
"yauzl": "^2.9.0"
|
||||
},
|
||||
"keywords": [
|
||||
"archive",
|
||||
"archiver",
|
||||
"stream",
|
||||
"zip",
|
||||
"tar"
|
||||
],
|
||||
"publishConfig": {
|
||||
"registry": "https://registry.npmjs.org/"
|
||||
}
|
||||
}
|
@ -1,278 +0,0 @@
|
||||
# v2.6.4
|
||||
- Fix potential prototype pollution exploit (#1828)
|
||||
|
||||
# v2.6.3
|
||||
- Updated lodash to squelch a security warning (#1675)
|
||||
|
||||
# v2.6.2
|
||||
- Updated lodash to squelch a security warning (#1620)
|
||||
|
||||
# v2.6.1
|
||||
- Updated lodash to prevent `npm audit` warnings. (#1532, #1533)
|
||||
- Made `async-es` more optimized for webpack users (#1517)
|
||||
- Fixed a stack overflow with large collections and a synchronous iterator (#1514)
|
||||
- Various small fixes/chores (#1505, #1511, #1527, #1530)
|
||||
|
||||
# v2.6.0
|
||||
- Added missing aliases for many methods. Previously, you could not (e.g.) `require('async/find')` or use `async.anyLimit`. (#1483)
|
||||
- Improved `queue` performance. (#1448, #1454)
|
||||
- Add missing sourcemap (#1452, #1453)
|
||||
- Various doc updates (#1448, #1471, #1483)
|
||||
|
||||
# v2.5.0
|
||||
- Added `concatLimit`, the `Limit` equivalent of [`concat`](https://caolan.github.io/async/docs.html#concat) ([#1426](https://github.com/caolan/async/issues/1426), [#1430](https://github.com/caolan/async/pull/1430))
|
||||
- `concat` improvements: it now preserves order, handles falsy values and the `iteratee` callback takes a variable number of arguments ([#1437](https://github.com/caolan/async/issues/1437), [#1436](https://github.com/caolan/async/pull/1436))
|
||||
- Fixed an issue in `queue` where there was a size discrepancy between `workersList().length` and `running()` ([#1428](https://github.com/caolan/async/issues/1428), [#1429](https://github.com/caolan/async/pull/1429))
|
||||
- Various doc fixes ([#1422](https://github.com/caolan/async/issues/1422), [#1424](https://github.com/caolan/async/pull/1424))
|
||||
|
||||
# v2.4.1
|
||||
- Fixed a bug preventing functions wrapped with `timeout()` from being re-used. ([#1418](https://github.com/caolan/async/issues/1418), [#1419](https://github.com/caolan/async/issues/1419))
|
||||
|
||||
# v2.4.0
|
||||
- Added `tryEach`, for running async functions in parallel, where you only expect one to succeed. ([#1365](https://github.com/caolan/async/issues/1365), [#687](https://github.com/caolan/async/issues/687))
|
||||
- Improved performance, most notably in `parallel` and `waterfall` ([#1395](https://github.com/caolan/async/issues/1395))
|
||||
- Added `queue.remove()`, for removing items in a `queue` ([#1397](https://github.com/caolan/async/issues/1397), [#1391](https://github.com/caolan/async/issues/1391))
|
||||
- Fixed using `eval`, preventing Async from running in pages with Content Security Policy ([#1404](https://github.com/caolan/async/issues/1404), [#1403](https://github.com/caolan/async/issues/1403))
|
||||
- Fixed errors thrown in an `asyncify`ed function's callback being caught by the underlying Promise ([#1408](https://github.com/caolan/async/issues/1408))
|
||||
- Fixed timing of `queue.empty()` ([#1367](https://github.com/caolan/async/issues/1367))
|
||||
- Various doc fixes ([#1314](https://github.com/caolan/async/issues/1314), [#1394](https://github.com/caolan/async/issues/1394), [#1412](https://github.com/caolan/async/issues/1412))
|
||||
|
||||
# v2.3.0
|
||||
- Added support for ES2017 `async` functions. Wherever you can pass a Node-style/CPS function that uses a callback, you can also pass an `async` function. Previously, you had to wrap `async` functions with `asyncify`. The caveat is that it will only work if `async` functions are supported natively in your environment, transpiled implementations can't be detected. ([#1386](https://github.com/caolan/async/issues/1386), [#1390](https://github.com/caolan/async/issues/1390))
|
||||
- Small doc fix ([#1392](https://github.com/caolan/async/issues/1392))
|
||||
|
||||
# v2.2.0
|
||||
- Added `groupBy`, and the `Series`/`Limit` equivalents, analogous to [`_.groupBy`](http://lodash.com/docs#groupBy) ([#1364](https://github.com/caolan/async/issues/1364))
|
||||
- Fixed `transform` bug when `callback` was not passed ([#1381](https://github.com/caolan/async/issues/1381))
|
||||
- Added note about `reflect` to `parallel` docs ([#1385](https://github.com/caolan/async/issues/1385))
|
||||
|
||||
# v2.1.5
|
||||
- Fix `auto` bug when function names collided with Array.prototype ([#1358](https://github.com/caolan/async/issues/1358))
|
||||
- Improve some error messages ([#1349](https://github.com/caolan/async/issues/1349))
|
||||
- Avoid stack overflow case in queue
|
||||
- Fixed an issue in `some`, `every` and `find` where processing would continue after the result was determined.
|
||||
- Cleanup implementations of `some`, `every` and `find`
|
||||
|
||||
# v2.1.3
|
||||
- Make bundle size smaller
|
||||
- Create optimized hotpath for `filter` in array case.
|
||||
|
||||
# v2.1.2
|
||||
- Fixed a stackoverflow bug with `detect`, `some`, `every` on large inputs ([#1293](https://github.com/caolan/async/issues/1293)).
|
||||
|
||||
# v2.1.0
|
||||
|
||||
- `retry` and `retryable` now support an optional `errorFilter` function that determines if the `task` should retry on the error ([#1256](https://github.com/caolan/async/issues/1256), [#1261](https://github.com/caolan/async/issues/1261))
|
||||
- Optimized array iteration in `race`, `cargo`, `queue`, and `priorityQueue` ([#1253](https://github.com/caolan/async/issues/1253))
|
||||
- Added alias documentation to doc site ([#1251](https://github.com/caolan/async/issues/1251), [#1254](https://github.com/caolan/async/issues/1254))
|
||||
- Added [BootStrap scrollspy](http://getbootstrap.com/javascript/#scrollspy) to docs to highlight in the sidebar the current method being viewed ([#1289](https://github.com/caolan/async/issues/1289), [#1300](https://github.com/caolan/async/issues/1300))
|
||||
- Various minor doc fixes ([#1263](https://github.com/caolan/async/issues/1263), [#1264](https://github.com/caolan/async/issues/1264), [#1271](https://github.com/caolan/async/issues/1271), [#1278](https://github.com/caolan/async/issues/1278), [#1280](https://github.com/caolan/async/issues/1280), [#1282](https://github.com/caolan/async/issues/1282), [#1302](https://github.com/caolan/async/issues/1302))
|
||||
|
||||
# v2.0.1
|
||||
|
||||
- Significantly optimized all iteration based collection methods such as `each`, `map`, `filter`, etc ([#1245](https://github.com/caolan/async/issues/1245), [#1246](https://github.com/caolan/async/issues/1246), [#1247](https://github.com/caolan/async/issues/1247)).
|
||||
|
||||
# v2.0.0
|
||||
|
||||
Lots of changes here!
|
||||
|
||||
First and foremost, we have a slick new [site for docs](https://caolan.github.io/async/). Special thanks to [**@hargasinski**](https://github.com/hargasinski) for his work converting our old docs to `jsdoc` format and implementing the new website. Also huge ups to [**@ivanseidel**](https://github.com/ivanseidel) for designing our new logo. It was a long process for both of these tasks, but I think these changes turned out extraordinary well.
|
||||
|
||||
The biggest feature is modularization. You can now `require("async/series")` to only require the `series` function. Every Async library function is available this way. You still can `require("async")` to require the entire library, like you could do before.
|
||||
|
||||
We also provide Async as a collection of ES2015 modules. You can now `import {each} from 'async-es'` or `import waterfall from 'async-es/waterfall'`. If you are using only a few Async functions, and are using a ES bundler such as Rollup, this can significantly lower your build size.
|
||||
|
||||
Major thanks to [**@Kikobeats**](github.com/Kikobeats), [**@aearly**](github.com/aearly) and [**@megawac**](github.com/megawac) for doing the majority of the modularization work, as well as [**@jdalton**](github.com/jdalton) and [**@Rich-Harris**](github.com/Rich-Harris) for advisory work on the general modularization strategy.
|
||||
|
||||
Another one of the general themes of the 2.0 release is standardization of what an "async" function is. We are now more strictly following the node-style continuation passing style. That is, an async function is a function that:
|
||||
|
||||
1. Takes a variable number of arguments
|
||||
2. The last argument is always a callback
|
||||
3. The callback can accept any number of arguments
|
||||
4. The first argument passed to the callback will be treated as an error result, if the argument is truthy
|
||||
5. Any number of result arguments can be passed after the "error" argument
|
||||
6. The callback is called once and exactly once, either on the same tick or later tick of the JavaScript event loop.
|
||||
|
||||
There were several cases where Async accepted some functions that did not strictly have these properties, most notably `auto`, `every`, `some`, `filter`, `reject` and `detect`.
|
||||
|
||||
Another theme is performance. We have eliminated internal deferrals in all cases where they make sense. For example, in `waterfall` and `auto`, there was a `setImmediate` between each task -- these deferrals have been removed. A `setImmediate` call can add up to 1ms of delay. This might not seem like a lot, but it can add up if you are using many Async functions in the course of processing a HTTP request, for example. Nearly all asynchronous functions that do I/O already have some sort of deferral built in, so the extra deferral is unnecessary. The trade-off of this change is removing our built-in stack-overflow defense. Many synchronous callback calls in series can quickly overflow the JS call stack. If you do have a function that is sometimes synchronous (calling its callback on the same tick), and are running into stack overflows, wrap it with `async.ensureAsync()`.
|
||||
|
||||
Another big performance win has been re-implementing `queue`, `cargo`, and `priorityQueue` with [doubly linked lists](https://en.wikipedia.org/wiki/Doubly_linked_list) instead of arrays. This has lead to queues being an order of [magnitude faster on large sets of tasks](https://github.com/caolan/async/pull/1205).
|
||||
|
||||
## New Features
|
||||
|
||||
- Async is now modularized. Individual functions can be `require()`d from the main package. (`require('async/auto')`) ([#984](https://github.com/caolan/async/issues/984), [#996](https://github.com/caolan/async/issues/996))
|
||||
- Async is also available as a collection of ES2015 modules in the new `async-es` package. (`import {forEachSeries} from 'async-es'`) ([#984](https://github.com/caolan/async/issues/984), [#996](https://github.com/caolan/async/issues/996))
|
||||
- Added `race`, analogous to `Promise.race()`. It will run an array of async tasks in parallel and will call its callback with the result of the first task to respond. ([#568](https://github.com/caolan/async/issues/568), [#1038](https://github.com/caolan/async/issues/1038))
|
||||
- Collection methods now accept ES2015 iterators. Maps, Sets, and anything that implements the iterator spec can now be passed directly to `each`, `map`, `parallel`, etc.. ([#579](https://github.com/caolan/async/issues/579), [#839](https://github.com/caolan/async/issues/839), [#1074](https://github.com/caolan/async/issues/1074))
|
||||
- Added `mapValues`, for mapping over the properties of an object and returning an object with the same keys. ([#1157](https://github.com/caolan/async/issues/1157), [#1177](https://github.com/caolan/async/issues/1177))
|
||||
- Added `timeout`, a wrapper for an async function that will make the task time-out after the specified time. ([#1007](https://github.com/caolan/async/issues/1007), [#1027](https://github.com/caolan/async/issues/1027))
|
||||
- Added `reflect` and `reflectAll`, analagous to [`Promise.reflect()`](http://bluebirdjs.com/docs/api/reflect.html), a wrapper for async tasks that always succeeds, by gathering results and errors into an object. ([#942](https://github.com/caolan/async/issues/942), [#1012](https://github.com/caolan/async/issues/1012), [#1095](https://github.com/caolan/async/issues/1095))
|
||||
- `constant` supports dynamic arguments -- it will now always use its last argument as the callback. ([#1016](https://github.com/caolan/async/issues/1016), [#1052](https://github.com/caolan/async/issues/1052))
|
||||
- `setImmediate` and `nextTick` now support arguments to partially apply to the deferred function, like the node-native versions do. ([#940](https://github.com/caolan/async/issues/940), [#1053](https://github.com/caolan/async/issues/1053))
|
||||
- `auto` now supports resolving cyclic dependencies using [Kahn's algorithm](https://en.wikipedia.org/wiki/Topological_sorting#Kahn.27s_algorithm) ([#1140](https://github.com/caolan/async/issues/1140)).
|
||||
- Added `autoInject`, a relative of `auto` that automatically spreads a task's dependencies as arguments to the task function. ([#608](https://github.com/caolan/async/issues/608), [#1055](https://github.com/caolan/async/issues/1055), [#1099](https://github.com/caolan/async/issues/1099), [#1100](https://github.com/caolan/async/issues/1100))
|
||||
- You can now limit the concurrency of `auto` tasks. ([#635](https://github.com/caolan/async/issues/635), [#637](https://github.com/caolan/async/issues/637))
|
||||
- Added `retryable`, a relative of `retry` that wraps an async function, making it retry when called. ([#1058](https://github.com/caolan/async/issues/1058))
|
||||
- `retry` now supports specifying a function that determines the next time interval, useful for exponential backoff, logging and other retry strategies. ([#1161](https://github.com/caolan/async/issues/1161))
|
||||
- `retry` will now pass all of the arguments the task function was resolved with to the callback ([#1231](https://github.com/caolan/async/issues/1231)).
|
||||
- Added `q.unsaturated` -- callback called when a `queue`'s number of running workers falls below a threshold. ([#868](https://github.com/caolan/async/issues/868), [#1030](https://github.com/caolan/async/issues/1030), [#1033](https://github.com/caolan/async/issues/1033), [#1034](https://github.com/caolan/async/issues/1034))
|
||||
- Added `q.error` -- a callback called whenever a `queue` task calls its callback with an error. ([#1170](https://github.com/caolan/async/issues/1170))
|
||||
- `applyEach` and `applyEachSeries` now pass results to the final callback. ([#1088](https://github.com/caolan/async/issues/1088))
|
||||
|
||||
## Breaking changes
|
||||
|
||||
- Calling a callback more than once is considered an error, and an error will be thrown. This had an explicit breaking change in `waterfall`. If you were relying on this behavior, you should more accurately represent your control flow as an event emitter or stream. ([#814](https://github.com/caolan/async/issues/814), [#815](https://github.com/caolan/async/issues/815), [#1048](https://github.com/caolan/async/issues/1048), [#1050](https://github.com/caolan/async/issues/1050))
|
||||
- `auto` task functions now always take the callback as the last argument. If a task has dependencies, the `results` object will be passed as the first argument. To migrate old task functions, wrap them with [`_.flip`](https://lodash.com/docs#flip) ([#1036](https://github.com/caolan/async/issues/1036), [#1042](https://github.com/caolan/async/issues/1042))
|
||||
- Internal `setImmediate` calls have been refactored away. This may make existing flows vulnerable to stack overflows if you use many synchronous functions in series. Use `ensureAsync` to work around this. ([#696](https://github.com/caolan/async/issues/696), [#704](https://github.com/caolan/async/issues/704), [#1049](https://github.com/caolan/async/issues/1049), [#1050](https://github.com/caolan/async/issues/1050))
|
||||
- `map` used to return an object when iterating over an object. `map` now always returns an array, like in other libraries. The previous object behavior has been split out into `mapValues`. ([#1157](https://github.com/caolan/async/issues/1157), [#1177](https://github.com/caolan/async/issues/1177))
|
||||
- `filter`, `reject`, `some`, `every`, `detect` and their families like `{METHOD}Series` and `{METHOD}Limit` now expect an error as the first callback argument, rather than just a simple boolean. Pass `null` as the first argument, or use `fs.access` instead of `fs.exists`. ([#118](https://github.com/caolan/async/issues/118), [#774](https://github.com/caolan/async/issues/774), [#1028](https://github.com/caolan/async/issues/1028), [#1041](https://github.com/caolan/async/issues/1041))
|
||||
- `{METHOD}` and `{METHOD}Series` are now implemented in terms of `{METHOD}Limit`. This is a major internal simplification, and is not expected to cause many problems, but it does subtly affect how functions execute internally. ([#778](https://github.com/caolan/async/issues/778), [#847](https://github.com/caolan/async/issues/847))
|
||||
- `retry`'s callback is now optional. Previously, omitting the callback would partially apply the function, meaning it could be passed directly as a task to `series` or `auto`. The partially applied "control-flow" behavior has been separated out into `retryable`. ([#1054](https://github.com/caolan/async/issues/1054), [#1058](https://github.com/caolan/async/issues/1058))
|
||||
- The test function for `whilst`, `until`, and `during` used to be passed non-error args from the iteratee function's callback, but this led to weirdness where the first call of the test function would be passed no args. We have made it so the test function is never passed extra arguments, and only the `doWhilst`, `doUntil`, and `doDuring` functions pass iteratee callback arguments to the test function ([#1217](https://github.com/caolan/async/issues/1217), [#1224](https://github.com/caolan/async/issues/1224))
|
||||
- The `q.tasks` array has been renamed `q._tasks` and is now implemented as a doubly linked list (DLL). Any code that used to interact with this array will need to be updated to either use the provided helpers or support DLLs ([#1205](https://github.com/caolan/async/issues/1205)).
|
||||
- The timing of the `q.saturated()` callback in a `queue` has been modified to better reflect when tasks pushed to the queue will start queueing. ([#724](https://github.com/caolan/async/issues/724), [#1078](https://github.com/caolan/async/issues/1078))
|
||||
- Removed `iterator` method in favour of [ES2015 iterator protocol](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Iterators_and_Generators ) which natively supports arrays ([#1237](https://github.com/caolan/async/issues/1237))
|
||||
- Dropped support for Component, Jam, SPM, and Volo ([#1175](https://github.com/caolan/async/issues/1175), #[#176](https://github.com/caolan/async/issues/176))
|
||||
|
||||
## Bug Fixes
|
||||
|
||||
- Improved handling of no dependency cases in `auto` & `autoInject` ([#1147](https://github.com/caolan/async/issues/1147)).
|
||||
- Fixed a bug where the callback generated by `asyncify` with `Promises` could resolve twice ([#1197](https://github.com/caolan/async/issues/1197)).
|
||||
- Fixed several documented optional callbacks not actually being optional ([#1223](https://github.com/caolan/async/issues/1223)).
|
||||
|
||||
## Other
|
||||
|
||||
- Added `someSeries` and `everySeries` for symmetry, as well as a complete set of `any`/`anyLimit`/`anySeries` and `all`/`/allLmit`/`allSeries` aliases.
|
||||
- Added `find` as an alias for `detect. (as well as `findLimit` and `findSeries`).
|
||||
- Various doc fixes ([#1005](https://github.com/caolan/async/issues/1005), [#1008](https://github.com/caolan/async/issues/1008), [#1010](https://github.com/caolan/async/issues/1010), [#1015](https://github.com/caolan/async/issues/1015), [#1021](https://github.com/caolan/async/issues/1021), [#1037](https://github.com/caolan/async/issues/1037), [#1039](https://github.com/caolan/async/issues/1039), [#1051](https://github.com/caolan/async/issues/1051), [#1102](https://github.com/caolan/async/issues/1102), [#1107](https://github.com/caolan/async/issues/1107), [#1121](https://github.com/caolan/async/issues/1121), [#1123](https://github.com/caolan/async/issues/1123), [#1129](https://github.com/caolan/async/issues/1129), [#1135](https://github.com/caolan/async/issues/1135), [#1138](https://github.com/caolan/async/issues/1138), [#1141](https://github.com/caolan/async/issues/1141), [#1153](https://github.com/caolan/async/issues/1153), [#1216](https://github.com/caolan/async/issues/1216), [#1217](https://github.com/caolan/async/issues/1217), [#1232](https://github.com/caolan/async/issues/1232), [#1233](https://github.com/caolan/async/issues/1233), [#1236](https://github.com/caolan/async/issues/1236), [#1238](https://github.com/caolan/async/issues/1238))
|
||||
|
||||
Thank you [**@aearly**](github.com/aearly) and [**@megawac**](github.com/megawac) for taking the lead on version 2 of async.
|
||||
|
||||
------------------------------------------
|
||||
|
||||
# v1.5.2
|
||||
- Allow using `"constructor"` as an argument in `memoize` ([#998](https://github.com/caolan/async/issues/998))
|
||||
- Give a better error messsage when `auto` dependency checking fails ([#994](https://github.com/caolan/async/issues/994))
|
||||
- Various doc updates ([#936](https://github.com/caolan/async/issues/936), [#956](https://github.com/caolan/async/issues/956), [#979](https://github.com/caolan/async/issues/979), [#1002](https://github.com/caolan/async/issues/1002))
|
||||
|
||||
# v1.5.1
|
||||
- Fix issue with `pause` in `queue` with concurrency enabled ([#946](https://github.com/caolan/async/issues/946))
|
||||
- `while` and `until` now pass the final result to callback ([#963](https://github.com/caolan/async/issues/963))
|
||||
- `auto` will properly handle concurrency when there is no callback ([#966](https://github.com/caolan/async/issues/966))
|
||||
- `auto` will no. properly stop execution when an error occurs ([#988](https://github.com/caolan/async/issues/988), [#993](https://github.com/caolan/async/issues/993))
|
||||
- Various doc fixes ([#971](https://github.com/caolan/async/issues/971), [#980](https://github.com/caolan/async/issues/980))
|
||||
|
||||
# v1.5.0
|
||||
|
||||
- Added `transform`, analogous to [`_.transform`](http://lodash.com/docs#transform) ([#892](https://github.com/caolan/async/issues/892))
|
||||
- `map` now returns an object when an object is passed in, rather than array with non-numeric keys. `map` will begin always returning an array with numeric indexes in the next major release. ([#873](https://github.com/caolan/async/issues/873))
|
||||
- `auto` now accepts an optional `concurrency` argument to limit the number o. running tasks ([#637](https://github.com/caolan/async/issues/637))
|
||||
- Added `queue#workersList()`, to retrieve the lis. of currently running tasks. ([#891](https://github.com/caolan/async/issues/891))
|
||||
- Various code simplifications ([#896](https://github.com/caolan/async/issues/896), [#904](https://github.com/caolan/async/issues/904))
|
||||
- Various doc fixes :scroll: ([#890](https://github.com/caolan/async/issues/890), [#894](https://github.com/caolan/async/issues/894), [#903](https://github.com/caolan/async/issues/903), [#905](https://github.com/caolan/async/issues/905), [#912](https://github.com/caolan/async/issues/912))
|
||||
|
||||
# v1.4.2
|
||||
|
||||
- Ensure coverage files don't get published on npm ([#879](https://github.com/caolan/async/issues/879))
|
||||
|
||||
# v1.4.1
|
||||
|
||||
- Add in overlooked `detectLimit` method ([#866](https://github.com/caolan/async/issues/866))
|
||||
- Removed unnecessary files from npm releases ([#861](https://github.com/caolan/async/issues/861))
|
||||
- Removed usage of a reserved word to prevent :boom: in older environments ([#870](https://github.com/caolan/async/issues/870))
|
||||
|
||||
# v1.4.0
|
||||
|
||||
- `asyncify` now supports promises ([#840](https://github.com/caolan/async/issues/840))
|
||||
- Added `Limit` versions of `filter` and `reject` ([#836](https://github.com/caolan/async/issues/836))
|
||||
- Add `Limit` versions of `detect`, `some` and `every` ([#828](https://github.com/caolan/async/issues/828), [#829](https://github.com/caolan/async/issues/829))
|
||||
- `some`, `every` and `detect` now short circuit early ([#828](https://github.com/caolan/async/issues/828), [#829](https://github.com/caolan/async/issues/829))
|
||||
- Improve detection of the global object ([#804](https://github.com/caolan/async/issues/804)), enabling use in WebWorkers
|
||||
- `whilst` now called with arguments from iterator ([#823](https://github.com/caolan/async/issues/823))
|
||||
- `during` now gets called with arguments from iterator ([#824](https://github.com/caolan/async/issues/824))
|
||||
- Code simplifications and optimizations aplenty ([diff](https://github.com/caolan/async/compare/v1.3.0...v1.4.0))
|
||||
|
||||
|
||||
# v1.3.0
|
||||
|
||||
New Features:
|
||||
- Added `constant`
|
||||
- Added `asyncify`/`wrapSync` for making sync functions work with callbacks. ([#671](https://github.com/caolan/async/issues/671), [#806](https://github.com/caolan/async/issues/806))
|
||||
- Added `during` and `doDuring`, which are like `whilst` with an async truth test. ([#800](https://github.com/caolan/async/issues/800))
|
||||
- `retry` now accepts an `interval` parameter to specify a delay between retries. ([#793](https://github.com/caolan/async/issues/793))
|
||||
- `async` should work better in Web Workers due to better `root` detection ([#804](https://github.com/caolan/async/issues/804))
|
||||
- Callbacks are now optional in `whilst`, `doWhilst`, `until`, and `doUntil` ([#642](https://github.com/caolan/async/issues/642))
|
||||
- Various internal updates ([#786](https://github.com/caolan/async/issues/786), [#801](https://github.com/caolan/async/issues/801), [#802](https://github.com/caolan/async/issues/802), [#803](https://github.com/caolan/async/issues/803))
|
||||
- Various doc fixes ([#790](https://github.com/caolan/async/issues/790), [#794](https://github.com/caolan/async/issues/794))
|
||||
|
||||
Bug Fixes:
|
||||
- `cargo` now exposes the `payload` size, and `cargo.payload` can be changed on the fly after the `cargo` is created. ([#740](https://github.com/caolan/async/issues/740), [#744](https://github.com/caolan/async/issues/744), [#783](https://github.com/caolan/async/issues/783))
|
||||
|
||||
|
||||
# v1.2.1
|
||||
|
||||
Bug Fix:
|
||||
|
||||
- Small regression with synchronous iterator behavior in `eachSeries` with a 1-element array. Before 1.1.0, `eachSeries`'s callback was called on the same tick, which this patch restores. In 2.0.0, it will be called on the next tick. ([#782](https://github.com/caolan/async/issues/782))
|
||||
|
||||
|
||||
# v1.2.0
|
||||
|
||||
New Features:
|
||||
|
||||
- Added `timesLimit` ([#743](https://github.com/caolan/async/issues/743))
|
||||
- `concurrency` can be changed after initialization in `queue` by setting `q.concurrency`. The new concurrency will be reflected the next time a task is processed. ([#747](https://github.com/caolan/async/issues/747), [#772](https://github.com/caolan/async/issues/772))
|
||||
|
||||
Bug Fixes:
|
||||
|
||||
- Fixed a regression in `each` and family with empty arrays that have additional properties. ([#775](https://github.com/caolan/async/issues/775), [#777](https://github.com/caolan/async/issues/777))
|
||||
|
||||
|
||||
# v1.1.1
|
||||
|
||||
Bug Fix:
|
||||
|
||||
- Small regression with synchronous iterator behavior in `eachSeries` with a 1-element array. Before 1.1.0, `eachSeries`'s callback was called on the same tick, which this patch restores. In 2.0.0, it will be called on the next tick. ([#782](https://github.com/caolan/async/issues/782))
|
||||
|
||||
|
||||
# v1.1.0
|
||||
|
||||
New Features:
|
||||
|
||||
- `cargo` now supports all of the same methods and event callbacks as `queue`.
|
||||
- Added `ensureAsync` - A wrapper that ensures an async function calls its callback on a later tick. ([#769](https://github.com/caolan/async/issues/769))
|
||||
- Optimized `map`, `eachOf`, and `waterfall` families of functions
|
||||
- Passing a `null` or `undefined` array to `map`, `each`, `parallel` and families will be treated as an empty array ([#667](https://github.com/caolan/async/issues/667)).
|
||||
- The callback is now optional for the composed results of `compose` and `seq`. ([#618](https://github.com/caolan/async/issues/618))
|
||||
- Reduced file size by 4kb, (minified version by 1kb)
|
||||
- Added code coverage through `nyc` and `coveralls` ([#768](https://github.com/caolan/async/issues/768))
|
||||
|
||||
Bug Fixes:
|
||||
|
||||
- `forever` will no longer stack overflow with a synchronous iterator ([#622](https://github.com/caolan/async/issues/622))
|
||||
- `eachLimit` and other limit functions will stop iterating once an error occurs ([#754](https://github.com/caolan/async/issues/754))
|
||||
- Always pass `null` in callbacks when there is no error ([#439](https://github.com/caolan/async/issues/439))
|
||||
- Ensure proper conditions when calling `drain()` after pushing an empty data set to a queue ([#668](https://github.com/caolan/async/issues/668))
|
||||
- `each` and family will properly handle an empty array ([#578](https://github.com/caolan/async/issues/578))
|
||||
- `eachSeries` and family will finish if the underlying array is modified during execution ([#557](https://github.com/caolan/async/issues/557))
|
||||
- `queue` will throw if a non-function is passed to `q.push()` ([#593](https://github.com/caolan/async/issues/593))
|
||||
- Doc fixes ([#629](https://github.com/caolan/async/issues/629), [#766](https://github.com/caolan/async/issues/766))
|
||||
|
||||
|
||||
# v1.0.0
|
||||
|
||||
No known breaking changes, we are simply complying with semver from here on out.
|
||||
|
||||
Changes:
|
||||
|
||||
- Start using a changelog!
|
||||
- Add `forEachOf` for iterating over Objects (or to iterate Arrays with indexes available) ([#168](https://github.com/caolan/async/issues/168) [#704](https://github.com/caolan/async/issues/704) [#321](https://github.com/caolan/async/issues/321))
|
||||
- Detect deadlocks in `auto` ([#663](https://github.com/caolan/async/issues/663))
|
||||
- Better support for require.js ([#527](https://github.com/caolan/async/issues/527))
|
||||
- Throw if queue created with concurrency `0` ([#714](https://github.com/caolan/async/issues/714))
|
||||
- Fix unneeded iteration in `queue.resume()` ([#758](https://github.com/caolan/async/issues/758))
|
||||
- Guard against timer mocking overriding `setImmediate` ([#609](https://github.com/caolan/async/issues/609) [#611](https://github.com/caolan/async/issues/611))
|
||||
- Miscellaneous doc fixes ([#542](https://github.com/caolan/async/issues/542) [#596](https://github.com/caolan/async/issues/596) [#615](https://github.com/caolan/async/issues/615) [#628](https://github.com/caolan/async/issues/628) [#631](https://github.com/caolan/async/issues/631) [#690](https://github.com/caolan/async/issues/690) [#729](https://github.com/caolan/async/issues/729))
|
||||
- Use single noop function internally ([#546](https://github.com/caolan/async/issues/546))
|
||||
- Optimize internal `_each`, `_map` and `_keys` functions.
|
@ -1,19 +0,0 @@
|
||||
Copyright (c) 2010-2018 Caolan McMahon
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
@ -1,56 +0,0 @@
|
||||

|
||||
|
||||
[](https://travis-ci.org/caolan/async)
|
||||
[](https://www.npmjs.com/package/async)
|
||||
[](https://coveralls.io/r/caolan/async?branch=master)
|
||||
[](https://gitter.im/caolan/async?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
[](https://www.libhive.com/providers/npm/packages/async)
|
||||
[](https://www.jsdelivr.com/package/npm/async)
|
||||
|
||||
|
||||
Async is a utility module which provides straight-forward, powerful functions for working with [asynchronous JavaScript](http://caolan.github.io/async/global.html). Although originally designed for use with [Node.js](https://nodejs.org/) and installable via `npm install --save async`, it can also be used directly in the browser.
|
||||
|
||||
This version of the package is optimized for the Node.js environment. If you use Async with webpack, install [`async-es`](https://www.npmjs.com/package/async-es) instead.
|
||||
|
||||
For Documentation, visit <https://caolan.github.io/async/>
|
||||
|
||||
*For Async v1.5.x documentation, go [HERE](https://github.com/caolan/async/blob/v1.5.2/README.md)*
|
||||
|
||||
|
||||
```javascript
|
||||
// for use with Node-style callbacks...
|
||||
var async = require("async");
|
||||
|
||||
var obj = {dev: "/dev.json", test: "/test.json", prod: "/prod.json"};
|
||||
var configs = {};
|
||||
|
||||
async.forEachOf(obj, (value, key, callback) => {
|
||||
fs.readFile(__dirname + value, "utf8", (err, data) => {
|
||||
if (err) return callback(err);
|
||||
try {
|
||||
configs[key] = JSON.parse(data);
|
||||
} catch (e) {
|
||||
return callback(e);
|
||||
}
|
||||
callback();
|
||||
});
|
||||
}, err => {
|
||||
if (err) console.error(err.message);
|
||||
// configs is now a map of JSON data
|
||||
doSomethingWith(configs);
|
||||
});
|
||||
```
|
||||
|
||||
```javascript
|
||||
var async = require("async");
|
||||
|
||||
// ...or ES2017 async functions
|
||||
async.mapLimit(urls, 5, async function(url) {
|
||||
const response = await fetch(url)
|
||||
return response.body
|
||||
}, (err, results) => {
|
||||
if (err) throw err
|
||||
// results is now an array of the response bodies
|
||||
console.log(results)
|
||||
})
|
||||
```
|
@ -1,50 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _createTester = require('./internal/createTester');
|
||||
|
||||
var _createTester2 = _interopRequireDefault(_createTester);
|
||||
|
||||
var _doParallel = require('./internal/doParallel');
|
||||
|
||||
var _doParallel2 = _interopRequireDefault(_doParallel);
|
||||
|
||||
var _notId = require('./internal/notId');
|
||||
|
||||
var _notId2 = _interopRequireDefault(_notId);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* Returns `true` if every element in `coll` satisfies an async test. If any
|
||||
* iteratee call returns `false`, the main `callback` is immediately called.
|
||||
*
|
||||
* @name every
|
||||
* @static
|
||||
* @memberOf module:Collections
|
||||
* @method
|
||||
* @alias all
|
||||
* @category Collection
|
||||
* @param {Array|Iterable|Object} coll - A collection to iterate over.
|
||||
* @param {AsyncFunction} iteratee - An async truth test to apply to each item
|
||||
* in the collection in parallel.
|
||||
* The iteratee must complete with a boolean result value.
|
||||
* Invoked with (item, callback).
|
||||
* @param {Function} [callback] - A callback which is called after all the
|
||||
* `iteratee` functions have finished. Result will be either `true` or `false`
|
||||
* depending on the values of the async tests. Invoked with (err, result).
|
||||
* @example
|
||||
*
|
||||
* async.every(['file1','file2','file3'], function(filePath, callback) {
|
||||
* fs.access(filePath, function(err) {
|
||||
* callback(null, !err)
|
||||
* });
|
||||
* }, function(err, result) {
|
||||
* // if result is true then every file exists
|
||||
* });
|
||||
*/
|
||||
exports.default = (0, _doParallel2.default)((0, _createTester2.default)(_notId2.default, _notId2.default));
|
||||
module.exports = exports['default'];
|
@ -1,42 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _createTester = require('./internal/createTester');
|
||||
|
||||
var _createTester2 = _interopRequireDefault(_createTester);
|
||||
|
||||
var _doParallelLimit = require('./internal/doParallelLimit');
|
||||
|
||||
var _doParallelLimit2 = _interopRequireDefault(_doParallelLimit);
|
||||
|
||||
var _notId = require('./internal/notId');
|
||||
|
||||
var _notId2 = _interopRequireDefault(_notId);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* The same as [`every`]{@link module:Collections.every} but runs a maximum of `limit` async operations at a time.
|
||||
*
|
||||
* @name everyLimit
|
||||
* @static
|
||||
* @memberOf module:Collections
|
||||
* @method
|
||||
* @see [async.every]{@link module:Collections.every}
|
||||
* @alias allLimit
|
||||
* @category Collection
|
||||
* @param {Array|Iterable|Object} coll - A collection to iterate over.
|
||||
* @param {number} limit - The maximum number of async operations at a time.
|
||||
* @param {AsyncFunction} iteratee - An async truth test to apply to each item
|
||||
* in the collection in parallel.
|
||||
* The iteratee must complete with a boolean result value.
|
||||
* Invoked with (item, callback).
|
||||
* @param {Function} [callback] - A callback which is called after all the
|
||||
* `iteratee` functions have finished. Result will be either `true` or `false`
|
||||
* depending on the values of the async tests. Invoked with (err, result).
|
||||
*/
|
||||
exports.default = (0, _doParallelLimit2.default)((0, _createTester2.default)(_notId2.default, _notId2.default));
|
||||
module.exports = exports['default'];
|
@ -1,37 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _everyLimit = require('./everyLimit');
|
||||
|
||||
var _everyLimit2 = _interopRequireDefault(_everyLimit);
|
||||
|
||||
var _doLimit = require('./internal/doLimit');
|
||||
|
||||
var _doLimit2 = _interopRequireDefault(_doLimit);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* The same as [`every`]{@link module:Collections.every} but runs only a single async operation at a time.
|
||||
*
|
||||
* @name everySeries
|
||||
* @static
|
||||
* @memberOf module:Collections
|
||||
* @method
|
||||
* @see [async.every]{@link module:Collections.every}
|
||||
* @alias allSeries
|
||||
* @category Collection
|
||||
* @param {Array|Iterable|Object} coll - A collection to iterate over.
|
||||
* @param {AsyncFunction} iteratee - An async truth test to apply to each item
|
||||
* in the collection in series.
|
||||
* The iteratee must complete with a boolean result value.
|
||||
* Invoked with (item, callback).
|
||||
* @param {Function} [callback] - A callback which is called after all the
|
||||
* `iteratee` functions have finished. Result will be either `true` or `false`
|
||||
* depending on the values of the async tests. Invoked with (err, result).
|
||||
*/
|
||||
exports.default = (0, _doLimit2.default)(_everyLimit2.default, 1);
|
||||
module.exports = exports['default'];
|
@ -1,52 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _createTester = require('./internal/createTester');
|
||||
|
||||
var _createTester2 = _interopRequireDefault(_createTester);
|
||||
|
||||
var _doParallel = require('./internal/doParallel');
|
||||
|
||||
var _doParallel2 = _interopRequireDefault(_doParallel);
|
||||
|
||||
var _identity = require('lodash/identity');
|
||||
|
||||
var _identity2 = _interopRequireDefault(_identity);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* Returns `true` if at least one element in the `coll` satisfies an async test.
|
||||
* If any iteratee call returns `true`, the main `callback` is immediately
|
||||
* called.
|
||||
*
|
||||
* @name some
|
||||
* @static
|
||||
* @memberOf module:Collections
|
||||
* @method
|
||||
* @alias any
|
||||
* @category Collection
|
||||
* @param {Array|Iterable|Object} coll - A collection to iterate over.
|
||||
* @param {AsyncFunction} iteratee - An async truth test to apply to each item
|
||||
* in the collections in parallel.
|
||||
* The iteratee should complete with a boolean `result` value.
|
||||
* Invoked with (item, callback).
|
||||
* @param {Function} [callback] - A callback which is called as soon as any
|
||||
* iteratee returns `true`, or after all the iteratee functions have finished.
|
||||
* Result will be either `true` or `false` depending on the values of the async
|
||||
* tests. Invoked with (err, result).
|
||||
* @example
|
||||
*
|
||||
* async.some(['file1','file2','file3'], function(filePath, callback) {
|
||||
* fs.access(filePath, function(err) {
|
||||
* callback(null, !err)
|
||||
* });
|
||||
* }, function(err, result) {
|
||||
* // if result is true then at least one of the files exists
|
||||
* });
|
||||
*/
|
||||
exports.default = (0, _doParallel2.default)((0, _createTester2.default)(Boolean, _identity2.default));
|
||||
module.exports = exports['default'];
|
@ -1,43 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _createTester = require('./internal/createTester');
|
||||
|
||||
var _createTester2 = _interopRequireDefault(_createTester);
|
||||
|
||||
var _doParallelLimit = require('./internal/doParallelLimit');
|
||||
|
||||
var _doParallelLimit2 = _interopRequireDefault(_doParallelLimit);
|
||||
|
||||
var _identity = require('lodash/identity');
|
||||
|
||||
var _identity2 = _interopRequireDefault(_identity);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* The same as [`some`]{@link module:Collections.some} but runs a maximum of `limit` async operations at a time.
|
||||
*
|
||||
* @name someLimit
|
||||
* @static
|
||||
* @memberOf module:Collections
|
||||
* @method
|
||||
* @see [async.some]{@link module:Collections.some}
|
||||
* @alias anyLimit
|
||||
* @category Collection
|
||||
* @param {Array|Iterable|Object} coll - A collection to iterate over.
|
||||
* @param {number} limit - The maximum number of async operations at a time.
|
||||
* @param {AsyncFunction} iteratee - An async truth test to apply to each item
|
||||
* in the collections in parallel.
|
||||
* The iteratee should complete with a boolean `result` value.
|
||||
* Invoked with (item, callback).
|
||||
* @param {Function} [callback] - A callback which is called as soon as any
|
||||
* iteratee returns `true`, or after all the iteratee functions have finished.
|
||||
* Result will be either `true` or `false` depending on the values of the async
|
||||
* tests. Invoked with (err, result).
|
||||
*/
|
||||
exports.default = (0, _doParallelLimit2.default)((0, _createTester2.default)(Boolean, _identity2.default));
|
||||
module.exports = exports['default'];
|
@ -1,38 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _someLimit = require('./someLimit');
|
||||
|
||||
var _someLimit2 = _interopRequireDefault(_someLimit);
|
||||
|
||||
var _doLimit = require('./internal/doLimit');
|
||||
|
||||
var _doLimit2 = _interopRequireDefault(_doLimit);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* The same as [`some`]{@link module:Collections.some} but runs only a single async operation at a time.
|
||||
*
|
||||
* @name someSeries
|
||||
* @static
|
||||
* @memberOf module:Collections
|
||||
* @method
|
||||
* @see [async.some]{@link module:Collections.some}
|
||||
* @alias anySeries
|
||||
* @category Collection
|
||||
* @param {Array|Iterable|Object} coll - A collection to iterate over.
|
||||
* @param {AsyncFunction} iteratee - An async truth test to apply to each item
|
||||
* in the collections in series.
|
||||
* The iteratee should complete with a boolean `result` value.
|
||||
* Invoked with (item, callback).
|
||||
* @param {Function} [callback] - A callback which is called as soon as any
|
||||
* iteratee returns `true`, or after all the iteratee functions have finished.
|
||||
* Result will be either `true` or `false` depending on the values of the async
|
||||
* tests. Invoked with (err, result).
|
||||
*/
|
||||
exports.default = (0, _doLimit2.default)(_someLimit2.default, 1);
|
||||
module.exports = exports['default'];
|
@ -1,68 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
exports.default = function (fn /*, ...args*/) {
|
||||
var args = (0, _slice2.default)(arguments, 1);
|
||||
return function () /*callArgs*/{
|
||||
var callArgs = (0, _slice2.default)(arguments);
|
||||
return fn.apply(null, args.concat(callArgs));
|
||||
};
|
||||
};
|
||||
|
||||
var _slice = require('./internal/slice');
|
||||
|
||||
var _slice2 = _interopRequireDefault(_slice);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
;
|
||||
|
||||
/**
|
||||
* Creates a continuation function with some arguments already applied.
|
||||
*
|
||||
* Useful as a shorthand when combined with other control flow functions. Any
|
||||
* arguments passed to the returned function are added to the arguments
|
||||
* originally passed to apply.
|
||||
*
|
||||
* @name apply
|
||||
* @static
|
||||
* @memberOf module:Utils
|
||||
* @method
|
||||
* @category Util
|
||||
* @param {Function} fn - The function you want to eventually apply all
|
||||
* arguments to. Invokes with (arguments...).
|
||||
* @param {...*} arguments... - Any number of arguments to automatically apply
|
||||
* when the continuation is called.
|
||||
* @returns {Function} the partially-applied function
|
||||
* @example
|
||||
*
|
||||
* // using apply
|
||||
* async.parallel([
|
||||
* async.apply(fs.writeFile, 'testfile1', 'test1'),
|
||||
* async.apply(fs.writeFile, 'testfile2', 'test2')
|
||||
* ]);
|
||||
*
|
||||
*
|
||||
* // the same process without using apply
|
||||
* async.parallel([
|
||||
* function(callback) {
|
||||
* fs.writeFile('testfile1', 'test1', callback);
|
||||
* },
|
||||
* function(callback) {
|
||||
* fs.writeFile('testfile2', 'test2', callback);
|
||||
* }
|
||||
* ]);
|
||||
*
|
||||
* // It's possible to pass any number of additional arguments when calling the
|
||||
* // continuation:
|
||||
*
|
||||
* node> var fn = async.apply(sys.puts, 'one');
|
||||
* node> fn('two', 'three');
|
||||
* one
|
||||
* two
|
||||
* three
|
||||
*/
|
||||
module.exports = exports['default'];
|
@ -1,51 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _applyEach = require('./internal/applyEach');
|
||||
|
||||
var _applyEach2 = _interopRequireDefault(_applyEach);
|
||||
|
||||
var _map = require('./map');
|
||||
|
||||
var _map2 = _interopRequireDefault(_map);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* Applies the provided arguments to each function in the array, calling
|
||||
* `callback` after all functions have completed. If you only provide the first
|
||||
* argument, `fns`, then it will return a function which lets you pass in the
|
||||
* arguments as if it were a single function call. If more arguments are
|
||||
* provided, `callback` is required while `args` is still optional.
|
||||
*
|
||||
* @name applyEach
|
||||
* @static
|
||||
* @memberOf module:ControlFlow
|
||||
* @method
|
||||
* @category Control Flow
|
||||
* @param {Array|Iterable|Object} fns - A collection of {@link AsyncFunction}s
|
||||
* to all call with the same arguments
|
||||
* @param {...*} [args] - any number of separate arguments to pass to the
|
||||
* function.
|
||||
* @param {Function} [callback] - the final argument should be the callback,
|
||||
* called when all functions have completed processing.
|
||||
* @returns {Function} - If only the first argument, `fns`, is provided, it will
|
||||
* return a function which lets you pass in the arguments as if it were a single
|
||||
* function call. The signature is `(..args, callback)`. If invoked with any
|
||||
* arguments, `callback` is required.
|
||||
* @example
|
||||
*
|
||||
* async.applyEach([enableSearch, updateSchema], 'bucket', callback);
|
||||
*
|
||||
* // partial application example:
|
||||
* async.each(
|
||||
* buckets,
|
||||
* async.applyEach([enableSearch, updateSchema]),
|
||||
* callback
|
||||
* );
|
||||
*/
|
||||
exports.default = (0, _applyEach2.default)(_map2.default);
|
||||
module.exports = exports['default'];
|
@ -1,37 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _applyEach = require('./internal/applyEach');
|
||||
|
||||
var _applyEach2 = _interopRequireDefault(_applyEach);
|
||||
|
||||
var _mapSeries = require('./mapSeries');
|
||||
|
||||
var _mapSeries2 = _interopRequireDefault(_mapSeries);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* The same as [`applyEach`]{@link module:ControlFlow.applyEach} but runs only a single async operation at a time.
|
||||
*
|
||||
* @name applyEachSeries
|
||||
* @static
|
||||
* @memberOf module:ControlFlow
|
||||
* @method
|
||||
* @see [async.applyEach]{@link module:ControlFlow.applyEach}
|
||||
* @category Control Flow
|
||||
* @param {Array|Iterable|Object} fns - A collection of {@link AsyncFunction}s to all
|
||||
* call with the same arguments
|
||||
* @param {...*} [args] - any number of separate arguments to pass to the
|
||||
* function.
|
||||
* @param {Function} [callback] - the final argument should be the callback,
|
||||
* called when all functions have completed processing.
|
||||
* @returns {Function} - If only the first argument is provided, it will return
|
||||
* a function which lets you pass in the arguments as if it were a single
|
||||
* function call.
|
||||
*/
|
||||
exports.default = (0, _applyEach2.default)(_mapSeries2.default);
|
||||
module.exports = exports['default'];
|
@ -1,110 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = asyncify;
|
||||
|
||||
var _isObject = require('lodash/isObject');
|
||||
|
||||
var _isObject2 = _interopRequireDefault(_isObject);
|
||||
|
||||
var _initialParams = require('./internal/initialParams');
|
||||
|
||||
var _initialParams2 = _interopRequireDefault(_initialParams);
|
||||
|
||||
var _setImmediate = require('./internal/setImmediate');
|
||||
|
||||
var _setImmediate2 = _interopRequireDefault(_setImmediate);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* Take a sync function and make it async, passing its return value to a
|
||||
* callback. This is useful for plugging sync functions into a waterfall,
|
||||
* series, or other async functions. Any arguments passed to the generated
|
||||
* function will be passed to the wrapped function (except for the final
|
||||
* callback argument). Errors thrown will be passed to the callback.
|
||||
*
|
||||
* If the function passed to `asyncify` returns a Promise, that promises's
|
||||
* resolved/rejected state will be used to call the callback, rather than simply
|
||||
* the synchronous return value.
|
||||
*
|
||||
* This also means you can asyncify ES2017 `async` functions.
|
||||
*
|
||||
* @name asyncify
|
||||
* @static
|
||||
* @memberOf module:Utils
|
||||
* @method
|
||||
* @alias wrapSync
|
||||
* @category Util
|
||||
* @param {Function} func - The synchronous function, or Promise-returning
|
||||
* function to convert to an {@link AsyncFunction}.
|
||||
* @returns {AsyncFunction} An asynchronous wrapper of the `func`. To be
|
||||
* invoked with `(args..., callback)`.
|
||||
* @example
|
||||
*
|
||||
* // passing a regular synchronous function
|
||||
* async.waterfall([
|
||||
* async.apply(fs.readFile, filename, "utf8"),
|
||||
* async.asyncify(JSON.parse),
|
||||
* function (data, next) {
|
||||
* // data is the result of parsing the text.
|
||||
* // If there was a parsing error, it would have been caught.
|
||||
* }
|
||||
* ], callback);
|
||||
*
|
||||
* // passing a function returning a promise
|
||||
* async.waterfall([
|
||||
* async.apply(fs.readFile, filename, "utf8"),
|
||||
* async.asyncify(function (contents) {
|
||||
* return db.model.create(contents);
|
||||
* }),
|
||||
* function (model, next) {
|
||||
* // `model` is the instantiated model object.
|
||||
* // If there was an error, this function would be skipped.
|
||||
* }
|
||||
* ], callback);
|
||||
*
|
||||
* // es2017 example, though `asyncify` is not needed if your JS environment
|
||||
* // supports async functions out of the box
|
||||
* var q = async.queue(async.asyncify(async function(file) {
|
||||
* var intermediateStep = await processFile(file);
|
||||
* return await somePromise(intermediateStep)
|
||||
* }));
|
||||
*
|
||||
* q.push(files);
|
||||
*/
|
||||
function asyncify(func) {
|
||||
return (0, _initialParams2.default)(function (args, callback) {
|
||||
var result;
|
||||
try {
|
||||
result = func.apply(this, args);
|
||||
} catch (e) {
|
||||
return callback(e);
|
||||
}
|
||||
// if result is Promise object
|
||||
if ((0, _isObject2.default)(result) && typeof result.then === 'function') {
|
||||
result.then(function (value) {
|
||||
invokeCallback(callback, null, value);
|
||||
}, function (err) {
|
||||
invokeCallback(callback, err.message ? err : new Error(err));
|
||||
});
|
||||
} else {
|
||||
callback(null, result);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function invokeCallback(callback, error, value) {
|
||||
try {
|
||||
callback(error, value);
|
||||
} catch (e) {
|
||||
(0, _setImmediate2.default)(rethrow, e);
|
||||
}
|
||||
}
|
||||
|
||||
function rethrow(error) {
|
||||
throw error;
|
||||
}
|
||||
module.exports = exports['default'];
|
@ -1,289 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
exports.default = function (tasks, concurrency, callback) {
|
||||
if (typeof concurrency === 'function') {
|
||||
// concurrency is optional, shift the args.
|
||||
callback = concurrency;
|
||||
concurrency = null;
|
||||
}
|
||||
callback = (0, _once2.default)(callback || _noop2.default);
|
||||
var keys = (0, _keys2.default)(tasks);
|
||||
var numTasks = keys.length;
|
||||
if (!numTasks) {
|
||||
return callback(null);
|
||||
}
|
||||
if (!concurrency) {
|
||||
concurrency = numTasks;
|
||||
}
|
||||
|
||||
var results = {};
|
||||
var runningTasks = 0;
|
||||
var hasError = false;
|
||||
|
||||
var listeners = Object.create(null);
|
||||
|
||||
var readyTasks = [];
|
||||
|
||||
// for cycle detection:
|
||||
var readyToCheck = []; // tasks that have been identified as reachable
|
||||
// without the possibility of returning to an ancestor task
|
||||
var uncheckedDependencies = {};
|
||||
|
||||
(0, _baseForOwn2.default)(tasks, function (task, key) {
|
||||
if (!(0, _isArray2.default)(task)) {
|
||||
// no dependencies
|
||||
enqueueTask(key, [task]);
|
||||
readyToCheck.push(key);
|
||||
return;
|
||||
}
|
||||
|
||||
var dependencies = task.slice(0, task.length - 1);
|
||||
var remainingDependencies = dependencies.length;
|
||||
if (remainingDependencies === 0) {
|
||||
enqueueTask(key, task);
|
||||
readyToCheck.push(key);
|
||||
return;
|
||||
}
|
||||
uncheckedDependencies[key] = remainingDependencies;
|
||||
|
||||
(0, _arrayEach2.default)(dependencies, function (dependencyName) {
|
||||
if (!tasks[dependencyName]) {
|
||||
throw new Error('async.auto task `' + key + '` has a non-existent dependency `' + dependencyName + '` in ' + dependencies.join(', '));
|
||||
}
|
||||
addListener(dependencyName, function () {
|
||||
remainingDependencies--;
|
||||
if (remainingDependencies === 0) {
|
||||
enqueueTask(key, task);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
checkForDeadlocks();
|
||||
processQueue();
|
||||
|
||||
function enqueueTask(key, task) {
|
||||
readyTasks.push(function () {
|
||||
runTask(key, task);
|
||||
});
|
||||
}
|
||||
|
||||
function processQueue() {
|
||||
if (readyTasks.length === 0 && runningTasks === 0) {
|
||||
return callback(null, results);
|
||||
}
|
||||
while (readyTasks.length && runningTasks < concurrency) {
|
||||
var run = readyTasks.shift();
|
||||
run();
|
||||
}
|
||||
}
|
||||
|
||||
function addListener(taskName, fn) {
|
||||
var taskListeners = listeners[taskName];
|
||||
if (!taskListeners) {
|
||||
taskListeners = listeners[taskName] = [];
|
||||
}
|
||||
|
||||
taskListeners.push(fn);
|
||||
}
|
||||
|
||||
function taskComplete(taskName) {
|
||||
var taskListeners = listeners[taskName] || [];
|
||||
(0, _arrayEach2.default)(taskListeners, function (fn) {
|
||||
fn();
|
||||
});
|
||||
processQueue();
|
||||
}
|
||||
|
||||
function runTask(key, task) {
|
||||
if (hasError) return;
|
||||
|
||||
var taskCallback = (0, _onlyOnce2.default)(function (err, result) {
|
||||
runningTasks--;
|
||||
if (arguments.length > 2) {
|
||||
result = (0, _slice2.default)(arguments, 1);
|
||||
}
|
||||
if (err) {
|
||||
var safeResults = {};
|
||||
(0, _baseForOwn2.default)(results, function (val, rkey) {
|
||||
safeResults[rkey] = val;
|
||||
});
|
||||
safeResults[key] = result;
|
||||
hasError = true;
|
||||
listeners = Object.create(null);
|
||||
|
||||
callback(err, safeResults);
|
||||
} else {
|
||||
results[key] = result;
|
||||
taskComplete(key);
|
||||
}
|
||||
});
|
||||
|
||||
runningTasks++;
|
||||
var taskFn = (0, _wrapAsync2.default)(task[task.length - 1]);
|
||||
if (task.length > 1) {
|
||||
taskFn(results, taskCallback);
|
||||
} else {
|
||||
taskFn(taskCallback);
|
||||
}
|
||||
}
|
||||
|
||||
function checkForDeadlocks() {
|
||||
// Kahn's algorithm
|
||||
// https://en.wikipedia.org/wiki/Topological_sorting#Kahn.27s_algorithm
|
||||
// http://connalle.blogspot.com/2013/10/topological-sortingkahn-algorithm.html
|
||||
var currentTask;
|
||||
var counter = 0;
|
||||
while (readyToCheck.length) {
|
||||
currentTask = readyToCheck.pop();
|
||||
counter++;
|
||||
(0, _arrayEach2.default)(getDependents(currentTask), function (dependent) {
|
||||
if (--uncheckedDependencies[dependent] === 0) {
|
||||
readyToCheck.push(dependent);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (counter !== numTasks) {
|
||||
throw new Error('async.auto cannot execute tasks due to a recursive dependency');
|
||||
}
|
||||
}
|
||||
|
||||
function getDependents(taskName) {
|
||||
var result = [];
|
||||
(0, _baseForOwn2.default)(tasks, function (task, key) {
|
||||
if ((0, _isArray2.default)(task) && (0, _baseIndexOf2.default)(task, taskName, 0) >= 0) {
|
||||
result.push(key);
|
||||
}
|
||||
});
|
||||
return result;
|
||||
}
|
||||
};
|
||||
|
||||
var _arrayEach = require('lodash/_arrayEach');
|
||||
|
||||
var _arrayEach2 = _interopRequireDefault(_arrayEach);
|
||||
|
||||
var _baseForOwn = require('lodash/_baseForOwn');
|
||||
|
||||
var _baseForOwn2 = _interopRequireDefault(_baseForOwn);
|
||||
|
||||
var _baseIndexOf = require('lodash/_baseIndexOf');
|
||||
|
||||
var _baseIndexOf2 = _interopRequireDefault(_baseIndexOf);
|
||||
|
||||
var _isArray = require('lodash/isArray');
|
||||
|
||||
var _isArray2 = _interopRequireDefault(_isArray);
|
||||
|
||||
var _keys = require('lodash/keys');
|
||||
|
||||
var _keys2 = _interopRequireDefault(_keys);
|
||||
|
||||
var _noop = require('lodash/noop');
|
||||
|
||||
var _noop2 = _interopRequireDefault(_noop);
|
||||
|
||||
var _slice = require('./internal/slice');
|
||||
|
||||
var _slice2 = _interopRequireDefault(_slice);
|
||||
|
||||
var _once = require('./internal/once');
|
||||
|
||||
var _once2 = _interopRequireDefault(_once);
|
||||
|
||||
var _onlyOnce = require('./internal/onlyOnce');
|
||||
|
||||
var _onlyOnce2 = _interopRequireDefault(_onlyOnce);
|
||||
|
||||
var _wrapAsync = require('./internal/wrapAsync');
|
||||
|
||||
var _wrapAsync2 = _interopRequireDefault(_wrapAsync);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
module.exports = exports['default'];
|
||||
|
||||
/**
|
||||
* Determines the best order for running the {@link AsyncFunction}s in `tasks`, based on
|
||||
* their requirements. Each function can optionally depend on other functions
|
||||
* being completed first, and each function is run as soon as its requirements
|
||||
* are satisfied.
|
||||
*
|
||||
* If any of the {@link AsyncFunction}s pass an error to their callback, the `auto` sequence
|
||||
* will stop. Further tasks will not execute (so any other functions depending
|
||||
* on it will not run), and the main `callback` is immediately called with the
|
||||
* error.
|
||||
*
|
||||
* {@link AsyncFunction}s also receive an object containing the results of functions which
|
||||
* have completed so far as the first argument, if they have dependencies. If a
|
||||
* task function has no dependencies, it will only be passed a callback.
|
||||
*
|
||||
* @name auto
|
||||
* @static
|
||||
* @memberOf module:ControlFlow
|
||||
* @method
|
||||
* @category Control Flow
|
||||
* @param {Object} tasks - An object. Each of its properties is either a
|
||||
* function or an array of requirements, with the {@link AsyncFunction} itself the last item
|
||||
* in the array. The object's key of a property serves as the name of the task
|
||||
* defined by that property, i.e. can be used when specifying requirements for
|
||||
* other tasks. The function receives one or two arguments:
|
||||
* * a `results` object, containing the results of the previously executed
|
||||
* functions, only passed if the task has any dependencies,
|
||||
* * a `callback(err, result)` function, which must be called when finished,
|
||||
* passing an `error` (which can be `null`) and the result of the function's
|
||||
* execution.
|
||||
* @param {number} [concurrency=Infinity] - An optional `integer` for
|
||||
* determining the maximum number of tasks that can be run in parallel. By
|
||||
* default, as many as possible.
|
||||
* @param {Function} [callback] - An optional callback which is called when all
|
||||
* the tasks have been completed. It receives the `err` argument if any `tasks`
|
||||
* pass an error to their callback. Results are always returned; however, if an
|
||||
* error occurs, no further `tasks` will be performed, and the results object
|
||||
* will only contain partial results. Invoked with (err, results).
|
||||
* @returns undefined
|
||||
* @example
|
||||
*
|
||||
* async.auto({
|
||||
* // this function will just be passed a callback
|
||||
* readData: async.apply(fs.readFile, 'data.txt', 'utf-8'),
|
||||
* showData: ['readData', function(results, cb) {
|
||||
* // results.readData is the file's contents
|
||||
* // ...
|
||||
* }]
|
||||
* }, callback);
|
||||
*
|
||||
* async.auto({
|
||||
* get_data: function(callback) {
|
||||
* console.log('in get_data');
|
||||
* // async code to get some data
|
||||
* callback(null, 'data', 'converted to array');
|
||||
* },
|
||||
* make_folder: function(callback) {
|
||||
* console.log('in make_folder');
|
||||
* // async code to create a directory to store a file in
|
||||
* // this is run at the same time as getting the data
|
||||
* callback(null, 'folder');
|
||||
* },
|
||||
* write_file: ['get_data', 'make_folder', function(results, callback) {
|
||||
* console.log('in write_file', JSON.stringify(results));
|
||||
* // once there is some data and the directory exists,
|
||||
* // write the data to a file in the directory
|
||||
* callback(null, 'filename');
|
||||
* }],
|
||||
* email_link: ['write_file', function(results, callback) {
|
||||
* console.log('in email_link', JSON.stringify(results));
|
||||
* // once the file is written let's email a link to it...
|
||||
* // results.write_file contains the filename returned by write_file.
|
||||
* callback(null, {'file':results.write_file, 'email':'user@example.com'});
|
||||
* }]
|
||||
* }, function(err, results) {
|
||||
* console.log('err = ', err);
|
||||
* console.log('results = ', results);
|
||||
* });
|
||||
*/
|
@ -1,170 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = autoInject;
|
||||
|
||||
var _auto = require('./auto');
|
||||
|
||||
var _auto2 = _interopRequireDefault(_auto);
|
||||
|
||||
var _baseForOwn = require('lodash/_baseForOwn');
|
||||
|
||||
var _baseForOwn2 = _interopRequireDefault(_baseForOwn);
|
||||
|
||||
var _arrayMap = require('lodash/_arrayMap');
|
||||
|
||||
var _arrayMap2 = _interopRequireDefault(_arrayMap);
|
||||
|
||||
var _isArray = require('lodash/isArray');
|
||||
|
||||
var _isArray2 = _interopRequireDefault(_isArray);
|
||||
|
||||
var _trim = require('lodash/trim');
|
||||
|
||||
var _trim2 = _interopRequireDefault(_trim);
|
||||
|
||||
var _wrapAsync = require('./internal/wrapAsync');
|
||||
|
||||
var _wrapAsync2 = _interopRequireDefault(_wrapAsync);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
var FN_ARGS = /^(?:async\s+)?(function)?\s*[^\(]*\(\s*([^\)]*)\)/m;
|
||||
var FN_ARG_SPLIT = /,/;
|
||||
var FN_ARG = /(=.+)?(\s*)$/;
|
||||
var STRIP_COMMENTS = /((\/\/.*$)|(\/\*[\s\S]*?\*\/))/mg;
|
||||
|
||||
function parseParams(func) {
|
||||
func = func.toString().replace(STRIP_COMMENTS, '');
|
||||
func = func.match(FN_ARGS)[2].replace(' ', '');
|
||||
func = func ? func.split(FN_ARG_SPLIT) : [];
|
||||
func = func.map(function (arg) {
|
||||
return (0, _trim2.default)(arg.replace(FN_ARG, ''));
|
||||
});
|
||||
return func;
|
||||
}
|
||||
|
||||
/**
|
||||
* A dependency-injected version of the [async.auto]{@link module:ControlFlow.auto} function. Dependent
|
||||
* tasks are specified as parameters to the function, after the usual callback
|
||||
* parameter, with the parameter names matching the names of the tasks it
|
||||
* depends on. This can provide even more readable task graphs which can be
|
||||
* easier to maintain.
|
||||
*
|
||||
* If a final callback is specified, the task results are similarly injected,
|
||||
* specified as named parameters after the initial error parameter.
|
||||
*
|
||||
* The autoInject function is purely syntactic sugar and its semantics are
|
||||
* otherwise equivalent to [async.auto]{@link module:ControlFlow.auto}.
|
||||
*
|
||||
* @name autoInject
|
||||
* @static
|
||||
* @memberOf module:ControlFlow
|
||||
* @method
|
||||
* @see [async.auto]{@link module:ControlFlow.auto}
|
||||
* @category Control Flow
|
||||
* @param {Object} tasks - An object, each of whose properties is an {@link AsyncFunction} of
|
||||
* the form 'func([dependencies...], callback). The object's key of a property
|
||||
* serves as the name of the task defined by that property, i.e. can be used
|
||||
* when specifying requirements for other tasks.
|
||||
* * The `callback` parameter is a `callback(err, result)` which must be called
|
||||
* when finished, passing an `error` (which can be `null`) and the result of
|
||||
* the function's execution. The remaining parameters name other tasks on
|
||||
* which the task is dependent, and the results from those tasks are the
|
||||
* arguments of those parameters.
|
||||
* @param {Function} [callback] - An optional callback which is called when all
|
||||
* the tasks have been completed. It receives the `err` argument if any `tasks`
|
||||
* pass an error to their callback, and a `results` object with any completed
|
||||
* task results, similar to `auto`.
|
||||
* @example
|
||||
*
|
||||
* // The example from `auto` can be rewritten as follows:
|
||||
* async.autoInject({
|
||||
* get_data: function(callback) {
|
||||
* // async code to get some data
|
||||
* callback(null, 'data', 'converted to array');
|
||||
* },
|
||||
* make_folder: function(callback) {
|
||||
* // async code to create a directory to store a file in
|
||||
* // this is run at the same time as getting the data
|
||||
* callback(null, 'folder');
|
||||
* },
|
||||
* write_file: function(get_data, make_folder, callback) {
|
||||
* // once there is some data and the directory exists,
|
||||
* // write the data to a file in the directory
|
||||
* callback(null, 'filename');
|
||||
* },
|
||||
* email_link: function(write_file, callback) {
|
||||
* // once the file is written let's email a link to it...
|
||||
* // write_file contains the filename returned by write_file.
|
||||
* callback(null, {'file':write_file, 'email':'user@example.com'});
|
||||
* }
|
||||
* }, function(err, results) {
|
||||
* console.log('err = ', err);
|
||||
* console.log('email_link = ', results.email_link);
|
||||
* });
|
||||
*
|
||||
* // If you are using a JS minifier that mangles parameter names, `autoInject`
|
||||
* // will not work with plain functions, since the parameter names will be
|
||||
* // collapsed to a single letter identifier. To work around this, you can
|
||||
* // explicitly specify the names of the parameters your task function needs
|
||||
* // in an array, similar to Angular.js dependency injection.
|
||||
*
|
||||
* // This still has an advantage over plain `auto`, since the results a task
|
||||
* // depends on are still spread into arguments.
|
||||
* async.autoInject({
|
||||
* //...
|
||||
* write_file: ['get_data', 'make_folder', function(get_data, make_folder, callback) {
|
||||
* callback(null, 'filename');
|
||||
* }],
|
||||
* email_link: ['write_file', function(write_file, callback) {
|
||||
* callback(null, {'file':write_file, 'email':'user@example.com'});
|
||||
* }]
|
||||
* //...
|
||||
* }, function(err, results) {
|
||||
* console.log('err = ', err);
|
||||
* console.log('email_link = ', results.email_link);
|
||||
* });
|
||||
*/
|
||||
function autoInject(tasks, callback) {
|
||||
var newTasks = {};
|
||||
|
||||
(0, _baseForOwn2.default)(tasks, function (taskFn, key) {
|
||||
var params;
|
||||
var fnIsAsync = (0, _wrapAsync.isAsync)(taskFn);
|
||||
var hasNoDeps = !fnIsAsync && taskFn.length === 1 || fnIsAsync && taskFn.length === 0;
|
||||
|
||||
if ((0, _isArray2.default)(taskFn)) {
|
||||
params = taskFn.slice(0, -1);
|
||||
taskFn = taskFn[taskFn.length - 1];
|
||||
|
||||
newTasks[key] = params.concat(params.length > 0 ? newTask : taskFn);
|
||||
} else if (hasNoDeps) {
|
||||
// no dependencies, use the function as-is
|
||||
newTasks[key] = taskFn;
|
||||
} else {
|
||||
params = parseParams(taskFn);
|
||||
if (taskFn.length === 0 && !fnIsAsync && params.length === 0) {
|
||||
throw new Error("autoInject task functions require explicit parameters.");
|
||||
}
|
||||
|
||||
// remove callback param
|
||||
if (!fnIsAsync) params.pop();
|
||||
|
||||
newTasks[key] = params.concat(newTask);
|
||||
}
|
||||
|
||||
function newTask(results, taskCb) {
|
||||
var newArgs = (0, _arrayMap2.default)(params, function (name) {
|
||||
return results[name];
|
||||
});
|
||||
newArgs.push(taskCb);
|
||||
(0, _wrapAsync2.default)(taskFn).apply(null, newArgs);
|
||||
}
|
||||
});
|
||||
|
||||
(0, _auto2.default)(newTasks, callback);
|
||||
}
|
||||
module.exports = exports['default'];
|
@ -1,17 +0,0 @@
|
||||
{
|
||||
"name": "async",
|
||||
"main": "dist/async.js",
|
||||
"ignore": [
|
||||
"bower_components",
|
||||
"lib",
|
||||
"mocha_test",
|
||||
"node_modules",
|
||||
"perf",
|
||||
"support",
|
||||
"**/.*",
|
||||
"*.config.js",
|
||||
"*.json",
|
||||
"index.js",
|
||||
"Makefile"
|
||||
]
|
||||
}
|
@ -1,94 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = cargo;
|
||||
|
||||
var _queue = require('./internal/queue');
|
||||
|
||||
var _queue2 = _interopRequireDefault(_queue);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* A cargo of tasks for the worker function to complete. Cargo inherits all of
|
||||
* the same methods and event callbacks as [`queue`]{@link module:ControlFlow.queue}.
|
||||
* @typedef {Object} CargoObject
|
||||
* @memberOf module:ControlFlow
|
||||
* @property {Function} length - A function returning the number of items
|
||||
* waiting to be processed. Invoke like `cargo.length()`.
|
||||
* @property {number} payload - An `integer` for determining how many tasks
|
||||
* should be process per round. This property can be changed after a `cargo` is
|
||||
* created to alter the payload on-the-fly.
|
||||
* @property {Function} push - Adds `task` to the `queue`. The callback is
|
||||
* called once the `worker` has finished processing the task. Instead of a
|
||||
* single task, an array of `tasks` can be submitted. The respective callback is
|
||||
* used for every task in the list. Invoke like `cargo.push(task, [callback])`.
|
||||
* @property {Function} saturated - A callback that is called when the
|
||||
* `queue.length()` hits the concurrency and further tasks will be queued.
|
||||
* @property {Function} empty - A callback that is called when the last item
|
||||
* from the `queue` is given to a `worker`.
|
||||
* @property {Function} drain - A callback that is called when the last item
|
||||
* from the `queue` has returned from the `worker`.
|
||||
* @property {Function} idle - a function returning false if there are items
|
||||
* waiting or being processed, or true if not. Invoke like `cargo.idle()`.
|
||||
* @property {Function} pause - a function that pauses the processing of tasks
|
||||
* until `resume()` is called. Invoke like `cargo.pause()`.
|
||||
* @property {Function} resume - a function that resumes the processing of
|
||||
* queued tasks when the queue is paused. Invoke like `cargo.resume()`.
|
||||
* @property {Function} kill - a function that removes the `drain` callback and
|
||||
* empties remaining tasks from the queue forcing it to go idle. Invoke like `cargo.kill()`.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Creates a `cargo` object with the specified payload. Tasks added to the
|
||||
* cargo will be processed altogether (up to the `payload` limit). If the
|
||||
* `worker` is in progress, the task is queued until it becomes available. Once
|
||||
* the `worker` has completed some tasks, each callback of those tasks is
|
||||
* called. Check out [these](https://camo.githubusercontent.com/6bbd36f4cf5b35a0f11a96dcd2e97711ffc2fb37/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130382f62626330636662302d356632392d313165322d393734662d3333393763363464633835382e676966) [animations](https://camo.githubusercontent.com/f4810e00e1c5f5f8addbe3e9f49064fd5d102699/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130312f38346339323036362d356632392d313165322d383134662d3964336430323431336266642e676966)
|
||||
* for how `cargo` and `queue` work.
|
||||
*
|
||||
* While [`queue`]{@link module:ControlFlow.queue} passes only one task to one of a group of workers
|
||||
* at a time, cargo passes an array of tasks to a single worker, repeating
|
||||
* when the worker is finished.
|
||||
*
|
||||
* @name cargo
|
||||
* @static
|
||||
* @memberOf module:ControlFlow
|
||||
* @method
|
||||
* @see [async.queue]{@link module:ControlFlow.queue}
|
||||
* @category Control Flow
|
||||
* @param {AsyncFunction} worker - An asynchronous function for processing an array
|
||||
* of queued tasks. Invoked with `(tasks, callback)`.
|
||||
* @param {number} [payload=Infinity] - An optional `integer` for determining
|
||||
* how many tasks should be processed per round; if omitted, the default is
|
||||
* unlimited.
|
||||
* @returns {module:ControlFlow.CargoObject} A cargo object to manage the tasks. Callbacks can
|
||||
* attached as certain properties to listen for specific events during the
|
||||
* lifecycle of the cargo and inner queue.
|
||||
* @example
|
||||
*
|
||||
* // create a cargo object with payload 2
|
||||
* var cargo = async.cargo(function(tasks, callback) {
|
||||
* for (var i=0; i<tasks.length; i++) {
|
||||
* console.log('hello ' + tasks[i].name);
|
||||
* }
|
||||
* callback();
|
||||
* }, 2);
|
||||
*
|
||||
* // add some items
|
||||
* cargo.push({name: 'foo'}, function(err) {
|
||||
* console.log('finished processing foo');
|
||||
* });
|
||||
* cargo.push({name: 'bar'}, function(err) {
|
||||
* console.log('finished processing bar');
|
||||
* });
|
||||
* cargo.push({name: 'baz'}, function(err) {
|
||||
* console.log('finished processing baz');
|
||||
* });
|
||||
*/
|
||||
function cargo(worker, payload) {
|
||||
return (0, _queue2.default)(worker, 1, payload);
|
||||
}
|
||||
module.exports = exports['default'];
|
@ -1,58 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
exports.default = function () /*...args*/{
|
||||
return _seq2.default.apply(null, (0, _slice2.default)(arguments).reverse());
|
||||
};
|
||||
|
||||
var _seq = require('./seq');
|
||||
|
||||
var _seq2 = _interopRequireDefault(_seq);
|
||||
|
||||
var _slice = require('./internal/slice');
|
||||
|
||||
var _slice2 = _interopRequireDefault(_slice);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
;
|
||||
|
||||
/**
|
||||
* Creates a function which is a composition of the passed asynchronous
|
||||
* functions. Each function consumes the return value of the function that
|
||||
* follows. Composing functions `f()`, `g()`, and `h()` would produce the result
|
||||
* of `f(g(h()))`, only this version uses callbacks to obtain the return values.
|
||||
*
|
||||
* Each function is executed with the `this` binding of the composed function.
|
||||
*
|
||||
* @name compose
|
||||
* @static
|
||||
* @memberOf module:ControlFlow
|
||||
* @method
|
||||
* @category Control Flow
|
||||
* @param {...AsyncFunction} functions - the asynchronous functions to compose
|
||||
* @returns {Function} an asynchronous function that is the composed
|
||||
* asynchronous `functions`
|
||||
* @example
|
||||
*
|
||||
* function add1(n, callback) {
|
||||
* setTimeout(function () {
|
||||
* callback(null, n + 1);
|
||||
* }, 10);
|
||||
* }
|
||||
*
|
||||
* function mul3(n, callback) {
|
||||
* setTimeout(function () {
|
||||
* callback(null, n * 3);
|
||||
* }, 10);
|
||||
* }
|
||||
*
|
||||
* var add1mul3 = async.compose(mul3, add1);
|
||||
* add1mul3(4, function (err, result) {
|
||||
* // result now equals 15
|
||||
* });
|
||||
*/
|
||||
module.exports = exports['default'];
|
@ -1,43 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _doLimit = require('./internal/doLimit');
|
||||
|
||||
var _doLimit2 = _interopRequireDefault(_doLimit);
|
||||
|
||||
var _concatLimit = require('./concatLimit');
|
||||
|
||||
var _concatLimit2 = _interopRequireDefault(_concatLimit);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* Applies `iteratee` to each item in `coll`, concatenating the results. Returns
|
||||
* the concatenated list. The `iteratee`s are called in parallel, and the
|
||||
* results are concatenated as they return. There is no guarantee that the
|
||||
* results array will be returned in the original order of `coll` passed to the
|
||||
* `iteratee` function.
|
||||
*
|
||||
* @name concat
|
||||
* @static
|
||||
* @memberOf module:Collections
|
||||
* @method
|
||||
* @category Collection
|
||||
* @param {Array|Iterable|Object} coll - A collection to iterate over.
|
||||
* @param {AsyncFunction} iteratee - A function to apply to each item in `coll`,
|
||||
* which should use an array as its result. Invoked with (item, callback).
|
||||
* @param {Function} [callback(err)] - A callback which is called after all the
|
||||
* `iteratee` functions have finished, or an error occurs. Results is an array
|
||||
* containing the concatenated results of the `iteratee` function. Invoked with
|
||||
* (err, results).
|
||||
* @example
|
||||
*
|
||||
* async.concat(['dir1','dir2','dir3'], fs.readdir, function(err, files) {
|
||||
* // files is now a list of filenames that exist in the 3 directories
|
||||
* });
|
||||
*/
|
||||
exports.default = (0, _doLimit2.default)(_concatLimit2.default, Infinity);
|
||||
module.exports = exports['default'];
|
@ -1,65 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
exports.default = function (coll, limit, iteratee, callback) {
|
||||
callback = callback || _noop2.default;
|
||||
var _iteratee = (0, _wrapAsync2.default)(iteratee);
|
||||
(0, _mapLimit2.default)(coll, limit, function (val, callback) {
|
||||
_iteratee(val, function (err /*, ...args*/) {
|
||||
if (err) return callback(err);
|
||||
return callback(null, (0, _slice2.default)(arguments, 1));
|
||||
});
|
||||
}, function (err, mapResults) {
|
||||
var result = [];
|
||||
for (var i = 0; i < mapResults.length; i++) {
|
||||
if (mapResults[i]) {
|
||||
result = _concat.apply(result, mapResults[i]);
|
||||
}
|
||||
}
|
||||
|
||||
return callback(err, result);
|
||||
});
|
||||
};
|
||||
|
||||
var _noop = require('lodash/noop');
|
||||
|
||||
var _noop2 = _interopRequireDefault(_noop);
|
||||
|
||||
var _wrapAsync = require('./internal/wrapAsync');
|
||||
|
||||
var _wrapAsync2 = _interopRequireDefault(_wrapAsync);
|
||||
|
||||
var _slice = require('./internal/slice');
|
||||
|
||||
var _slice2 = _interopRequireDefault(_slice);
|
||||
|
||||
var _mapLimit = require('./mapLimit');
|
||||
|
||||
var _mapLimit2 = _interopRequireDefault(_mapLimit);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
var _concat = Array.prototype.concat;
|
||||
|
||||
/**
|
||||
* The same as [`concat`]{@link module:Collections.concat} but runs a maximum of `limit` async operations at a time.
|
||||
*
|
||||
* @name concatLimit
|
||||
* @static
|
||||
* @memberOf module:Collections
|
||||
* @method
|
||||
* @see [async.concat]{@link module:Collections.concat}
|
||||
* @category Collection
|
||||
* @param {Array|Iterable|Object} coll - A collection to iterate over.
|
||||
* @param {number} limit - The maximum number of async operations at a time.
|
||||
* @param {AsyncFunction} iteratee - A function to apply to each item in `coll`,
|
||||
* which should use an array as its result. Invoked with (item, callback).
|
||||
* @param {Function} [callback] - A callback which is called after all the
|
||||
* `iteratee` functions have finished, or an error occurs. Results is an array
|
||||
* containing the concatenated results of the `iteratee` function. Invoked with
|
||||
* (err, results).
|
||||
*/
|
||||
module.exports = exports['default'];
|
@ -1,36 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _doLimit = require('./internal/doLimit');
|
||||
|
||||
var _doLimit2 = _interopRequireDefault(_doLimit);
|
||||
|
||||
var _concatLimit = require('./concatLimit');
|
||||
|
||||
var _concatLimit2 = _interopRequireDefault(_concatLimit);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* The same as [`concat`]{@link module:Collections.concat} but runs only a single async operation at a time.
|
||||
*
|
||||
* @name concatSeries
|
||||
* @static
|
||||
* @memberOf module:Collections
|
||||
* @method
|
||||
* @see [async.concat]{@link module:Collections.concat}
|
||||
* @category Collection
|
||||
* @param {Array|Iterable|Object} coll - A collection to iterate over.
|
||||
* @param {AsyncFunction} iteratee - A function to apply to each item in `coll`.
|
||||
* The iteratee should complete with an array an array of results.
|
||||
* Invoked with (item, callback).
|
||||
* @param {Function} [callback(err)] - A callback which is called after all the
|
||||
* `iteratee` functions have finished, or an error occurs. Results is an array
|
||||
* containing the concatenated results of the `iteratee` function. Invoked with
|
||||
* (err, results).
|
||||
*/
|
||||
exports.default = (0, _doLimit2.default)(_concatLimit2.default, 1);
|
||||
module.exports = exports['default'];
|
@ -1,66 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
exports.default = function () /*...values*/{
|
||||
var values = (0, _slice2.default)(arguments);
|
||||
var args = [null].concat(values);
|
||||
return function () /*...ignoredArgs, callback*/{
|
||||
var callback = arguments[arguments.length - 1];
|
||||
return callback.apply(this, args);
|
||||
};
|
||||
};
|
||||
|
||||
var _slice = require('./internal/slice');
|
||||
|
||||
var _slice2 = _interopRequireDefault(_slice);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
;
|
||||
|
||||
/**
|
||||
* Returns a function that when called, calls-back with the values provided.
|
||||
* Useful as the first function in a [`waterfall`]{@link module:ControlFlow.waterfall}, or for plugging values in to
|
||||
* [`auto`]{@link module:ControlFlow.auto}.
|
||||
*
|
||||
* @name constant
|
||||
* @static
|
||||
* @memberOf module:Utils
|
||||
* @method
|
||||
* @category Util
|
||||
* @param {...*} arguments... - Any number of arguments to automatically invoke
|
||||
* callback with.
|
||||
* @returns {AsyncFunction} Returns a function that when invoked, automatically
|
||||
* invokes the callback with the previous given arguments.
|
||||
* @example
|
||||
*
|
||||
* async.waterfall([
|
||||
* async.constant(42),
|
||||
* function (value, next) {
|
||||
* // value === 42
|
||||
* },
|
||||
* //...
|
||||
* ], callback);
|
||||
*
|
||||
* async.waterfall([
|
||||
* async.constant(filename, "utf8"),
|
||||
* fs.readFile,
|
||||
* function (fileData, next) {
|
||||
* //...
|
||||
* }
|
||||
* //...
|
||||
* ], callback);
|
||||
*
|
||||
* async.auto({
|
||||
* hostname: async.constant("https://server.net/"),
|
||||
* port: findFreePort,
|
||||
* launchServer: ["hostname", "port", function (options, cb) {
|
||||
* startServer(options, cb);
|
||||
* }],
|
||||
* //...
|
||||
* }, callback);
|
||||
*/
|
||||
module.exports = exports['default'];
|
@ -1,61 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _identity = require('lodash/identity');
|
||||
|
||||
var _identity2 = _interopRequireDefault(_identity);
|
||||
|
||||
var _createTester = require('./internal/createTester');
|
||||
|
||||
var _createTester2 = _interopRequireDefault(_createTester);
|
||||
|
||||
var _doParallel = require('./internal/doParallel');
|
||||
|
||||
var _doParallel2 = _interopRequireDefault(_doParallel);
|
||||
|
||||
var _findGetResult = require('./internal/findGetResult');
|
||||
|
||||
var _findGetResult2 = _interopRequireDefault(_findGetResult);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* Returns the first value in `coll` that passes an async truth test. The
|
||||
* `iteratee` is applied in parallel, meaning the first iteratee to return
|
||||
* `true` will fire the detect `callback` with that result. That means the
|
||||
* result might not be the first item in the original `coll` (in terms of order)
|
||||
* that passes the test.
|
||||
|
||||
* If order within the original `coll` is important, then look at
|
||||
* [`detectSeries`]{@link module:Collections.detectSeries}.
|
||||
*
|
||||
* @name detect
|
||||
* @static
|
||||
* @memberOf module:Collections
|
||||
* @method
|
||||
* @alias find
|
||||
* @category Collections
|
||||
* @param {Array|Iterable|Object} coll - A collection to iterate over.
|
||||
* @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`.
|
||||
* The iteratee must complete with a boolean value as its result.
|
||||
* Invoked with (item, callback).
|
||||
* @param {Function} [callback] - A callback which is called as soon as any
|
||||
* iteratee returns `true`, or after all the `iteratee` functions have finished.
|
||||
* Result will be the first item in the array that passes the truth test
|
||||
* (iteratee) or the value `undefined` if none passed. Invoked with
|
||||
* (err, result).
|
||||
* @example
|
||||
*
|
||||
* async.detect(['file1','file2','file3'], function(filePath, callback) {
|
||||
* fs.access(filePath, function(err) {
|
||||
* callback(null, !err)
|
||||
* });
|
||||
* }, function(err, result) {
|
||||
* // result now equals the first file in the list that exists
|
||||
* });
|
||||
*/
|
||||
exports.default = (0, _doParallel2.default)((0, _createTester2.default)(_identity2.default, _findGetResult2.default));
|
||||
module.exports = exports['default'];
|
@ -1,48 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _identity = require('lodash/identity');
|
||||
|
||||
var _identity2 = _interopRequireDefault(_identity);
|
||||
|
||||
var _createTester = require('./internal/createTester');
|
||||
|
||||
var _createTester2 = _interopRequireDefault(_createTester);
|
||||
|
||||
var _doParallelLimit = require('./internal/doParallelLimit');
|
||||
|
||||
var _doParallelLimit2 = _interopRequireDefault(_doParallelLimit);
|
||||
|
||||
var _findGetResult = require('./internal/findGetResult');
|
||||
|
||||
var _findGetResult2 = _interopRequireDefault(_findGetResult);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* The same as [`detect`]{@link module:Collections.detect} but runs a maximum of `limit` async operations at a
|
||||
* time.
|
||||
*
|
||||
* @name detectLimit
|
||||
* @static
|
||||
* @memberOf module:Collections
|
||||
* @method
|
||||
* @see [async.detect]{@link module:Collections.detect}
|
||||
* @alias findLimit
|
||||
* @category Collections
|
||||
* @param {Array|Iterable|Object} coll - A collection to iterate over.
|
||||
* @param {number} limit - The maximum number of async operations at a time.
|
||||
* @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`.
|
||||
* The iteratee must complete with a boolean value as its result.
|
||||
* Invoked with (item, callback).
|
||||
* @param {Function} [callback] - A callback which is called as soon as any
|
||||
* iteratee returns `true`, or after all the `iteratee` functions have finished.
|
||||
* Result will be the first item in the array that passes the truth test
|
||||
* (iteratee) or the value `undefined` if none passed. Invoked with
|
||||
* (err, result).
|
||||
*/
|
||||
exports.default = (0, _doParallelLimit2.default)((0, _createTester2.default)(_identity2.default, _findGetResult2.default));
|
||||
module.exports = exports['default'];
|
@ -1,38 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _detectLimit = require('./detectLimit');
|
||||
|
||||
var _detectLimit2 = _interopRequireDefault(_detectLimit);
|
||||
|
||||
var _doLimit = require('./internal/doLimit');
|
||||
|
||||
var _doLimit2 = _interopRequireDefault(_doLimit);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* The same as [`detect`]{@link module:Collections.detect} but runs only a single async operation at a time.
|
||||
*
|
||||
* @name detectSeries
|
||||
* @static
|
||||
* @memberOf module:Collections
|
||||
* @method
|
||||
* @see [async.detect]{@link module:Collections.detect}
|
||||
* @alias findSeries
|
||||
* @category Collections
|
||||
* @param {Array|Iterable|Object} coll - A collection to iterate over.
|
||||
* @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`.
|
||||
* The iteratee must complete with a boolean value as its result.
|
||||
* Invoked with (item, callback).
|
||||
* @param {Function} [callback] - A callback which is called as soon as any
|
||||
* iteratee returns `true`, or after all the `iteratee` functions have finished.
|
||||
* Result will be the first item in the array that passes the truth test
|
||||
* (iteratee) or the value `undefined` if none passed. Invoked with
|
||||
* (err, result).
|
||||
*/
|
||||
exports.default = (0, _doLimit2.default)(_detectLimit2.default, 1);
|
||||
module.exports = exports['default'];
|
@ -1,43 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _consoleFunc = require('./internal/consoleFunc');
|
||||
|
||||
var _consoleFunc2 = _interopRequireDefault(_consoleFunc);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* Logs the result of an [`async` function]{@link AsyncFunction} to the
|
||||
* `console` using `console.dir` to display the properties of the resulting object.
|
||||
* Only works in Node.js or in browsers that support `console.dir` and
|
||||
* `console.error` (such as FF and Chrome).
|
||||
* If multiple arguments are returned from the async function,
|
||||
* `console.dir` is called on each argument in order.
|
||||
*
|
||||
* @name dir
|
||||
* @static
|
||||
* @memberOf module:Utils
|
||||
* @method
|
||||
* @category Util
|
||||
* @param {AsyncFunction} function - The function you want to eventually apply
|
||||
* all arguments to.
|
||||
* @param {...*} arguments... - Any number of arguments to apply to the function.
|
||||
* @example
|
||||
*
|
||||
* // in a module
|
||||
* var hello = function(name, callback) {
|
||||
* setTimeout(function() {
|
||||
* callback(null, {hello: name});
|
||||
* }, 1000);
|
||||
* };
|
||||
*
|
||||
* // in the node repl
|
||||
* node> async.dir(hello, 'world');
|
||||
* {hello: 'world'}
|
||||
*/
|
||||
exports.default = (0, _consoleFunc2.default)('dir');
|
||||
module.exports = exports['default'];
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -1,66 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = doDuring;
|
||||
|
||||
var _noop = require('lodash/noop');
|
||||
|
||||
var _noop2 = _interopRequireDefault(_noop);
|
||||
|
||||
var _slice = require('./internal/slice');
|
||||
|
||||
var _slice2 = _interopRequireDefault(_slice);
|
||||
|
||||
var _onlyOnce = require('./internal/onlyOnce');
|
||||
|
||||
var _onlyOnce2 = _interopRequireDefault(_onlyOnce);
|
||||
|
||||
var _wrapAsync = require('./internal/wrapAsync');
|
||||
|
||||
var _wrapAsync2 = _interopRequireDefault(_wrapAsync);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* The post-check version of [`during`]{@link module:ControlFlow.during}. To reflect the difference in
|
||||
* the order of operations, the arguments `test` and `fn` are switched.
|
||||
*
|
||||
* Also a version of [`doWhilst`]{@link module:ControlFlow.doWhilst} with asynchronous `test` function.
|
||||
* @name doDuring
|
||||
* @static
|
||||
* @memberOf module:ControlFlow
|
||||
* @method
|
||||
* @see [async.during]{@link module:ControlFlow.during}
|
||||
* @category Control Flow
|
||||
* @param {AsyncFunction} fn - An async function which is called each time
|
||||
* `test` passes. Invoked with (callback).
|
||||
* @param {AsyncFunction} test - asynchronous truth test to perform before each
|
||||
* execution of `fn`. Invoked with (...args, callback), where `...args` are the
|
||||
* non-error args from the previous callback of `fn`.
|
||||
* @param {Function} [callback] - A callback which is called after the test
|
||||
* function has failed and repeated execution of `fn` has stopped. `callback`
|
||||
* will be passed an error if one occurred, otherwise `null`.
|
||||
*/
|
||||
function doDuring(fn, test, callback) {
|
||||
callback = (0, _onlyOnce2.default)(callback || _noop2.default);
|
||||
var _fn = (0, _wrapAsync2.default)(fn);
|
||||
var _test = (0, _wrapAsync2.default)(test);
|
||||
|
||||
function next(err /*, ...args*/) {
|
||||
if (err) return callback(err);
|
||||
var args = (0, _slice2.default)(arguments, 1);
|
||||
args.push(check);
|
||||
_test.apply(this, args);
|
||||
};
|
||||
|
||||
function check(err, truth) {
|
||||
if (err) return callback(err);
|
||||
if (!truth) return callback(null);
|
||||
_fn(next);
|
||||
}
|
||||
|
||||
check(null, true);
|
||||
}
|
||||
module.exports = exports['default'];
|
@ -1,39 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = doUntil;
|
||||
|
||||
var _doWhilst = require('./doWhilst');
|
||||
|
||||
var _doWhilst2 = _interopRequireDefault(_doWhilst);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* Like ['doWhilst']{@link module:ControlFlow.doWhilst}, except the `test` is inverted. Note the
|
||||
* argument ordering differs from `until`.
|
||||
*
|
||||
* @name doUntil
|
||||
* @static
|
||||
* @memberOf module:ControlFlow
|
||||
* @method
|
||||
* @see [async.doWhilst]{@link module:ControlFlow.doWhilst}
|
||||
* @category Control Flow
|
||||
* @param {AsyncFunction} iteratee - An async function which is called each time
|
||||
* `test` fails. Invoked with (callback).
|
||||
* @param {Function} test - synchronous truth test to perform after each
|
||||
* execution of `iteratee`. Invoked with any non-error callback results of
|
||||
* `iteratee`.
|
||||
* @param {Function} [callback] - A callback which is called after the test
|
||||
* function has passed and repeated execution of `iteratee` has stopped. `callback`
|
||||
* will be passed an error and any arguments passed to the final `iteratee`'s
|
||||
* callback. Invoked with (err, [results]);
|
||||
*/
|
||||
function doUntil(iteratee, test, callback) {
|
||||
(0, _doWhilst2.default)(iteratee, function () {
|
||||
return !test.apply(this, arguments);
|
||||
}, callback);
|
||||
}
|
||||
module.exports = exports['default'];
|
@ -1,59 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = doWhilst;
|
||||
|
||||
var _noop = require('lodash/noop');
|
||||
|
||||
var _noop2 = _interopRequireDefault(_noop);
|
||||
|
||||
var _slice = require('./internal/slice');
|
||||
|
||||
var _slice2 = _interopRequireDefault(_slice);
|
||||
|
||||
var _onlyOnce = require('./internal/onlyOnce');
|
||||
|
||||
var _onlyOnce2 = _interopRequireDefault(_onlyOnce);
|
||||
|
||||
var _wrapAsync = require('./internal/wrapAsync');
|
||||
|
||||
var _wrapAsync2 = _interopRequireDefault(_wrapAsync);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* The post-check version of [`whilst`]{@link module:ControlFlow.whilst}. To reflect the difference in
|
||||
* the order of operations, the arguments `test` and `iteratee` are switched.
|
||||
*
|
||||
* `doWhilst` is to `whilst` as `do while` is to `while` in plain JavaScript.
|
||||
*
|
||||
* @name doWhilst
|
||||
* @static
|
||||
* @memberOf module:ControlFlow
|
||||
* @method
|
||||
* @see [async.whilst]{@link module:ControlFlow.whilst}
|
||||
* @category Control Flow
|
||||
* @param {AsyncFunction} iteratee - A function which is called each time `test`
|
||||
* passes. Invoked with (callback).
|
||||
* @param {Function} test - synchronous truth test to perform after each
|
||||
* execution of `iteratee`. Invoked with any non-error callback results of
|
||||
* `iteratee`.
|
||||
* @param {Function} [callback] - A callback which is called after the test
|
||||
* function has failed and repeated execution of `iteratee` has stopped.
|
||||
* `callback` will be passed an error and any arguments passed to the final
|
||||
* `iteratee`'s callback. Invoked with (err, [results]);
|
||||
*/
|
||||
function doWhilst(iteratee, test, callback) {
|
||||
callback = (0, _onlyOnce2.default)(callback || _noop2.default);
|
||||
var _iteratee = (0, _wrapAsync2.default)(iteratee);
|
||||
var next = function (err /*, ...args*/) {
|
||||
if (err) return callback(err);
|
||||
var args = (0, _slice2.default)(arguments, 1);
|
||||
if (test.apply(this, args)) return _iteratee(next);
|
||||
callback.apply(null, [null].concat(args));
|
||||
};
|
||||
_iteratee(next);
|
||||
}
|
||||
module.exports = exports['default'];
|
@ -1,76 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = during;
|
||||
|
||||
var _noop = require('lodash/noop');
|
||||
|
||||
var _noop2 = _interopRequireDefault(_noop);
|
||||
|
||||
var _onlyOnce = require('./internal/onlyOnce');
|
||||
|
||||
var _onlyOnce2 = _interopRequireDefault(_onlyOnce);
|
||||
|
||||
var _wrapAsync = require('./internal/wrapAsync');
|
||||
|
||||
var _wrapAsync2 = _interopRequireDefault(_wrapAsync);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* Like [`whilst`]{@link module:ControlFlow.whilst}, except the `test` is an asynchronous function that
|
||||
* is passed a callback in the form of `function (err, truth)`. If error is
|
||||
* passed to `test` or `fn`, the main callback is immediately called with the
|
||||
* value of the error.
|
||||
*
|
||||
* @name during
|
||||
* @static
|
||||
* @memberOf module:ControlFlow
|
||||
* @method
|
||||
* @see [async.whilst]{@link module:ControlFlow.whilst}
|
||||
* @category Control Flow
|
||||
* @param {AsyncFunction} test - asynchronous truth test to perform before each
|
||||
* execution of `fn`. Invoked with (callback).
|
||||
* @param {AsyncFunction} fn - An async function which is called each time
|
||||
* `test` passes. Invoked with (callback).
|
||||
* @param {Function} [callback] - A callback which is called after the test
|
||||
* function has failed and repeated execution of `fn` has stopped. `callback`
|
||||
* will be passed an error, if one occurred, otherwise `null`.
|
||||
* @example
|
||||
*
|
||||
* var count = 0;
|
||||
*
|
||||
* async.during(
|
||||
* function (callback) {
|
||||
* return callback(null, count < 5);
|
||||
* },
|
||||
* function (callback) {
|
||||
* count++;
|
||||
* setTimeout(callback, 1000);
|
||||
* },
|
||||
* function (err) {
|
||||
* // 5 seconds have passed
|
||||
* }
|
||||
* );
|
||||
*/
|
||||
function during(test, fn, callback) {
|
||||
callback = (0, _onlyOnce2.default)(callback || _noop2.default);
|
||||
var _fn = (0, _wrapAsync2.default)(fn);
|
||||
var _test = (0, _wrapAsync2.default)(test);
|
||||
|
||||
function next(err) {
|
||||
if (err) return callback(err);
|
||||
_test(check);
|
||||
}
|
||||
|
||||
function check(err, truth) {
|
||||
if (err) return callback(err);
|
||||
if (!truth) return callback(null);
|
||||
_fn(next);
|
||||
}
|
||||
|
||||
_test(check);
|
||||
}
|
||||
module.exports = exports['default'];
|
@ -1,82 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = eachLimit;
|
||||
|
||||
var _eachOf = require('./eachOf');
|
||||
|
||||
var _eachOf2 = _interopRequireDefault(_eachOf);
|
||||
|
||||
var _withoutIndex = require('./internal/withoutIndex');
|
||||
|
||||
var _withoutIndex2 = _interopRequireDefault(_withoutIndex);
|
||||
|
||||
var _wrapAsync = require('./internal/wrapAsync');
|
||||
|
||||
var _wrapAsync2 = _interopRequireDefault(_wrapAsync);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* Applies the function `iteratee` to each item in `coll`, in parallel.
|
||||
* The `iteratee` is called with an item from the list, and a callback for when
|
||||
* it has finished. If the `iteratee` passes an error to its `callback`, the
|
||||
* main `callback` (for the `each` function) is immediately called with the
|
||||
* error.
|
||||
*
|
||||
* Note, that since this function applies `iteratee` to each item in parallel,
|
||||
* there is no guarantee that the iteratee functions will complete in order.
|
||||
*
|
||||
* @name each
|
||||
* @static
|
||||
* @memberOf module:Collections
|
||||
* @method
|
||||
* @alias forEach
|
||||
* @category Collection
|
||||
* @param {Array|Iterable|Object} coll - A collection to iterate over.
|
||||
* @param {AsyncFunction} iteratee - An async function to apply to
|
||||
* each item in `coll`. Invoked with (item, callback).
|
||||
* The array index is not passed to the iteratee.
|
||||
* If you need the index, use `eachOf`.
|
||||
* @param {Function} [callback] - A callback which is called when all
|
||||
* `iteratee` functions have finished, or an error occurs. Invoked with (err).
|
||||
* @example
|
||||
*
|
||||
* // assuming openFiles is an array of file names and saveFile is a function
|
||||
* // to save the modified contents of that file:
|
||||
*
|
||||
* async.each(openFiles, saveFile, function(err){
|
||||
* // if any of the saves produced an error, err would equal that error
|
||||
* });
|
||||
*
|
||||
* // assuming openFiles is an array of file names
|
||||
* async.each(openFiles, function(file, callback) {
|
||||
*
|
||||
* // Perform operation on file here.
|
||||
* console.log('Processing file ' + file);
|
||||
*
|
||||
* if( file.length > 32 ) {
|
||||
* console.log('This file name is too long');
|
||||
* callback('File name too long');
|
||||
* } else {
|
||||
* // Do work to process file here
|
||||
* console.log('File processed');
|
||||
* callback();
|
||||
* }
|
||||
* }, function(err) {
|
||||
* // if any of the file processing produced an error, err would equal that error
|
||||
* if( err ) {
|
||||
* // One of the iterations produced an error.
|
||||
* // All processing will now stop.
|
||||
* console.log('A file failed to process');
|
||||
* } else {
|
||||
* console.log('All files have been processed successfully');
|
||||
* }
|
||||
* });
|
||||
*/
|
||||
function eachLimit(coll, iteratee, callback) {
|
||||
(0, _eachOf2.default)(coll, (0, _withoutIndex2.default)((0, _wrapAsync2.default)(iteratee)), callback);
|
||||
}
|
||||
module.exports = exports['default'];
|
@ -1,45 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = eachLimit;
|
||||
|
||||
var _eachOfLimit = require('./internal/eachOfLimit');
|
||||
|
||||
var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit);
|
||||
|
||||
var _withoutIndex = require('./internal/withoutIndex');
|
||||
|
||||
var _withoutIndex2 = _interopRequireDefault(_withoutIndex);
|
||||
|
||||
var _wrapAsync = require('./internal/wrapAsync');
|
||||
|
||||
var _wrapAsync2 = _interopRequireDefault(_wrapAsync);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* The same as [`each`]{@link module:Collections.each} but runs a maximum of `limit` async operations at a time.
|
||||
*
|
||||
* @name eachLimit
|
||||
* @static
|
||||
* @memberOf module:Collections
|
||||
* @method
|
||||
* @see [async.each]{@link module:Collections.each}
|
||||
* @alias forEachLimit
|
||||
* @category Collection
|
||||
* @param {Array|Iterable|Object} coll - A collection to iterate over.
|
||||
* @param {number} limit - The maximum number of async operations at a time.
|
||||
* @param {AsyncFunction} iteratee - An async function to apply to each item in
|
||||
* `coll`.
|
||||
* The array index is not passed to the iteratee.
|
||||
* If you need the index, use `eachOfLimit`.
|
||||
* Invoked with (item, callback).
|
||||
* @param {Function} [callback] - A callback which is called when all
|
||||
* `iteratee` functions have finished, or an error occurs. Invoked with (err).
|
||||
*/
|
||||
function eachLimit(coll, limit, iteratee, callback) {
|
||||
(0, _eachOfLimit2.default)(limit)(coll, (0, _withoutIndex2.default)((0, _wrapAsync2.default)(iteratee)), callback);
|
||||
}
|
||||
module.exports = exports['default'];
|
@ -1,111 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
exports.default = function (coll, iteratee, callback) {
|
||||
var eachOfImplementation = (0, _isArrayLike2.default)(coll) ? eachOfArrayLike : eachOfGeneric;
|
||||
eachOfImplementation(coll, (0, _wrapAsync2.default)(iteratee), callback);
|
||||
};
|
||||
|
||||
var _isArrayLike = require('lodash/isArrayLike');
|
||||
|
||||
var _isArrayLike2 = _interopRequireDefault(_isArrayLike);
|
||||
|
||||
var _breakLoop = require('./internal/breakLoop');
|
||||
|
||||
var _breakLoop2 = _interopRequireDefault(_breakLoop);
|
||||
|
||||
var _eachOfLimit = require('./eachOfLimit');
|
||||
|
||||
var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit);
|
||||
|
||||
var _doLimit = require('./internal/doLimit');
|
||||
|
||||
var _doLimit2 = _interopRequireDefault(_doLimit);
|
||||
|
||||
var _noop = require('lodash/noop');
|
||||
|
||||
var _noop2 = _interopRequireDefault(_noop);
|
||||
|
||||
var _once = require('./internal/once');
|
||||
|
||||
var _once2 = _interopRequireDefault(_once);
|
||||
|
||||
var _onlyOnce = require('./internal/onlyOnce');
|
||||
|
||||
var _onlyOnce2 = _interopRequireDefault(_onlyOnce);
|
||||
|
||||
var _wrapAsync = require('./internal/wrapAsync');
|
||||
|
||||
var _wrapAsync2 = _interopRequireDefault(_wrapAsync);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
// eachOf implementation optimized for array-likes
|
||||
function eachOfArrayLike(coll, iteratee, callback) {
|
||||
callback = (0, _once2.default)(callback || _noop2.default);
|
||||
var index = 0,
|
||||
completed = 0,
|
||||
length = coll.length;
|
||||
if (length === 0) {
|
||||
callback(null);
|
||||
}
|
||||
|
||||
function iteratorCallback(err, value) {
|
||||
if (err) {
|
||||
callback(err);
|
||||
} else if (++completed === length || value === _breakLoop2.default) {
|
||||
callback(null);
|
||||
}
|
||||
}
|
||||
|
||||
for (; index < length; index++) {
|
||||
iteratee(coll[index], index, (0, _onlyOnce2.default)(iteratorCallback));
|
||||
}
|
||||
}
|
||||
|
||||
// a generic version of eachOf which can handle array, object, and iterator cases.
|
||||
var eachOfGeneric = (0, _doLimit2.default)(_eachOfLimit2.default, Infinity);
|
||||
|
||||
/**
|
||||
* Like [`each`]{@link module:Collections.each}, except that it passes the key (or index) as the second argument
|
||||
* to the iteratee.
|
||||
*
|
||||
* @name eachOf
|
||||
* @static
|
||||
* @memberOf module:Collections
|
||||
* @method
|
||||
* @alias forEachOf
|
||||
* @category Collection
|
||||
* @see [async.each]{@link module:Collections.each}
|
||||
* @param {Array|Iterable|Object} coll - A collection to iterate over.
|
||||
* @param {AsyncFunction} iteratee - A function to apply to each
|
||||
* item in `coll`.
|
||||
* The `key` is the item's key, or index in the case of an array.
|
||||
* Invoked with (item, key, callback).
|
||||
* @param {Function} [callback] - A callback which is called when all
|
||||
* `iteratee` functions have finished, or an error occurs. Invoked with (err).
|
||||
* @example
|
||||
*
|
||||
* var obj = {dev: "/dev.json", test: "/test.json", prod: "/prod.json"};
|
||||
* var configs = {};
|
||||
*
|
||||
* async.forEachOf(obj, function (value, key, callback) {
|
||||
* fs.readFile(__dirname + value, "utf8", function (err, data) {
|
||||
* if (err) return callback(err);
|
||||
* try {
|
||||
* configs[key] = JSON.parse(data);
|
||||
* } catch (e) {
|
||||
* return callback(e);
|
||||
* }
|
||||
* callback();
|
||||
* });
|
||||
* }, function (err) {
|
||||
* if (err) console.error(err.message);
|
||||
* // configs is now a map of JSON data
|
||||
* doSomethingWith(configs);
|
||||
* });
|
||||
*/
|
||||
module.exports = exports['default'];
|
@ -1,41 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = eachOfLimit;
|
||||
|
||||
var _eachOfLimit2 = require('./internal/eachOfLimit');
|
||||
|
||||
var _eachOfLimit3 = _interopRequireDefault(_eachOfLimit2);
|
||||
|
||||
var _wrapAsync = require('./internal/wrapAsync');
|
||||
|
||||
var _wrapAsync2 = _interopRequireDefault(_wrapAsync);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* The same as [`eachOf`]{@link module:Collections.eachOf} but runs a maximum of `limit` async operations at a
|
||||
* time.
|
||||
*
|
||||
* @name eachOfLimit
|
||||
* @static
|
||||
* @memberOf module:Collections
|
||||
* @method
|
||||
* @see [async.eachOf]{@link module:Collections.eachOf}
|
||||
* @alias forEachOfLimit
|
||||
* @category Collection
|
||||
* @param {Array|Iterable|Object} coll - A collection to iterate over.
|
||||
* @param {number} limit - The maximum number of async operations at a time.
|
||||
* @param {AsyncFunction} iteratee - An async function to apply to each
|
||||
* item in `coll`. The `key` is the item's key, or index in the case of an
|
||||
* array.
|
||||
* Invoked with (item, key, callback).
|
||||
* @param {Function} [callback] - A callback which is called when all
|
||||
* `iteratee` functions have finished, or an error occurs. Invoked with (err).
|
||||
*/
|
||||
function eachOfLimit(coll, limit, iteratee, callback) {
|
||||
(0, _eachOfLimit3.default)(limit)(coll, (0, _wrapAsync2.default)(iteratee), callback);
|
||||
}
|
||||
module.exports = exports['default'];
|
@ -1,35 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _eachOfLimit = require('./eachOfLimit');
|
||||
|
||||
var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit);
|
||||
|
||||
var _doLimit = require('./internal/doLimit');
|
||||
|
||||
var _doLimit2 = _interopRequireDefault(_doLimit);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* The same as [`eachOf`]{@link module:Collections.eachOf} but runs only a single async operation at a time.
|
||||
*
|
||||
* @name eachOfSeries
|
||||
* @static
|
||||
* @memberOf module:Collections
|
||||
* @method
|
||||
* @see [async.eachOf]{@link module:Collections.eachOf}
|
||||
* @alias forEachOfSeries
|
||||
* @category Collection
|
||||
* @param {Array|Iterable|Object} coll - A collection to iterate over.
|
||||
* @param {AsyncFunction} iteratee - An async function to apply to each item in
|
||||
* `coll`.
|
||||
* Invoked with (item, key, callback).
|
||||
* @param {Function} [callback] - A callback which is called when all `iteratee`
|
||||
* functions have finished, or an error occurs. Invoked with (err).
|
||||
*/
|
||||
exports.default = (0, _doLimit2.default)(_eachOfLimit2.default, 1);
|
||||
module.exports = exports['default'];
|
@ -1,37 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _eachLimit = require('./eachLimit');
|
||||
|
||||
var _eachLimit2 = _interopRequireDefault(_eachLimit);
|
||||
|
||||
var _doLimit = require('./internal/doLimit');
|
||||
|
||||
var _doLimit2 = _interopRequireDefault(_doLimit);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* The same as [`each`]{@link module:Collections.each} but runs only a single async operation at a time.
|
||||
*
|
||||
* @name eachSeries
|
||||
* @static
|
||||
* @memberOf module:Collections
|
||||
* @method
|
||||
* @see [async.each]{@link module:Collections.each}
|
||||
* @alias forEachSeries
|
||||
* @category Collection
|
||||
* @param {Array|Iterable|Object} coll - A collection to iterate over.
|
||||
* @param {AsyncFunction} iteratee - An async function to apply to each
|
||||
* item in `coll`.
|
||||
* The array index is not passed to the iteratee.
|
||||
* If you need the index, use `eachOfSeries`.
|
||||
* Invoked with (item, callback).
|
||||
* @param {Function} [callback] - A callback which is called when all
|
||||
* `iteratee` functions have finished, or an error occurs. Invoked with (err).
|
||||
*/
|
||||
exports.default = (0, _doLimit2.default)(_eachLimit2.default, 1);
|
||||
module.exports = exports['default'];
|
@ -1,73 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = ensureAsync;
|
||||
|
||||
var _setImmediate = require('./internal/setImmediate');
|
||||
|
||||
var _setImmediate2 = _interopRequireDefault(_setImmediate);
|
||||
|
||||
var _initialParams = require('./internal/initialParams');
|
||||
|
||||
var _initialParams2 = _interopRequireDefault(_initialParams);
|
||||
|
||||
var _wrapAsync = require('./internal/wrapAsync');
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* Wrap an async function and ensure it calls its callback on a later tick of
|
||||
* the event loop. If the function already calls its callback on a next tick,
|
||||
* no extra deferral is added. This is useful for preventing stack overflows
|
||||
* (`RangeError: Maximum call stack size exceeded`) and generally keeping
|
||||
* [Zalgo](http://blog.izs.me/post/59142742143/designing-apis-for-asynchrony)
|
||||
* contained. ES2017 `async` functions are returned as-is -- they are immune
|
||||
* to Zalgo's corrupting influences, as they always resolve on a later tick.
|
||||
*
|
||||
* @name ensureAsync
|
||||
* @static
|
||||
* @memberOf module:Utils
|
||||
* @method
|
||||
* @category Util
|
||||
* @param {AsyncFunction} fn - an async function, one that expects a node-style
|
||||
* callback as its last argument.
|
||||
* @returns {AsyncFunction} Returns a wrapped function with the exact same call
|
||||
* signature as the function passed in.
|
||||
* @example
|
||||
*
|
||||
* function sometimesAsync(arg, callback) {
|
||||
* if (cache[arg]) {
|
||||
* return callback(null, cache[arg]); // this would be synchronous!!
|
||||
* } else {
|
||||
* doSomeIO(arg, callback); // this IO would be asynchronous
|
||||
* }
|
||||
* }
|
||||
*
|
||||
* // this has a risk of stack overflows if many results are cached in a row
|
||||
* async.mapSeries(args, sometimesAsync, done);
|
||||
*
|
||||
* // this will defer sometimesAsync's callback if necessary,
|
||||
* // preventing stack overflows
|
||||
* async.mapSeries(args, async.ensureAsync(sometimesAsync), done);
|
||||
*/
|
||||
function ensureAsync(fn) {
|
||||
if ((0, _wrapAsync.isAsync)(fn)) return fn;
|
||||
return (0, _initialParams2.default)(function (args, callback) {
|
||||
var sync = true;
|
||||
args.push(function () {
|
||||
var innerArgs = arguments;
|
||||
if (sync) {
|
||||
(0, _setImmediate2.default)(function () {
|
||||
callback.apply(null, innerArgs);
|
||||
});
|
||||
} else {
|
||||
callback.apply(null, innerArgs);
|
||||
}
|
||||
});
|
||||
fn.apply(this, args);
|
||||
sync = false;
|
||||
});
|
||||
}
|
||||
module.exports = exports['default'];
|
@ -1,50 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _createTester = require('./internal/createTester');
|
||||
|
||||
var _createTester2 = _interopRequireDefault(_createTester);
|
||||
|
||||
var _doParallel = require('./internal/doParallel');
|
||||
|
||||
var _doParallel2 = _interopRequireDefault(_doParallel);
|
||||
|
||||
var _notId = require('./internal/notId');
|
||||
|
||||
var _notId2 = _interopRequireDefault(_notId);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* Returns `true` if every element in `coll` satisfies an async test. If any
|
||||
* iteratee call returns `false`, the main `callback` is immediately called.
|
||||
*
|
||||
* @name every
|
||||
* @static
|
||||
* @memberOf module:Collections
|
||||
* @method
|
||||
* @alias all
|
||||
* @category Collection
|
||||
* @param {Array|Iterable|Object} coll - A collection to iterate over.
|
||||
* @param {AsyncFunction} iteratee - An async truth test to apply to each item
|
||||
* in the collection in parallel.
|
||||
* The iteratee must complete with a boolean result value.
|
||||
* Invoked with (item, callback).
|
||||
* @param {Function} [callback] - A callback which is called after all the
|
||||
* `iteratee` functions have finished. Result will be either `true` or `false`
|
||||
* depending on the values of the async tests. Invoked with (err, result).
|
||||
* @example
|
||||
*
|
||||
* async.every(['file1','file2','file3'], function(filePath, callback) {
|
||||
* fs.access(filePath, function(err) {
|
||||
* callback(null, !err)
|
||||
* });
|
||||
* }, function(err, result) {
|
||||
* // if result is true then every file exists
|
||||
* });
|
||||
*/
|
||||
exports.default = (0, _doParallel2.default)((0, _createTester2.default)(_notId2.default, _notId2.default));
|
||||
module.exports = exports['default'];
|
@ -1,42 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _createTester = require('./internal/createTester');
|
||||
|
||||
var _createTester2 = _interopRequireDefault(_createTester);
|
||||
|
||||
var _doParallelLimit = require('./internal/doParallelLimit');
|
||||
|
||||
var _doParallelLimit2 = _interopRequireDefault(_doParallelLimit);
|
||||
|
||||
var _notId = require('./internal/notId');
|
||||
|
||||
var _notId2 = _interopRequireDefault(_notId);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* The same as [`every`]{@link module:Collections.every} but runs a maximum of `limit` async operations at a time.
|
||||
*
|
||||
* @name everyLimit
|
||||
* @static
|
||||
* @memberOf module:Collections
|
||||
* @method
|
||||
* @see [async.every]{@link module:Collections.every}
|
||||
* @alias allLimit
|
||||
* @category Collection
|
||||
* @param {Array|Iterable|Object} coll - A collection to iterate over.
|
||||
* @param {number} limit - The maximum number of async operations at a time.
|
||||
* @param {AsyncFunction} iteratee - An async truth test to apply to each item
|
||||
* in the collection in parallel.
|
||||
* The iteratee must complete with a boolean result value.
|
||||
* Invoked with (item, callback).
|
||||
* @param {Function} [callback] - A callback which is called after all the
|
||||
* `iteratee` functions have finished. Result will be either `true` or `false`
|
||||
* depending on the values of the async tests. Invoked with (err, result).
|
||||
*/
|
||||
exports.default = (0, _doParallelLimit2.default)((0, _createTester2.default)(_notId2.default, _notId2.default));
|
||||
module.exports = exports['default'];
|
@ -1,37 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _everyLimit = require('./everyLimit');
|
||||
|
||||
var _everyLimit2 = _interopRequireDefault(_everyLimit);
|
||||
|
||||
var _doLimit = require('./internal/doLimit');
|
||||
|
||||
var _doLimit2 = _interopRequireDefault(_doLimit);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* The same as [`every`]{@link module:Collections.every} but runs only a single async operation at a time.
|
||||
*
|
||||
* @name everySeries
|
||||
* @static
|
||||
* @memberOf module:Collections
|
||||
* @method
|
||||
* @see [async.every]{@link module:Collections.every}
|
||||
* @alias allSeries
|
||||
* @category Collection
|
||||
* @param {Array|Iterable|Object} coll - A collection to iterate over.
|
||||
* @param {AsyncFunction} iteratee - An async truth test to apply to each item
|
||||
* in the collection in series.
|
||||
* The iteratee must complete with a boolean result value.
|
||||
* Invoked with (item, callback).
|
||||
* @param {Function} [callback] - A callback which is called after all the
|
||||
* `iteratee` functions have finished. Result will be either `true` or `false`
|
||||
* depending on the values of the async tests. Invoked with (err, result).
|
||||
*/
|
||||
exports.default = (0, _doLimit2.default)(_everyLimit2.default, 1);
|
||||
module.exports = exports['default'];
|
@ -1,45 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _filter = require('./internal/filter');
|
||||
|
||||
var _filter2 = _interopRequireDefault(_filter);
|
||||
|
||||
var _doParallel = require('./internal/doParallel');
|
||||
|
||||
var _doParallel2 = _interopRequireDefault(_doParallel);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* Returns a new array of all the values in `coll` which pass an async truth
|
||||
* test. This operation is performed in parallel, but the results array will be
|
||||
* in the same order as the original.
|
||||
*
|
||||
* @name filter
|
||||
* @static
|
||||
* @memberOf module:Collections
|
||||
* @method
|
||||
* @alias select
|
||||
* @category Collection
|
||||
* @param {Array|Iterable|Object} coll - A collection to iterate over.
|
||||
* @param {Function} iteratee - A truth test to apply to each item in `coll`.
|
||||
* The `iteratee` is passed a `callback(err, truthValue)`, which must be called
|
||||
* with a boolean argument once it has completed. Invoked with (item, callback).
|
||||
* @param {Function} [callback] - A callback which is called after all the
|
||||
* `iteratee` functions have finished. Invoked with (err, results).
|
||||
* @example
|
||||
*
|
||||
* async.filter(['file1','file2','file3'], function(filePath, callback) {
|
||||
* fs.access(filePath, function(err) {
|
||||
* callback(null, !err)
|
||||
* });
|
||||
* }, function(err, results) {
|
||||
* // results now equals an array of the existing files
|
||||
* });
|
||||
*/
|
||||
exports.default = (0, _doParallel2.default)(_filter2.default);
|
||||
module.exports = exports['default'];
|
@ -1,37 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _filter = require('./internal/filter');
|
||||
|
||||
var _filter2 = _interopRequireDefault(_filter);
|
||||
|
||||
var _doParallelLimit = require('./internal/doParallelLimit');
|
||||
|
||||
var _doParallelLimit2 = _interopRequireDefault(_doParallelLimit);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* The same as [`filter`]{@link module:Collections.filter} but runs a maximum of `limit` async operations at a
|
||||
* time.
|
||||
*
|
||||
* @name filterLimit
|
||||
* @static
|
||||
* @memberOf module:Collections
|
||||
* @method
|
||||
* @see [async.filter]{@link module:Collections.filter}
|
||||
* @alias selectLimit
|
||||
* @category Collection
|
||||
* @param {Array|Iterable|Object} coll - A collection to iterate over.
|
||||
* @param {number} limit - The maximum number of async operations at a time.
|
||||
* @param {Function} iteratee - A truth test to apply to each item in `coll`.
|
||||
* The `iteratee` is passed a `callback(err, truthValue)`, which must be called
|
||||
* with a boolean argument once it has completed. Invoked with (item, callback).
|
||||
* @param {Function} [callback] - A callback which is called after all the
|
||||
* `iteratee` functions have finished. Invoked with (err, results).
|
||||
*/
|
||||
exports.default = (0, _doParallelLimit2.default)(_filter2.default);
|
||||
module.exports = exports['default'];
|
@ -1,35 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _filterLimit = require('./filterLimit');
|
||||
|
||||
var _filterLimit2 = _interopRequireDefault(_filterLimit);
|
||||
|
||||
var _doLimit = require('./internal/doLimit');
|
||||
|
||||
var _doLimit2 = _interopRequireDefault(_doLimit);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* The same as [`filter`]{@link module:Collections.filter} but runs only a single async operation at a time.
|
||||
*
|
||||
* @name filterSeries
|
||||
* @static
|
||||
* @memberOf module:Collections
|
||||
* @method
|
||||
* @see [async.filter]{@link module:Collections.filter}
|
||||
* @alias selectSeries
|
||||
* @category Collection
|
||||
* @param {Array|Iterable|Object} coll - A collection to iterate over.
|
||||
* @param {Function} iteratee - A truth test to apply to each item in `coll`.
|
||||
* The `iteratee` is passed a `callback(err, truthValue)`, which must be called
|
||||
* with a boolean argument once it has completed. Invoked with (item, callback).
|
||||
* @param {Function} [callback] - A callback which is called after all the
|
||||
* `iteratee` functions have finished. Invoked with (err, results)
|
||||
*/
|
||||
exports.default = (0, _doLimit2.default)(_filterLimit2.default, 1);
|
||||
module.exports = exports['default'];
|
@ -1,61 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _identity = require('lodash/identity');
|
||||
|
||||
var _identity2 = _interopRequireDefault(_identity);
|
||||
|
||||
var _createTester = require('./internal/createTester');
|
||||
|
||||
var _createTester2 = _interopRequireDefault(_createTester);
|
||||
|
||||
var _doParallel = require('./internal/doParallel');
|
||||
|
||||
var _doParallel2 = _interopRequireDefault(_doParallel);
|
||||
|
||||
var _findGetResult = require('./internal/findGetResult');
|
||||
|
||||
var _findGetResult2 = _interopRequireDefault(_findGetResult);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* Returns the first value in `coll` that passes an async truth test. The
|
||||
* `iteratee` is applied in parallel, meaning the first iteratee to return
|
||||
* `true` will fire the detect `callback` with that result. That means the
|
||||
* result might not be the first item in the original `coll` (in terms of order)
|
||||
* that passes the test.
|
||||
|
||||
* If order within the original `coll` is important, then look at
|
||||
* [`detectSeries`]{@link module:Collections.detectSeries}.
|
||||
*
|
||||
* @name detect
|
||||
* @static
|
||||
* @memberOf module:Collections
|
||||
* @method
|
||||
* @alias find
|
||||
* @category Collections
|
||||
* @param {Array|Iterable|Object} coll - A collection to iterate over.
|
||||
* @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`.
|
||||
* The iteratee must complete with a boolean value as its result.
|
||||
* Invoked with (item, callback).
|
||||
* @param {Function} [callback] - A callback which is called as soon as any
|
||||
* iteratee returns `true`, or after all the `iteratee` functions have finished.
|
||||
* Result will be the first item in the array that passes the truth test
|
||||
* (iteratee) or the value `undefined` if none passed. Invoked with
|
||||
* (err, result).
|
||||
* @example
|
||||
*
|
||||
* async.detect(['file1','file2','file3'], function(filePath, callback) {
|
||||
* fs.access(filePath, function(err) {
|
||||
* callback(null, !err)
|
||||
* });
|
||||
* }, function(err, result) {
|
||||
* // result now equals the first file in the list that exists
|
||||
* });
|
||||
*/
|
||||
exports.default = (0, _doParallel2.default)((0, _createTester2.default)(_identity2.default, _findGetResult2.default));
|
||||
module.exports = exports['default'];
|
@ -1,48 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _identity = require('lodash/identity');
|
||||
|
||||
var _identity2 = _interopRequireDefault(_identity);
|
||||
|
||||
var _createTester = require('./internal/createTester');
|
||||
|
||||
var _createTester2 = _interopRequireDefault(_createTester);
|
||||
|
||||
var _doParallelLimit = require('./internal/doParallelLimit');
|
||||
|
||||
var _doParallelLimit2 = _interopRequireDefault(_doParallelLimit);
|
||||
|
||||
var _findGetResult = require('./internal/findGetResult');
|
||||
|
||||
var _findGetResult2 = _interopRequireDefault(_findGetResult);
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* The same as [`detect`]{@link module:Collections.detect} but runs a maximum of `limit` async operations at a
|
||||
* time.
|
||||
*
|
||||
* @name detectLimit
|
||||
* @static
|
||||
* @memberOf module:Collections
|
||||
* @method
|
||||
* @see [async.detect]{@link module:Collections.detect}
|
||||
* @alias findLimit
|
||||
* @category Collections
|
||||
* @param {Array|Iterable|Object} coll - A collection to iterate over.
|
||||
* @param {number} limit - The maximum number of async operations at a time.
|
||||
* @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`.
|
||||
* The iteratee must complete with a boolean value as its result.
|
||||
* Invoked with (item, callback).
|
||||
* @param {Function} [callback] - A callback which is called as soon as any
|
||||
* iteratee returns `true`, or after all the `iteratee` functions have finished.
|
||||
* Result will be the first item in the array that passes the truth test
|
||||
* (iteratee) or the value `undefined` if none passed. Invoked with
|
||||
* (err, result).
|
||||
*/
|
||||
exports.default = (0, _doParallelLimit2.default)((0, _createTester2.default)(_identity2.default, _findGetResult2.default));
|
||||
module.exports = exports['default'];
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue