diff --git a/node_modules/.bin/mime b/node_modules/.bin/mime deleted file mode 120000 index fbb7ee0..0000000 --- a/node_modules/.bin/mime +++ /dev/null @@ -1 +0,0 @@ -../mime/cli.js \ No newline at end of file diff --git a/node_modules/.bin/rimraf b/node_modules/.bin/rimraf deleted file mode 120000 index 4cd49a4..0000000 --- a/node_modules/.bin/rimraf +++ /dev/null @@ -1 +0,0 @@ -../rimraf/bin.js \ No newline at end of file diff --git a/node_modules/archiver-utils/CHANGELOG.md b/node_modules/archiver-utils/CHANGELOG.md deleted file mode 100644 index b1135d2..0000000 --- a/node_modules/archiver-utils/CHANGELOG.md +++ /dev/null @@ -1,14 +0,0 @@ -## Changelog - -**2.1.0** — July 19, 2019 — [Diff](https://github.com/archiverjs/archiver-utils/compare/2.0.0...2.1.0) - -- other: less lodash (#16) -- other: update dependencies - -**2.0.0** — August 22, 2018 — [Diff](https://github.com/archiverjs/archiver-utils/compare/1.3.0...2.0.0) - -- breaking: follow node LTS, remove support for versions under 6. -- other: remove unused lodash dependence (#13) -- test: now targeting node v10 - -[Release Archive](https://github.com/archiverjs/archiver-utils/releases) \ No newline at end of file diff --git a/node_modules/archiver-utils/LICENSE b/node_modules/archiver-utils/LICENSE deleted file mode 100644 index af5a4f6..0000000 --- a/node_modules/archiver-utils/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -Copyright (c) 2015 Chris Talkington. - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/archiver-utils/README.md b/node_modules/archiver-utils/README.md deleted file mode 100644 index 81aa56c..0000000 --- a/node_modules/archiver-utils/README.md +++ /dev/null @@ -1,7 +0,0 @@ -# Archiver Utils [![Build Status](https://travis-ci.org/archiverjs/archiver-utils.svg?branch=master)](https://travis-ci.org/archiverjs/archiver-utils) [![Build status](https://ci.appveyor.com/api/projects/status/7254ojgmlglhqbed/branch/master?svg=true)](https://ci.appveyor.com/project/ctalkington/archiver-utils/branch/master) - - -## Things of Interest -- [Changelog](https://github.com/archiverjs/archiver-utils/releases) -- [Contributing](https://github.com/archiverjs/archiver-utils/blob/master/CONTRIBUTING.md) -- [MIT License](https://github.com/archiverjs/archiver-utils/blob/master/LICENSE) diff --git a/node_modules/archiver-utils/file.js b/node_modules/archiver-utils/file.js deleted file mode 100644 index 912f25e..0000000 --- a/node_modules/archiver-utils/file.js +++ /dev/null @@ -1,209 +0,0 @@ -/** - * archiver-utils - * - * Copyright (c) 2012-2014 Chris Talkington, contributors. - * Licensed under the MIT license. - * https://github.com/archiverjs/node-archiver/blob/master/LICENSE-MIT - */ -var fs = require('graceful-fs'); -var path = require('path'); - -var flatten = require('lodash.flatten'); -var difference = require('lodash.difference'); -var union = require('lodash.union'); -var isPlainObject = require('lodash.isplainobject'); - -var glob = require('glob'); - -var file = module.exports = {}; - -var pathSeparatorRe = /[\/\\]/g; - -// Process specified wildcard glob patterns or filenames against a -// callback, excluding and uniquing files in the result set. -var processPatterns = function(patterns, fn) { - // Filepaths to return. - var result = []; - // Iterate over flattened patterns array. - flatten(patterns).forEach(function(pattern) { - // If the first character is ! it should be omitted - var exclusion = pattern.indexOf('!') === 0; - // If the pattern is an exclusion, remove the ! - if (exclusion) { pattern = pattern.slice(1); } - // Find all matching files for this pattern. - var matches = fn(pattern); - if (exclusion) { - // If an exclusion, remove matching files. - result = difference(result, matches); - } else { - // Otherwise add matching files. - result = union(result, matches); - } - }); - return result; -}; - -// True if the file path exists. -file.exists = function() { - var filepath = path.join.apply(path, arguments); - return fs.existsSync(filepath); -}; - -// Return an array of all file paths that match the given wildcard patterns. -file.expand = function(...args) { - // If the first argument is an options object, save those options to pass - // into the File.prototype.glob.sync method. - var options = isPlainObject(args[0]) ? args.shift() : {}; - // Use the first argument if it's an Array, otherwise convert the arguments - // object to an array and use that. - var patterns = Array.isArray(args[0]) ? args[0] : args; - // Return empty set if there are no patterns or filepaths. - if (patterns.length === 0) { return []; } - // Return all matching filepaths. - var matches = processPatterns(patterns, function(pattern) { - // Find all matching files for this pattern. - return glob.sync(pattern, options); - }); - // Filter result set? - if (options.filter) { - matches = matches.filter(function(filepath) { - filepath = path.join(options.cwd || '', filepath); - try { - if (typeof options.filter === 'function') { - return options.filter(filepath); - } else { - // If the file is of the right type and exists, this should work. - return fs.statSync(filepath)[options.filter](); - } - } catch(e) { - // Otherwise, it's probably not the right type. - return false; - } - }); - } - return matches; -}; - -// Build a multi task "files" object dynamically. -file.expandMapping = function(patterns, destBase, options) { - options = Object.assign({ - rename: function(destBase, destPath) { - return path.join(destBase || '', destPath); - } - }, options); - var files = []; - var fileByDest = {}; - // Find all files matching pattern, using passed-in options. - file.expand(options, patterns).forEach(function(src) { - var destPath = src; - // Flatten? - if (options.flatten) { - destPath = path.basename(destPath); - } - // Change the extension? - if (options.ext) { - destPath = destPath.replace(/(\.[^\/]*)?$/, options.ext); - } - // Generate destination filename. - var dest = options.rename(destBase, destPath, options); - // Prepend cwd to src path if necessary. - if (options.cwd) { src = path.join(options.cwd, src); } - // Normalize filepaths to be unix-style. - dest = dest.replace(pathSeparatorRe, '/'); - src = src.replace(pathSeparatorRe, '/'); - // Map correct src path to dest path. - if (fileByDest[dest]) { - // If dest already exists, push this src onto that dest's src array. - fileByDest[dest].src.push(src); - } else { - // Otherwise create a new src-dest file mapping object. - files.push({ - src: [src], - dest: dest, - }); - // And store a reference for later use. - fileByDest[dest] = files[files.length - 1]; - } - }); - return files; -}; - -// reusing bits of grunt's multi-task source normalization -file.normalizeFilesArray = function(data) { - var files = []; - - data.forEach(function(obj) { - var prop; - if ('src' in obj || 'dest' in obj) { - files.push(obj); - } - }); - - if (files.length === 0) { - return []; - } - - files = _(files).chain().forEach(function(obj) { - if (!('src' in obj) || !obj.src) { return; } - // Normalize .src properties to flattened array. - if (Array.isArray(obj.src)) { - obj.src = flatten(obj.src); - } else { - obj.src = [obj.src]; - } - }).map(function(obj) { - // Build options object, removing unwanted properties. - var expandOptions = Object.assign({}, obj); - delete expandOptions.src; - delete expandOptions.dest; - - // Expand file mappings. - if (obj.expand) { - return file.expandMapping(obj.src, obj.dest, expandOptions).map(function(mapObj) { - // Copy obj properties to result. - var result = Object.assign({}, obj); - // Make a clone of the orig obj available. - result.orig = Object.assign({}, obj); - // Set .src and .dest, processing both as templates. - result.src = mapObj.src; - result.dest = mapObj.dest; - // Remove unwanted properties. - ['expand', 'cwd', 'flatten', 'rename', 'ext'].forEach(function(prop) { - delete result[prop]; - }); - return result; - }); - } - - // Copy obj properties to result, adding an .orig property. - var result = Object.assign({}, obj); - // Make a clone of the orig obj available. - result.orig = Object.assign({}, obj); - - if ('src' in result) { - // Expose an expand-on-demand getter method as .src. - Object.defineProperty(result, 'src', { - enumerable: true, - get: function fn() { - var src; - if (!('result' in fn)) { - src = obj.src; - // If src is an array, flatten it. Otherwise, make it into an array. - src = Array.isArray(src) ? flatten(src) : [src]; - // Expand src files, memoizing result. - fn.result = file.expand(expandOptions, src); - } - return fn.result; - } - }); - } - - if ('dest' in result) { - result.dest = obj.dest; - } - - return result; - }).flatten().value(); - - return files; -}; diff --git a/node_modules/archiver-utils/index.js b/node_modules/archiver-utils/index.js deleted file mode 100644 index a825f5b..0000000 --- a/node_modules/archiver-utils/index.js +++ /dev/null @@ -1,156 +0,0 @@ -/** - * archiver-utils - * - * Copyright (c) 2015 Chris Talkington. - * Licensed under the MIT license. - * https://github.com/archiverjs/archiver-utils/blob/master/LICENSE - */ -var fs = require('graceful-fs'); -var path = require('path'); -var nutil = require('util'); -var lazystream = require('lazystream'); -var normalizePath = require('normalize-path'); -var defaults = require('lodash.defaults'); - -var Stream = require('stream').Stream; -var PassThrough = require('readable-stream').PassThrough; - -var utils = module.exports = {}; -utils.file = require('./file.js'); - -function assertPath(path) { - if (typeof path !== 'string') { - throw new TypeError('Path must be a string. Received ' + nutils.inspect(path)); - } -} - -utils.collectStream = function(source, callback) { - var collection = []; - var size = 0; - - source.on('error', callback); - - source.on('data', function(chunk) { - collection.push(chunk); - size += chunk.length; - }); - - source.on('end', function() { - var buf = new Buffer(size); - var offset = 0; - - collection.forEach(function(data) { - data.copy(buf, offset); - offset += data.length; - }); - - callback(null, buf); - }); -}; - -utils.dateify = function(dateish) { - dateish = dateish || new Date(); - - if (dateish instanceof Date) { - dateish = dateish; - } else if (typeof dateish === 'string') { - dateish = new Date(dateish); - } else { - dateish = new Date(); - } - - return dateish; -}; - -// this is slightly different from lodash version -utils.defaults = function(object, source, guard) { - var args = arguments; - args[0] = args[0] || {}; - - return defaults(...args); -}; - -utils.isStream = function(source) { - return source instanceof Stream; -}; - -utils.lazyReadStream = function(filepath) { - return new lazystream.Readable(function() { - return fs.createReadStream(filepath); - }); -}; - -utils.normalizeInputSource = function(source) { - if (source === null) { - return new Buffer(0); - } else if (typeof source === 'string') { - return new Buffer(source); - } else if (utils.isStream(source) && !source._readableState) { - var normalized = new PassThrough(); - source.pipe(normalized); - - return normalized; - } - - return source; -}; - -utils.sanitizePath = function(filepath) { - return normalizePath(filepath, false).replace(/^\w+:/, '').replace(/^(\.\.\/|\/)+/, ''); -}; - -utils.trailingSlashIt = function(str) { - return str.slice(-1) !== '/' ? str + '/' : str; -}; - -utils.unixifyPath = function(filepath) { - return normalizePath(filepath, false).replace(/^\w+:/, ''); -}; - -utils.walkdir = function(dirpath, base, callback) { - var results = []; - - if (typeof base === 'function') { - callback = base; - base = dirpath; - } - - fs.readdir(dirpath, function(err, list) { - var i = 0; - var file; - var filepath; - - if (err) { - return callback(err); - } - - (function next() { - file = list[i++]; - - if (!file) { - return callback(null, results); - } - - filepath = path.join(dirpath, file); - - fs.stat(filepath, function(err, stats) { - results.push({ - path: filepath, - relative: path.relative(base, filepath).replace(/\\/g, '/'), - stats: stats - }); - - if (stats && stats.isDirectory()) { - utils.walkdir(filepath, base, function(err, res) { - res.forEach(function(dirEntry) { - results.push(dirEntry); - }); - next(); - }); - } else { - next(); - } - }); - })(); - }); -}; diff --git a/node_modules/archiver-utils/node_modules/readable-stream/.travis.yml b/node_modules/archiver-utils/node_modules/readable-stream/.travis.yml deleted file mode 100644 index f62cdac..0000000 --- a/node_modules/archiver-utils/node_modules/readable-stream/.travis.yml +++ /dev/null @@ -1,34 +0,0 @@ -sudo: false -language: node_js -before_install: - - (test $NPM_LEGACY && npm install -g npm@2 && npm install -g npm@3) || true -notifications: - email: false -matrix: - fast_finish: true - include: - - node_js: '0.8' - env: NPM_LEGACY=true - - node_js: '0.10' - env: NPM_LEGACY=true - - node_js: '0.11' - env: NPM_LEGACY=true - - node_js: '0.12' - env: NPM_LEGACY=true - - node_js: 1 - env: NPM_LEGACY=true - - node_js: 2 - env: NPM_LEGACY=true - - node_js: 3 - env: NPM_LEGACY=true - - node_js: 4 - - node_js: 5 - - node_js: 6 - - node_js: 7 - - node_js: 8 - - node_js: 9 -script: "npm run test" -env: - global: - - secure: rE2Vvo7vnjabYNULNyLFxOyt98BoJexDqsiOnfiD6kLYYsiQGfr/sbZkPMOFm9qfQG7pjqx+zZWZjGSswhTt+626C0t/njXqug7Yps4c3dFblzGfreQHp7wNX5TFsvrxd6dAowVasMp61sJcRnB2w8cUzoe3RAYUDHyiHktwqMc= - - secure: g9YINaKAdMatsJ28G9jCGbSaguXCyxSTy+pBO6Ch0Cf57ZLOTka3HqDj8p3nV28LUIHZ3ut5WO43CeYKwt4AUtLpBS3a0dndHdY6D83uY6b2qh5hXlrcbeQTq2cvw2y95F7hm4D1kwrgZ7ViqaKggRcEupAL69YbJnxeUDKWEdI= diff --git a/node_modules/archiver-utils/node_modules/readable-stream/CONTRIBUTING.md b/node_modules/archiver-utils/node_modules/readable-stream/CONTRIBUTING.md deleted file mode 100644 index f478d58..0000000 --- a/node_modules/archiver-utils/node_modules/readable-stream/CONTRIBUTING.md +++ /dev/null @@ -1,38 +0,0 @@ -# Developer's Certificate of Origin 1.1 - -By making a contribution to this project, I certify that: - -* (a) The contribution was created in whole or in part by me and I - have the right to submit it under the open source license - indicated in the file; or - -* (b) The contribution is based upon previous work that, to the best - of my knowledge, is covered under an appropriate open source - license and I have the right under that license to submit that - work with modifications, whether created in whole or in part - by me, under the same open source license (unless I am - permitted to submit under a different license), as indicated - in the file; or - -* (c) The contribution was provided directly to me by some other - person who certified (a), (b) or (c) and I have not modified - it. - -* (d) I understand and agree that this project and the contribution - are public and that a record of the contribution (including all - personal information I submit with it, including my sign-off) is - maintained indefinitely and may be redistributed consistent with - this project or the open source license(s) involved. - -## Moderation Policy - -The [Node.js Moderation Policy] applies to this WG. - -## Code of Conduct - -The [Node.js Code of Conduct][] applies to this WG. - -[Node.js Code of Conduct]: -https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md -[Node.js Moderation Policy]: -https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md diff --git a/node_modules/archiver-utils/node_modules/readable-stream/GOVERNANCE.md b/node_modules/archiver-utils/node_modules/readable-stream/GOVERNANCE.md deleted file mode 100644 index 16ffb93..0000000 --- a/node_modules/archiver-utils/node_modules/readable-stream/GOVERNANCE.md +++ /dev/null @@ -1,136 +0,0 @@ -### Streams Working Group - -The Node.js Streams is jointly governed by a Working Group -(WG) -that is responsible for high-level guidance of the project. - -The WG has final authority over this project including: - -* Technical direction -* Project governance and process (including this policy) -* Contribution policy -* GitHub repository hosting -* Conduct guidelines -* Maintaining the list of additional Collaborators - -For the current list of WG members, see the project -[README.md](./README.md#current-project-team-members). - -### Collaborators - -The readable-stream GitHub repository is -maintained by the WG and additional Collaborators who are added by the -WG on an ongoing basis. - -Individuals making significant and valuable contributions are made -Collaborators and given commit-access to the project. These -individuals are identified by the WG and their addition as -Collaborators is discussed during the WG meeting. - -_Note:_ If you make a significant contribution and are not considered -for commit-access log an issue or contact a WG member directly and it -will be brought up in the next WG meeting. - -Modifications of the contents of the readable-stream repository are -made on -a collaborative basis. Anybody with a GitHub account may propose a -modification via pull request and it will be considered by the project -Collaborators. All pull requests must be reviewed and accepted by a -Collaborator with sufficient expertise who is able to take full -responsibility for the change. In the case of pull requests proposed -by an existing Collaborator, an additional Collaborator is required -for sign-off. Consensus should be sought if additional Collaborators -participate and there is disagreement around a particular -modification. See _Consensus Seeking Process_ below for further detail -on the consensus model used for governance. - -Collaborators may opt to elevate significant or controversial -modifications, or modifications that have not found consensus to the -WG for discussion by assigning the ***WG-agenda*** tag to a pull -request or issue. The WG should serve as the final arbiter where -required. - -For the current list of Collaborators, see the project -[README.md](./README.md#members). - -### WG Membership - -WG seats are not time-limited. There is no fixed size of the WG. -However, the expected target is between 6 and 12, to ensure adequate -coverage of important areas of expertise, balanced with the ability to -make decisions efficiently. - -There is no specific set of requirements or qualifications for WG -membership beyond these rules. - -The WG may add additional members to the WG by unanimous consensus. - -A WG member may be removed from the WG by voluntary resignation, or by -unanimous consensus of all other WG members. - -Changes to WG membership should be posted in the agenda, and may be -suggested as any other agenda item (see "WG Meetings" below). - -If an addition or removal is proposed during a meeting, and the full -WG is not in attendance to participate, then the addition or removal -is added to the agenda for the subsequent meeting. This is to ensure -that all members are given the opportunity to participate in all -membership decisions. If a WG member is unable to attend a meeting -where a planned membership decision is being made, then their consent -is assumed. - -No more than 1/3 of the WG members may be affiliated with the same -employer. If removal or resignation of a WG member, or a change of -employment by a WG member, creates a situation where more than 1/3 of -the WG membership shares an employer, then the situation must be -immediately remedied by the resignation or removal of one or more WG -members affiliated with the over-represented employer(s). - -### WG Meetings - -The WG meets occasionally on a Google Hangout On Air. A designated moderator -approved by the WG runs the meeting. Each meeting should be -published to YouTube. - -Items are added to the WG agenda that are considered contentious or -are modifications of governance, contribution policy, WG membership, -or release process. - -The intention of the agenda is not to approve or review all patches; -that should happen continuously on GitHub and be handled by the larger -group of Collaborators. - -Any community member or contributor can ask that something be added to -the next meeting's agenda by logging a GitHub Issue. Any Collaborator, -WG member or the moderator can add the item to the agenda by adding -the ***WG-agenda*** tag to the issue. - -Prior to each WG meeting the moderator will share the Agenda with -members of the WG. WG members can add any items they like to the -agenda at the beginning of each meeting. The moderator and the WG -cannot veto or remove items. - -The WG may invite persons or representatives from certain projects to -participate in a non-voting capacity. - -The moderator is responsible for summarizing the discussion of each -agenda item and sends it as a pull request after the meeting. - -### Consensus Seeking Process - -The WG follows a -[Consensus -Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making) -decision-making model. - -When an agenda item has appeared to reach a consensus the moderator -will ask "Does anyone object?" as a final call for dissent from the -consensus. - -If an agenda item cannot reach a consensus a WG member can call for -either a closing vote or a vote to table the issue to the next -meeting. The call for a vote must be seconded by a majority of the WG -or else the discussion will continue. Simple majority wins. - -Note that changes to WG membership require a majority consensus. See -"WG Membership" above. diff --git a/node_modules/archiver-utils/node_modules/readable-stream/LICENSE b/node_modules/archiver-utils/node_modules/readable-stream/LICENSE deleted file mode 100644 index 2873b3b..0000000 --- a/node_modules/archiver-utils/node_modules/readable-stream/LICENSE +++ /dev/null @@ -1,47 +0,0 @@ -Node.js is licensed for use as follows: - -""" -Copyright Node.js contributors. All rights reserved. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. -""" - -This license applies to parts of Node.js originating from the -https://github.com/joyent/node repository: - -""" -Copyright Joyent, Inc. and other Node contributors. All rights reserved. -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. -""" diff --git a/node_modules/archiver-utils/node_modules/readable-stream/README.md b/node_modules/archiver-utils/node_modules/readable-stream/README.md deleted file mode 100644 index f1c5a93..0000000 --- a/node_modules/archiver-utils/node_modules/readable-stream/README.md +++ /dev/null @@ -1,58 +0,0 @@ -# readable-stream - -***Node-core v8.17.0 streams for userland*** [![Build Status](https://travis-ci.org/nodejs/readable-stream.svg?branch=master)](https://travis-ci.org/nodejs/readable-stream) - - -[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) -[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) - - -[![Sauce Test Status](https://saucelabs.com/browser-matrix/readable-stream.svg)](https://saucelabs.com/u/readable-stream) - -```bash -npm install --save readable-stream -``` - -***Node-core streams for userland*** - -This package is a mirror of the Streams2 and Streams3 implementations in -Node-core. - -Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.17.0/docs/api/stream.html). - -If you want to guarantee a stable streams base, regardless of what version of -Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). - -As of version 2.0.0 **readable-stream** uses semantic versioning. - -# Streams Working Group - -`readable-stream` is maintained by the Streams Working Group, which -oversees the development and maintenance of the Streams API within -Node.js. The responsibilities of the Streams Working Group include: - -* Addressing stream issues on the Node.js issue tracker. -* Authoring and editing stream documentation within the Node.js project. -* Reviewing changes to stream subclasses within the Node.js project. -* Redirecting changes to streams from the Node.js project to this - project. -* Assisting in the implementation of stream providers within Node.js. -* Recommending versions of `readable-stream` to be included in Node.js. -* Messaging about the future of streams to give the community advance - notice of changes. - - -## Team Members - -* **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) <christopher.s.dickinson@gmail.com> - - Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B -* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> - - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 -* **Rod Vagg** ([@rvagg](https://github.com/rvagg)) <rod@vagg.org> - - Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D -* **Sam Newman** ([@sonewman](https://github.com/sonewman)) <newmansam@outlook.com> -* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> -* **Domenic Denicola** ([@domenic](https://github.com/domenic)) <d@domenic.me> -* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com> - - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E -* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) <shestak.irina@gmail.com> diff --git a/node_modules/archiver-utils/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md b/node_modules/archiver-utils/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md deleted file mode 100644 index 83275f1..0000000 --- a/node_modules/archiver-utils/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md +++ /dev/null @@ -1,60 +0,0 @@ -# streams WG Meeting 2015-01-30 - -## Links - -* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg -* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106 -* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/ - -## Agenda - -Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting. - -* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105) -* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101) -* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102) -* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99) - -## Minutes - -### adopt a charter - -* group: +1's all around - -### What versioning scheme should be adopted? -* group: +1’s 3.0.0 -* domenic+group: pulling in patches from other sources where appropriate -* mikeal: version independently, suggesting versions for io.js -* mikeal+domenic: work with TC to notify in advance of changes -simpler stream creation - -### streamline creation of streams -* sam: streamline creation of streams -* domenic: nice simple solution posted - but, we lose the opportunity to change the model - may not be backwards incompatible (double check keys) - - **action item:** domenic will check - -### remove implicit flowing of streams on(‘data’) -* add isFlowing / isPaused -* mikeal: worrying that we’re documenting polyfill methods – confuses users -* domenic: more reflective API is probably good, with warning labels for users -* new section for mad scientists (reflective stream access) -* calvin: name the “third state” -* mikeal: maybe borrow the name from whatwg? -* domenic: we’re missing the “third state” -* consensus: kind of difficult to name the third state -* mikeal: figure out differences in states / compat -* mathias: always flow on data – eliminates third state - * explore what it breaks - -**action items:** -* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream) -* ask rod/build for infrastructure -* **chris**: explore the “flow on data” approach -* add isPaused/isFlowing -* add new docs section -* move isPaused to that section - - diff --git a/node_modules/archiver-utils/node_modules/readable-stream/duplex-browser.js b/node_modules/archiver-utils/node_modules/readable-stream/duplex-browser.js deleted file mode 100644 index f8b2db8..0000000 --- a/node_modules/archiver-utils/node_modules/readable-stream/duplex-browser.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require('./lib/_stream_duplex.js'); diff --git a/node_modules/archiver-utils/node_modules/readable-stream/duplex.js b/node_modules/archiver-utils/node_modules/readable-stream/duplex.js deleted file mode 100644 index 46924cb..0000000 --- a/node_modules/archiver-utils/node_modules/readable-stream/duplex.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require('./readable').Duplex diff --git a/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_duplex.js b/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_duplex.js deleted file mode 100644 index 57003c3..0000000 --- a/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_duplex.js +++ /dev/null @@ -1,131 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -// a duplex stream is just a stream that is both readable and writable. -// Since JS doesn't have multiple prototypal inheritance, this class -// prototypally inherits from Readable, and then parasitically from -// Writable. - -'use strict'; - -/**/ - -var pna = require('process-nextick-args'); -/**/ - -/**/ -var objectKeys = Object.keys || function (obj) { - var keys = []; - for (var key in obj) { - keys.push(key); - }return keys; -}; -/**/ - -module.exports = Duplex; - -/**/ -var util = Object.create(require('core-util-is')); -util.inherits = require('inherits'); -/**/ - -var Readable = require('./_stream_readable'); -var Writable = require('./_stream_writable'); - -util.inherits(Duplex, Readable); - -{ - // avoid scope creep, the keys array can then be collected - var keys = objectKeys(Writable.prototype); - for (var v = 0; v < keys.length; v++) { - var method = keys[v]; - if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; - } -} - -function Duplex(options) { - if (!(this instanceof Duplex)) return new Duplex(options); - - Readable.call(this, options); - Writable.call(this, options); - - if (options && options.readable === false) this.readable = false; - - if (options && options.writable === false) this.writable = false; - - this.allowHalfOpen = true; - if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; - - this.once('end', onend); -} - -Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function () { - return this._writableState.highWaterMark; - } -}); - -// the no-half-open enforcer -function onend() { - // if we allow half-open state, or if the writable side ended, - // then we're ok. - if (this.allowHalfOpen || this._writableState.ended) return; - - // no more data can be written. - // But allow more writes to happen in this tick. - pna.nextTick(onEndNT, this); -} - -function onEndNT(self) { - self.end(); -} - -Object.defineProperty(Duplex.prototype, 'destroyed', { - get: function () { - if (this._readableState === undefined || this._writableState === undefined) { - return false; - } - return this._readableState.destroyed && this._writableState.destroyed; - }, - set: function (value) { - // we ignore the value if the stream - // has not been initialized yet - if (this._readableState === undefined || this._writableState === undefined) { - return; - } - - // backward compatibility, the user is explicitly - // managing destroyed - this._readableState.destroyed = value; - this._writableState.destroyed = value; - } -}); - -Duplex.prototype._destroy = function (err, cb) { - this.push(null); - this.end(); - - pna.nextTick(cb, err); -}; \ No newline at end of file diff --git a/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_passthrough.js b/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_passthrough.js deleted file mode 100644 index 612edb4..0000000 --- a/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_passthrough.js +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -// a passthrough stream. -// basically just the most minimal sort of Transform stream. -// Every written chunk gets output as-is. - -'use strict'; - -module.exports = PassThrough; - -var Transform = require('./_stream_transform'); - -/**/ -var util = Object.create(require('core-util-is')); -util.inherits = require('inherits'); -/**/ - -util.inherits(PassThrough, Transform); - -function PassThrough(options) { - if (!(this instanceof PassThrough)) return new PassThrough(options); - - Transform.call(this, options); -} - -PassThrough.prototype._transform = function (chunk, encoding, cb) { - cb(null, chunk); -}; \ No newline at end of file diff --git a/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_readable.js b/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_readable.js deleted file mode 100644 index 3af95cb..0000000 --- a/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_readable.js +++ /dev/null @@ -1,1019 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -'use strict'; - -/**/ - -var pna = require('process-nextick-args'); -/**/ - -module.exports = Readable; - -/**/ -var isArray = require('isarray'); -/**/ - -/**/ -var Duplex; -/**/ - -Readable.ReadableState = ReadableState; - -/**/ -var EE = require('events').EventEmitter; - -var EElistenerCount = function (emitter, type) { - return emitter.listeners(type).length; -}; -/**/ - -/**/ -var Stream = require('./internal/streams/stream'); -/**/ - -/**/ - -var Buffer = require('safe-buffer').Buffer; -var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; -function _uint8ArrayToBuffer(chunk) { - return Buffer.from(chunk); -} -function _isUint8Array(obj) { - return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; -} - -/**/ - -/**/ -var util = Object.create(require('core-util-is')); -util.inherits = require('inherits'); -/**/ - -/**/ -var debugUtil = require('util'); -var debug = void 0; -if (debugUtil && debugUtil.debuglog) { - debug = debugUtil.debuglog('stream'); -} else { - debug = function () {}; -} -/**/ - -var BufferList = require('./internal/streams/BufferList'); -var destroyImpl = require('./internal/streams/destroy'); -var StringDecoder; - -util.inherits(Readable, Stream); - -var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; - -function prependListener(emitter, event, fn) { - // Sadly this is not cacheable as some libraries bundle their own - // event emitter implementation with them. - if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); - - // This is a hack to make sure that our error handler is attached before any - // userland ones. NEVER DO THIS. This is here only because this code needs - // to continue to work with older versions of Node.js that do not include - // the prependListener() method. The goal is to eventually remove this hack. - if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; -} - -function ReadableState(options, stream) { - Duplex = Duplex || require('./_stream_duplex'); - - options = options || {}; - - // Duplex streams are both readable and writable, but share - // the same options object. - // However, some cases require setting options to different - // values for the readable and the writable sides of the duplex stream. - // These options can be provided separately as readableXXX and writableXXX. - var isDuplex = stream instanceof Duplex; - - // object stream flag. Used to make read(n) ignore n and to - // make all the buffer merging and length checks go away - this.objectMode = !!options.objectMode; - - if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; - - // the point at which it stops calling _read() to fill the buffer - // Note: 0 is a valid value, means "don't call _read preemptively ever" - var hwm = options.highWaterMark; - var readableHwm = options.readableHighWaterMark; - var defaultHwm = this.objectMode ? 16 : 16 * 1024; - - if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm; - - // cast to ints. - this.highWaterMark = Math.floor(this.highWaterMark); - - // A linked list is used to store data chunks instead of an array because the - // linked list can remove elements from the beginning faster than - // array.shift() - this.buffer = new BufferList(); - this.length = 0; - this.pipes = null; - this.pipesCount = 0; - this.flowing = null; - this.ended = false; - this.endEmitted = false; - this.reading = false; - - // a flag to be able to tell if the event 'readable'/'data' is emitted - // immediately, or on a later tick. We set this to true at first, because - // any actions that shouldn't happen until "later" should generally also - // not happen before the first read call. - this.sync = true; - - // whenever we return null, then we set a flag to say - // that we're awaiting a 'readable' event emission. - this.needReadable = false; - this.emittedReadable = false; - this.readableListening = false; - this.resumeScheduled = false; - - // has it been destroyed - this.destroyed = false; - - // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = options.defaultEncoding || 'utf8'; - - // the number of writers that are awaiting a drain event in .pipe()s - this.awaitDrain = 0; - - // if true, a maybeReadMore has been scheduled - this.readingMore = false; - - this.decoder = null; - this.encoding = null; - if (options.encoding) { - if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; - this.decoder = new StringDecoder(options.encoding); - this.encoding = options.encoding; - } -} - -function Readable(options) { - Duplex = Duplex || require('./_stream_duplex'); - - if (!(this instanceof Readable)) return new Readable(options); - - this._readableState = new ReadableState(options, this); - - // legacy - this.readable = true; - - if (options) { - if (typeof options.read === 'function') this._read = options.read; - - if (typeof options.destroy === 'function') this._destroy = options.destroy; - } - - Stream.call(this); -} - -Object.defineProperty(Readable.prototype, 'destroyed', { - get: function () { - if (this._readableState === undefined) { - return false; - } - return this._readableState.destroyed; - }, - set: function (value) { - // we ignore the value if the stream - // has not been initialized yet - if (!this._readableState) { - return; - } - - // backward compatibility, the user is explicitly - // managing destroyed - this._readableState.destroyed = value; - } -}); - -Readable.prototype.destroy = destroyImpl.destroy; -Readable.prototype._undestroy = destroyImpl.undestroy; -Readable.prototype._destroy = function (err, cb) { - this.push(null); - cb(err); -}; - -// Manually shove something into the read() buffer. -// This returns true if the highWaterMark has not been hit yet, -// similar to how Writable.write() returns true if you should -// write() some more. -Readable.prototype.push = function (chunk, encoding) { - var state = this._readableState; - var skipChunkCheck; - - if (!state.objectMode) { - if (typeof chunk === 'string') { - encoding = encoding || state.defaultEncoding; - if (encoding !== state.encoding) { - chunk = Buffer.from(chunk, encoding); - encoding = ''; - } - skipChunkCheck = true; - } - } else { - skipChunkCheck = true; - } - - return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); -}; - -// Unshift should *always* be something directly out of read() -Readable.prototype.unshift = function (chunk) { - return readableAddChunk(this, chunk, null, true, false); -}; - -function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { - var state = stream._readableState; - if (chunk === null) { - state.reading = false; - onEofChunk(stream, state); - } else { - var er; - if (!skipChunkCheck) er = chunkInvalid(state, chunk); - if (er) { - stream.emit('error', er); - } else if (state.objectMode || chunk && chunk.length > 0) { - if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { - chunk = _uint8ArrayToBuffer(chunk); - } - - if (addToFront) { - if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true); - } else if (state.ended) { - stream.emit('error', new Error('stream.push() after EOF')); - } else { - state.reading = false; - if (state.decoder && !encoding) { - chunk = state.decoder.write(chunk); - if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); - } else { - addChunk(stream, state, chunk, false); - } - } - } else if (!addToFront) { - state.reading = false; - } - } - - return needMoreData(state); -} - -function addChunk(stream, state, chunk, addToFront) { - if (state.flowing && state.length === 0 && !state.sync) { - stream.emit('data', chunk); - stream.read(0); - } else { - // update the buffer info. - state.length += state.objectMode ? 1 : chunk.length; - if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); - - if (state.needReadable) emitReadable(stream); - } - maybeReadMore(stream, state); -} - -function chunkInvalid(state, chunk) { - var er; - if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { - er = new TypeError('Invalid non-string/buffer chunk'); - } - return er; -} - -// if it's past the high water mark, we can push in some more. -// Also, if we have no data yet, we can stand some -// more bytes. This is to work around cases where hwm=0, -// such as the repl. Also, if the push() triggered a -// readable event, and the user called read(largeNumber) such that -// needReadable was set, then we ought to push more, so that another -// 'readable' event will be triggered. -function needMoreData(state) { - return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); -} - -Readable.prototype.isPaused = function () { - return this._readableState.flowing === false; -}; - -// backwards compatibility. -Readable.prototype.setEncoding = function (enc) { - if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; - this._readableState.decoder = new StringDecoder(enc); - this._readableState.encoding = enc; - return this; -}; - -// Don't raise the hwm > 8MB -var MAX_HWM = 0x800000; -function computeNewHighWaterMark(n) { - if (n >= MAX_HWM) { - n = MAX_HWM; - } else { - // Get the next highest power of 2 to prevent increasing hwm excessively in - // tiny amounts - n--; - n |= n >>> 1; - n |= n >>> 2; - n |= n >>> 4; - n |= n >>> 8; - n |= n >>> 16; - n++; - } - return n; -} - -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function howMuchToRead(n, state) { - if (n <= 0 || state.length === 0 && state.ended) return 0; - if (state.objectMode) return 1; - if (n !== n) { - // Only flow one buffer at a time - if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; - } - // If we're asking for more than the current hwm, then raise the hwm. - if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); - if (n <= state.length) return n; - // Don't have enough - if (!state.ended) { - state.needReadable = true; - return 0; - } - return state.length; -} - -// you can override either this method, or the async _read(n) below. -Readable.prototype.read = function (n) { - debug('read', n); - n = parseInt(n, 10); - var state = this._readableState; - var nOrig = n; - - if (n !== 0) state.emittedReadable = false; - - // if we're doing read(0) to trigger a readable event, but we - // already have a bunch of data in the buffer, then just trigger - // the 'readable' event and move on. - if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { - debug('read: emitReadable', state.length, state.ended); - if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); - return null; - } - - n = howMuchToRead(n, state); - - // if we've ended, and we're now clear, then finish it up. - if (n === 0 && state.ended) { - if (state.length === 0) endReadable(this); - return null; - } - - // All the actual chunk generation logic needs to be - // *below* the call to _read. The reason is that in certain - // synthetic stream cases, such as passthrough streams, _read - // may be a completely synchronous operation which may change - // the state of the read buffer, providing enough data when - // before there was *not* enough. - // - // So, the steps are: - // 1. Figure out what the state of things will be after we do - // a read from the buffer. - // - // 2. If that resulting state will trigger a _read, then call _read. - // Note that this may be asynchronous, or synchronous. Yes, it is - // deeply ugly to write APIs this way, but that still doesn't mean - // that the Readable class should behave improperly, as streams are - // designed to be sync/async agnostic. - // Take note if the _read call is sync or async (ie, if the read call - // has returned yet), so that we know whether or not it's safe to emit - // 'readable' etc. - // - // 3. Actually pull the requested chunks out of the buffer and return. - - // if we need a readable event, then we need to do some reading. - var doRead = state.needReadable; - debug('need readable', doRead); - - // if we currently have less than the highWaterMark, then also read some - if (state.length === 0 || state.length - n < state.highWaterMark) { - doRead = true; - debug('length less than watermark', doRead); - } - - // however, if we've ended, then there's no point, and if we're already - // reading, then it's unnecessary. - if (state.ended || state.reading) { - doRead = false; - debug('reading or ended', doRead); - } else if (doRead) { - debug('do read'); - state.reading = true; - state.sync = true; - // if the length is currently zero, then we *need* a readable event. - if (state.length === 0) state.needReadable = true; - // call internal read method - this._read(state.highWaterMark); - state.sync = false; - // If _read pushed data synchronously, then `reading` will be false, - // and we need to re-evaluate how much data we can return to the user. - if (!state.reading) n = howMuchToRead(nOrig, state); - } - - var ret; - if (n > 0) ret = fromList(n, state);else ret = null; - - if (ret === null) { - state.needReadable = true; - n = 0; - } else { - state.length -= n; - } - - if (state.length === 0) { - // If we have nothing in the buffer, then we want to know - // as soon as we *do* get something into the buffer. - if (!state.ended) state.needReadable = true; - - // If we tried to read() past the EOF, then emit end on the next tick. - if (nOrig !== n && state.ended) endReadable(this); - } - - if (ret !== null) this.emit('data', ret); - - return ret; -}; - -function onEofChunk(stream, state) { - if (state.ended) return; - if (state.decoder) { - var chunk = state.decoder.end(); - if (chunk && chunk.length) { - state.buffer.push(chunk); - state.length += state.objectMode ? 1 : chunk.length; - } - } - state.ended = true; - - // emit 'readable' now to make sure it gets picked up. - emitReadable(stream); -} - -// Don't emit readable right away in sync mode, because this can trigger -// another read() call => stack overflow. This way, it might trigger -// a nextTick recursion warning, but that's not so bad. -function emitReadable(stream) { - var state = stream._readableState; - state.needReadable = false; - if (!state.emittedReadable) { - debug('emitReadable', state.flowing); - state.emittedReadable = true; - if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream); - } -} - -function emitReadable_(stream) { - debug('emit readable'); - stream.emit('readable'); - flow(stream); -} - -// at this point, the user has presumably seen the 'readable' event, -// and called read() to consume some data. that may have triggered -// in turn another _read(n) call, in which case reading = true if -// it's in progress. -// However, if we're not ended, or reading, and the length < hwm, -// then go ahead and try to read some more preemptively. -function maybeReadMore(stream, state) { - if (!state.readingMore) { - state.readingMore = true; - pna.nextTick(maybeReadMore_, stream, state); - } -} - -function maybeReadMore_(stream, state) { - var len = state.length; - while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { - debug('maybeReadMore read 0'); - stream.read(0); - if (len === state.length) - // didn't get any data, stop spinning. - break;else len = state.length; - } - state.readingMore = false; -} - -// abstract method. to be overridden in specific implementation classes. -// call cb(er, data) where data is <= n in length. -// for virtual (non-string, non-buffer) streams, "length" is somewhat -// arbitrary, and perhaps not very meaningful. -Readable.prototype._read = function (n) { - this.emit('error', new Error('_read() is not implemented')); -}; - -Readable.prototype.pipe = function (dest, pipeOpts) { - var src = this; - var state = this._readableState; - - switch (state.pipesCount) { - case 0: - state.pipes = dest; - break; - case 1: - state.pipes = [state.pipes, dest]; - break; - default: - state.pipes.push(dest); - break; - } - state.pipesCount += 1; - debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); - - var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; - - var endFn = doEnd ? onend : unpipe; - if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn); - - dest.on('unpipe', onunpipe); - function onunpipe(readable, unpipeInfo) { - debug('onunpipe'); - if (readable === src) { - if (unpipeInfo && unpipeInfo.hasUnpiped === false) { - unpipeInfo.hasUnpiped = true; - cleanup(); - } - } - } - - function onend() { - debug('onend'); - dest.end(); - } - - // when the dest drains, it reduces the awaitDrain counter - // on the source. This would be more elegant with a .once() - // handler in flow(), but adding and removing repeatedly is - // too slow. - var ondrain = pipeOnDrain(src); - dest.on('drain', ondrain); - - var cleanedUp = false; - function cleanup() { - debug('cleanup'); - // cleanup event handlers once the pipe is broken - dest.removeListener('close', onclose); - dest.removeListener('finish', onfinish); - dest.removeListener('drain', ondrain); - dest.removeListener('error', onerror); - dest.removeListener('unpipe', onunpipe); - src.removeListener('end', onend); - src.removeListener('end', unpipe); - src.removeListener('data', ondata); - - cleanedUp = true; - - // if the reader is waiting for a drain event from this - // specific writer, then it would cause it to never start - // flowing again. - // So, if this is awaiting a drain, then we just call it now. - // If we don't know, then assume that we are waiting for one. - if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); - } - - // If the user pushes more data while we're writing to dest then we'll end up - // in ondata again. However, we only want to increase awaitDrain once because - // dest will only emit one 'drain' event for the multiple writes. - // => Introduce a guard on increasing awaitDrain. - var increasedAwaitDrain = false; - src.on('data', ondata); - function ondata(chunk) { - debug('ondata'); - increasedAwaitDrain = false; - var ret = dest.write(chunk); - if (false === ret && !increasedAwaitDrain) { - // If the user unpiped during `dest.write()`, it is possible - // to get stuck in a permanently paused state if that write - // also returned false. - // => Check whether `dest` is still a piping destination. - if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { - debug('false write response, pause', state.awaitDrain); - state.awaitDrain++; - increasedAwaitDrain = true; - } - src.pause(); - } - } - - // if the dest has an error, then stop piping into it. - // however, don't suppress the throwing behavior for this. - function onerror(er) { - debug('onerror', er); - unpipe(); - dest.removeListener('error', onerror); - if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); - } - - // Make sure our error handler is attached before userland ones. - prependListener(dest, 'error', onerror); - - // Both close and finish should trigger unpipe, but only once. - function onclose() { - dest.removeListener('finish', onfinish); - unpipe(); - } - dest.once('close', onclose); - function onfinish() { - debug('onfinish'); - dest.removeListener('close', onclose); - unpipe(); - } - dest.once('finish', onfinish); - - function unpipe() { - debug('unpipe'); - src.unpipe(dest); - } - - // tell the dest that it's being piped to - dest.emit('pipe', src); - - // start the flow if it hasn't been started already. - if (!state.flowing) { - debug('pipe resume'); - src.resume(); - } - - return dest; -}; - -function pipeOnDrain(src) { - return function () { - var state = src._readableState; - debug('pipeOnDrain', state.awaitDrain); - if (state.awaitDrain) state.awaitDrain--; - if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { - state.flowing = true; - flow(src); - } - }; -} - -Readable.prototype.unpipe = function (dest) { - var state = this._readableState; - var unpipeInfo = { hasUnpiped: false }; - - // if we're not piping anywhere, then do nothing. - if (state.pipesCount === 0) return this; - - // just one destination. most common case. - if (state.pipesCount === 1) { - // passed in one, but it's not the right one. - if (dest && dest !== state.pipes) return this; - - if (!dest) dest = state.pipes; - - // got a match. - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; - if (dest) dest.emit('unpipe', this, unpipeInfo); - return this; - } - - // slow case. multiple pipe destinations. - - if (!dest) { - // remove all. - var dests = state.pipes; - var len = state.pipesCount; - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; - - for (var i = 0; i < len; i++) { - dests[i].emit('unpipe', this, { hasUnpiped: false }); - }return this; - } - - // try to find the right one. - var index = indexOf(state.pipes, dest); - if (index === -1) return this; - - state.pipes.splice(index, 1); - state.pipesCount -= 1; - if (state.pipesCount === 1) state.pipes = state.pipes[0]; - - dest.emit('unpipe', this, unpipeInfo); - - return this; -}; - -// set up data events if they are asked for -// Ensure readable listeners eventually get something -Readable.prototype.on = function (ev, fn) { - var res = Stream.prototype.on.call(this, ev, fn); - - if (ev === 'data') { - // Start flowing on next tick if stream isn't explicitly paused - if (this._readableState.flowing !== false) this.resume(); - } else if (ev === 'readable') { - var state = this._readableState; - if (!state.endEmitted && !state.readableListening) { - state.readableListening = state.needReadable = true; - state.emittedReadable = false; - if (!state.reading) { - pna.nextTick(nReadingNextTick, this); - } else if (state.length) { - emitReadable(this); - } - } - } - - return res; -}; -Readable.prototype.addListener = Readable.prototype.on; - -function nReadingNextTick(self) { - debug('readable nexttick read 0'); - self.read(0); -} - -// pause() and resume() are remnants of the legacy readable stream API -// If the user uses them, then switch into old mode. -Readable.prototype.resume = function () { - var state = this._readableState; - if (!state.flowing) { - debug('resume'); - state.flowing = true; - resume(this, state); - } - return this; -}; - -function resume(stream, state) { - if (!state.resumeScheduled) { - state.resumeScheduled = true; - pna.nextTick(resume_, stream, state); - } -} - -function resume_(stream, state) { - if (!state.reading) { - debug('resume read 0'); - stream.read(0); - } - - state.resumeScheduled = false; - state.awaitDrain = 0; - stream.emit('resume'); - flow(stream); - if (state.flowing && !state.reading) stream.read(0); -} - -Readable.prototype.pause = function () { - debug('call pause flowing=%j', this._readableState.flowing); - if (false !== this._readableState.flowing) { - debug('pause'); - this._readableState.flowing = false; - this.emit('pause'); - } - return this; -}; - -function flow(stream) { - var state = stream._readableState; - debug('flow', state.flowing); - while (state.flowing && stream.read() !== null) {} -} - -// wrap an old-style stream as the async data source. -// This is *not* part of the readable stream interface. -// It is an ugly unfortunate mess of history. -Readable.prototype.wrap = function (stream) { - var _this = this; - - var state = this._readableState; - var paused = false; - - stream.on('end', function () { - debug('wrapped end'); - if (state.decoder && !state.ended) { - var chunk = state.decoder.end(); - if (chunk && chunk.length) _this.push(chunk); - } - - _this.push(null); - }); - - stream.on('data', function (chunk) { - debug('wrapped data'); - if (state.decoder) chunk = state.decoder.write(chunk); - - // don't skip over falsy values in objectMode - if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; - - var ret = _this.push(chunk); - if (!ret) { - paused = true; - stream.pause(); - } - }); - - // proxy all the other methods. - // important when wrapping filters and duplexes. - for (var i in stream) { - if (this[i] === undefined && typeof stream[i] === 'function') { - this[i] = function (method) { - return function () { - return stream[method].apply(stream, arguments); - }; - }(i); - } - } - - // proxy certain important events. - for (var n = 0; n < kProxyEvents.length; n++) { - stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); - } - - // when we try to consume some more bytes, simply unpause the - // underlying stream. - this._read = function (n) { - debug('wrapped _read', n); - if (paused) { - paused = false; - stream.resume(); - } - }; - - return this; -}; - -Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function () { - return this._readableState.highWaterMark; - } -}); - -// exposed for testing purposes only. -Readable._fromList = fromList; - -// Pluck off n bytes from an array of buffers. -// Length is the combined lengths of all the buffers in the list. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function fromList(n, state) { - // nothing buffered - if (state.length === 0) return null; - - var ret; - if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { - // read it all, truncate the list - if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length); - state.buffer.clear(); - } else { - // read part of list - ret = fromListPartial(n, state.buffer, state.decoder); - } - - return ret; -} - -// Extracts only enough buffered data to satisfy the amount requested. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function fromListPartial(n, list, hasStrings) { - var ret; - if (n < list.head.data.length) { - // slice is the same for buffers and strings - ret = list.head.data.slice(0, n); - list.head.data = list.head.data.slice(n); - } else if (n === list.head.data.length) { - // first chunk is a perfect match - ret = list.shift(); - } else { - // result spans more than one buffer - ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list); - } - return ret; -} - -// Copies a specified amount of characters from the list of buffered data -// chunks. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function copyFromBufferString(n, list) { - var p = list.head; - var c = 1; - var ret = p.data; - n -= ret.length; - while (p = p.next) { - var str = p.data; - var nb = n > str.length ? str.length : n; - if (nb === str.length) ret += str;else ret += str.slice(0, n); - n -= nb; - if (n === 0) { - if (nb === str.length) { - ++c; - if (p.next) list.head = p.next;else list.head = list.tail = null; - } else { - list.head = p; - p.data = str.slice(nb); - } - break; - } - ++c; - } - list.length -= c; - return ret; -} - -// Copies a specified amount of bytes from the list of buffered data chunks. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function copyFromBuffer(n, list) { - var ret = Buffer.allocUnsafe(n); - var p = list.head; - var c = 1; - p.data.copy(ret); - n -= p.data.length; - while (p = p.next) { - var buf = p.data; - var nb = n > buf.length ? buf.length : n; - buf.copy(ret, ret.length - n, 0, nb); - n -= nb; - if (n === 0) { - if (nb === buf.length) { - ++c; - if (p.next) list.head = p.next;else list.head = list.tail = null; - } else { - list.head = p; - p.data = buf.slice(nb); - } - break; - } - ++c; - } - list.length -= c; - return ret; -} - -function endReadable(stream) { - var state = stream._readableState; - - // If we get here before consuming all the bytes, then that is a - // bug in node. Should never happen. - if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream'); - - if (!state.endEmitted) { - state.ended = true; - pna.nextTick(endReadableNT, state, stream); - } -} - -function endReadableNT(state, stream) { - // Check that we didn't get one last unshift. - if (!state.endEmitted && state.length === 0) { - state.endEmitted = true; - stream.readable = false; - stream.emit('end'); - } -} - -function indexOf(xs, x) { - for (var i = 0, l = xs.length; i < l; i++) { - if (xs[i] === x) return i; - } - return -1; -} \ No newline at end of file diff --git a/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_transform.js b/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_transform.js deleted file mode 100644 index fcfc105..0000000 --- a/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_transform.js +++ /dev/null @@ -1,214 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -// a transform stream is a readable/writable stream where you do -// something with the data. Sometimes it's called a "filter", -// but that's not a great name for it, since that implies a thing where -// some bits pass through, and others are simply ignored. (That would -// be a valid example of a transform, of course.) -// -// While the output is causally related to the input, it's not a -// necessarily symmetric or synchronous transformation. For example, -// a zlib stream might take multiple plain-text writes(), and then -// emit a single compressed chunk some time in the future. -// -// Here's how this works: -// -// The Transform stream has all the aspects of the readable and writable -// stream classes. When you write(chunk), that calls _write(chunk,cb) -// internally, and returns false if there's a lot of pending writes -// buffered up. When you call read(), that calls _read(n) until -// there's enough pending readable data buffered up. -// -// In a transform stream, the written data is placed in a buffer. When -// _read(n) is called, it transforms the queued up data, calling the -// buffered _write cb's as it consumes chunks. If consuming a single -// written chunk would result in multiple output chunks, then the first -// outputted bit calls the readcb, and subsequent chunks just go into -// the read buffer, and will cause it to emit 'readable' if necessary. -// -// This way, back-pressure is actually determined by the reading side, -// since _read has to be called to start processing a new chunk. However, -// a pathological inflate type of transform can cause excessive buffering -// here. For example, imagine a stream where every byte of input is -// interpreted as an integer from 0-255, and then results in that many -// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in -// 1kb of data being output. In this case, you could write a very small -// amount of input, and end up with a very large amount of output. In -// such a pathological inflating mechanism, there'd be no way to tell -// the system to stop doing the transform. A single 4MB write could -// cause the system to run out of memory. -// -// However, even in such a pathological case, only a single written chunk -// would be consumed, and then the rest would wait (un-transformed) until -// the results of the previous transformed chunk were consumed. - -'use strict'; - -module.exports = Transform; - -var Duplex = require('./_stream_duplex'); - -/**/ -var util = Object.create(require('core-util-is')); -util.inherits = require('inherits'); -/**/ - -util.inherits(Transform, Duplex); - -function afterTransform(er, data) { - var ts = this._transformState; - ts.transforming = false; - - var cb = ts.writecb; - - if (!cb) { - return this.emit('error', new Error('write callback called multiple times')); - } - - ts.writechunk = null; - ts.writecb = null; - - if (data != null) // single equals check for both `null` and `undefined` - this.push(data); - - cb(er); - - var rs = this._readableState; - rs.reading = false; - if (rs.needReadable || rs.length < rs.highWaterMark) { - this._read(rs.highWaterMark); - } -} - -function Transform(options) { - if (!(this instanceof Transform)) return new Transform(options); - - Duplex.call(this, options); - - this._transformState = { - afterTransform: afterTransform.bind(this), - needTransform: false, - transforming: false, - writecb: null, - writechunk: null, - writeencoding: null - }; - - // start out asking for a readable event once data is transformed. - this._readableState.needReadable = true; - - // we have implemented the _read method, and done the other things - // that Readable wants before the first _read call, so unset the - // sync guard flag. - this._readableState.sync = false; - - if (options) { - if (typeof options.transform === 'function') this._transform = options.transform; - - if (typeof options.flush === 'function') this._flush = options.flush; - } - - // When the writable side finishes, then flush out anything remaining. - this.on('prefinish', prefinish); -} - -function prefinish() { - var _this = this; - - if (typeof this._flush === 'function') { - this._flush(function (er, data) { - done(_this, er, data); - }); - } else { - done(this, null, null); - } -} - -Transform.prototype.push = function (chunk, encoding) { - this._transformState.needTransform = false; - return Duplex.prototype.push.call(this, chunk, encoding); -}; - -// This is the part where you do stuff! -// override this function in implementation classes. -// 'chunk' is an input chunk. -// -// Call `push(newChunk)` to pass along transformed output -// to the readable side. You may call 'push' zero or more times. -// -// Call `cb(err)` when you are done with this chunk. If you pass -// an error, then that'll put the hurt on the whole operation. If you -// never call cb(), then you'll never get another chunk. -Transform.prototype._transform = function (chunk, encoding, cb) { - throw new Error('_transform() is not implemented'); -}; - -Transform.prototype._write = function (chunk, encoding, cb) { - var ts = this._transformState; - ts.writecb = cb; - ts.writechunk = chunk; - ts.writeencoding = encoding; - if (!ts.transforming) { - var rs = this._readableState; - if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); - } -}; - -// Doesn't matter what the args are here. -// _transform does all the work. -// That we got here means that the readable side wants more data. -Transform.prototype._read = function (n) { - var ts = this._transformState; - - if (ts.writechunk !== null && ts.writecb && !ts.transforming) { - ts.transforming = true; - this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); - } else { - // mark that we need a transform, so that any data that comes in - // will get processed, now that we've asked for it. - ts.needTransform = true; - } -}; - -Transform.prototype._destroy = function (err, cb) { - var _this2 = this; - - Duplex.prototype._destroy.call(this, err, function (err2) { - cb(err2); - _this2.emit('close'); - }); -}; - -function done(stream, er, data) { - if (er) return stream.emit('error', er); - - if (data != null) // single equals check for both `null` and `undefined` - stream.push(data); - - // if there's nothing in the write buffer, then that means - // that nothing more will ever be provided - if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0'); - - if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming'); - - return stream.push(null); -} \ No newline at end of file diff --git a/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_writable.js b/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_writable.js deleted file mode 100644 index e1e897f..0000000 --- a/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_writable.js +++ /dev/null @@ -1,685 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -// A bit simpler than readable streams. -// Implement an async ._write(chunk, encoding, cb), and it'll handle all -// the drain event emission and buffering. - -'use strict'; - -/**/ - -var pna = require('process-nextick-args'); -/**/ - -module.exports = Writable; - -/* */ -function WriteReq(chunk, encoding, cb) { - this.chunk = chunk; - this.encoding = encoding; - this.callback = cb; - this.next = null; -} - -// It seems a linked list but it is not -// there will be only 2 of these for each stream -function CorkedRequest(state) { - var _this = this; - - this.next = null; - this.entry = null; - this.finish = function () { - onCorkedFinish(_this, state); - }; -} -/* */ - -/**/ -var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick; -/**/ - -/**/ -var Duplex; -/**/ - -Writable.WritableState = WritableState; - -/**/ -var util = Object.create(require('core-util-is')); -util.inherits = require('inherits'); -/**/ - -/**/ -var internalUtil = { - deprecate: require('util-deprecate') -}; -/**/ - -/**/ -var Stream = require('./internal/streams/stream'); -/**/ - -/**/ - -var Buffer = require('safe-buffer').Buffer; -var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; -function _uint8ArrayToBuffer(chunk) { - return Buffer.from(chunk); -} -function _isUint8Array(obj) { - return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; -} - -/**/ - -var destroyImpl = require('./internal/streams/destroy'); - -util.inherits(Writable, Stream); - -function nop() {} - -function WritableState(options, stream) { - Duplex = Duplex || require('./_stream_duplex'); - - options = options || {}; - - // Duplex streams are both readable and writable, but share - // the same options object. - // However, some cases require setting options to different - // values for the readable and the writable sides of the duplex stream. - // These options can be provided separately as readableXXX and writableXXX. - var isDuplex = stream instanceof Duplex; - - // object stream flag to indicate whether or not this stream - // contains buffers or objects. - this.objectMode = !!options.objectMode; - - if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; - - // the point at which write() starts returning false - // Note: 0 is a valid value, means that we always return false if - // the entire buffer is not flushed immediately on write() - var hwm = options.highWaterMark; - var writableHwm = options.writableHighWaterMark; - var defaultHwm = this.objectMode ? 16 : 16 * 1024; - - if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm; - - // cast to ints. - this.highWaterMark = Math.floor(this.highWaterMark); - - // if _final has been called - this.finalCalled = false; - - // drain event flag. - this.needDrain = false; - // at the start of calling end() - this.ending = false; - // when end() has been called, and returned - this.ended = false; - // when 'finish' is emitted - this.finished = false; - - // has it been destroyed - this.destroyed = false; - - // should we decode strings into buffers before passing to _write? - // this is here so that some node-core streams can optimize string - // handling at a lower level. - var noDecode = options.decodeStrings === false; - this.decodeStrings = !noDecode; - - // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = options.defaultEncoding || 'utf8'; - - // not an actual buffer we keep track of, but a measurement - // of how much we're waiting to get pushed to some underlying - // socket or file. - this.length = 0; - - // a flag to see when we're in the middle of a write. - this.writing = false; - - // when true all writes will be buffered until .uncork() call - this.corked = 0; - - // a flag to be able to tell if the onwrite cb is called immediately, - // or on a later tick. We set this to true at first, because any - // actions that shouldn't happen until "later" should generally also - // not happen before the first write call. - this.sync = true; - - // a flag to know if we're processing previously buffered items, which - // may call the _write() callback in the same tick, so that we don't - // end up in an overlapped onwrite situation. - this.bufferProcessing = false; - - // the callback that's passed to _write(chunk,cb) - this.onwrite = function (er) { - onwrite(stream, er); - }; - - // the callback that the user supplies to write(chunk,encoding,cb) - this.writecb = null; - - // the amount that is being written when _write is called. - this.writelen = 0; - - this.bufferedRequest = null; - this.lastBufferedRequest = null; - - // number of pending user-supplied write callbacks - // this must be 0 before 'finish' can be emitted - this.pendingcb = 0; - - // emit prefinish if the only thing we're waiting for is _write cbs - // This is relevant for synchronous Transform streams - this.prefinished = false; - - // True if the error was already emitted and should not be thrown again - this.errorEmitted = false; - - // count buffered requests - this.bufferedRequestCount = 0; - - // allocate the first CorkedRequest, there is always - // one allocated and free to use, and we maintain at most two - this.corkedRequestsFree = new CorkedRequest(this); -} - -WritableState.prototype.getBuffer = function getBuffer() { - var current = this.bufferedRequest; - var out = []; - while (current) { - out.push(current); - current = current.next; - } - return out; -}; - -(function () { - try { - Object.defineProperty(WritableState.prototype, 'buffer', { - get: internalUtil.deprecate(function () { - return this.getBuffer(); - }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') - }); - } catch (_) {} -})(); - -// Test _writableState for inheritance to account for Duplex streams, -// whose prototype chain only points to Readable. -var realHasInstance; -if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { - realHasInstance = Function.prototype[Symbol.hasInstance]; - Object.defineProperty(Writable, Symbol.hasInstance, { - value: function (object) { - if (realHasInstance.call(this, object)) return true; - if (this !== Writable) return false; - - return object && object._writableState instanceof WritableState; - } - }); -} else { - realHasInstance = function (object) { - return object instanceof this; - }; -} - -function Writable(options) { - Duplex = Duplex || require('./_stream_duplex'); - - // Writable ctor is applied to Duplexes, too. - // `realHasInstance` is necessary because using plain `instanceof` - // would return false, as no `_writableState` property is attached. - - // Trying to use the custom `instanceof` for Writable here will also break the - // Node.js LazyTransform implementation, which has a non-trivial getter for - // `_writableState` that would lead to infinite recursion. - if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) { - return new Writable(options); - } - - this._writableState = new WritableState(options, this); - - // legacy. - this.writable = true; - - if (options) { - if (typeof options.write === 'function') this._write = options.write; - - if (typeof options.writev === 'function') this._writev = options.writev; - - if (typeof options.destroy === 'function') this._destroy = options.destroy; - - if (typeof options.final === 'function') this._final = options.final; - } - - Stream.call(this); -} - -// Otherwise people can pipe Writable streams, which is just wrong. -Writable.prototype.pipe = function () { - this.emit('error', new Error('Cannot pipe, not readable')); -}; - -function writeAfterEnd(stream, cb) { - var er = new Error('write after end'); - // TODO: defer error events consistently everywhere, not just the cb - stream.emit('error', er); - pna.nextTick(cb, er); -} - -// Checks that a user-supplied chunk is valid, especially for the particular -// mode the stream is in. Currently this means that `null` is never accepted -// and undefined/non-string values are only allowed in object mode. -function validChunk(stream, state, chunk, cb) { - var valid = true; - var er = false; - - if (chunk === null) { - er = new TypeError('May not write null values to stream'); - } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { - er = new TypeError('Invalid non-string/buffer chunk'); - } - if (er) { - stream.emit('error', er); - pna.nextTick(cb, er); - valid = false; - } - return valid; -} - -Writable.prototype.write = function (chunk, encoding, cb) { - var state = this._writableState; - var ret = false; - var isBuf = !state.objectMode && _isUint8Array(chunk); - - if (isBuf && !Buffer.isBuffer(chunk)) { - chunk = _uint8ArrayToBuffer(chunk); - } - - if (typeof encoding === 'function') { - cb = encoding; - encoding = null; - } - - if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; - - if (typeof cb !== 'function') cb = nop; - - if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { - state.pendingcb++; - ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); - } - - return ret; -}; - -Writable.prototype.cork = function () { - var state = this._writableState; - - state.corked++; -}; - -Writable.prototype.uncork = function () { - var state = this._writableState; - - if (state.corked) { - state.corked--; - - if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); - } -}; - -Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { - // node::ParseEncoding() requires lower case. - if (typeof encoding === 'string') encoding = encoding.toLowerCase(); - if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); - this._writableState.defaultEncoding = encoding; - return this; -}; - -function decodeChunk(state, chunk, encoding) { - if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { - chunk = Buffer.from(chunk, encoding); - } - return chunk; -} - -Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function () { - return this._writableState.highWaterMark; - } -}); - -// if we're already writing something, then just put this -// in the queue, and wait our turn. Otherwise, call _write -// If we return false, then we need a drain event, so set that flag. -function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { - if (!isBuf) { - var newChunk = decodeChunk(state, chunk, encoding); - if (chunk !== newChunk) { - isBuf = true; - encoding = 'buffer'; - chunk = newChunk; - } - } - var len = state.objectMode ? 1 : chunk.length; - - state.length += len; - - var ret = state.length < state.highWaterMark; - // we must ensure that previous needDrain will not be reset to false. - if (!ret) state.needDrain = true; - - if (state.writing || state.corked) { - var last = state.lastBufferedRequest; - state.lastBufferedRequest = { - chunk: chunk, - encoding: encoding, - isBuf: isBuf, - callback: cb, - next: null - }; - if (last) { - last.next = state.lastBufferedRequest; - } else { - state.bufferedRequest = state.lastBufferedRequest; - } - state.bufferedRequestCount += 1; - } else { - doWrite(stream, state, false, len, chunk, encoding, cb); - } - - return ret; -} - -function doWrite(stream, state, writev, len, chunk, encoding, cb) { - state.writelen = len; - state.writecb = cb; - state.writing = true; - state.sync = true; - if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); - state.sync = false; -} - -function onwriteError(stream, state, sync, er, cb) { - --state.pendingcb; - - if (sync) { - // defer the callback if we are being called synchronously - // to avoid piling up things on the stack - pna.nextTick(cb, er); - // this can emit finish, and it will always happen - // after error - pna.nextTick(finishMaybe, stream, state); - stream._writableState.errorEmitted = true; - stream.emit('error', er); - } else { - // the caller expect this to happen before if - // it is async - cb(er); - stream._writableState.errorEmitted = true; - stream.emit('error', er); - // this can emit finish, but finish must - // always follow error - finishMaybe(stream, state); - } -} - -function onwriteStateUpdate(state) { - state.writing = false; - state.writecb = null; - state.length -= state.writelen; - state.writelen = 0; -} - -function onwrite(stream, er) { - var state = stream._writableState; - var sync = state.sync; - var cb = state.writecb; - - onwriteStateUpdate(state); - - if (er) onwriteError(stream, state, sync, er, cb);else { - // Check if we're actually ready to finish, but don't emit yet - var finished = needFinish(state); - - if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { - clearBuffer(stream, state); - } - - if (sync) { - /**/ - asyncWrite(afterWrite, stream, state, finished, cb); - /**/ - } else { - afterWrite(stream, state, finished, cb); - } - } -} - -function afterWrite(stream, state, finished, cb) { - if (!finished) onwriteDrain(stream, state); - state.pendingcb--; - cb(); - finishMaybe(stream, state); -} - -// Must force callback to be called on nextTick, so that we don't -// emit 'drain' before the write() consumer gets the 'false' return -// value, and has a chance to attach a 'drain' listener. -function onwriteDrain(stream, state) { - if (state.length === 0 && state.needDrain) { - state.needDrain = false; - stream.emit('drain'); - } -} - -// if there's something in the buffer waiting, then process it -function clearBuffer(stream, state) { - state.bufferProcessing = true; - var entry = state.bufferedRequest; - - if (stream._writev && entry && entry.next) { - // Fast case, write everything using _writev() - var l = state.bufferedRequestCount; - var buffer = new Array(l); - var holder = state.corkedRequestsFree; - holder.entry = entry; - - var count = 0; - var allBuffers = true; - while (entry) { - buffer[count] = entry; - if (!entry.isBuf) allBuffers = false; - entry = entry.next; - count += 1; - } - buffer.allBuffers = allBuffers; - - doWrite(stream, state, true, state.length, buffer, '', holder.finish); - - // doWrite is almost always async, defer these to save a bit of time - // as the hot path ends with doWrite - state.pendingcb++; - state.lastBufferedRequest = null; - if (holder.next) { - state.corkedRequestsFree = holder.next; - holder.next = null; - } else { - state.corkedRequestsFree = new CorkedRequest(state); - } - state.bufferedRequestCount = 0; - } else { - // Slow case, write chunks one-by-one - while (entry) { - var chunk = entry.chunk; - var encoding = entry.encoding; - var cb = entry.callback; - var len = state.objectMode ? 1 : chunk.length; - - doWrite(stream, state, false, len, chunk, encoding, cb); - entry = entry.next; - state.bufferedRequestCount--; - // if we didn't call the onwrite immediately, then - // it means that we need to wait until it does. - // also, that means that the chunk and cb are currently - // being processed, so move the buffer counter past them. - if (state.writing) { - break; - } - } - - if (entry === null) state.lastBufferedRequest = null; - } - - state.bufferedRequest = entry; - state.bufferProcessing = false; -} - -Writable.prototype._write = function (chunk, encoding, cb) { - cb(new Error('_write() is not implemented')); -}; - -Writable.prototype._writev = null; - -Writable.prototype.end = function (chunk, encoding, cb) { - var state = this._writableState; - - if (typeof chunk === 'function') { - cb = chunk; - chunk = null; - encoding = null; - } else if (typeof encoding === 'function') { - cb = encoding; - encoding = null; - } - - if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); - - // .end() fully uncorks - if (state.corked) { - state.corked = 1; - this.uncork(); - } - - // ignore unnecessary end() calls. - if (!state.ending) endWritable(this, state, cb); -}; - -function needFinish(state) { - return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; -} -function callFinal(stream, state) { - stream._final(function (err) { - state.pendingcb--; - if (err) { - stream.emit('error', err); - } - state.prefinished = true; - stream.emit('prefinish'); - finishMaybe(stream, state); - }); -} -function prefinish(stream, state) { - if (!state.prefinished && !state.finalCalled) { - if (typeof stream._final === 'function') { - state.pendingcb++; - state.finalCalled = true; - pna.nextTick(callFinal, stream, state); - } else { - state.prefinished = true; - stream.emit('prefinish'); - } - } -} - -function finishMaybe(stream, state) { - var need = needFinish(state); - if (need) { - prefinish(stream, state); - if (state.pendingcb === 0) { - state.finished = true; - stream.emit('finish'); - } - } - return need; -} - -function endWritable(stream, state, cb) { - state.ending = true; - finishMaybe(stream, state); - if (cb) { - if (state.finished) pna.nextTick(cb);else stream.once('finish', cb); - } - state.ended = true; - stream.writable = false; -} - -function onCorkedFinish(corkReq, state, err) { - var entry = corkReq.entry; - corkReq.entry = null; - while (entry) { - var cb = entry.callback; - state.pendingcb--; - cb(err); - entry = entry.next; - } - - // reuse the free corkReq. - state.corkedRequestsFree.next = corkReq; -} - -Object.defineProperty(Writable.prototype, 'destroyed', { - get: function () { - if (this._writableState === undefined) { - return false; - } - return this._writableState.destroyed; - }, - set: function (value) { - // we ignore the value if the stream - // has not been initialized yet - if (!this._writableState) { - return; - } - - // backward compatibility, the user is explicitly - // managing destroyed - this._writableState.destroyed = value; - } -}); - -Writable.prototype.destroy = destroyImpl.destroy; -Writable.prototype._undestroy = destroyImpl.undestroy; -Writable.prototype._destroy = function (err, cb) { - this.end(); - cb(err); -}; \ No newline at end of file diff --git a/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/BufferList.js b/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/BufferList.js deleted file mode 100644 index 5e08097..0000000 --- a/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/BufferList.js +++ /dev/null @@ -1,78 +0,0 @@ -'use strict'; - -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } - -var Buffer = require('safe-buffer').Buffer; -var util = require('util'); - -function copyBuffer(src, target, offset) { - src.copy(target, offset); -} - -module.exports = function () { - function BufferList() { - _classCallCheck(this, BufferList); - - this.head = null; - this.tail = null; - this.length = 0; - } - - BufferList.prototype.push = function push(v) { - var entry = { data: v, next: null }; - if (this.length > 0) this.tail.next = entry;else this.head = entry; - this.tail = entry; - ++this.length; - }; - - BufferList.prototype.unshift = function unshift(v) { - var entry = { data: v, next: this.head }; - if (this.length === 0) this.tail = entry; - this.head = entry; - ++this.length; - }; - - BufferList.prototype.shift = function shift() { - if (this.length === 0) return; - var ret = this.head.data; - if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; - --this.length; - return ret; - }; - - BufferList.prototype.clear = function clear() { - this.head = this.tail = null; - this.length = 0; - }; - - BufferList.prototype.join = function join(s) { - if (this.length === 0) return ''; - var p = this.head; - var ret = '' + p.data; - while (p = p.next) { - ret += s + p.data; - }return ret; - }; - - BufferList.prototype.concat = function concat(n) { - if (this.length === 0) return Buffer.alloc(0); - var ret = Buffer.allocUnsafe(n >>> 0); - var p = this.head; - var i = 0; - while (p) { - copyBuffer(p.data, ret, i); - i += p.data.length; - p = p.next; - } - return ret; - }; - - return BufferList; -}(); - -if (util && util.inspect && util.inspect.custom) { - module.exports.prototype[util.inspect.custom] = function () { - var obj = util.inspect({ length: this.length }); - return this.constructor.name + ' ' + obj; - }; -} \ No newline at end of file diff --git a/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/destroy.js b/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/destroy.js deleted file mode 100644 index 85a8214..0000000 --- a/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/destroy.js +++ /dev/null @@ -1,84 +0,0 @@ -'use strict'; - -/**/ - -var pna = require('process-nextick-args'); -/**/ - -// undocumented cb() API, needed for core, not for public API -function destroy(err, cb) { - var _this = this; - - var readableDestroyed = this._readableState && this._readableState.destroyed; - var writableDestroyed = this._writableState && this._writableState.destroyed; - - if (readableDestroyed || writableDestroyed) { - if (cb) { - cb(err); - } else if (err) { - if (!this._writableState) { - pna.nextTick(emitErrorNT, this, err); - } else if (!this._writableState.errorEmitted) { - this._writableState.errorEmitted = true; - pna.nextTick(emitErrorNT, this, err); - } - } - - return this; - } - - // we set destroyed to true before firing error callbacks in order - // to make it re-entrance safe in case destroy() is called within callbacks - - if (this._readableState) { - this._readableState.destroyed = true; - } - - // if this is a duplex stream mark the writable part as destroyed as well - if (this._writableState) { - this._writableState.destroyed = true; - } - - this._destroy(err || null, function (err) { - if (!cb && err) { - if (!_this._writableState) { - pna.nextTick(emitErrorNT, _this, err); - } else if (!_this._writableState.errorEmitted) { - _this._writableState.errorEmitted = true; - pna.nextTick(emitErrorNT, _this, err); - } - } else if (cb) { - cb(err); - } - }); - - return this; -} - -function undestroy() { - if (this._readableState) { - this._readableState.destroyed = false; - this._readableState.reading = false; - this._readableState.ended = false; - this._readableState.endEmitted = false; - } - - if (this._writableState) { - this._writableState.destroyed = false; - this._writableState.ended = false; - this._writableState.ending = false; - this._writableState.finalCalled = false; - this._writableState.prefinished = false; - this._writableState.finished = false; - this._writableState.errorEmitted = false; - } -} - -function emitErrorNT(self, err) { - self.emit('error', err); -} - -module.exports = { - destroy: destroy, - undestroy: undestroy -}; \ No newline at end of file diff --git a/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/stream-browser.js b/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/stream-browser.js deleted file mode 100644 index 9332a3f..0000000 --- a/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/stream-browser.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require('events').EventEmitter; diff --git a/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/stream.js b/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/stream.js deleted file mode 100644 index ce2ad5b..0000000 --- a/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/stream.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require('stream'); diff --git a/node_modules/archiver-utils/node_modules/readable-stream/node_modules/safe-buffer/LICENSE b/node_modules/archiver-utils/node_modules/readable-stream/node_modules/safe-buffer/LICENSE deleted file mode 100644 index 0c068ce..0000000 --- a/node_modules/archiver-utils/node_modules/readable-stream/node_modules/safe-buffer/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Feross Aboukhadijeh - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/archiver-utils/node_modules/readable-stream/node_modules/safe-buffer/README.md b/node_modules/archiver-utils/node_modules/readable-stream/node_modules/safe-buffer/README.md deleted file mode 100644 index e9a81af..0000000 --- a/node_modules/archiver-utils/node_modules/readable-stream/node_modules/safe-buffer/README.md +++ /dev/null @@ -1,584 +0,0 @@ -# safe-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] - -[travis-image]: https://img.shields.io/travis/feross/safe-buffer/master.svg -[travis-url]: https://travis-ci.org/feross/safe-buffer -[npm-image]: https://img.shields.io/npm/v/safe-buffer.svg -[npm-url]: https://npmjs.org/package/safe-buffer -[downloads-image]: https://img.shields.io/npm/dm/safe-buffer.svg -[downloads-url]: https://npmjs.org/package/safe-buffer -[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg -[standard-url]: https://standardjs.com - -#### Safer Node.js Buffer API - -**Use the new Node.js Buffer APIs (`Buffer.from`, `Buffer.alloc`, -`Buffer.allocUnsafe`, `Buffer.allocUnsafeSlow`) in all versions of Node.js.** - -**Uses the built-in implementation when available.** - -## install - -``` -npm install safe-buffer -``` - -## usage - -The goal of this package is to provide a safe replacement for the node.js `Buffer`. - -It's a drop-in replacement for `Buffer`. You can use it by adding one `require` line to -the top of your node.js modules: - -```js -var Buffer = require('safe-buffer').Buffer - -// Existing buffer code will continue to work without issues: - -new Buffer('hey', 'utf8') -new Buffer([1, 2, 3], 'utf8') -new Buffer(obj) -new Buffer(16) // create an uninitialized buffer (potentially unsafe) - -// But you can use these new explicit APIs to make clear what you want: - -Buffer.from('hey', 'utf8') // convert from many types to a Buffer -Buffer.alloc(16) // create a zero-filled buffer (safe) -Buffer.allocUnsafe(16) // create an uninitialized buffer (potentially unsafe) -``` - -## api - -### Class Method: Buffer.from(array) - - -* `array` {Array} - -Allocates a new `Buffer` using an `array` of octets. - -```js -const buf = Buffer.from([0x62,0x75,0x66,0x66,0x65,0x72]); - // creates a new Buffer containing ASCII bytes - // ['b','u','f','f','e','r'] -``` - -A `TypeError` will be thrown if `array` is not an `Array`. - -### Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]]) - - -* `arrayBuffer` {ArrayBuffer} The `.buffer` property of a `TypedArray` or - a `new ArrayBuffer()` -* `byteOffset` {Number} Default: `0` -* `length` {Number} Default: `arrayBuffer.length - byteOffset` - -When passed a reference to the `.buffer` property of a `TypedArray` instance, -the newly created `Buffer` will share the same allocated memory as the -TypedArray. - -```js -const arr = new Uint16Array(2); -arr[0] = 5000; -arr[1] = 4000; - -const buf = Buffer.from(arr.buffer); // shares the memory with arr; - -console.log(buf); - // Prints: - -// changing the TypedArray changes the Buffer also -arr[1] = 6000; - -console.log(buf); - // Prints: -``` - -The optional `byteOffset` and `length` arguments specify a memory range within -the `arrayBuffer` that will be shared by the `Buffer`. - -```js -const ab = new ArrayBuffer(10); -const buf = Buffer.from(ab, 0, 2); -console.log(buf.length); - // Prints: 2 -``` - -A `TypeError` will be thrown if `arrayBuffer` is not an `ArrayBuffer`. - -### Class Method: Buffer.from(buffer) - - -* `buffer` {Buffer} - -Copies the passed `buffer` data onto a new `Buffer` instance. - -```js -const buf1 = Buffer.from('buffer'); -const buf2 = Buffer.from(buf1); - -buf1[0] = 0x61; -console.log(buf1.toString()); - // 'auffer' -console.log(buf2.toString()); - // 'buffer' (copy is not changed) -``` - -A `TypeError` will be thrown if `buffer` is not a `Buffer`. - -### Class Method: Buffer.from(str[, encoding]) - - -* `str` {String} String to encode. -* `encoding` {String} Encoding to use, Default: `'utf8'` - -Creates a new `Buffer` containing the given JavaScript string `str`. If -provided, the `encoding` parameter identifies the character encoding. -If not provided, `encoding` defaults to `'utf8'`. - -```js -const buf1 = Buffer.from('this is a tést'); -console.log(buf1.toString()); - // prints: this is a tést -console.log(buf1.toString('ascii')); - // prints: this is a tC)st - -const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex'); -console.log(buf2.toString()); - // prints: this is a tést -``` - -A `TypeError` will be thrown if `str` is not a string. - -### Class Method: Buffer.alloc(size[, fill[, encoding]]) - - -* `size` {Number} -* `fill` {Value} Default: `undefined` -* `encoding` {String} Default: `utf8` - -Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the -`Buffer` will be *zero-filled*. - -```js -const buf = Buffer.alloc(5); -console.log(buf); - // -``` - -The `size` must be less than or equal to the value of -`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is -`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will -be created if a `size` less than or equal to 0 is specified. - -If `fill` is specified, the allocated `Buffer` will be initialized by calling -`buf.fill(fill)`. See [`buf.fill()`][] for more information. - -```js -const buf = Buffer.alloc(5, 'a'); -console.log(buf); - // -``` - -If both `fill` and `encoding` are specified, the allocated `Buffer` will be -initialized by calling `buf.fill(fill, encoding)`. For example: - -```js -const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64'); -console.log(buf); - // -``` - -Calling `Buffer.alloc(size)` can be significantly slower than the alternative -`Buffer.allocUnsafe(size)` but ensures that the newly created `Buffer` instance -contents will *never contain sensitive data*. - -A `TypeError` will be thrown if `size` is not a number. - -### Class Method: Buffer.allocUnsafe(size) - - -* `size` {Number} - -Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must -be less than or equal to the value of `require('buffer').kMaxLength` (on 64-bit -architectures, `kMaxLength` is `(2^31)-1`). Otherwise, a [`RangeError`][] is -thrown. A zero-length Buffer will be created if a `size` less than or equal to -0 is specified. - -The underlying memory for `Buffer` instances created in this way is *not -initialized*. The contents of the newly created `Buffer` are unknown and -*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such -`Buffer` instances to zeroes. - -```js -const buf = Buffer.allocUnsafe(5); -console.log(buf); - // - // (octets will be different, every time) -buf.fill(0); -console.log(buf); - // -``` - -A `TypeError` will be thrown if `size` is not a number. - -Note that the `Buffer` module pre-allocates an internal `Buffer` instance of -size `Buffer.poolSize` that is used as a pool for the fast allocation of new -`Buffer` instances created using `Buffer.allocUnsafe(size)` (and the deprecated -`new Buffer(size)` constructor) only when `size` is less than or equal to -`Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two). The default -value of `Buffer.poolSize` is `8192` but can be modified. - -Use of this pre-allocated internal memory pool is a key difference between -calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`. -Specifically, `Buffer.alloc(size, fill)` will *never* use the internal Buffer -pool, while `Buffer.allocUnsafe(size).fill(fill)` *will* use the internal -Buffer pool if `size` is less than or equal to half `Buffer.poolSize`. The -difference is subtle but can be important when an application requires the -additional performance that `Buffer.allocUnsafe(size)` provides. - -### Class Method: Buffer.allocUnsafeSlow(size) - - -* `size` {Number} - -Allocates a new *non-zero-filled* and non-pooled `Buffer` of `size` bytes. The -`size` must be less than or equal to the value of -`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is -`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will -be created if a `size` less than or equal to 0 is specified. - -The underlying memory for `Buffer` instances created in this way is *not -initialized*. The contents of the newly created `Buffer` are unknown and -*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such -`Buffer` instances to zeroes. - -When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances, -allocations under 4KB are, by default, sliced from a single pre-allocated -`Buffer`. This allows applications to avoid the garbage collection overhead of -creating many individually allocated Buffers. This approach improves both -performance and memory usage by eliminating the need to track and cleanup as -many `Persistent` objects. - -However, in the case where a developer may need to retain a small chunk of -memory from a pool for an indeterminate amount of time, it may be appropriate -to create an un-pooled Buffer instance using `Buffer.allocUnsafeSlow()` then -copy out the relevant bits. - -```js -// need to keep around a few small chunks of memory -const store = []; - -socket.on('readable', () => { - const data = socket.read(); - // allocate for retained data - const sb = Buffer.allocUnsafeSlow(10); - // copy the data into the new allocation - data.copy(sb, 0, 0, 10); - store.push(sb); -}); -``` - -Use of `Buffer.allocUnsafeSlow()` should be used only as a last resort *after* -a developer has observed undue memory retention in their applications. - -A `TypeError` will be thrown if `size` is not a number. - -### All the Rest - -The rest of the `Buffer` API is exactly the same as in node.js. -[See the docs](https://nodejs.org/api/buffer.html). - - -## Related links - -- [Node.js issue: Buffer(number) is unsafe](https://github.com/nodejs/node/issues/4660) -- [Node.js Enhancement Proposal: Buffer.from/Buffer.alloc/Buffer.zalloc/Buffer() soft-deprecate](https://github.com/nodejs/node-eps/pull/4) - -## Why is `Buffer` unsafe? - -Today, the node.js `Buffer` constructor is overloaded to handle many different argument -types like `String`, `Array`, `Object`, `TypedArrayView` (`Uint8Array`, etc.), -`ArrayBuffer`, and also `Number`. - -The API is optimized for convenience: you can throw any type at it, and it will try to do -what you want. - -Because the Buffer constructor is so powerful, you often see code like this: - -```js -// Convert UTF-8 strings to hex -function toHex (str) { - return new Buffer(str).toString('hex') -} -``` - -***But what happens if `toHex` is called with a `Number` argument?*** - -### Remote Memory Disclosure - -If an attacker can make your program call the `Buffer` constructor with a `Number` -argument, then they can make it allocate uninitialized memory from the node.js process. -This could potentially disclose TLS private keys, user data, or database passwords. - -When the `Buffer` constructor is passed a `Number` argument, it returns an -**UNINITIALIZED** block of memory of the specified `size`. When you create a `Buffer` like -this, you **MUST** overwrite the contents before returning it to the user. - -From the [node.js docs](https://nodejs.org/api/buffer.html#buffer_new_buffer_size): - -> `new Buffer(size)` -> -> - `size` Number -> -> The underlying memory for `Buffer` instances created in this way is not initialized. -> **The contents of a newly created `Buffer` are unknown and could contain sensitive -> data.** Use `buf.fill(0)` to initialize a Buffer to zeroes. - -(Emphasis our own.) - -Whenever the programmer intended to create an uninitialized `Buffer` you often see code -like this: - -```js -var buf = new Buffer(16) - -// Immediately overwrite the uninitialized buffer with data from another buffer -for (var i = 0; i < buf.length; i++) { - buf[i] = otherBuf[i] -} -``` - - -### Would this ever be a problem in real code? - -Yes. It's surprisingly common to forget to check the type of your variables in a -dynamically-typed language like JavaScript. - -Usually the consequences of assuming the wrong type is that your program crashes with an -uncaught exception. But the failure mode for forgetting to check the type of arguments to -the `Buffer` constructor is more catastrophic. - -Here's an example of a vulnerable service that takes a JSON payload and converts it to -hex: - -```js -// Take a JSON payload {str: "some string"} and convert it to hex -var server = http.createServer(function (req, res) { - var data = '' - req.setEncoding('utf8') - req.on('data', function (chunk) { - data += chunk - }) - req.on('end', function () { - var body = JSON.parse(data) - res.end(new Buffer(body.str).toString('hex')) - }) -}) - -server.listen(8080) -``` - -In this example, an http client just has to send: - -```json -{ - "str": 1000 -} -``` - -and it will get back 1,000 bytes of uninitialized memory from the server. - -This is a very serious bug. It's similar in severity to the -[the Heartbleed bug](http://heartbleed.com/) that allowed disclosure of OpenSSL process -memory by remote attackers. - - -### Which real-world packages were vulnerable? - -#### [`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht) - -[Mathias Buus](https://github.com/mafintosh) and I -([Feross Aboukhadijeh](http://feross.org/)) found this issue in one of our own packages, -[`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht). The bug would allow -anyone on the internet to send a series of messages to a user of `bittorrent-dht` and get -them to reveal 20 bytes at a time of uninitialized memory from the node.js process. - -Here's -[the commit](https://github.com/feross/bittorrent-dht/commit/6c7da04025d5633699800a99ec3fbadf70ad35b8) -that fixed it. We released a new fixed version, created a -[Node Security Project disclosure](https://nodesecurity.io/advisories/68), and deprecated all -vulnerable versions on npm so users will get a warning to upgrade to a newer version. - -#### [`ws`](https://www.npmjs.com/package/ws) - -That got us wondering if there were other vulnerable packages. Sure enough, within a short -period of time, we found the same issue in [`ws`](https://www.npmjs.com/package/ws), the -most popular WebSocket implementation in node.js. - -If certain APIs were called with `Number` parameters instead of `String` or `Buffer` as -expected, then uninitialized server memory would be disclosed to the remote peer. - -These were the vulnerable methods: - -```js -socket.send(number) -socket.ping(number) -socket.pong(number) -``` - -Here's a vulnerable socket server with some echo functionality: - -```js -server.on('connection', function (socket) { - socket.on('message', function (message) { - message = JSON.parse(message) - if (message.type === 'echo') { - socket.send(message.data) // send back the user's message - } - }) -}) -``` - -`socket.send(number)` called on the server, will disclose server memory. - -Here's [the release](https://github.com/websockets/ws/releases/tag/1.0.1) where the issue -was fixed, with a more detailed explanation. Props to -[Arnout Kazemier](https://github.com/3rd-Eden) for the quick fix. Here's the -[Node Security Project disclosure](https://nodesecurity.io/advisories/67). - - -### What's the solution? - -It's important that node.js offers a fast way to get memory otherwise performance-critical -applications would needlessly get a lot slower. - -But we need a better way to *signal our intent* as programmers. **When we want -uninitialized memory, we should request it explicitly.** - -Sensitive functionality should not be packed into a developer-friendly API that loosely -accepts many different types. This type of API encourages the lazy practice of passing -variables in without checking the type very carefully. - -#### A new API: `Buffer.allocUnsafe(number)` - -The functionality of creating buffers with uninitialized memory should be part of another -API. We propose `Buffer.allocUnsafe(number)`. This way, it's not part of an API that -frequently gets user input of all sorts of different types passed into it. - -```js -var buf = Buffer.allocUnsafe(16) // careful, uninitialized memory! - -// Immediately overwrite the uninitialized buffer with data from another buffer -for (var i = 0; i < buf.length; i++) { - buf[i] = otherBuf[i] -} -``` - - -### How do we fix node.js core? - -We sent [a PR to node.js core](https://github.com/nodejs/node/pull/4514) (merged as -`semver-major`) which defends against one case: - -```js -var str = 16 -new Buffer(str, 'utf8') -``` - -In this situation, it's implied that the programmer intended the first argument to be a -string, since they passed an encoding as a second argument. Today, node.js will allocate -uninitialized memory in the case of `new Buffer(number, encoding)`, which is probably not -what the programmer intended. - -But this is only a partial solution, since if the programmer does `new Buffer(variable)` -(without an `encoding` parameter) there's no way to know what they intended. If `variable` -is sometimes a number, then uninitialized memory will sometimes be returned. - -### What's the real long-term fix? - -We could deprecate and remove `new Buffer(number)` and use `Buffer.allocUnsafe(number)` when -we need uninitialized memory. But that would break 1000s of packages. - -~~We believe the best solution is to:~~ - -~~1. Change `new Buffer(number)` to return safe, zeroed-out memory~~ - -~~2. Create a new API for creating uninitialized Buffers. We propose: `Buffer.allocUnsafe(number)`~~ - -#### Update - -We now support adding three new APIs: - -- `Buffer.from(value)` - convert from any type to a buffer -- `Buffer.alloc(size)` - create a zero-filled buffer -- `Buffer.allocUnsafe(size)` - create an uninitialized buffer with given size - -This solves the core problem that affected `ws` and `bittorrent-dht` which is -`Buffer(variable)` getting tricked into taking a number argument. - -This way, existing code continues working and the impact on the npm ecosystem will be -minimal. Over time, npm maintainers can migrate performance-critical code to use -`Buffer.allocUnsafe(number)` instead of `new Buffer(number)`. - - -### Conclusion - -We think there's a serious design issue with the `Buffer` API as it exists today. It -promotes insecure software by putting high-risk functionality into a convenient API -with friendly "developer ergonomics". - -This wasn't merely a theoretical exercise because we found the issue in some of the -most popular npm packages. - -Fortunately, there's an easy fix that can be applied today. Use `safe-buffer` in place of -`buffer`. - -```js -var Buffer = require('safe-buffer').Buffer -``` - -Eventually, we hope that node.js core can switch to this new, safer behavior. We believe -the impact on the ecosystem would be minimal since it's not a breaking change. -Well-maintained, popular packages would be updated to use `Buffer.alloc` quickly, while -older, insecure packages would magically become safe from this attack vector. - - -## links - -- [Node.js PR: buffer: throw if both length and enc are passed](https://github.com/nodejs/node/pull/4514) -- [Node Security Project disclosure for `ws`](https://nodesecurity.io/advisories/67) -- [Node Security Project disclosure for`bittorrent-dht`](https://nodesecurity.io/advisories/68) - - -## credit - -The original issues in `bittorrent-dht` -([disclosure](https://nodesecurity.io/advisories/68)) and -`ws` ([disclosure](https://nodesecurity.io/advisories/67)) were discovered by -[Mathias Buus](https://github.com/mafintosh) and -[Feross Aboukhadijeh](http://feross.org/). - -Thanks to [Adam Baldwin](https://github.com/evilpacket) for helping disclose these issues -and for his work running the [Node Security Project](https://nodesecurity.io/). - -Thanks to [John Hiesey](https://github.com/jhiesey) for proofreading this README and -auditing the code. - - -## license - -MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org) diff --git a/node_modules/archiver-utils/node_modules/readable-stream/node_modules/safe-buffer/index.d.ts b/node_modules/archiver-utils/node_modules/readable-stream/node_modules/safe-buffer/index.d.ts deleted file mode 100644 index e9fed80..0000000 --- a/node_modules/archiver-utils/node_modules/readable-stream/node_modules/safe-buffer/index.d.ts +++ /dev/null @@ -1,187 +0,0 @@ -declare module "safe-buffer" { - export class Buffer { - length: number - write(string: string, offset?: number, length?: number, encoding?: string): number; - toString(encoding?: string, start?: number, end?: number): string; - toJSON(): { type: 'Buffer', data: any[] }; - equals(otherBuffer: Buffer): boolean; - compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number; - copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; - slice(start?: number, end?: number): Buffer; - writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; - writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; - writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; - writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; - readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number; - readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number; - readIntLE(offset: number, byteLength: number, noAssert?: boolean): number; - readIntBE(offset: number, byteLength: number, noAssert?: boolean): number; - readUInt8(offset: number, noAssert?: boolean): number; - readUInt16LE(offset: number, noAssert?: boolean): number; - readUInt16BE(offset: number, noAssert?: boolean): number; - readUInt32LE(offset: number, noAssert?: boolean): number; - readUInt32BE(offset: number, noAssert?: boolean): number; - readInt8(offset: number, noAssert?: boolean): number; - readInt16LE(offset: number, noAssert?: boolean): number; - readInt16BE(offset: number, noAssert?: boolean): number; - readInt32LE(offset: number, noAssert?: boolean): number; - readInt32BE(offset: number, noAssert?: boolean): number; - readFloatLE(offset: number, noAssert?: boolean): number; - readFloatBE(offset: number, noAssert?: boolean): number; - readDoubleLE(offset: number, noAssert?: boolean): number; - readDoubleBE(offset: number, noAssert?: boolean): number; - swap16(): Buffer; - swap32(): Buffer; - swap64(): Buffer; - writeUInt8(value: number, offset: number, noAssert?: boolean): number; - writeUInt16LE(value: number, offset: number, noAssert?: boolean): number; - writeUInt16BE(value: number, offset: number, noAssert?: boolean): number; - writeUInt32LE(value: number, offset: number, noAssert?: boolean): number; - writeUInt32BE(value: number, offset: number, noAssert?: boolean): number; - writeInt8(value: number, offset: number, noAssert?: boolean): number; - writeInt16LE(value: number, offset: number, noAssert?: boolean): number; - writeInt16BE(value: number, offset: number, noAssert?: boolean): number; - writeInt32LE(value: number, offset: number, noAssert?: boolean): number; - writeInt32BE(value: number, offset: number, noAssert?: boolean): number; - writeFloatLE(value: number, offset: number, noAssert?: boolean): number; - writeFloatBE(value: number, offset: number, noAssert?: boolean): number; - writeDoubleLE(value: number, offset: number, noAssert?: boolean): number; - writeDoubleBE(value: number, offset: number, noAssert?: boolean): number; - fill(value: any, offset?: number, end?: number): this; - indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; - lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; - includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean; - - /** - * Allocates a new buffer containing the given {str}. - * - * @param str String to store in buffer. - * @param encoding encoding to use, optional. Default is 'utf8' - */ - constructor (str: string, encoding?: string); - /** - * Allocates a new buffer of {size} octets. - * - * @param size count of octets to allocate. - */ - constructor (size: number); - /** - * Allocates a new buffer containing the given {array} of octets. - * - * @param array The octets to store. - */ - constructor (array: Uint8Array); - /** - * Produces a Buffer backed by the same allocated memory as - * the given {ArrayBuffer}. - * - * - * @param arrayBuffer The ArrayBuffer with which to share memory. - */ - constructor (arrayBuffer: ArrayBuffer); - /** - * Allocates a new buffer containing the given {array} of octets. - * - * @param array The octets to store. - */ - constructor (array: any[]); - /** - * Copies the passed {buffer} data onto a new {Buffer} instance. - * - * @param buffer The buffer to copy. - */ - constructor (buffer: Buffer); - prototype: Buffer; - /** - * Allocates a new Buffer using an {array} of octets. - * - * @param array - */ - static from(array: any[]): Buffer; - /** - * When passed a reference to the .buffer property of a TypedArray instance, - * the newly created Buffer will share the same allocated memory as the TypedArray. - * The optional {byteOffset} and {length} arguments specify a memory range - * within the {arrayBuffer} that will be shared by the Buffer. - * - * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer() - * @param byteOffset - * @param length - */ - static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer; - /** - * Copies the passed {buffer} data onto a new Buffer instance. - * - * @param buffer - */ - static from(buffer: Buffer): Buffer; - /** - * Creates a new Buffer containing the given JavaScript string {str}. - * If provided, the {encoding} parameter identifies the character encoding. - * If not provided, {encoding} defaults to 'utf8'. - * - * @param str - */ - static from(str: string, encoding?: string): Buffer; - /** - * Returns true if {obj} is a Buffer - * - * @param obj object to test. - */ - static isBuffer(obj: any): obj is Buffer; - /** - * Returns true if {encoding} is a valid encoding argument. - * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' - * - * @param encoding string to test. - */ - static isEncoding(encoding: string): boolean; - /** - * Gives the actual byte length of a string. encoding defaults to 'utf8'. - * This is not the same as String.prototype.length since that returns the number of characters in a string. - * - * @param string string to test. - * @param encoding encoding used to evaluate (defaults to 'utf8') - */ - static byteLength(string: string, encoding?: string): number; - /** - * Returns a buffer which is the result of concatenating all the buffers in the list together. - * - * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer. - * If the list has exactly one item, then the first item of the list is returned. - * If the list has more than one item, then a new Buffer is created. - * - * @param list An array of Buffer objects to concatenate - * @param totalLength Total length of the buffers when concatenated. - * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly. - */ - static concat(list: Buffer[], totalLength?: number): Buffer; - /** - * The same as buf1.compare(buf2). - */ - static compare(buf1: Buffer, buf2: Buffer): number; - /** - * Allocates a new buffer of {size} octets. - * - * @param size count of octets to allocate. - * @param fill if specified, buffer will be initialized by calling buf.fill(fill). - * If parameter is omitted, buffer will be filled with zeros. - * @param encoding encoding used for call to buf.fill while initalizing - */ - static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer; - /** - * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents - * of the newly created Buffer are unknown and may contain sensitive data. - * - * @param size count of octets to allocate - */ - static allocUnsafe(size: number): Buffer; - /** - * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents - * of the newly created Buffer are unknown and may contain sensitive data. - * - * @param size count of octets to allocate - */ - static allocUnsafeSlow(size: number): Buffer; - } -} \ No newline at end of file diff --git a/node_modules/archiver-utils/node_modules/readable-stream/node_modules/safe-buffer/index.js b/node_modules/archiver-utils/node_modules/readable-stream/node_modules/safe-buffer/index.js deleted file mode 100644 index 22438da..0000000 --- a/node_modules/archiver-utils/node_modules/readable-stream/node_modules/safe-buffer/index.js +++ /dev/null @@ -1,62 +0,0 @@ -/* eslint-disable node/no-deprecated-api */ -var buffer = require('buffer') -var Buffer = buffer.Buffer - -// alternative to using Object.keys for old browsers -function copyProps (src, dst) { - for (var key in src) { - dst[key] = src[key] - } -} -if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { - module.exports = buffer -} else { - // Copy properties from require('buffer') - copyProps(buffer, exports) - exports.Buffer = SafeBuffer -} - -function SafeBuffer (arg, encodingOrOffset, length) { - return Buffer(arg, encodingOrOffset, length) -} - -// Copy static methods from Buffer -copyProps(Buffer, SafeBuffer) - -SafeBuffer.from = function (arg, encodingOrOffset, length) { - if (typeof arg === 'number') { - throw new TypeError('Argument must not be a number') - } - return Buffer(arg, encodingOrOffset, length) -} - -SafeBuffer.alloc = function (size, fill, encoding) { - if (typeof size !== 'number') { - throw new TypeError('Argument must be a number') - } - var buf = Buffer(size) - if (fill !== undefined) { - if (typeof encoding === 'string') { - buf.fill(fill, encoding) - } else { - buf.fill(fill) - } - } else { - buf.fill(0) - } - return buf -} - -SafeBuffer.allocUnsafe = function (size) { - if (typeof size !== 'number') { - throw new TypeError('Argument must be a number') - } - return Buffer(size) -} - -SafeBuffer.allocUnsafeSlow = function (size) { - if (typeof size !== 'number') { - throw new TypeError('Argument must be a number') - } - return buffer.SlowBuffer(size) -} diff --git a/node_modules/archiver-utils/node_modules/readable-stream/node_modules/safe-buffer/package.json b/node_modules/archiver-utils/node_modules/readable-stream/node_modules/safe-buffer/package.json deleted file mode 100644 index 623fbc3..0000000 --- a/node_modules/archiver-utils/node_modules/readable-stream/node_modules/safe-buffer/package.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "name": "safe-buffer", - "description": "Safer Node.js Buffer API", - "version": "5.1.2", - "author": { - "name": "Feross Aboukhadijeh", - "email": "feross@feross.org", - "url": "http://feross.org" - }, - "bugs": { - "url": "https://github.com/feross/safe-buffer/issues" - }, - "devDependencies": { - "standard": "*", - "tape": "^4.0.0" - }, - "homepage": "https://github.com/feross/safe-buffer", - "keywords": [ - "buffer", - "buffer allocate", - "node security", - "safe", - "safe-buffer", - "security", - "uninitialized" - ], - "license": "MIT", - "main": "index.js", - "types": "index.d.ts", - "repository": { - "type": "git", - "url": "git://github.com/feross/safe-buffer.git" - }, - "scripts": { - "test": "standard && tape test/*.js" - } -} diff --git a/node_modules/archiver-utils/node_modules/readable-stream/node_modules/string_decoder/.travis.yml b/node_modules/archiver-utils/node_modules/readable-stream/node_modules/string_decoder/.travis.yml deleted file mode 100644 index 3347a72..0000000 --- a/node_modules/archiver-utils/node_modules/readable-stream/node_modules/string_decoder/.travis.yml +++ /dev/null @@ -1,50 +0,0 @@ -sudo: false -language: node_js -before_install: - - npm install -g npm@2 - - test $NPM_LEGACY && npm install -g npm@latest-3 || npm install npm -g -notifications: - email: false -matrix: - fast_finish: true - include: - - node_js: '0.8' - env: - - TASK=test - - NPM_LEGACY=true - - node_js: '0.10' - env: - - TASK=test - - NPM_LEGACY=true - - node_js: '0.11' - env: - - TASK=test - - NPM_LEGACY=true - - node_js: '0.12' - env: - - TASK=test - - NPM_LEGACY=true - - node_js: 1 - env: - - TASK=test - - NPM_LEGACY=true - - node_js: 2 - env: - - TASK=test - - NPM_LEGACY=true - - node_js: 3 - env: - - TASK=test - - NPM_LEGACY=true - - node_js: 4 - env: TASK=test - - node_js: 5 - env: TASK=test - - node_js: 6 - env: TASK=test - - node_js: 7 - env: TASK=test - - node_js: 8 - env: TASK=test - - node_js: 9 - env: TASK=test diff --git a/node_modules/archiver-utils/node_modules/readable-stream/node_modules/string_decoder/LICENSE b/node_modules/archiver-utils/node_modules/readable-stream/node_modules/string_decoder/LICENSE deleted file mode 100644 index 778edb2..0000000 --- a/node_modules/archiver-utils/node_modules/readable-stream/node_modules/string_decoder/LICENSE +++ /dev/null @@ -1,48 +0,0 @@ -Node.js is licensed for use as follows: - -""" -Copyright Node.js contributors. All rights reserved. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. -""" - -This license applies to parts of Node.js originating from the -https://github.com/joyent/node repository: - -""" -Copyright Joyent, Inc. and other Node contributors. All rights reserved. -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. -""" - diff --git a/node_modules/archiver-utils/node_modules/readable-stream/node_modules/string_decoder/README.md b/node_modules/archiver-utils/node_modules/readable-stream/node_modules/string_decoder/README.md deleted file mode 100644 index 5fd5831..0000000 --- a/node_modules/archiver-utils/node_modules/readable-stream/node_modules/string_decoder/README.md +++ /dev/null @@ -1,47 +0,0 @@ -# string_decoder - -***Node-core v8.9.4 string_decoder for userland*** - - -[![NPM](https://nodei.co/npm/string_decoder.png?downloads=true&downloadRank=true)](https://nodei.co/npm/string_decoder/) -[![NPM](https://nodei.co/npm-dl/string_decoder.png?&months=6&height=3)](https://nodei.co/npm/string_decoder/) - - -```bash -npm install --save string_decoder -``` - -***Node-core string_decoder for userland*** - -This package is a mirror of the string_decoder implementation in Node-core. - -Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.9.4/docs/api/). - -As of version 1.0.0 **string_decoder** uses semantic versioning. - -## Previous versions - -Previous version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. - -## Update - -The *build/* directory contains a build script that will scrape the source from the [nodejs/node](https://github.com/nodejs/node) repo given a specific Node version. - -## Streams Working Group - -`string_decoder` is maintained by the Streams Working Group, which -oversees the development and maintenance of the Streams API within -Node.js. The responsibilities of the Streams Working Group include: - -* Addressing stream issues on the Node.js issue tracker. -* Authoring and editing stream documentation within the Node.js project. -* Reviewing changes to stream subclasses within the Node.js project. -* Redirecting changes to streams from the Node.js project to this - project. -* Assisting in the implementation of stream providers within Node.js. -* Recommending versions of `readable-stream` to be included in Node.js. -* Messaging about the future of streams to give the community advance - notice of changes. - -See [readable-stream](https://github.com/nodejs/readable-stream) for -more details. diff --git a/node_modules/archiver-utils/node_modules/readable-stream/node_modules/string_decoder/lib/string_decoder.js b/node_modules/archiver-utils/node_modules/readable-stream/node_modules/string_decoder/lib/string_decoder.js deleted file mode 100644 index 2e89e63..0000000 --- a/node_modules/archiver-utils/node_modules/readable-stream/node_modules/string_decoder/lib/string_decoder.js +++ /dev/null @@ -1,296 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -'use strict'; - -/**/ - -var Buffer = require('safe-buffer').Buffer; -/**/ - -var isEncoding = Buffer.isEncoding || function (encoding) { - encoding = '' + encoding; - switch (encoding && encoding.toLowerCase()) { - case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': - return true; - default: - return false; - } -}; - -function _normalizeEncoding(enc) { - if (!enc) return 'utf8'; - var retried; - while (true) { - switch (enc) { - case 'utf8': - case 'utf-8': - return 'utf8'; - case 'ucs2': - case 'ucs-2': - case 'utf16le': - case 'utf-16le': - return 'utf16le'; - case 'latin1': - case 'binary': - return 'latin1'; - case 'base64': - case 'ascii': - case 'hex': - return enc; - default: - if (retried) return; // undefined - enc = ('' + enc).toLowerCase(); - retried = true; - } - } -}; - -// Do not cache `Buffer.isEncoding` when checking encoding names as some -// modules monkey-patch it to support additional encodings -function normalizeEncoding(enc) { - var nenc = _normalizeEncoding(enc); - if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); - return nenc || enc; -} - -// StringDecoder provides an interface for efficiently splitting a series of -// buffers into a series of JS strings without breaking apart multi-byte -// characters. -exports.StringDecoder = StringDecoder; -function StringDecoder(encoding) { - this.encoding = normalizeEncoding(encoding); - var nb; - switch (this.encoding) { - case 'utf16le': - this.text = utf16Text; - this.end = utf16End; - nb = 4; - break; - case 'utf8': - this.fillLast = utf8FillLast; - nb = 4; - break; - case 'base64': - this.text = base64Text; - this.end = base64End; - nb = 3; - break; - default: - this.write = simpleWrite; - this.end = simpleEnd; - return; - } - this.lastNeed = 0; - this.lastTotal = 0; - this.lastChar = Buffer.allocUnsafe(nb); -} - -StringDecoder.prototype.write = function (buf) { - if (buf.length === 0) return ''; - var r; - var i; - if (this.lastNeed) { - r = this.fillLast(buf); - if (r === undefined) return ''; - i = this.lastNeed; - this.lastNeed = 0; - } else { - i = 0; - } - if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); - return r || ''; -}; - -StringDecoder.prototype.end = utf8End; - -// Returns only complete characters in a Buffer -StringDecoder.prototype.text = utf8Text; - -// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer -StringDecoder.prototype.fillLast = function (buf) { - if (this.lastNeed <= buf.length) { - buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); - return this.lastChar.toString(this.encoding, 0, this.lastTotal); - } - buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); - this.lastNeed -= buf.length; -}; - -// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a -// continuation byte. If an invalid byte is detected, -2 is returned. -function utf8CheckByte(byte) { - if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; - return byte >> 6 === 0x02 ? -1 : -2; -} - -// Checks at most 3 bytes at the end of a Buffer in order to detect an -// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) -// needed to complete the UTF-8 character (if applicable) are returned. -function utf8CheckIncomplete(self, buf, i) { - var j = buf.length - 1; - if (j < i) return 0; - var nb = utf8CheckByte(buf[j]); - if (nb >= 0) { - if (nb > 0) self.lastNeed = nb - 1; - return nb; - } - if (--j < i || nb === -2) return 0; - nb = utf8CheckByte(buf[j]); - if (nb >= 0) { - if (nb > 0) self.lastNeed = nb - 2; - return nb; - } - if (--j < i || nb === -2) return 0; - nb = utf8CheckByte(buf[j]); - if (nb >= 0) { - if (nb > 0) { - if (nb === 2) nb = 0;else self.lastNeed = nb - 3; - } - return nb; - } - return 0; -} - -// Validates as many continuation bytes for a multi-byte UTF-8 character as -// needed or are available. If we see a non-continuation byte where we expect -// one, we "replace" the validated continuation bytes we've seen so far with -// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding -// behavior. The continuation byte check is included three times in the case -// where all of the continuation bytes for a character exist in the same buffer. -// It is also done this way as a slight performance increase instead of using a -// loop. -function utf8CheckExtraBytes(self, buf, p) { - if ((buf[0] & 0xC0) !== 0x80) { - self.lastNeed = 0; - return '\ufffd'; - } - if (self.lastNeed > 1 && buf.length > 1) { - if ((buf[1] & 0xC0) !== 0x80) { - self.lastNeed = 1; - return '\ufffd'; - } - if (self.lastNeed > 2 && buf.length > 2) { - if ((buf[2] & 0xC0) !== 0x80) { - self.lastNeed = 2; - return '\ufffd'; - } - } - } -} - -// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. -function utf8FillLast(buf) { - var p = this.lastTotal - this.lastNeed; - var r = utf8CheckExtraBytes(this, buf, p); - if (r !== undefined) return r; - if (this.lastNeed <= buf.length) { - buf.copy(this.lastChar, p, 0, this.lastNeed); - return this.lastChar.toString(this.encoding, 0, this.lastTotal); - } - buf.copy(this.lastChar, p, 0, buf.length); - this.lastNeed -= buf.length; -} - -// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a -// partial character, the character's bytes are buffered until the required -// number of bytes are available. -function utf8Text(buf, i) { - var total = utf8CheckIncomplete(this, buf, i); - if (!this.lastNeed) return buf.toString('utf8', i); - this.lastTotal = total; - var end = buf.length - (total - this.lastNeed); - buf.copy(this.lastChar, 0, end); - return buf.toString('utf8', i, end); -} - -// For UTF-8, a replacement character is added when ending on a partial -// character. -function utf8End(buf) { - var r = buf && buf.length ? this.write(buf) : ''; - if (this.lastNeed) return r + '\ufffd'; - return r; -} - -// UTF-16LE typically needs two bytes per character, but even if we have an even -// number of bytes available, we need to check if we end on a leading/high -// surrogate. In that case, we need to wait for the next two bytes in order to -// decode the last character properly. -function utf16Text(buf, i) { - if ((buf.length - i) % 2 === 0) { - var r = buf.toString('utf16le', i); - if (r) { - var c = r.charCodeAt(r.length - 1); - if (c >= 0xD800 && c <= 0xDBFF) { - this.lastNeed = 2; - this.lastTotal = 4; - this.lastChar[0] = buf[buf.length - 2]; - this.lastChar[1] = buf[buf.length - 1]; - return r.slice(0, -1); - } - } - return r; - } - this.lastNeed = 1; - this.lastTotal = 2; - this.lastChar[0] = buf[buf.length - 1]; - return buf.toString('utf16le', i, buf.length - 1); -} - -// For UTF-16LE we do not explicitly append special replacement characters if we -// end on a partial character, we simply let v8 handle that. -function utf16End(buf) { - var r = buf && buf.length ? this.write(buf) : ''; - if (this.lastNeed) { - var end = this.lastTotal - this.lastNeed; - return r + this.lastChar.toString('utf16le', 0, end); - } - return r; -} - -function base64Text(buf, i) { - var n = (buf.length - i) % 3; - if (n === 0) return buf.toString('base64', i); - this.lastNeed = 3 - n; - this.lastTotal = 3; - if (n === 1) { - this.lastChar[0] = buf[buf.length - 1]; - } else { - this.lastChar[0] = buf[buf.length - 2]; - this.lastChar[1] = buf[buf.length - 1]; - } - return buf.toString('base64', i, buf.length - n); -} - -function base64End(buf) { - var r = buf && buf.length ? this.write(buf) : ''; - if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); - return r; -} - -// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) -function simpleWrite(buf) { - return buf.toString(this.encoding); -} - -function simpleEnd(buf) { - return buf && buf.length ? this.write(buf) : ''; -} \ No newline at end of file diff --git a/node_modules/archiver-utils/node_modules/readable-stream/node_modules/string_decoder/package.json b/node_modules/archiver-utils/node_modules/readable-stream/node_modules/string_decoder/package.json deleted file mode 100644 index 518c3eb..0000000 --- a/node_modules/archiver-utils/node_modules/readable-stream/node_modules/string_decoder/package.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "name": "string_decoder", - "version": "1.1.1", - "description": "The string_decoder module from Node core", - "main": "lib/string_decoder.js", - "dependencies": { - "safe-buffer": "~5.1.0" - }, - "devDependencies": { - "babel-polyfill": "^6.23.0", - "core-util-is": "^1.0.2", - "inherits": "^2.0.3", - "tap": "~0.4.8" - }, - "scripts": { - "test": "tap test/parallel/*.js && node test/verify-dependencies", - "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js" - }, - "repository": { - "type": "git", - "url": "git://github.com/nodejs/string_decoder.git" - }, - "homepage": "https://github.com/nodejs/string_decoder", - "keywords": [ - "string", - "decoder", - "browser", - "browserify" - ], - "license": "MIT" -} diff --git a/node_modules/archiver-utils/node_modules/readable-stream/package.json b/node_modules/archiver-utils/node_modules/readable-stream/package.json deleted file mode 100644 index 514c178..0000000 --- a/node_modules/archiver-utils/node_modules/readable-stream/package.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "name": "readable-stream", - "version": "2.3.8", - "description": "Streams3, a user-land copy of the stream library from Node.js", - "main": "readable.js", - "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - }, - "devDependencies": { - "assert": "^1.4.0", - "babel-polyfill": "^6.9.1", - "buffer": "^4.9.0", - "lolex": "^2.3.2", - "nyc": "^6.4.0", - "tap": "^0.7.0", - "tape": "^4.8.0" - }, - "scripts": { - "test": "tap test/parallel/*.js test/ours/*.js && node test/verify-dependencies.js", - "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js", - "cover": "nyc npm test", - "report": "nyc report --reporter=lcov" - }, - "repository": { - "type": "git", - "url": "git://github.com/nodejs/readable-stream" - }, - "keywords": [ - "readable", - "stream", - "pipe" - ], - "browser": { - "util": false, - "./readable.js": "./readable-browser.js", - "./writable.js": "./writable-browser.js", - "./duplex.js": "./duplex-browser.js", - "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js" - }, - "nyc": { - "include": [ - "lib/**.js" - ] - }, - "license": "MIT" -} diff --git a/node_modules/archiver-utils/node_modules/readable-stream/passthrough.js b/node_modules/archiver-utils/node_modules/readable-stream/passthrough.js deleted file mode 100644 index ffd791d..0000000 --- a/node_modules/archiver-utils/node_modules/readable-stream/passthrough.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require('./readable').PassThrough diff --git a/node_modules/archiver-utils/node_modules/readable-stream/readable-browser.js b/node_modules/archiver-utils/node_modules/readable-stream/readable-browser.js deleted file mode 100644 index e503725..0000000 --- a/node_modules/archiver-utils/node_modules/readable-stream/readable-browser.js +++ /dev/null @@ -1,7 +0,0 @@ -exports = module.exports = require('./lib/_stream_readable.js'); -exports.Stream = exports; -exports.Readable = exports; -exports.Writable = require('./lib/_stream_writable.js'); -exports.Duplex = require('./lib/_stream_duplex.js'); -exports.Transform = require('./lib/_stream_transform.js'); -exports.PassThrough = require('./lib/_stream_passthrough.js'); diff --git a/node_modules/archiver-utils/node_modules/readable-stream/readable.js b/node_modules/archiver-utils/node_modules/readable-stream/readable.js deleted file mode 100644 index ec89ec5..0000000 --- a/node_modules/archiver-utils/node_modules/readable-stream/readable.js +++ /dev/null @@ -1,19 +0,0 @@ -var Stream = require('stream'); -if (process.env.READABLE_STREAM === 'disable' && Stream) { - module.exports = Stream; - exports = module.exports = Stream.Readable; - exports.Readable = Stream.Readable; - exports.Writable = Stream.Writable; - exports.Duplex = Stream.Duplex; - exports.Transform = Stream.Transform; - exports.PassThrough = Stream.PassThrough; - exports.Stream = Stream; -} else { - exports = module.exports = require('./lib/_stream_readable.js'); - exports.Stream = Stream || exports; - exports.Readable = exports; - exports.Writable = require('./lib/_stream_writable.js'); - exports.Duplex = require('./lib/_stream_duplex.js'); - exports.Transform = require('./lib/_stream_transform.js'); - exports.PassThrough = require('./lib/_stream_passthrough.js'); -} diff --git a/node_modules/archiver-utils/node_modules/readable-stream/transform.js b/node_modules/archiver-utils/node_modules/readable-stream/transform.js deleted file mode 100644 index b1baba2..0000000 --- a/node_modules/archiver-utils/node_modules/readable-stream/transform.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require('./readable').Transform diff --git a/node_modules/archiver-utils/node_modules/readable-stream/writable-browser.js b/node_modules/archiver-utils/node_modules/readable-stream/writable-browser.js deleted file mode 100644 index ebdde6a..0000000 --- a/node_modules/archiver-utils/node_modules/readable-stream/writable-browser.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require('./lib/_stream_writable.js'); diff --git a/node_modules/archiver-utils/node_modules/readable-stream/writable.js b/node_modules/archiver-utils/node_modules/readable-stream/writable.js deleted file mode 100644 index 3211a6f..0000000 --- a/node_modules/archiver-utils/node_modules/readable-stream/writable.js +++ /dev/null @@ -1,8 +0,0 @@ -var Stream = require("stream") -var Writable = require("./lib/_stream_writable.js") - -if (process.env.READABLE_STREAM === 'disable') { - module.exports = Stream && Stream.Writable || Writable -} else { - module.exports = Writable -} diff --git a/node_modules/archiver-utils/package.json b/node_modules/archiver-utils/package.json deleted file mode 100644 index 1582f18..0000000 --- a/node_modules/archiver-utils/package.json +++ /dev/null @@ -1,54 +0,0 @@ -{ - "name": "archiver-utils", - "version": "2.1.0", - "license": "MIT", - "description": "utility functions for archiver", - "homepage": "https://github.com/archiverjs/archiver-utils#readme", - "author": { - "name": "Chris Talkington", - "url": "http://christalkington.com/" - }, - "repository": { - "type": "git", - "url": "https://github.com/archiverjs/archiver-utils.git" - }, - "bugs": { - "url": "https://github.com/archiverjs/archiver-utils/issues" - }, - "keywords": [ - "archiver", - "utils" - ], - "main": "index.js", - "files": [ - "index.js", - "file.js" - ], - "engines": { - "node": ">= 6" - }, - "scripts": { - "test": "mocha --reporter dot" - }, - "dependencies": { - "glob": "^7.1.4", - "graceful-fs": "^4.2.0", - "lazystream": "^1.0.0", - "lodash.defaults": "^4.2.0", - "lodash.difference": "^4.5.0", - "lodash.flatten": "^4.4.0", - "lodash.isplainobject": "^4.0.6", - "lodash.union": "^4.6.0", - "normalize-path": "^3.0.0", - "readable-stream": "^2.0.0" - }, - "devDependencies": { - "chai": "^4.2.0", - "mkdirp": "^0.5.0", - "mocha": "^5.0.0", - "rimraf": "^2.6.3" - }, - "publishConfig": { - "registry": "https://registry.npmjs.org/" - } -} diff --git a/node_modules/archiver/CHANGELOG.md b/node_modules/archiver/CHANGELOG.md deleted file mode 100644 index 6cc85bb..0000000 --- a/node_modules/archiver/CHANGELOG.md +++ /dev/null @@ -1,108 +0,0 @@ -## Changelog - -**3.1.1** - _August 2, 2019_ — [Diff](https://github.com/archiverjs/node-archiver/compare/3.1.0...3.1.1) - -- update zip-stream to v2.1.2 - -**3.1.0** - _August 2, 2019_ — [Diff](https://github.com/archiverjs/node-archiver/compare/3.0.3...3.1.0) - -- update zip-stream to v2.1.0 - -**3.0.3** - _July 19, 2019_ — [Diff](https://github.com/archiverjs/node-archiver/compare/3.0.2...3.0.3) - -- test: now targeting node v12 -- other: update zip-stream@2.0.0 - -**3.0.2** - _July 19, 2019_ — [Diff](https://github.com/archiverjs/node-archiver/compare/3.0.1...3.0.2) - -- other: update dependencies - -**3.0.1** - _July 19, 2019_ — [Diff](https://github.com/archiverjs/node-archiver/compare/3.0.0...3.0.1) - -- other: update dependencies -- docs: now deployed using netlify - -**3.0.0** - _August 22, 2018_ — [Diff](https://github.com/archiverjs/node-archiver/compare/2.1.1...3.0.0) - -- breaking: follow node LTS, remove support for versions under 6. (#339) -- bugfix: use stats in tar.js and core.js (#326) -- other: update to archiver-utils@2 and zip-stream@2 -- other: remove lodash npm module usage (#335, #339) -- other: Avoid using deprecated Buffer constructor (#312) -- other: Remove unnecessary return and fix indentation (#297) -- test: now targeting node v10 (#320) - -**2.1.1** — _January 10, 2018_ — [Diff](https://github.com/archiverjs/node-archiver/compare/2.1.0...2.1.1) - -- bugfix: fix relative symlink paths (#293) -- other: coding style fixes (#294) - -**2.1.0** — _October 12, 2017_ — [Diff](https://github.com/archiverjs/node-archiver/compare/2.0.3...2.1.0) - -- refactor: `directory` now uses glob behind the scenes. should fix some directory recursion issues. (#267, #275) -- docs: more info in quick start. (#284) - -**2.0.3** — _August 25, 2017_ — [Diff](https://github.com/archiverjs/node-archiver/compare/2.0.2...2.0.3) - -- bugfix: revert #261 due to potential issues with editing entryData in special cases. -- bugfix: add var to entryData in glob callback (#273) - -**2.0.2** — _August 25, 2017_ — [Diff](https://github.com/archiverjs/node-archiver/compare/2.0.1...2.0.2) - -- docs: fix changelog date. - -**2.0.1** — _August 25, 2017_ — [Diff](https://github.com/archiverjs/node-archiver/compare/2.0.0...2.0.1) - -- bugfix: add const to entryData in glob callback (#261) -- other: coding style fixes (#263) - -**2.0.0** — _July 5, 2017_ — [Diff](https://github.com/archiverjs/node-archiver/compare/1.3.0...2.0.0) - -- feature: support for symlinks. (#228) -- feature: support for promises on `finalize`. (#248) -- feature: addition of `symlink` method for programmatically creating symlinks within an archive. -- change: emit `warning` instead of `error` when stat fails and the process can still continue. -- change: errors and warnings now contain extended data (where available) and have standardized error codes (#256) -- change: removal of deprecated `bulk` functionality. (#249) -- change: removal of internal `_entries` property in favor of `progress` event. (#247) -- change: support for node v4.0+ only. node v0.10 and v0.12 support has been dropped. (#241) - -**1.3.0** — _December 13, 2016_ — [Diff](https://github.com/archiverjs/node-archiver/compare/1.2.0...1.3.0) - -- improve `directory` and `glob` methods to use events rather than callbacks. (#203) -- fix bulk warning spam (#208) -- updated mocha (#205) - -**1.2.0** — _November 2, 2016_ — [Diff](https://github.com/archiverjs/node-archiver/compare/1.1.0...1.2.0) - -- Add a `process.emitWarning` for `deprecated` (#202) - -**1.1.0** — _August 29, 2016_ — [Diff](https://github.com/archiverjs/node-archiver/compare/1.0.1...1.1.0) - -- minor doc fixes. -- bump deps to ensure latest versions are used. - -**1.0.1** — _July 27, 2016_ — [Diff](https://github.com/archiverjs/node-archiver/compare/1.0.0...1.0.1) - -- minor doc fixes. -- dependencies upgraded. - -**1.0.0** — _April 5, 2016_ — [Diff](https://github.com/archiverjs/node-archiver/compare/0.21.0...1.0.0) - -- version unification across many archiver packages. -- dependencies upgraded and now using semver caret (^). - -**0.21.0** — _December 21, 2015_ — [Diff](https://github.com/archiverjs/node-archiver/compare/0.20.0...0.21.0) - -- core: add support for `entry.prefix`. update some internals to use it. -- core(glob): when setting `options.cwd` get an absolute path to the file and use the relative path for `entry.name`. #173 -- core(bulk): soft-deprecation of `bulk` feature. will remain for time being with no new features or support. -- docs: initial jsdoc for core. http://archiverjs.com/docs -- tests: restructure a bit. - -**0.20.0** — _November 30, 2015_ — [Diff](https://github.com/archiverjs/node-archiver/compare/0.19.0...0.20.0) - -- simpler path normalization as path.join was a bit restrictive. #162 -- move utils to separate module to DRY. - -[Release Archive](https://github.com/archiverjs/node-archiver/releases) diff --git a/node_modules/archiver/LICENSE b/node_modules/archiver/LICENSE deleted file mode 100644 index bc56a8a..0000000 --- a/node_modules/archiver/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -Copyright (c) 2012-2014 Chris Talkington, contributors. - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/archiver/README.md b/node_modules/archiver/README.md deleted file mode 100644 index 0a4de34..0000000 --- a/node_modules/archiver/README.md +++ /dev/null @@ -1,94 +0,0 @@ -# Archiver - -[![Build Status](https://travis-ci.org/archiverjs/node-archiver.svg?branch=master)](https://travis-ci.org/archiverjs/node-archiver) [![Build status](https://ci.appveyor.com/api/projects/status/38kqu3yp159nodxe/branch/master?svg=true)](https://ci.appveyor.com/project/ctalkington/node-archiver/branch/master) - -a streaming interface for archive generation - -Visit the [API documentation](https://www.archiverjs.com/) for a list of all methods available. - -## Install - -```bash -npm install archiver --save -``` - -## Quick Start - -```js -// require modules -var fs = require('fs'); -var archiver = require('archiver'); - -// create a file to stream archive data to. -var output = fs.createWriteStream(__dirname + '/example.zip'); -var archive = archiver('zip', { - zlib: { level: 9 } // Sets the compression level. -}); - -// listen for all archive data to be written -// 'close' event is fired only when a file descriptor is involved -output.on('close', function() { - console.log(archive.pointer() + ' total bytes'); - console.log('archiver has been finalized and the output file descriptor has closed.'); -}); - -// This event is fired when the data source is drained no matter what was the data source. -// It is not part of this library but rather from the NodeJS Stream API. -// @see: https://nodejs.org/api/stream.html#stream_event_end -output.on('end', function() { - console.log('Data has been drained'); -}); - -// good practice to catch warnings (ie stat failures and other non-blocking errors) -archive.on('warning', function(err) { - if (err.code === 'ENOENT') { - // log warning - } else { - // throw error - throw err; - } -}); - -// good practice to catch this error explicitly -archive.on('error', function(err) { - throw err; -}); - -// pipe archive data to the file -archive.pipe(output); - -// append a file from stream -var file1 = __dirname + '/file1.txt'; -archive.append(fs.createReadStream(file1), { name: 'file1.txt' }); - -// append a file from string -archive.append('string cheese!', { name: 'file2.txt' }); - -// append a file from buffer -var buffer3 = Buffer.from('buff it!'); -archive.append(buffer3, { name: 'file3.txt' }); - -// append a file -archive.file('file1.txt', { name: 'file4.txt' }); - -// append files from a sub-directory and naming it `new-subdir` within the archive -archive.directory('subdir/', 'new-subdir'); - -// append files from a sub-directory, putting its contents at the root of archive -archive.directory('subdir/', false); - -// append files from a glob pattern -archive.glob('subdir/*.txt'); - -// finalize the archive (ie we are done appending files but streams have to finish yet) -// 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand -archive.finalize(); -``` - -## Formats - -Archiver ships with out of the box support for TAR and ZIP archives. - -You can register additional formats with `registerFormat`. - -_Formats will be changing in the next few releases to implement a middleware approach._ diff --git a/node_modules/archiver/index.js b/node_modules/archiver/index.js deleted file mode 100644 index bcaebf1..0000000 --- a/node_modules/archiver/index.js +++ /dev/null @@ -1,70 +0,0 @@ -/** - * Archiver Vending - * - * @ignore - * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} - * @copyright (c) 2012-2014 Chris Talkington, contributors. - */ -var Archiver = require('./lib/core'); - -var formats = {}; - -/** - * Dispenses a new Archiver instance. - * - * @constructor - * @param {String} format The archive format to use. - * @param {Object} options See [Archiver]{@link Archiver} - * @return {Archiver} - */ -var vending = function(format, options) { - return vending.create(format, options); -}; - -/** - * Creates a new Archiver instance. - * - * @param {String} format The archive format to use. - * @param {Object} options See [Archiver]{@link Archiver} - * @return {Archiver} - */ -vending.create = function(format, options) { - if (formats[format]) { - var instance = new Archiver(format, options); - instance.setFormat(format); - instance.setModule(new formats[format](options)); - - return instance; - } else { - throw new Error('create(' + format + '): format not registered'); - } -}; - -/** - * Registers a format for use with archiver. - * - * @param {String} format The name of the format. - * @param {Function} module The function for archiver to interact with. - * @return void - */ -vending.registerFormat = function(format, module) { - if (formats[format]) { - throw new Error('register(' + format + '): format already registered'); - } - - if (typeof module !== 'function') { - throw new Error('register(' + format + '): format module invalid'); - } - - if (typeof module.prototype.append !== 'function' || typeof module.prototype.finalize !== 'function') { - throw new Error('register(' + format + '): format module missing methods'); - } - - formats[format] = module; -}; - -vending.registerFormat('zip', require('./lib/plugins/zip')); -vending.registerFormat('tar', require('./lib/plugins/tar')); -vending.registerFormat('json', require('./lib/plugins/json')); - -module.exports = vending; \ No newline at end of file diff --git a/node_modules/archiver/lib/core.js b/node_modules/archiver/lib/core.js deleted file mode 100644 index d5cde49..0000000 --- a/node_modules/archiver/lib/core.js +++ /dev/null @@ -1,956 +0,0 @@ -/** - * Archiver Core - * - * @ignore - * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} - * @copyright (c) 2012-2014 Chris Talkington, contributors. - */ -var fs = require('fs'); -var glob = require('glob'); -var async = require('async'); -var path = require('path'); -var util = require('archiver-utils'); - -var inherits = require('util').inherits; -var ArchiverError = require('./error'); -var Transform = require('readable-stream').Transform; - -var win32 = process.platform === 'win32'; - -/** - * @constructor - * @param {String} format The archive format to use. - * @param {(CoreOptions|TransformOptions)} options See also {@link ZipOptions} and {@link TarOptions}. - */ -var Archiver = function(format, options) { - if (!(this instanceof Archiver)) { - return new Archiver(format, options); - } - - if (typeof format !== 'string') { - options = format; - format = 'zip'; - } - - options = this.options = util.defaults(options, { - highWaterMark: 1024 * 1024, - statConcurrency: 4 - }); - - Transform.call(this, options); - - this._format = false; - this._module = false; - this._pending = 0; - this._pointer = 0; - - this._entriesCount = 0; - this._entriesProcessedCount = 0; - this._fsEntriesTotalBytes = 0; - this._fsEntriesProcessedBytes = 0; - - this._queue = async.queue(this._onQueueTask.bind(this), 1); - this._queue.drain = this._onQueueDrain.bind(this); - - this._statQueue = async.queue(this._onStatQueueTask.bind(this), options.statConcurrency); - - this._state = { - aborted: false, - finalize: false, - finalizing: false, - finalized: false, - modulePiped: false - }; - - this._streams = []; -}; - -inherits(Archiver, Transform); - -/** - * Internal logic for `abort`. - * - * @private - * @return void - */ -Archiver.prototype._abort = function() { - this._state.aborted = true; - this._queue.kill(); - this._statQueue.kill(); - - if (this._queue.idle()) { - this._shutdown(); - } -}; - -/** - * Internal helper for appending files. - * - * @private - * @param {String} filepath The source filepath. - * @param {EntryData} data The entry data. - * @return void - */ -Archiver.prototype._append = function(filepath, data) { - data = data || {}; - - var task = { - source: null, - filepath: filepath - }; - - if (!data.name) { - data.name = filepath; - } - - data.sourcePath = filepath; - task.data = data; - this._entriesCount++; - - if (data.stats && data.stats instanceof fs.Stats) { - task = this._updateQueueTaskWithStats(task, data.stats); - if (task) { - if (data.stats.size) { - this._fsEntriesTotalBytes += data.stats.size; - } - - this._queue.push(task); - } - } else { - this._statQueue.push(task); - } -}; - -/** - * Internal logic for `finalize`. - * - * @private - * @return void - */ -Archiver.prototype._finalize = function() { - if (this._state.finalizing || this._state.finalized || this._state.aborted) { - return; - } - - this._state.finalizing = true; - - this._moduleFinalize(); - - this._state.finalizing = false; - this._state.finalized = true; -}; - -/** - * Checks the various state variables to determine if we can `finalize`. - * - * @private - * @return {Boolean} - */ -Archiver.prototype._maybeFinalize = function() { - if (this._state.finalizing || this._state.finalized || this._state.aborted) { - return false; - } - - if (this._state.finalize && this._pending === 0 && this._queue.idle() && this._statQueue.idle()) { - this._finalize(); - return true; - } - - return false; -}; - -/** - * Appends an entry to the module. - * - * @private - * @fires Archiver#entry - * @param {(Buffer|Stream)} source - * @param {EntryData} data - * @param {Function} callback - * @return void - */ -Archiver.prototype._moduleAppend = function(source, data, callback) { - if (this._state.aborted) { - callback(); - return; - } - - this._module.append(source, data, function(err) { - this._task = null; - - if (this._state.aborted) { - this._shutdown(); - return; - } - - if (err) { - this.emit('error', err); - setImmediate(callback); - return; - } - - /** - * Fires when the entry's input has been processed and appended to the archive. - * - * @event Archiver#entry - * @type {EntryData} - */ - this.emit('entry', data); - this._entriesProcessedCount++; - - if (data.stats && data.stats.size) { - this._fsEntriesProcessedBytes += data.stats.size; - } - - /** - * @event Archiver#progress - * @type {ProgressData} - */ - this.emit('progress', { - entries: { - total: this._entriesCount, - processed: this._entriesProcessedCount - }, - fs: { - totalBytes: this._fsEntriesTotalBytes, - processedBytes: this._fsEntriesProcessedBytes - } - }); - - setImmediate(callback); - }.bind(this)); -}; - -/** - * Finalizes the module. - * - * @private - * @return void - */ -Archiver.prototype._moduleFinalize = function() { - if (typeof this._module.finalize === 'function') { - this._module.finalize(); - } else if (typeof this._module.end === 'function') { - this._module.end(); - } else { - this.emit('error', new ArchiverError('NOENDMETHOD')); - } -}; - -/** - * Pipes the module to our internal stream with error bubbling. - * - * @private - * @return void - */ -Archiver.prototype._modulePipe = function() { - this._module.on('error', this._onModuleError.bind(this)); - this._module.pipe(this); - this._state.modulePiped = true; -}; - -/** - * Determines if the current module supports a defined feature. - * - * @private - * @param {String} key - * @return {Boolean} - */ -Archiver.prototype._moduleSupports = function(key) { - if (!this._module.supports || !this._module.supports[key]) { - return false; - } - - return this._module.supports[key]; -}; - -/** - * Unpipes the module from our internal stream. - * - * @private - * @return void - */ -Archiver.prototype._moduleUnpipe = function() { - this._module.unpipe(this); - this._state.modulePiped = false; -}; - -/** - * Normalizes entry data with fallbacks for key properties. - * - * @private - * @param {Object} data - * @param {fs.Stats} stats - * @return {Object} - */ -Archiver.prototype._normalizeEntryData = function(data, stats) { - data = util.defaults(data, { - type: 'file', - name: null, - date: null, - mode: null, - prefix: null, - sourcePath: null, - stats: false - }); - - if (stats && data.stats === false) { - data.stats = stats; - } - - var isDir = data.type === 'directory'; - - if (data.name) { - if (typeof data.prefix === 'string' && '' !== data.prefix) { - data.name = data.prefix + '/' + data.name; - data.prefix = null; - } - - data.name = util.sanitizePath(data.name); - - if (data.type !== 'symlink' && data.name.slice(-1) === '/') { - isDir = true; - data.type = 'directory'; - } else if (isDir) { - data.name += '/'; - } - } - - // 511 === 0777; 493 === 0755; 438 === 0666; 420 === 0644 - if (typeof data.mode === 'number') { - if (win32) { - data.mode &= 511; - } else { - data.mode &= 4095 - } - } else if (data.stats && data.mode === null) { - if (win32) { - data.mode = data.stats.mode & 511; - } else { - data.mode = data.stats.mode & 4095; - } - - // stat isn't reliable on windows; force 0755 for dir - if (win32 && isDir) { - data.mode = 493; - } - } else if (data.mode === null) { - data.mode = isDir ? 493 : 420; - } - - if (data.stats && data.date === null) { - data.date = data.stats.mtime; - } else { - data.date = util.dateify(data.date); - } - - return data; -}; - -/** - * Error listener that re-emits error on to our internal stream. - * - * @private - * @param {Error} err - * @return void - */ -Archiver.prototype._onModuleError = function(err) { - /** - * @event Archiver#error - * @type {ErrorData} - */ - this.emit('error', err); -}; - -/** - * Checks the various state variables after queue has drained to determine if - * we need to `finalize`. - * - * @private - * @return void - */ -Archiver.prototype._onQueueDrain = function() { - if (this._state.finalizing || this._state.finalized || this._state.aborted) { - return; - } - - if (this._state.finalize && this._pending === 0 && this._queue.idle() && this._statQueue.idle()) { - this._finalize(); - } -}; - -/** - * Appends each queue task to the module. - * - * @private - * @param {Object} task - * @param {Function} callback - * @return void - */ -Archiver.prototype._onQueueTask = function(task, callback) { - if (this._state.finalizing || this._state.finalized || this._state.aborted) { - callback(); - return; - } - - this._task = task; - this._moduleAppend(task.source, task.data, callback); -}; - -/** - * Performs a file stat and reinjects the task back into the queue. - * - * @private - * @param {Object} task - * @param {Function} callback - * @return void - */ -Archiver.prototype._onStatQueueTask = function(task, callback) { - if (this._state.finalizing || this._state.finalized || this._state.aborted) { - callback(); - return; - } - - fs.lstat(task.filepath, function(err, stats) { - if (this._state.aborted) { - setImmediate(callback); - return; - } - - if (err) { - this._entriesCount--; - - /** - * @event Archiver#warning - * @type {ErrorData} - */ - this.emit('warning', err); - setImmediate(callback); - return; - } - - task = this._updateQueueTaskWithStats(task, stats); - - if (task) { - if (stats.size) { - this._fsEntriesTotalBytes += stats.size; - } - - this._queue.push(task); - } - - setImmediate(callback); - }.bind(this)); -}; - -/** - * Unpipes the module and ends our internal stream. - * - * @private - * @return void - */ -Archiver.prototype._shutdown = function() { - this._moduleUnpipe(); - this.end(); -}; - -/** - * Tracks the bytes emitted by our internal stream. - * - * @private - * @param {Buffer} chunk - * @param {String} encoding - * @param {Function} callback - * @return void - */ -Archiver.prototype._transform = function(chunk, encoding, callback) { - if (chunk) { - this._pointer += chunk.length; - } - - callback(null, chunk); -}; - -/** - * Updates and normalizes a queue task using stats data. - * - * @private - * @param {Object} task - * @param {fs.Stats} stats - * @return {Object} - */ -Archiver.prototype._updateQueueTaskWithStats = function(task, stats) { - if (stats.isFile()) { - task.data.type = 'file'; - task.data.sourceType = 'stream'; - task.source = util.lazyReadStream(task.filepath); - } else if (stats.isDirectory() && this._moduleSupports('directory')) { - task.data.name = util.trailingSlashIt(task.data.name); - task.data.type = 'directory'; - task.data.sourcePath = util.trailingSlashIt(task.filepath); - task.data.sourceType = 'buffer'; - task.source = Buffer.concat([]); - } else if (stats.isSymbolicLink() && this._moduleSupports('symlink')) { - var linkPath = fs.readlinkSync(task.filepath); - var dirName = path.dirname(task.filepath); - task.data.type = 'symlink'; - task.data.linkname = path.relative(dirName, path.resolve(dirName, linkPath)); - task.data.sourceType = 'buffer'; - task.source = Buffer.concat([]); - } else { - if (stats.isDirectory()) { - this.emit('warning', new ArchiverError('DIRECTORYNOTSUPPORTED', task.data)); - } else if (stats.isSymbolicLink()) { - this.emit('warning', new ArchiverError('SYMLINKNOTSUPPORTED', task.data)); - } else { - this.emit('warning', new ArchiverError('ENTRYNOTSUPPORTED', task.data)); - } - - return null; - } - - task.data = this._normalizeEntryData(task.data, stats); - - return task; -}; - -/** - * Aborts the archiving process, taking a best-effort approach, by: - * - * - removing any pending queue tasks - * - allowing any active queue workers to finish - * - detaching internal module pipes - * - ending both sides of the Transform stream - * - * It will NOT drain any remaining sources. - * - * @return {this} - */ -Archiver.prototype.abort = function() { - if (this._state.aborted || this._state.finalized) { - return this; - } - - this._abort(); - - return this; -}; - -/** - * Appends an input source (text string, buffer, or stream) to the instance. - * - * When the instance has received, processed, and emitted the input, the `entry` - * event is fired. - * - * @fires Archiver#entry - * @param {(Buffer|Stream|String)} source The input source. - * @param {EntryData} data See also {@link ZipEntryData} and {@link TarEntryData}. - * @return {this} - */ -Archiver.prototype.append = function(source, data) { - if (this._state.finalize || this._state.aborted) { - this.emit('error', new ArchiverError('QUEUECLOSED')); - return this; - } - - data = this._normalizeEntryData(data); - - if (typeof data.name !== 'string' || data.name.length === 0) { - this.emit('error', new ArchiverError('ENTRYNAMEREQUIRED')); - return this; - } - - if (data.type === 'directory' && !this._moduleSupports('directory')) { - this.emit('error', new ArchiverError('DIRECTORYNOTSUPPORTED', { name: data.name })); - return this; - } - - source = util.normalizeInputSource(source); - - if (Buffer.isBuffer(source)) { - data.sourceType = 'buffer'; - } else if (util.isStream(source)) { - data.sourceType = 'stream'; - } else { - this.emit('error', new ArchiverError('INPUTSTEAMBUFFERREQUIRED', { name: data.name })); - return this; - } - - this._entriesCount++; - this._queue.push({ - data: data, - source: source - }); - - return this; -}; - -/** - * Appends a directory and its files, recursively, given its dirpath. - * - * @param {String} dirpath The source directory path. - * @param {String} destpath The destination path within the archive. - * @param {(EntryData|Function)} data See also [ZipEntryData]{@link ZipEntryData} and - * [TarEntryData]{@link TarEntryData}. - * @return {this} - */ -Archiver.prototype.directory = function(dirpath, destpath, data) { - if (this._state.finalize || this._state.aborted) { - this.emit('error', new ArchiverError('QUEUECLOSED')); - return this; - } - - if (typeof dirpath !== 'string' || dirpath.length === 0) { - this.emit('error', new ArchiverError('DIRECTORYDIRPATHREQUIRED')); - return this; - } - - this._pending++; - - if (destpath === false) { - destpath = ''; - } else if (typeof destpath !== 'string'){ - destpath = dirpath; - } - - var dataFunction = false; - if (typeof data === 'function') { - dataFunction = data; - data = {}; - } else if (typeof data !== 'object') { - data = {}; - } - - var globOptions = { - stat: false, - dot: true, - cwd: dirpath - }; - - function onGlobEnd() { - this._pending--; - this._maybeFinalize(); - } - - function onGlobError(err) { - this.emit('error', err); - } - - function onGlobMatch(match){ - var ignoreMatch = false; - var entryData = Object.assign({}, data); - entryData.name = match; - entryData.prefix = destpath; - match = globber._makeAbs(match); - - try { - if (dataFunction) { - entryData = dataFunction(entryData); - - if (entryData === false) { - ignoreMatch = true; - } else if (typeof entryData !== 'object') { - throw new ArchiverError('DIRECTORYFUNCTIONINVALIDDATA', { dirpath: dirpath }); - } - } - } catch(e) { - this.emit('error', e); - return; - } - - if (ignoreMatch) { - return; - } - - this._append(match, entryData); - } - - var globber = glob('**', globOptions); - globber.on('error', onGlobError.bind(this)); - globber.on('match', onGlobMatch.bind(this)); - globber.on('end', onGlobEnd.bind(this)); - - return this; -}; - -/** - * Appends a file given its filepath using a - * [lazystream]{@link https://github.com/jpommerening/node-lazystream} wrapper to - * prevent issues with open file limits. - * - * When the instance has received, processed, and emitted the file, the `entry` - * event is fired. - * - * @param {String} filepath The source filepath. - * @param {EntryData} data See also [ZipEntryData]{@link ZipEntryData} and - * [TarEntryData]{@link TarEntryData}. - * @return {this} - */ -Archiver.prototype.file = function(filepath, data) { - if (this._state.finalize || this._state.aborted) { - this.emit('error', new ArchiverError('QUEUECLOSED')); - return this; - } - - if (typeof filepath !== 'string' || filepath.length === 0) { - this.emit('error', new ArchiverError('FILEFILEPATHREQUIRED')); - return this; - } - - this._append(filepath, data); - - return this; -}; - -/** - * Appends multiple files that match a glob pattern. - * - * @param {String} pattern The [glob pattern]{@link https://github.com/isaacs/node-glob#glob-primer} to match. - * @param {Object} options See [node-glob]{@link https://github.com/isaacs/node-glob#options}. - * @param {EntryData} data See also [ZipEntryData]{@link ZipEntryData} and - * [TarEntryData]{@link TarEntryData}. - * @return {this} - */ -Archiver.prototype.glob = function(pattern, options, data) { - this._pending++; - - options = util.defaults(options, { - stat: false - }); - - function onGlobEnd() { - this._pending--; - this._maybeFinalize(); - } - - function onGlobError(err) { - this.emit('error', err); - } - - function onGlobMatch(match){ - var entryData = Object.assign({}, data); - - if (options.cwd) { - entryData.name = match; - match = globber._makeAbs(match); - } - - this._append(match, entryData); - } - - var globber = glob(pattern, options); - globber.on('error', onGlobError.bind(this)); - globber.on('match', onGlobMatch.bind(this)); - globber.on('end', onGlobEnd.bind(this)); - - return this; -}; - -/** - * Finalizes the instance and prevents further appending to the archive - * structure (queue will continue til drained). - * - * The `end`, `close` or `finish` events on the destination stream may fire - * right after calling this method so you should set listeners beforehand to - * properly detect stream completion. - * - * @return {this} - */ -Archiver.prototype.finalize = function() { - if (this._state.aborted) { - this.emit('error', new ArchiverError('ABORTED')); - return this; - } - - if (this._state.finalize) { - this.emit('error', new ArchiverError('FINALIZING')); - return this; - } - - this._state.finalize = true; - - if (this._pending === 0 && this._queue.idle() && this._statQueue.idle()) { - this._finalize(); - } - - var self = this; - - return new Promise(function(resolve, reject) { - var errored; - - self._module.on('end', function() { - if (!errored) { - resolve(); - } - }) - - self._module.on('error', function(err) { - errored = true; - reject(err); - }) - }) -}; - -/** - * Sets the module format name used for archiving. - * - * @param {String} format The name of the format. - * @return {this} - */ -Archiver.prototype.setFormat = function(format) { - if (this._format) { - this.emit('error', new ArchiverError('FORMATSET')); - return this; - } - - this._format = format; - - return this; -}; - -/** - * Sets the module used for archiving. - * - * @param {Function} module The function for archiver to interact with. - * @return {this} - */ -Archiver.prototype.setModule = function(module) { - if (this._state.aborted) { - this.emit('error', new ArchiverError('ABORTED')); - return this; - } - - if (this._state.module) { - this.emit('error', new ArchiverError('MODULESET')); - return this; - } - - this._module = module; - this._modulePipe(); - - return this; -}; - -/** - * Appends a symlink to the instance. - * - * This does NOT interact with filesystem and is used for programmatically creating symlinks. - * - * @param {String} filepath The symlink path (within archive). - * @param {String} target The target path (within archive). - * @return {this} - */ -Archiver.prototype.symlink = function(filepath, target) { - if (this._state.finalize || this._state.aborted) { - this.emit('error', new ArchiverError('QUEUECLOSED')); - return this; - } - - if (typeof filepath !== 'string' || filepath.length === 0) { - this.emit('error', new ArchiverError('SYMLINKFILEPATHREQUIRED')); - return this; - } - - if (typeof target !== 'string' || target.length === 0) { - this.emit('error', new ArchiverError('SYMLINKTARGETREQUIRED', { filepath: filepath })); - return this; - } - - if (!this._moduleSupports('symlink')) { - this.emit('error', new ArchiverError('SYMLINKNOTSUPPORTED', { filepath: filepath })); - return this; - } - - var data = {}; - data.type = 'symlink'; - data.name = filepath.replace(/\\/g, '/'); - data.linkname = target.replace(/\\/g, '/'); - data.sourceType = 'buffer'; - - this._entriesCount++; - this._queue.push({ - data: data, - source: Buffer.concat([]) - }); - - return this; -}; - -/** - * Returns the current length (in bytes) that has been emitted. - * - * @return {Number} - */ -Archiver.prototype.pointer = function() { - return this._pointer; -}; - -/** - * Middleware-like helper that has yet to be fully implemented. - * - * @private - * @param {Function} plugin - * @return {this} - */ -Archiver.prototype.use = function(plugin) { - this._streams.push(plugin); - return this; -}; - -module.exports = Archiver; - -/** - * @typedef {Object} CoreOptions - * @global - * @property {Number} [statConcurrency=4] Sets the number of workers used to - * process the internal fs stat queue. - */ - -/** - * @typedef {Object} TransformOptions - * @property {Boolean} [allowHalfOpen=true] If set to false, then the stream - * will automatically end the readable side when the writable side ends and vice - * versa. - * @property {Boolean} [readableObjectMode=false] Sets objectMode for readable - * side of the stream. Has no effect if objectMode is true. - * @property {Boolean} [writableObjectMode=false] Sets objectMode for writable - * side of the stream. Has no effect if objectMode is true. - * @property {Boolean} [decodeStrings=true] Whether or not to decode strings - * into Buffers before passing them to _write(). `Writable` - * @property {String} [encoding=NULL] If specified, then buffers will be decoded - * to strings using the specified encoding. `Readable` - * @property {Number} [highWaterMark=16kb] The maximum number of bytes to store - * in the internal buffer before ceasing to read from the underlying resource. - * `Readable` `Writable` - * @property {Boolean} [objectMode=false] Whether this stream should behave as a - * stream of objects. Meaning that stream.read(n) returns a single value instead - * of a Buffer of size n. `Readable` `Writable` - */ - -/** - * @typedef {Object} EntryData - * @property {String} name Sets the entry name including internal path. - * @property {(String|Date)} [date=NOW()] Sets the entry date. - * @property {Number} [mode=D:0755/F:0644] Sets the entry permissions. - * @property {String} [prefix] Sets a path prefix for the entry name. Useful - * when working with methods like `directory` or `glob`. - * @property {fs.Stats} [stats] Sets the fs stat data for this entry allowing - * for reduction of fs stat calls when stat data is already known. - */ - -/** - * @typedef {Object} ErrorData - * @property {String} message The message of the error. - * @property {String} code The error code assigned to this error. - * @property {String} data Additional data provided for reporting or debugging (where available). - */ - -/** - * @typedef {Object} ProgressData - * @property {Object} entries - * @property {Number} entries.total Number of entries that have been appended. - * @property {Number} entries.processed Number of entries that have been processed. - * @property {Object} fs - * @property {Number} fs.totalBytes Number of bytes that have been appended. Calculated asynchronously and might not be accurate: it growth while entries are added. (based on fs.Stats) - * @property {Number} fs.processedBytes Number of bytes that have been processed. (based on fs.Stats) - */ diff --git a/node_modules/archiver/lib/error.js b/node_modules/archiver/lib/error.js deleted file mode 100644 index 381cf86..0000000 --- a/node_modules/archiver/lib/error.js +++ /dev/null @@ -1,40 +0,0 @@ -/** - * Archiver Core - * - * @ignore - * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} - * @copyright (c) 2012-2014 Chris Talkington, contributors. - */ - -var util = require('util'); - -const ERROR_CODES = { - 'ABORTED': 'archive was aborted', - 'DIRECTORYDIRPATHREQUIRED': 'diretory dirpath argument must be a non-empty string value', - 'DIRECTORYFUNCTIONINVALIDDATA': 'invalid data returned by directory custom data function', - 'ENTRYNAMEREQUIRED': 'entry name must be a non-empty string value', - 'FILEFILEPATHREQUIRED': 'file filepath argument must be a non-empty string value', - 'FINALIZING': 'archive already finalizing', - 'QUEUECLOSED': 'queue closed', - 'NOENDMETHOD': 'no suitable finalize/end method defined by module', - 'DIRECTORYNOTSUPPORTED': 'support for directory entries not defined by module', - 'FORMATSET': 'archive format already set', - 'INPUTSTEAMBUFFERREQUIRED': 'input source must be valid Stream or Buffer instance', - 'MODULESET': 'module already set', - 'SYMLINKNOTSUPPORTED': 'support for symlink entries not defined by module', - 'SYMLINKFILEPATHREQUIRED': 'symlink filepath argument must be a non-empty string value', - 'SYMLINKTARGETREQUIRED': 'symlink target argument must be a non-empty string value', - 'ENTRYNOTSUPPORTED': 'entry not supported' -}; - -function ArchiverError(code, data) { - Error.captureStackTrace(this, this.constructor); - //this.name = this.constructor.name; - this.message = ERROR_CODES[code] || code; - this.code = code; - this.data = data; -} - -util.inherits(ArchiverError, Error); - -exports = module.exports = ArchiverError; \ No newline at end of file diff --git a/node_modules/archiver/lib/plugins/json.js b/node_modules/archiver/lib/plugins/json.js deleted file mode 100644 index c8dce44..0000000 --- a/node_modules/archiver/lib/plugins/json.js +++ /dev/null @@ -1,110 +0,0 @@ -/** - * JSON Format Plugin - * - * @module plugins/json - * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} - * @copyright (c) 2012-2014 Chris Talkington, contributors. - */ -var inherits = require('util').inherits; -var Transform = require('readable-stream').Transform; - -var crc32 = require('buffer-crc32'); -var util = require('archiver-utils'); - -/** - * @constructor - * @param {(JsonOptions|TransformOptions)} options - */ -var Json = function(options) { - if (!(this instanceof Json)) { - return new Json(options); - } - - options = this.options = util.defaults(options, {}); - - Transform.call(this, options); - - this.supports = { - directory: true, - symlink: true - }; - - this.files = []; -}; - -inherits(Json, Transform); - -/** - * [_transform description] - * - * @private - * @param {Buffer} chunk - * @param {String} encoding - * @param {Function} callback - * @return void - */ -Json.prototype._transform = function(chunk, encoding, callback) { - callback(null, chunk); -}; - -/** - * [_writeStringified description] - * - * @private - * @return void - */ -Json.prototype._writeStringified = function() { - var fileString = JSON.stringify(this.files); - this.write(fileString); -}; - -/** - * [append description] - * - * @param {(Buffer|Stream)} source - * @param {EntryData} data - * @param {Function} callback - * @return void - */ -Json.prototype.append = function(source, data, callback) { - var self = this; - - data.crc32 = 0; - - function onend(err, sourceBuffer) { - if (err) { - callback(err); - return; - } - - data.size = sourceBuffer.length || 0; - data.crc32 = crc32.unsigned(sourceBuffer); - - self.files.push(data); - - callback(null, data); - } - - if (data.sourceType === 'buffer') { - onend(null, source); - } else if (data.sourceType === 'stream') { - util.collectStream(source, onend); - } -}; - -/** - * [finalize description] - * - * @return void - */ -Json.prototype.finalize = function() { - this._writeStringified(); - this.end(); -}; - -module.exports = Json; - -/** - * @typedef {Object} JsonOptions - * @global - */ diff --git a/node_modules/archiver/lib/plugins/tar.js b/node_modules/archiver/lib/plugins/tar.js deleted file mode 100644 index 01d184f..0000000 --- a/node_modules/archiver/lib/plugins/tar.js +++ /dev/null @@ -1,167 +0,0 @@ -/** - * TAR Format Plugin - * - * @module plugins/tar - * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} - * @copyright (c) 2012-2014 Chris Talkington, contributors. - */ -var zlib = require('zlib'); - -var engine = require('tar-stream'); -var util = require('archiver-utils'); - -/** - * @constructor - * @param {TarOptions} options - */ -var Tar = function(options) { - if (!(this instanceof Tar)) { - return new Tar(options); - } - - options = this.options = util.defaults(options, { - gzip: false - }); - - if (typeof options.gzipOptions !== 'object') { - options.gzipOptions = {}; - } - - this.supports = { - directory: true, - symlink: true - }; - - this.engine = engine.pack(options); - this.compressor = false; - - if (options.gzip) { - this.compressor = zlib.createGzip(options.gzipOptions); - this.compressor.on('error', this._onCompressorError.bind(this)); - } -}; - -/** - * [_onCompressorError description] - * - * @private - * @param {Error} err - * @return void - */ -Tar.prototype._onCompressorError = function(err) { - this.engine.emit('error', err); -}; - -/** - * [append description] - * - * @param {(Buffer|Stream)} source - * @param {TarEntryData} data - * @param {Function} callback - * @return void - */ -Tar.prototype.append = function(source, data, callback) { - var self = this; - - data.mtime = data.date; - - function append(err, sourceBuffer) { - if (err) { - callback(err); - return; - } - - self.engine.entry(data, sourceBuffer, function(err) { - callback(err, data); - }); - } - - if (data.sourceType === 'buffer') { - append(null, source); - } else if (data.sourceType === 'stream' && data.stats) { - data.size = data.stats.size; - - var entry = self.engine.entry(data, function(err) { - callback(err, data); - }); - - source.pipe(entry); - } else if (data.sourceType === 'stream') { - util.collectStream(source, append); - } -}; - -/** - * [finalize description] - * - * @return void - */ -Tar.prototype.finalize = function() { - this.engine.finalize(); -}; - -/** - * [on description] - * - * @return this.engine - */ -Tar.prototype.on = function() { - return this.engine.on.apply(this.engine, arguments); -}; - -/** - * [pipe description] - * - * @param {String} destination - * @param {Object} options - * @return this.engine - */ -Tar.prototype.pipe = function(destination, options) { - if (this.compressor) { - return this.engine.pipe.apply(this.engine, [this.compressor]).pipe(destination, options); - } else { - return this.engine.pipe.apply(this.engine, arguments); - } -}; - -/** - * [unpipe description] - * - * @return this.engine - */ -Tar.prototype.unpipe = function() { - if (this.compressor) { - return this.compressor.unpipe.apply(this.compressor, arguments); - } else { - return this.engine.unpipe.apply(this.engine, arguments); - } -}; - -module.exports = Tar; - -/** - * @typedef {Object} TarOptions - * @global - * @property {Boolean} [gzip=false] Compress the tar archive using gzip. - * @property {Object} [gzipOptions] Passed to [zlib]{@link https://nodejs.org/api/zlib.html#zlib_class_options} - * to control compression. - * @property {*} [*] See [tar-stream]{@link https://github.com/mafintosh/tar-stream} documentation for additional properties. - */ - -/** - * @typedef {Object} TarEntryData - * @global - * @property {String} name Sets the entry name including internal path. - * @property {(String|Date)} [date=NOW()] Sets the entry date. - * @property {Number} [mode=D:0755/F:0644] Sets the entry permissions. - * @property {String} [prefix] Sets a path prefix for the entry name. Useful - * when working with methods like `directory` or `glob`. - * @property {fs.Stats} [stats] Sets the fs stat data for this entry allowing - * for reduction of fs stat calls when stat data is already known. - */ - -/** - * TarStream Module - * @external TarStream - * @see {@link https://github.com/mafintosh/tar-stream} - */ diff --git a/node_modules/archiver/lib/plugins/zip.js b/node_modules/archiver/lib/plugins/zip.js deleted file mode 100644 index 0b7b7a2..0000000 --- a/node_modules/archiver/lib/plugins/zip.js +++ /dev/null @@ -1,116 +0,0 @@ -/** - * ZIP Format Plugin - * - * @module plugins/zip - * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} - * @copyright (c) 2012-2014 Chris Talkington, contributors. - */ -var engine = require('zip-stream'); -var util = require('archiver-utils'); - -/** - * @constructor - * @param {ZipOptions} [options] - * @param {String} [options.comment] Sets the zip archive comment. - * @param {Boolean} [options.forceLocalTime=false] Forces the archive to contain local file times instead of UTC. - * @param {Boolean} [options.forceZip64=false] Forces the archive to contain ZIP64 headers. - * @param {Boolean} [options.store=false] Sets the compression method to STORE. - * @param {Object} [options.zlib] Passed to [zlib]{@link https://nodejs.org/api/zlib.html#zlib_class_options} - */ -var Zip = function(options) { - if (!(this instanceof Zip)) { - return new Zip(options); - } - - options = this.options = util.defaults(options, { - comment: '', - forceUTC: false, - store: false - }); - - this.supports = { - directory: true, - symlink: true - }; - - this.engine = new engine(options); -}; - -/** - * @param {(Buffer|Stream)} source - * @param {ZipEntryData} data - * @param {String} data.name Sets the entry name including internal path. - * @param {(String|Date)} [data.date=NOW()] Sets the entry date. - * @param {Number} [data.mode=D:0755/F:0644] Sets the entry permissions. - * @param {String} [data.prefix] Sets a path prefix for the entry name. Useful - * when working with methods like `directory` or `glob`. - * @param {fs.Stats} [data.stats] Sets the fs stat data for this entry allowing - * for reduction of fs stat calls when stat data is already known. - * @param {Boolean} [data.store=ZipOptions.store] Sets the compression method to STORE. - * @param {Function} callback - * @return void - */ -Zip.prototype.append = function(source, data, callback) { - this.engine.entry(source, data, callback); -}; - -/** - * @return void - */ -Zip.prototype.finalize = function() { - this.engine.finalize(); -}; - -/** - * @return this.engine - */ -Zip.prototype.on = function() { - return this.engine.on.apply(this.engine, arguments); -}; - -/** - * @return this.engine - */ -Zip.prototype.pipe = function() { - return this.engine.pipe.apply(this.engine, arguments); -}; - -/** - * @return this.engine - */ -Zip.prototype.unpipe = function() { - return this.engine.unpipe.apply(this.engine, arguments); -}; - -module.exports = Zip; - -/** - * @typedef {Object} ZipOptions - * @global - * @property {String} [comment] Sets the zip archive comment. - * @property {Boolean} [forceLocalTime=false] Forces the archive to contain local file times instead of UTC. - * @property {Boolean} [forceZip64=false] Forces the archive to contain ZIP64 headers. - * @property {Boolean} [store=false] Sets the compression method to STORE. - * @property {Object} [zlib] Passed to [zlib]{@link https://nodejs.org/api/zlib.html#zlib_class_options} - * to control compression. - * @property {*} [*] See [zip-stream]{@link https://archiverjs.com/zip-stream/ZipStream.html} documentation for current list of properties. - */ - -/** - * @typedef {Object} ZipEntryData - * @global - * @property {String} name Sets the entry name including internal path. - * @property {(String|Date)} [date=NOW()] Sets the entry date. - * @property {Number} [mode=D:0755/F:0644] Sets the entry permissions. - * @property {String} [prefix] Sets a path prefix for the entry name. Useful - * when working with methods like `directory` or `glob`. - * @property {fs.Stats} [stats] Sets the fs stat data for this entry allowing - * for reduction of fs stat calls when stat data is already known. - * @property {Boolean} [store=ZipOptions.store] Sets the compression method to STORE. - */ - -/** - * ZipStream Module - * @external ZipStream - * @see {@link https://www.archiverjs.com/zip-stream/ZipStream.html} - */ diff --git a/node_modules/archiver/package.json b/node_modules/archiver/package.json deleted file mode 100644 index b09d899..0000000 --- a/node_modules/archiver/package.json +++ /dev/null @@ -1,61 +0,0 @@ -{ - "name": "archiver", - "version": "3.1.1", - "description": "a streaming interface for archive generation", - "homepage": "https://github.com/archiverjs/node-archiver", - "author": { - "name": "Chris Talkington", - "url": "http://christalkington.com/" - }, - "repository": { - "type": "git", - "url": "https://github.com/archiverjs/node-archiver.git" - }, - "bugs": { - "url": "https://github.com/archiverjs/node-archiver/issues" - }, - "license": "MIT", - "main": "index.js", - "files": [ - "index.js", - "lib" - ], - "engines": { - "node": ">= 6" - }, - "scripts": { - "test": "mocha --reporter dot", - "jsdoc": "jsdoc -c jsdoc.json README.md", - "bench": "node benchmark/simple/pack-zip.js" - }, - "dependencies": { - "archiver-utils": "^2.1.0", - "async": "^2.6.3", - "buffer-crc32": "^0.2.1", - "glob": "^7.1.4", - "readable-stream": "^3.4.0", - "tar-stream": "^2.1.0", - "zip-stream": "^2.1.2" - }, - "devDependencies": { - "archiver-jsdoc-theme": "^1.1.1", - "chai": "^4.2.0", - "jsdoc": "^3.6.3", - "mkdirp": "^0.5.0", - "mocha": "^6.2.0", - "rimraf": "^2.6.3", - "stream-bench": "^0.1.2", - "tar": "^4.4.10", - "yauzl": "^2.9.0" - }, - "keywords": [ - "archive", - "archiver", - "stream", - "zip", - "tar" - ], - "publishConfig": { - "registry": "https://registry.npmjs.org/" - } -} diff --git a/node_modules/async/CHANGELOG.md b/node_modules/async/CHANGELOG.md deleted file mode 100644 index ec6b871..0000000 --- a/node_modules/async/CHANGELOG.md +++ /dev/null @@ -1,278 +0,0 @@ -# v2.6.4 -- Fix potential prototype pollution exploit (#1828) - -# v2.6.3 -- Updated lodash to squelch a security warning (#1675) - -# v2.6.2 -- Updated lodash to squelch a security warning (#1620) - -# v2.6.1 -- Updated lodash to prevent `npm audit` warnings. (#1532, #1533) -- Made `async-es` more optimized for webpack users (#1517) -- Fixed a stack overflow with large collections and a synchronous iterator (#1514) -- Various small fixes/chores (#1505, #1511, #1527, #1530) - -# v2.6.0 -- Added missing aliases for many methods. Previously, you could not (e.g.) `require('async/find')` or use `async.anyLimit`. (#1483) -- Improved `queue` performance. (#1448, #1454) -- Add missing sourcemap (#1452, #1453) -- Various doc updates (#1448, #1471, #1483) - -# v2.5.0 -- Added `concatLimit`, the `Limit` equivalent of [`concat`](https://caolan.github.io/async/docs.html#concat) ([#1426](https://github.com/caolan/async/issues/1426), [#1430](https://github.com/caolan/async/pull/1430)) -- `concat` improvements: it now preserves order, handles falsy values and the `iteratee` callback takes a variable number of arguments ([#1437](https://github.com/caolan/async/issues/1437), [#1436](https://github.com/caolan/async/pull/1436)) -- Fixed an issue in `queue` where there was a size discrepancy between `workersList().length` and `running()` ([#1428](https://github.com/caolan/async/issues/1428), [#1429](https://github.com/caolan/async/pull/1429)) -- Various doc fixes ([#1422](https://github.com/caolan/async/issues/1422), [#1424](https://github.com/caolan/async/pull/1424)) - -# v2.4.1 -- Fixed a bug preventing functions wrapped with `timeout()` from being re-used. ([#1418](https://github.com/caolan/async/issues/1418), [#1419](https://github.com/caolan/async/issues/1419)) - -# v2.4.0 -- Added `tryEach`, for running async functions in parallel, where you only expect one to succeed. ([#1365](https://github.com/caolan/async/issues/1365), [#687](https://github.com/caolan/async/issues/687)) -- Improved performance, most notably in `parallel` and `waterfall` ([#1395](https://github.com/caolan/async/issues/1395)) -- Added `queue.remove()`, for removing items in a `queue` ([#1397](https://github.com/caolan/async/issues/1397), [#1391](https://github.com/caolan/async/issues/1391)) -- Fixed using `eval`, preventing Async from running in pages with Content Security Policy ([#1404](https://github.com/caolan/async/issues/1404), [#1403](https://github.com/caolan/async/issues/1403)) -- Fixed errors thrown in an `asyncify`ed function's callback being caught by the underlying Promise ([#1408](https://github.com/caolan/async/issues/1408)) -- Fixed timing of `queue.empty()` ([#1367](https://github.com/caolan/async/issues/1367)) -- Various doc fixes ([#1314](https://github.com/caolan/async/issues/1314), [#1394](https://github.com/caolan/async/issues/1394), [#1412](https://github.com/caolan/async/issues/1412)) - -# v2.3.0 -- Added support for ES2017 `async` functions. Wherever you can pass a Node-style/CPS function that uses a callback, you can also pass an `async` function. Previously, you had to wrap `async` functions with `asyncify`. The caveat is that it will only work if `async` functions are supported natively in your environment, transpiled implementations can't be detected. ([#1386](https://github.com/caolan/async/issues/1386), [#1390](https://github.com/caolan/async/issues/1390)) -- Small doc fix ([#1392](https://github.com/caolan/async/issues/1392)) - -# v2.2.0 -- Added `groupBy`, and the `Series`/`Limit` equivalents, analogous to [`_.groupBy`](http://lodash.com/docs#groupBy) ([#1364](https://github.com/caolan/async/issues/1364)) -- Fixed `transform` bug when `callback` was not passed ([#1381](https://github.com/caolan/async/issues/1381)) -- Added note about `reflect` to `parallel` docs ([#1385](https://github.com/caolan/async/issues/1385)) - -# v2.1.5 -- Fix `auto` bug when function names collided with Array.prototype ([#1358](https://github.com/caolan/async/issues/1358)) -- Improve some error messages ([#1349](https://github.com/caolan/async/issues/1349)) -- Avoid stack overflow case in queue -- Fixed an issue in `some`, `every` and `find` where processing would continue after the result was determined. -- Cleanup implementations of `some`, `every` and `find` - -# v2.1.3 -- Make bundle size smaller -- Create optimized hotpath for `filter` in array case. - -# v2.1.2 -- Fixed a stackoverflow bug with `detect`, `some`, `every` on large inputs ([#1293](https://github.com/caolan/async/issues/1293)). - -# v2.1.0 - -- `retry` and `retryable` now support an optional `errorFilter` function that determines if the `task` should retry on the error ([#1256](https://github.com/caolan/async/issues/1256), [#1261](https://github.com/caolan/async/issues/1261)) -- Optimized array iteration in `race`, `cargo`, `queue`, and `priorityQueue` ([#1253](https://github.com/caolan/async/issues/1253)) -- Added alias documentation to doc site ([#1251](https://github.com/caolan/async/issues/1251), [#1254](https://github.com/caolan/async/issues/1254)) -- Added [BootStrap scrollspy](http://getbootstrap.com/javascript/#scrollspy) to docs to highlight in the sidebar the current method being viewed ([#1289](https://github.com/caolan/async/issues/1289), [#1300](https://github.com/caolan/async/issues/1300)) -- Various minor doc fixes ([#1263](https://github.com/caolan/async/issues/1263), [#1264](https://github.com/caolan/async/issues/1264), [#1271](https://github.com/caolan/async/issues/1271), [#1278](https://github.com/caolan/async/issues/1278), [#1280](https://github.com/caolan/async/issues/1280), [#1282](https://github.com/caolan/async/issues/1282), [#1302](https://github.com/caolan/async/issues/1302)) - -# v2.0.1 - -- Significantly optimized all iteration based collection methods such as `each`, `map`, `filter`, etc ([#1245](https://github.com/caolan/async/issues/1245), [#1246](https://github.com/caolan/async/issues/1246), [#1247](https://github.com/caolan/async/issues/1247)). - -# v2.0.0 - -Lots of changes here! - -First and foremost, we have a slick new [site for docs](https://caolan.github.io/async/). Special thanks to [**@hargasinski**](https://github.com/hargasinski) for his work converting our old docs to `jsdoc` format and implementing the new website. Also huge ups to [**@ivanseidel**](https://github.com/ivanseidel) for designing our new logo. It was a long process for both of these tasks, but I think these changes turned out extraordinary well. - -The biggest feature is modularization. You can now `require("async/series")` to only require the `series` function. Every Async library function is available this way. You still can `require("async")` to require the entire library, like you could do before. - -We also provide Async as a collection of ES2015 modules. You can now `import {each} from 'async-es'` or `import waterfall from 'async-es/waterfall'`. If you are using only a few Async functions, and are using a ES bundler such as Rollup, this can significantly lower your build size. - -Major thanks to [**@Kikobeats**](github.com/Kikobeats), [**@aearly**](github.com/aearly) and [**@megawac**](github.com/megawac) for doing the majority of the modularization work, as well as [**@jdalton**](github.com/jdalton) and [**@Rich-Harris**](github.com/Rich-Harris) for advisory work on the general modularization strategy. - -Another one of the general themes of the 2.0 release is standardization of what an "async" function is. We are now more strictly following the node-style continuation passing style. That is, an async function is a function that: - -1. Takes a variable number of arguments -2. The last argument is always a callback -3. The callback can accept any number of arguments -4. The first argument passed to the callback will be treated as an error result, if the argument is truthy -5. Any number of result arguments can be passed after the "error" argument -6. The callback is called once and exactly once, either on the same tick or later tick of the JavaScript event loop. - -There were several cases where Async accepted some functions that did not strictly have these properties, most notably `auto`, `every`, `some`, `filter`, `reject` and `detect`. - -Another theme is performance. We have eliminated internal deferrals in all cases where they make sense. For example, in `waterfall` and `auto`, there was a `setImmediate` between each task -- these deferrals have been removed. A `setImmediate` call can add up to 1ms of delay. This might not seem like a lot, but it can add up if you are using many Async functions in the course of processing a HTTP request, for example. Nearly all asynchronous functions that do I/O already have some sort of deferral built in, so the extra deferral is unnecessary. The trade-off of this change is removing our built-in stack-overflow defense. Many synchronous callback calls in series can quickly overflow the JS call stack. If you do have a function that is sometimes synchronous (calling its callback on the same tick), and are running into stack overflows, wrap it with `async.ensureAsync()`. - -Another big performance win has been re-implementing `queue`, `cargo`, and `priorityQueue` with [doubly linked lists](https://en.wikipedia.org/wiki/Doubly_linked_list) instead of arrays. This has lead to queues being an order of [magnitude faster on large sets of tasks](https://github.com/caolan/async/pull/1205). - -## New Features - -- Async is now modularized. Individual functions can be `require()`d from the main package. (`require('async/auto')`) ([#984](https://github.com/caolan/async/issues/984), [#996](https://github.com/caolan/async/issues/996)) -- Async is also available as a collection of ES2015 modules in the new `async-es` package. (`import {forEachSeries} from 'async-es'`) ([#984](https://github.com/caolan/async/issues/984), [#996](https://github.com/caolan/async/issues/996)) -- Added `race`, analogous to `Promise.race()`. It will run an array of async tasks in parallel and will call its callback with the result of the first task to respond. ([#568](https://github.com/caolan/async/issues/568), [#1038](https://github.com/caolan/async/issues/1038)) -- Collection methods now accept ES2015 iterators. Maps, Sets, and anything that implements the iterator spec can now be passed directly to `each`, `map`, `parallel`, etc.. ([#579](https://github.com/caolan/async/issues/579), [#839](https://github.com/caolan/async/issues/839), [#1074](https://github.com/caolan/async/issues/1074)) -- Added `mapValues`, for mapping over the properties of an object and returning an object with the same keys. ([#1157](https://github.com/caolan/async/issues/1157), [#1177](https://github.com/caolan/async/issues/1177)) -- Added `timeout`, a wrapper for an async function that will make the task time-out after the specified time. ([#1007](https://github.com/caolan/async/issues/1007), [#1027](https://github.com/caolan/async/issues/1027)) -- Added `reflect` and `reflectAll`, analagous to [`Promise.reflect()`](http://bluebirdjs.com/docs/api/reflect.html), a wrapper for async tasks that always succeeds, by gathering results and errors into an object. ([#942](https://github.com/caolan/async/issues/942), [#1012](https://github.com/caolan/async/issues/1012), [#1095](https://github.com/caolan/async/issues/1095)) -- `constant` supports dynamic arguments -- it will now always use its last argument as the callback. ([#1016](https://github.com/caolan/async/issues/1016), [#1052](https://github.com/caolan/async/issues/1052)) -- `setImmediate` and `nextTick` now support arguments to partially apply to the deferred function, like the node-native versions do. ([#940](https://github.com/caolan/async/issues/940), [#1053](https://github.com/caolan/async/issues/1053)) -- `auto` now supports resolving cyclic dependencies using [Kahn's algorithm](https://en.wikipedia.org/wiki/Topological_sorting#Kahn.27s_algorithm) ([#1140](https://github.com/caolan/async/issues/1140)). -- Added `autoInject`, a relative of `auto` that automatically spreads a task's dependencies as arguments to the task function. ([#608](https://github.com/caolan/async/issues/608), [#1055](https://github.com/caolan/async/issues/1055), [#1099](https://github.com/caolan/async/issues/1099), [#1100](https://github.com/caolan/async/issues/1100)) -- You can now limit the concurrency of `auto` tasks. ([#635](https://github.com/caolan/async/issues/635), [#637](https://github.com/caolan/async/issues/637)) -- Added `retryable`, a relative of `retry` that wraps an async function, making it retry when called. ([#1058](https://github.com/caolan/async/issues/1058)) -- `retry` now supports specifying a function that determines the next time interval, useful for exponential backoff, logging and other retry strategies. ([#1161](https://github.com/caolan/async/issues/1161)) -- `retry` will now pass all of the arguments the task function was resolved with to the callback ([#1231](https://github.com/caolan/async/issues/1231)). -- Added `q.unsaturated` -- callback called when a `queue`'s number of running workers falls below a threshold. ([#868](https://github.com/caolan/async/issues/868), [#1030](https://github.com/caolan/async/issues/1030), [#1033](https://github.com/caolan/async/issues/1033), [#1034](https://github.com/caolan/async/issues/1034)) -- Added `q.error` -- a callback called whenever a `queue` task calls its callback with an error. ([#1170](https://github.com/caolan/async/issues/1170)) -- `applyEach` and `applyEachSeries` now pass results to the final callback. ([#1088](https://github.com/caolan/async/issues/1088)) - -## Breaking changes - -- Calling a callback more than once is considered an error, and an error will be thrown. This had an explicit breaking change in `waterfall`. If you were relying on this behavior, you should more accurately represent your control flow as an event emitter or stream. ([#814](https://github.com/caolan/async/issues/814), [#815](https://github.com/caolan/async/issues/815), [#1048](https://github.com/caolan/async/issues/1048), [#1050](https://github.com/caolan/async/issues/1050)) -- `auto` task functions now always take the callback as the last argument. If a task has dependencies, the `results` object will be passed as the first argument. To migrate old task functions, wrap them with [`_.flip`](https://lodash.com/docs#flip) ([#1036](https://github.com/caolan/async/issues/1036), [#1042](https://github.com/caolan/async/issues/1042)) -- Internal `setImmediate` calls have been refactored away. This may make existing flows vulnerable to stack overflows if you use many synchronous functions in series. Use `ensureAsync` to work around this. ([#696](https://github.com/caolan/async/issues/696), [#704](https://github.com/caolan/async/issues/704), [#1049](https://github.com/caolan/async/issues/1049), [#1050](https://github.com/caolan/async/issues/1050)) -- `map` used to return an object when iterating over an object. `map` now always returns an array, like in other libraries. The previous object behavior has been split out into `mapValues`. ([#1157](https://github.com/caolan/async/issues/1157), [#1177](https://github.com/caolan/async/issues/1177)) -- `filter`, `reject`, `some`, `every`, `detect` and their families like `{METHOD}Series` and `{METHOD}Limit` now expect an error as the first callback argument, rather than just a simple boolean. Pass `null` as the first argument, or use `fs.access` instead of `fs.exists`. ([#118](https://github.com/caolan/async/issues/118), [#774](https://github.com/caolan/async/issues/774), [#1028](https://github.com/caolan/async/issues/1028), [#1041](https://github.com/caolan/async/issues/1041)) -- `{METHOD}` and `{METHOD}Series` are now implemented in terms of `{METHOD}Limit`. This is a major internal simplification, and is not expected to cause many problems, but it does subtly affect how functions execute internally. ([#778](https://github.com/caolan/async/issues/778), [#847](https://github.com/caolan/async/issues/847)) -- `retry`'s callback is now optional. Previously, omitting the callback would partially apply the function, meaning it could be passed directly as a task to `series` or `auto`. The partially applied "control-flow" behavior has been separated out into `retryable`. ([#1054](https://github.com/caolan/async/issues/1054), [#1058](https://github.com/caolan/async/issues/1058)) -- The test function for `whilst`, `until`, and `during` used to be passed non-error args from the iteratee function's callback, but this led to weirdness where the first call of the test function would be passed no args. We have made it so the test function is never passed extra arguments, and only the `doWhilst`, `doUntil`, and `doDuring` functions pass iteratee callback arguments to the test function ([#1217](https://github.com/caolan/async/issues/1217), [#1224](https://github.com/caolan/async/issues/1224)) -- The `q.tasks` array has been renamed `q._tasks` and is now implemented as a doubly linked list (DLL). Any code that used to interact with this array will need to be updated to either use the provided helpers or support DLLs ([#1205](https://github.com/caolan/async/issues/1205)). -- The timing of the `q.saturated()` callback in a `queue` has been modified to better reflect when tasks pushed to the queue will start queueing. ([#724](https://github.com/caolan/async/issues/724), [#1078](https://github.com/caolan/async/issues/1078)) -- Removed `iterator` method in favour of [ES2015 iterator protocol](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Iterators_and_Generators ) which natively supports arrays ([#1237](https://github.com/caolan/async/issues/1237)) -- Dropped support for Component, Jam, SPM, and Volo ([#1175](https://github.com/caolan/async/issues/1175), #[#176](https://github.com/caolan/async/issues/176)) - -## Bug Fixes - -- Improved handling of no dependency cases in `auto` & `autoInject` ([#1147](https://github.com/caolan/async/issues/1147)). -- Fixed a bug where the callback generated by `asyncify` with `Promises` could resolve twice ([#1197](https://github.com/caolan/async/issues/1197)). -- Fixed several documented optional callbacks not actually being optional ([#1223](https://github.com/caolan/async/issues/1223)). - -## Other - -- Added `someSeries` and `everySeries` for symmetry, as well as a complete set of `any`/`anyLimit`/`anySeries` and `all`/`/allLmit`/`allSeries` aliases. -- Added `find` as an alias for `detect. (as well as `findLimit` and `findSeries`). -- Various doc fixes ([#1005](https://github.com/caolan/async/issues/1005), [#1008](https://github.com/caolan/async/issues/1008), [#1010](https://github.com/caolan/async/issues/1010), [#1015](https://github.com/caolan/async/issues/1015), [#1021](https://github.com/caolan/async/issues/1021), [#1037](https://github.com/caolan/async/issues/1037), [#1039](https://github.com/caolan/async/issues/1039), [#1051](https://github.com/caolan/async/issues/1051), [#1102](https://github.com/caolan/async/issues/1102), [#1107](https://github.com/caolan/async/issues/1107), [#1121](https://github.com/caolan/async/issues/1121), [#1123](https://github.com/caolan/async/issues/1123), [#1129](https://github.com/caolan/async/issues/1129), [#1135](https://github.com/caolan/async/issues/1135), [#1138](https://github.com/caolan/async/issues/1138), [#1141](https://github.com/caolan/async/issues/1141), [#1153](https://github.com/caolan/async/issues/1153), [#1216](https://github.com/caolan/async/issues/1216), [#1217](https://github.com/caolan/async/issues/1217), [#1232](https://github.com/caolan/async/issues/1232), [#1233](https://github.com/caolan/async/issues/1233), [#1236](https://github.com/caolan/async/issues/1236), [#1238](https://github.com/caolan/async/issues/1238)) - -Thank you [**@aearly**](github.com/aearly) and [**@megawac**](github.com/megawac) for taking the lead on version 2 of async. - ------------------------------------------- - -# v1.5.2 -- Allow using `"constructor"` as an argument in `memoize` ([#998](https://github.com/caolan/async/issues/998)) -- Give a better error messsage when `auto` dependency checking fails ([#994](https://github.com/caolan/async/issues/994)) -- Various doc updates ([#936](https://github.com/caolan/async/issues/936), [#956](https://github.com/caolan/async/issues/956), [#979](https://github.com/caolan/async/issues/979), [#1002](https://github.com/caolan/async/issues/1002)) - -# v1.5.1 -- Fix issue with `pause` in `queue` with concurrency enabled ([#946](https://github.com/caolan/async/issues/946)) -- `while` and `until` now pass the final result to callback ([#963](https://github.com/caolan/async/issues/963)) -- `auto` will properly handle concurrency when there is no callback ([#966](https://github.com/caolan/async/issues/966)) -- `auto` will no. properly stop execution when an error occurs ([#988](https://github.com/caolan/async/issues/988), [#993](https://github.com/caolan/async/issues/993)) -- Various doc fixes ([#971](https://github.com/caolan/async/issues/971), [#980](https://github.com/caolan/async/issues/980)) - -# v1.5.0 - -- Added `transform`, analogous to [`_.transform`](http://lodash.com/docs#transform) ([#892](https://github.com/caolan/async/issues/892)) -- `map` now returns an object when an object is passed in, rather than array with non-numeric keys. `map` will begin always returning an array with numeric indexes in the next major release. ([#873](https://github.com/caolan/async/issues/873)) -- `auto` now accepts an optional `concurrency` argument to limit the number o. running tasks ([#637](https://github.com/caolan/async/issues/637)) -- Added `queue#workersList()`, to retrieve the lis. of currently running tasks. ([#891](https://github.com/caolan/async/issues/891)) -- Various code simplifications ([#896](https://github.com/caolan/async/issues/896), [#904](https://github.com/caolan/async/issues/904)) -- Various doc fixes :scroll: ([#890](https://github.com/caolan/async/issues/890), [#894](https://github.com/caolan/async/issues/894), [#903](https://github.com/caolan/async/issues/903), [#905](https://github.com/caolan/async/issues/905), [#912](https://github.com/caolan/async/issues/912)) - -# v1.4.2 - -- Ensure coverage files don't get published on npm ([#879](https://github.com/caolan/async/issues/879)) - -# v1.4.1 - -- Add in overlooked `detectLimit` method ([#866](https://github.com/caolan/async/issues/866)) -- Removed unnecessary files from npm releases ([#861](https://github.com/caolan/async/issues/861)) -- Removed usage of a reserved word to prevent :boom: in older environments ([#870](https://github.com/caolan/async/issues/870)) - -# v1.4.0 - -- `asyncify` now supports promises ([#840](https://github.com/caolan/async/issues/840)) -- Added `Limit` versions of `filter` and `reject` ([#836](https://github.com/caolan/async/issues/836)) -- Add `Limit` versions of `detect`, `some` and `every` ([#828](https://github.com/caolan/async/issues/828), [#829](https://github.com/caolan/async/issues/829)) -- `some`, `every` and `detect` now short circuit early ([#828](https://github.com/caolan/async/issues/828), [#829](https://github.com/caolan/async/issues/829)) -- Improve detection of the global object ([#804](https://github.com/caolan/async/issues/804)), enabling use in WebWorkers -- `whilst` now called with arguments from iterator ([#823](https://github.com/caolan/async/issues/823)) -- `during` now gets called with arguments from iterator ([#824](https://github.com/caolan/async/issues/824)) -- Code simplifications and optimizations aplenty ([diff](https://github.com/caolan/async/compare/v1.3.0...v1.4.0)) - - -# v1.3.0 - -New Features: -- Added `constant` -- Added `asyncify`/`wrapSync` for making sync functions work with callbacks. ([#671](https://github.com/caolan/async/issues/671), [#806](https://github.com/caolan/async/issues/806)) -- Added `during` and `doDuring`, which are like `whilst` with an async truth test. ([#800](https://github.com/caolan/async/issues/800)) -- `retry` now accepts an `interval` parameter to specify a delay between retries. ([#793](https://github.com/caolan/async/issues/793)) -- `async` should work better in Web Workers due to better `root` detection ([#804](https://github.com/caolan/async/issues/804)) -- Callbacks are now optional in `whilst`, `doWhilst`, `until`, and `doUntil` ([#642](https://github.com/caolan/async/issues/642)) -- Various internal updates ([#786](https://github.com/caolan/async/issues/786), [#801](https://github.com/caolan/async/issues/801), [#802](https://github.com/caolan/async/issues/802), [#803](https://github.com/caolan/async/issues/803)) -- Various doc fixes ([#790](https://github.com/caolan/async/issues/790), [#794](https://github.com/caolan/async/issues/794)) - -Bug Fixes: -- `cargo` now exposes the `payload` size, and `cargo.payload` can be changed on the fly after the `cargo` is created. ([#740](https://github.com/caolan/async/issues/740), [#744](https://github.com/caolan/async/issues/744), [#783](https://github.com/caolan/async/issues/783)) - - -# v1.2.1 - -Bug Fix: - -- Small regression with synchronous iterator behavior in `eachSeries` with a 1-element array. Before 1.1.0, `eachSeries`'s callback was called on the same tick, which this patch restores. In 2.0.0, it will be called on the next tick. ([#782](https://github.com/caolan/async/issues/782)) - - -# v1.2.0 - -New Features: - -- Added `timesLimit` ([#743](https://github.com/caolan/async/issues/743)) -- `concurrency` can be changed after initialization in `queue` by setting `q.concurrency`. The new concurrency will be reflected the next time a task is processed. ([#747](https://github.com/caolan/async/issues/747), [#772](https://github.com/caolan/async/issues/772)) - -Bug Fixes: - -- Fixed a regression in `each` and family with empty arrays that have additional properties. ([#775](https://github.com/caolan/async/issues/775), [#777](https://github.com/caolan/async/issues/777)) - - -# v1.1.1 - -Bug Fix: - -- Small regression with synchronous iterator behavior in `eachSeries` with a 1-element array. Before 1.1.0, `eachSeries`'s callback was called on the same tick, which this patch restores. In 2.0.0, it will be called on the next tick. ([#782](https://github.com/caolan/async/issues/782)) - - -# v1.1.0 - -New Features: - -- `cargo` now supports all of the same methods and event callbacks as `queue`. -- Added `ensureAsync` - A wrapper that ensures an async function calls its callback on a later tick. ([#769](https://github.com/caolan/async/issues/769)) -- Optimized `map`, `eachOf`, and `waterfall` families of functions -- Passing a `null` or `undefined` array to `map`, `each`, `parallel` and families will be treated as an empty array ([#667](https://github.com/caolan/async/issues/667)). -- The callback is now optional for the composed results of `compose` and `seq`. ([#618](https://github.com/caolan/async/issues/618)) -- Reduced file size by 4kb, (minified version by 1kb) -- Added code coverage through `nyc` and `coveralls` ([#768](https://github.com/caolan/async/issues/768)) - -Bug Fixes: - -- `forever` will no longer stack overflow with a synchronous iterator ([#622](https://github.com/caolan/async/issues/622)) -- `eachLimit` and other limit functions will stop iterating once an error occurs ([#754](https://github.com/caolan/async/issues/754)) -- Always pass `null` in callbacks when there is no error ([#439](https://github.com/caolan/async/issues/439)) -- Ensure proper conditions when calling `drain()` after pushing an empty data set to a queue ([#668](https://github.com/caolan/async/issues/668)) -- `each` and family will properly handle an empty array ([#578](https://github.com/caolan/async/issues/578)) -- `eachSeries` and family will finish if the underlying array is modified during execution ([#557](https://github.com/caolan/async/issues/557)) -- `queue` will throw if a non-function is passed to `q.push()` ([#593](https://github.com/caolan/async/issues/593)) -- Doc fixes ([#629](https://github.com/caolan/async/issues/629), [#766](https://github.com/caolan/async/issues/766)) - - -# v1.0.0 - -No known breaking changes, we are simply complying with semver from here on out. - -Changes: - -- Start using a changelog! -- Add `forEachOf` for iterating over Objects (or to iterate Arrays with indexes available) ([#168](https://github.com/caolan/async/issues/168) [#704](https://github.com/caolan/async/issues/704) [#321](https://github.com/caolan/async/issues/321)) -- Detect deadlocks in `auto` ([#663](https://github.com/caolan/async/issues/663)) -- Better support for require.js ([#527](https://github.com/caolan/async/issues/527)) -- Throw if queue created with concurrency `0` ([#714](https://github.com/caolan/async/issues/714)) -- Fix unneeded iteration in `queue.resume()` ([#758](https://github.com/caolan/async/issues/758)) -- Guard against timer mocking overriding `setImmediate` ([#609](https://github.com/caolan/async/issues/609) [#611](https://github.com/caolan/async/issues/611)) -- Miscellaneous doc fixes ([#542](https://github.com/caolan/async/issues/542) [#596](https://github.com/caolan/async/issues/596) [#615](https://github.com/caolan/async/issues/615) [#628](https://github.com/caolan/async/issues/628) [#631](https://github.com/caolan/async/issues/631) [#690](https://github.com/caolan/async/issues/690) [#729](https://github.com/caolan/async/issues/729)) -- Use single noop function internally ([#546](https://github.com/caolan/async/issues/546)) -- Optimize internal `_each`, `_map` and `_keys` functions. diff --git a/node_modules/async/LICENSE b/node_modules/async/LICENSE deleted file mode 100644 index b18aed6..0000000 --- a/node_modules/async/LICENSE +++ /dev/null @@ -1,19 +0,0 @@ -Copyright (c) 2010-2018 Caolan McMahon - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/async/README.md b/node_modules/async/README.md deleted file mode 100644 index 49cf950..0000000 --- a/node_modules/async/README.md +++ /dev/null @@ -1,56 +0,0 @@ -![Async Logo](https://raw.githubusercontent.com/caolan/async/master/logo/async-logo_readme.jpg) - -[![Build Status via Travis CI](https://travis-ci.org/caolan/async.svg?branch=master)](https://travis-ci.org/caolan/async) -[![NPM version](https://img.shields.io/npm/v/async.svg)](https://www.npmjs.com/package/async) -[![Coverage Status](https://coveralls.io/repos/caolan/async/badge.svg?branch=master)](https://coveralls.io/r/caolan/async?branch=master) -[![Join the chat at https://gitter.im/caolan/async](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/caolan/async?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) -[![libhive - Open source examples](https://www.libhive.com/providers/npm/packages/async/examples/badge.svg)](https://www.libhive.com/providers/npm/packages/async) -[![jsDelivr Hits](https://data.jsdelivr.com/v1/package/npm/async/badge?style=rounded)](https://www.jsdelivr.com/package/npm/async) - - -Async is a utility module which provides straight-forward, powerful functions for working with [asynchronous JavaScript](http://caolan.github.io/async/global.html). Although originally designed for use with [Node.js](https://nodejs.org/) and installable via `npm install --save async`, it can also be used directly in the browser. - -This version of the package is optimized for the Node.js environment. If you use Async with webpack, install [`async-es`](https://www.npmjs.com/package/async-es) instead. - -For Documentation, visit - -*For Async v1.5.x documentation, go [HERE](https://github.com/caolan/async/blob/v1.5.2/README.md)* - - -```javascript -// for use with Node-style callbacks... -var async = require("async"); - -var obj = {dev: "/dev.json", test: "/test.json", prod: "/prod.json"}; -var configs = {}; - -async.forEachOf(obj, (value, key, callback) => { - fs.readFile(__dirname + value, "utf8", (err, data) => { - if (err) return callback(err); - try { - configs[key] = JSON.parse(data); - } catch (e) { - return callback(e); - } - callback(); - }); -}, err => { - if (err) console.error(err.message); - // configs is now a map of JSON data - doSomethingWith(configs); -}); -``` - -```javascript -var async = require("async"); - -// ...or ES2017 async functions -async.mapLimit(urls, 5, async function(url) { - const response = await fetch(url) - return response.body -}, (err, results) => { - if (err) throw err - // results is now an array of the response bodies - console.log(results) -}) -``` diff --git a/node_modules/async/all.js b/node_modules/async/all.js deleted file mode 100644 index d0565b0..0000000 --- a/node_modules/async/all.js +++ /dev/null @@ -1,50 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _createTester = require('./internal/createTester'); - -var _createTester2 = _interopRequireDefault(_createTester); - -var _doParallel = require('./internal/doParallel'); - -var _doParallel2 = _interopRequireDefault(_doParallel); - -var _notId = require('./internal/notId'); - -var _notId2 = _interopRequireDefault(_notId); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Returns `true` if every element in `coll` satisfies an async test. If any - * iteratee call returns `false`, the main `callback` is immediately called. - * - * @name every - * @static - * @memberOf module:Collections - * @method - * @alias all - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collection in parallel. - * The iteratee must complete with a boolean result value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Result will be either `true` or `false` - * depending on the values of the async tests. Invoked with (err, result). - * @example - * - * async.every(['file1','file2','file3'], function(filePath, callback) { - * fs.access(filePath, function(err) { - * callback(null, !err) - * }); - * }, function(err, result) { - * // if result is true then every file exists - * }); - */ -exports.default = (0, _doParallel2.default)((0, _createTester2.default)(_notId2.default, _notId2.default)); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/allLimit.js b/node_modules/async/allLimit.js deleted file mode 100644 index a1a759a..0000000 --- a/node_modules/async/allLimit.js +++ /dev/null @@ -1,42 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _createTester = require('./internal/createTester'); - -var _createTester2 = _interopRequireDefault(_createTester); - -var _doParallelLimit = require('./internal/doParallelLimit'); - -var _doParallelLimit2 = _interopRequireDefault(_doParallelLimit); - -var _notId = require('./internal/notId'); - -var _notId2 = _interopRequireDefault(_notId); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * The same as [`every`]{@link module:Collections.every} but runs a maximum of `limit` async operations at a time. - * - * @name everyLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.every]{@link module:Collections.every} - * @alias allLimit - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collection in parallel. - * The iteratee must complete with a boolean result value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Result will be either `true` or `false` - * depending on the values of the async tests. Invoked with (err, result). - */ -exports.default = (0, _doParallelLimit2.default)((0, _createTester2.default)(_notId2.default, _notId2.default)); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/allSeries.js b/node_modules/async/allSeries.js deleted file mode 100644 index 23bfebb..0000000 --- a/node_modules/async/allSeries.js +++ /dev/null @@ -1,37 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _everyLimit = require('./everyLimit'); - -var _everyLimit2 = _interopRequireDefault(_everyLimit); - -var _doLimit = require('./internal/doLimit'); - -var _doLimit2 = _interopRequireDefault(_doLimit); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * The same as [`every`]{@link module:Collections.every} but runs only a single async operation at a time. - * - * @name everySeries - * @static - * @memberOf module:Collections - * @method - * @see [async.every]{@link module:Collections.every} - * @alias allSeries - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collection in series. - * The iteratee must complete with a boolean result value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Result will be either `true` or `false` - * depending on the values of the async tests. Invoked with (err, result). - */ -exports.default = (0, _doLimit2.default)(_everyLimit2.default, 1); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/any.js b/node_modules/async/any.js deleted file mode 100644 index a8e70f7..0000000 --- a/node_modules/async/any.js +++ /dev/null @@ -1,52 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _createTester = require('./internal/createTester'); - -var _createTester2 = _interopRequireDefault(_createTester); - -var _doParallel = require('./internal/doParallel'); - -var _doParallel2 = _interopRequireDefault(_doParallel); - -var _identity = require('lodash/identity'); - -var _identity2 = _interopRequireDefault(_identity); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Returns `true` if at least one element in the `coll` satisfies an async test. - * If any iteratee call returns `true`, the main `callback` is immediately - * called. - * - * @name some - * @static - * @memberOf module:Collections - * @method - * @alias any - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collections in parallel. - * The iteratee should complete with a boolean `result` value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called as soon as any - * iteratee returns `true`, or after all the iteratee functions have finished. - * Result will be either `true` or `false` depending on the values of the async - * tests. Invoked with (err, result). - * @example - * - * async.some(['file1','file2','file3'], function(filePath, callback) { - * fs.access(filePath, function(err) { - * callback(null, !err) - * }); - * }, function(err, result) { - * // if result is true then at least one of the files exists - * }); - */ -exports.default = (0, _doParallel2.default)((0, _createTester2.default)(Boolean, _identity2.default)); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/anyLimit.js b/node_modules/async/anyLimit.js deleted file mode 100644 index 24ca3f4..0000000 --- a/node_modules/async/anyLimit.js +++ /dev/null @@ -1,43 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _createTester = require('./internal/createTester'); - -var _createTester2 = _interopRequireDefault(_createTester); - -var _doParallelLimit = require('./internal/doParallelLimit'); - -var _doParallelLimit2 = _interopRequireDefault(_doParallelLimit); - -var _identity = require('lodash/identity'); - -var _identity2 = _interopRequireDefault(_identity); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * The same as [`some`]{@link module:Collections.some} but runs a maximum of `limit` async operations at a time. - * - * @name someLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.some]{@link module:Collections.some} - * @alias anyLimit - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collections in parallel. - * The iteratee should complete with a boolean `result` value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called as soon as any - * iteratee returns `true`, or after all the iteratee functions have finished. - * Result will be either `true` or `false` depending on the values of the async - * tests. Invoked with (err, result). - */ -exports.default = (0, _doParallelLimit2.default)((0, _createTester2.default)(Boolean, _identity2.default)); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/anySeries.js b/node_modules/async/anySeries.js deleted file mode 100644 index dc24ed2..0000000 --- a/node_modules/async/anySeries.js +++ /dev/null @@ -1,38 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _someLimit = require('./someLimit'); - -var _someLimit2 = _interopRequireDefault(_someLimit); - -var _doLimit = require('./internal/doLimit'); - -var _doLimit2 = _interopRequireDefault(_doLimit); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * The same as [`some`]{@link module:Collections.some} but runs only a single async operation at a time. - * - * @name someSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.some]{@link module:Collections.some} - * @alias anySeries - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collections in series. - * The iteratee should complete with a boolean `result` value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called as soon as any - * iteratee returns `true`, or after all the iteratee functions have finished. - * Result will be either `true` or `false` depending on the values of the async - * tests. Invoked with (err, result). - */ -exports.default = (0, _doLimit2.default)(_someLimit2.default, 1); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/apply.js b/node_modules/async/apply.js deleted file mode 100644 index f590fa5..0000000 --- a/node_modules/async/apply.js +++ /dev/null @@ -1,68 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -exports.default = function (fn /*, ...args*/) { - var args = (0, _slice2.default)(arguments, 1); - return function () /*callArgs*/{ - var callArgs = (0, _slice2.default)(arguments); - return fn.apply(null, args.concat(callArgs)); - }; -}; - -var _slice = require('./internal/slice'); - -var _slice2 = _interopRequireDefault(_slice); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -; - -/** - * Creates a continuation function with some arguments already applied. - * - * Useful as a shorthand when combined with other control flow functions. Any - * arguments passed to the returned function are added to the arguments - * originally passed to apply. - * - * @name apply - * @static - * @memberOf module:Utils - * @method - * @category Util - * @param {Function} fn - The function you want to eventually apply all - * arguments to. Invokes with (arguments...). - * @param {...*} arguments... - Any number of arguments to automatically apply - * when the continuation is called. - * @returns {Function} the partially-applied function - * @example - * - * // using apply - * async.parallel([ - * async.apply(fs.writeFile, 'testfile1', 'test1'), - * async.apply(fs.writeFile, 'testfile2', 'test2') - * ]); - * - * - * // the same process without using apply - * async.parallel([ - * function(callback) { - * fs.writeFile('testfile1', 'test1', callback); - * }, - * function(callback) { - * fs.writeFile('testfile2', 'test2', callback); - * } - * ]); - * - * // It's possible to pass any number of additional arguments when calling the - * // continuation: - * - * node> var fn = async.apply(sys.puts, 'one'); - * node> fn('two', 'three'); - * one - * two - * three - */ -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/applyEach.js b/node_modules/async/applyEach.js deleted file mode 100644 index 06c0845..0000000 --- a/node_modules/async/applyEach.js +++ /dev/null @@ -1,51 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _applyEach = require('./internal/applyEach'); - -var _applyEach2 = _interopRequireDefault(_applyEach); - -var _map = require('./map'); - -var _map2 = _interopRequireDefault(_map); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Applies the provided arguments to each function in the array, calling - * `callback` after all functions have completed. If you only provide the first - * argument, `fns`, then it will return a function which lets you pass in the - * arguments as if it were a single function call. If more arguments are - * provided, `callback` is required while `args` is still optional. - * - * @name applyEach - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {Array|Iterable|Object} fns - A collection of {@link AsyncFunction}s - * to all call with the same arguments - * @param {...*} [args] - any number of separate arguments to pass to the - * function. - * @param {Function} [callback] - the final argument should be the callback, - * called when all functions have completed processing. - * @returns {Function} - If only the first argument, `fns`, is provided, it will - * return a function which lets you pass in the arguments as if it were a single - * function call. The signature is `(..args, callback)`. If invoked with any - * arguments, `callback` is required. - * @example - * - * async.applyEach([enableSearch, updateSchema], 'bucket', callback); - * - * // partial application example: - * async.each( - * buckets, - * async.applyEach([enableSearch, updateSchema]), - * callback - * ); - */ -exports.default = (0, _applyEach2.default)(_map2.default); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/applyEachSeries.js b/node_modules/async/applyEachSeries.js deleted file mode 100644 index ad80280..0000000 --- a/node_modules/async/applyEachSeries.js +++ /dev/null @@ -1,37 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _applyEach = require('./internal/applyEach'); - -var _applyEach2 = _interopRequireDefault(_applyEach); - -var _mapSeries = require('./mapSeries'); - -var _mapSeries2 = _interopRequireDefault(_mapSeries); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * The same as [`applyEach`]{@link module:ControlFlow.applyEach} but runs only a single async operation at a time. - * - * @name applyEachSeries - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.applyEach]{@link module:ControlFlow.applyEach} - * @category Control Flow - * @param {Array|Iterable|Object} fns - A collection of {@link AsyncFunction}s to all - * call with the same arguments - * @param {...*} [args] - any number of separate arguments to pass to the - * function. - * @param {Function} [callback] - the final argument should be the callback, - * called when all functions have completed processing. - * @returns {Function} - If only the first argument is provided, it will return - * a function which lets you pass in the arguments as if it were a single - * function call. - */ -exports.default = (0, _applyEach2.default)(_mapSeries2.default); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/asyncify.js b/node_modules/async/asyncify.js deleted file mode 100644 index 5e3fc91..0000000 --- a/node_modules/async/asyncify.js +++ /dev/null @@ -1,110 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = asyncify; - -var _isObject = require('lodash/isObject'); - -var _isObject2 = _interopRequireDefault(_isObject); - -var _initialParams = require('./internal/initialParams'); - -var _initialParams2 = _interopRequireDefault(_initialParams); - -var _setImmediate = require('./internal/setImmediate'); - -var _setImmediate2 = _interopRequireDefault(_setImmediate); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Take a sync function and make it async, passing its return value to a - * callback. This is useful for plugging sync functions into a waterfall, - * series, or other async functions. Any arguments passed to the generated - * function will be passed to the wrapped function (except for the final - * callback argument). Errors thrown will be passed to the callback. - * - * If the function passed to `asyncify` returns a Promise, that promises's - * resolved/rejected state will be used to call the callback, rather than simply - * the synchronous return value. - * - * This also means you can asyncify ES2017 `async` functions. - * - * @name asyncify - * @static - * @memberOf module:Utils - * @method - * @alias wrapSync - * @category Util - * @param {Function} func - The synchronous function, or Promise-returning - * function to convert to an {@link AsyncFunction}. - * @returns {AsyncFunction} An asynchronous wrapper of the `func`. To be - * invoked with `(args..., callback)`. - * @example - * - * // passing a regular synchronous function - * async.waterfall([ - * async.apply(fs.readFile, filename, "utf8"), - * async.asyncify(JSON.parse), - * function (data, next) { - * // data is the result of parsing the text. - * // If there was a parsing error, it would have been caught. - * } - * ], callback); - * - * // passing a function returning a promise - * async.waterfall([ - * async.apply(fs.readFile, filename, "utf8"), - * async.asyncify(function (contents) { - * return db.model.create(contents); - * }), - * function (model, next) { - * // `model` is the instantiated model object. - * // If there was an error, this function would be skipped. - * } - * ], callback); - * - * // es2017 example, though `asyncify` is not needed if your JS environment - * // supports async functions out of the box - * var q = async.queue(async.asyncify(async function(file) { - * var intermediateStep = await processFile(file); - * return await somePromise(intermediateStep) - * })); - * - * q.push(files); - */ -function asyncify(func) { - return (0, _initialParams2.default)(function (args, callback) { - var result; - try { - result = func.apply(this, args); - } catch (e) { - return callback(e); - } - // if result is Promise object - if ((0, _isObject2.default)(result) && typeof result.then === 'function') { - result.then(function (value) { - invokeCallback(callback, null, value); - }, function (err) { - invokeCallback(callback, err.message ? err : new Error(err)); - }); - } else { - callback(null, result); - } - }); -} - -function invokeCallback(callback, error, value) { - try { - callback(error, value); - } catch (e) { - (0, _setImmediate2.default)(rethrow, e); - } -} - -function rethrow(error) { - throw error; -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/auto.js b/node_modules/async/auto.js deleted file mode 100644 index 26c1d56..0000000 --- a/node_modules/async/auto.js +++ /dev/null @@ -1,289 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -exports.default = function (tasks, concurrency, callback) { - if (typeof concurrency === 'function') { - // concurrency is optional, shift the args. - callback = concurrency; - concurrency = null; - } - callback = (0, _once2.default)(callback || _noop2.default); - var keys = (0, _keys2.default)(tasks); - var numTasks = keys.length; - if (!numTasks) { - return callback(null); - } - if (!concurrency) { - concurrency = numTasks; - } - - var results = {}; - var runningTasks = 0; - var hasError = false; - - var listeners = Object.create(null); - - var readyTasks = []; - - // for cycle detection: - var readyToCheck = []; // tasks that have been identified as reachable - // without the possibility of returning to an ancestor task - var uncheckedDependencies = {}; - - (0, _baseForOwn2.default)(tasks, function (task, key) { - if (!(0, _isArray2.default)(task)) { - // no dependencies - enqueueTask(key, [task]); - readyToCheck.push(key); - return; - } - - var dependencies = task.slice(0, task.length - 1); - var remainingDependencies = dependencies.length; - if (remainingDependencies === 0) { - enqueueTask(key, task); - readyToCheck.push(key); - return; - } - uncheckedDependencies[key] = remainingDependencies; - - (0, _arrayEach2.default)(dependencies, function (dependencyName) { - if (!tasks[dependencyName]) { - throw new Error('async.auto task `' + key + '` has a non-existent dependency `' + dependencyName + '` in ' + dependencies.join(', ')); - } - addListener(dependencyName, function () { - remainingDependencies--; - if (remainingDependencies === 0) { - enqueueTask(key, task); - } - }); - }); - }); - - checkForDeadlocks(); - processQueue(); - - function enqueueTask(key, task) { - readyTasks.push(function () { - runTask(key, task); - }); - } - - function processQueue() { - if (readyTasks.length === 0 && runningTasks === 0) { - return callback(null, results); - } - while (readyTasks.length && runningTasks < concurrency) { - var run = readyTasks.shift(); - run(); - } - } - - function addListener(taskName, fn) { - var taskListeners = listeners[taskName]; - if (!taskListeners) { - taskListeners = listeners[taskName] = []; - } - - taskListeners.push(fn); - } - - function taskComplete(taskName) { - var taskListeners = listeners[taskName] || []; - (0, _arrayEach2.default)(taskListeners, function (fn) { - fn(); - }); - processQueue(); - } - - function runTask(key, task) { - if (hasError) return; - - var taskCallback = (0, _onlyOnce2.default)(function (err, result) { - runningTasks--; - if (arguments.length > 2) { - result = (0, _slice2.default)(arguments, 1); - } - if (err) { - var safeResults = {}; - (0, _baseForOwn2.default)(results, function (val, rkey) { - safeResults[rkey] = val; - }); - safeResults[key] = result; - hasError = true; - listeners = Object.create(null); - - callback(err, safeResults); - } else { - results[key] = result; - taskComplete(key); - } - }); - - runningTasks++; - var taskFn = (0, _wrapAsync2.default)(task[task.length - 1]); - if (task.length > 1) { - taskFn(results, taskCallback); - } else { - taskFn(taskCallback); - } - } - - function checkForDeadlocks() { - // Kahn's algorithm - // https://en.wikipedia.org/wiki/Topological_sorting#Kahn.27s_algorithm - // http://connalle.blogspot.com/2013/10/topological-sortingkahn-algorithm.html - var currentTask; - var counter = 0; - while (readyToCheck.length) { - currentTask = readyToCheck.pop(); - counter++; - (0, _arrayEach2.default)(getDependents(currentTask), function (dependent) { - if (--uncheckedDependencies[dependent] === 0) { - readyToCheck.push(dependent); - } - }); - } - - if (counter !== numTasks) { - throw new Error('async.auto cannot execute tasks due to a recursive dependency'); - } - } - - function getDependents(taskName) { - var result = []; - (0, _baseForOwn2.default)(tasks, function (task, key) { - if ((0, _isArray2.default)(task) && (0, _baseIndexOf2.default)(task, taskName, 0) >= 0) { - result.push(key); - } - }); - return result; - } -}; - -var _arrayEach = require('lodash/_arrayEach'); - -var _arrayEach2 = _interopRequireDefault(_arrayEach); - -var _baseForOwn = require('lodash/_baseForOwn'); - -var _baseForOwn2 = _interopRequireDefault(_baseForOwn); - -var _baseIndexOf = require('lodash/_baseIndexOf'); - -var _baseIndexOf2 = _interopRequireDefault(_baseIndexOf); - -var _isArray = require('lodash/isArray'); - -var _isArray2 = _interopRequireDefault(_isArray); - -var _keys = require('lodash/keys'); - -var _keys2 = _interopRequireDefault(_keys); - -var _noop = require('lodash/noop'); - -var _noop2 = _interopRequireDefault(_noop); - -var _slice = require('./internal/slice'); - -var _slice2 = _interopRequireDefault(_slice); - -var _once = require('./internal/once'); - -var _once2 = _interopRequireDefault(_once); - -var _onlyOnce = require('./internal/onlyOnce'); - -var _onlyOnce2 = _interopRequireDefault(_onlyOnce); - -var _wrapAsync = require('./internal/wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -module.exports = exports['default']; - -/** - * Determines the best order for running the {@link AsyncFunction}s in `tasks`, based on - * their requirements. Each function can optionally depend on other functions - * being completed first, and each function is run as soon as its requirements - * are satisfied. - * - * If any of the {@link AsyncFunction}s pass an error to their callback, the `auto` sequence - * will stop. Further tasks will not execute (so any other functions depending - * on it will not run), and the main `callback` is immediately called with the - * error. - * - * {@link AsyncFunction}s also receive an object containing the results of functions which - * have completed so far as the first argument, if they have dependencies. If a - * task function has no dependencies, it will only be passed a callback. - * - * @name auto - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {Object} tasks - An object. Each of its properties is either a - * function or an array of requirements, with the {@link AsyncFunction} itself the last item - * in the array. The object's key of a property serves as the name of the task - * defined by that property, i.e. can be used when specifying requirements for - * other tasks. The function receives one or two arguments: - * * a `results` object, containing the results of the previously executed - * functions, only passed if the task has any dependencies, - * * a `callback(err, result)` function, which must be called when finished, - * passing an `error` (which can be `null`) and the result of the function's - * execution. - * @param {number} [concurrency=Infinity] - An optional `integer` for - * determining the maximum number of tasks that can be run in parallel. By - * default, as many as possible. - * @param {Function} [callback] - An optional callback which is called when all - * the tasks have been completed. It receives the `err` argument if any `tasks` - * pass an error to their callback. Results are always returned; however, if an - * error occurs, no further `tasks` will be performed, and the results object - * will only contain partial results. Invoked with (err, results). - * @returns undefined - * @example - * - * async.auto({ - * // this function will just be passed a callback - * readData: async.apply(fs.readFile, 'data.txt', 'utf-8'), - * showData: ['readData', function(results, cb) { - * // results.readData is the file's contents - * // ... - * }] - * }, callback); - * - * async.auto({ - * get_data: function(callback) { - * console.log('in get_data'); - * // async code to get some data - * callback(null, 'data', 'converted to array'); - * }, - * make_folder: function(callback) { - * console.log('in make_folder'); - * // async code to create a directory to store a file in - * // this is run at the same time as getting the data - * callback(null, 'folder'); - * }, - * write_file: ['get_data', 'make_folder', function(results, callback) { - * console.log('in write_file', JSON.stringify(results)); - * // once there is some data and the directory exists, - * // write the data to a file in the directory - * callback(null, 'filename'); - * }], - * email_link: ['write_file', function(results, callback) { - * console.log('in email_link', JSON.stringify(results)); - * // once the file is written let's email a link to it... - * // results.write_file contains the filename returned by write_file. - * callback(null, {'file':results.write_file, 'email':'user@example.com'}); - * }] - * }, function(err, results) { - * console.log('err = ', err); - * console.log('results = ', results); - * }); - */ \ No newline at end of file diff --git a/node_modules/async/autoInject.js b/node_modules/async/autoInject.js deleted file mode 100644 index bfbe7e8..0000000 --- a/node_modules/async/autoInject.js +++ /dev/null @@ -1,170 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = autoInject; - -var _auto = require('./auto'); - -var _auto2 = _interopRequireDefault(_auto); - -var _baseForOwn = require('lodash/_baseForOwn'); - -var _baseForOwn2 = _interopRequireDefault(_baseForOwn); - -var _arrayMap = require('lodash/_arrayMap'); - -var _arrayMap2 = _interopRequireDefault(_arrayMap); - -var _isArray = require('lodash/isArray'); - -var _isArray2 = _interopRequireDefault(_isArray); - -var _trim = require('lodash/trim'); - -var _trim2 = _interopRequireDefault(_trim); - -var _wrapAsync = require('./internal/wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -var FN_ARGS = /^(?:async\s+)?(function)?\s*[^\(]*\(\s*([^\)]*)\)/m; -var FN_ARG_SPLIT = /,/; -var FN_ARG = /(=.+)?(\s*)$/; -var STRIP_COMMENTS = /((\/\/.*$)|(\/\*[\s\S]*?\*\/))/mg; - -function parseParams(func) { - func = func.toString().replace(STRIP_COMMENTS, ''); - func = func.match(FN_ARGS)[2].replace(' ', ''); - func = func ? func.split(FN_ARG_SPLIT) : []; - func = func.map(function (arg) { - return (0, _trim2.default)(arg.replace(FN_ARG, '')); - }); - return func; -} - -/** - * A dependency-injected version of the [async.auto]{@link module:ControlFlow.auto} function. Dependent - * tasks are specified as parameters to the function, after the usual callback - * parameter, with the parameter names matching the names of the tasks it - * depends on. This can provide even more readable task graphs which can be - * easier to maintain. - * - * If a final callback is specified, the task results are similarly injected, - * specified as named parameters after the initial error parameter. - * - * The autoInject function is purely syntactic sugar and its semantics are - * otherwise equivalent to [async.auto]{@link module:ControlFlow.auto}. - * - * @name autoInject - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.auto]{@link module:ControlFlow.auto} - * @category Control Flow - * @param {Object} tasks - An object, each of whose properties is an {@link AsyncFunction} of - * the form 'func([dependencies...], callback). The object's key of a property - * serves as the name of the task defined by that property, i.e. can be used - * when specifying requirements for other tasks. - * * The `callback` parameter is a `callback(err, result)` which must be called - * when finished, passing an `error` (which can be `null`) and the result of - * the function's execution. The remaining parameters name other tasks on - * which the task is dependent, and the results from those tasks are the - * arguments of those parameters. - * @param {Function} [callback] - An optional callback which is called when all - * the tasks have been completed. It receives the `err` argument if any `tasks` - * pass an error to their callback, and a `results` object with any completed - * task results, similar to `auto`. - * @example - * - * // The example from `auto` can be rewritten as follows: - * async.autoInject({ - * get_data: function(callback) { - * // async code to get some data - * callback(null, 'data', 'converted to array'); - * }, - * make_folder: function(callback) { - * // async code to create a directory to store a file in - * // this is run at the same time as getting the data - * callback(null, 'folder'); - * }, - * write_file: function(get_data, make_folder, callback) { - * // once there is some data and the directory exists, - * // write the data to a file in the directory - * callback(null, 'filename'); - * }, - * email_link: function(write_file, callback) { - * // once the file is written let's email a link to it... - * // write_file contains the filename returned by write_file. - * callback(null, {'file':write_file, 'email':'user@example.com'}); - * } - * }, function(err, results) { - * console.log('err = ', err); - * console.log('email_link = ', results.email_link); - * }); - * - * // If you are using a JS minifier that mangles parameter names, `autoInject` - * // will not work with plain functions, since the parameter names will be - * // collapsed to a single letter identifier. To work around this, you can - * // explicitly specify the names of the parameters your task function needs - * // in an array, similar to Angular.js dependency injection. - * - * // This still has an advantage over plain `auto`, since the results a task - * // depends on are still spread into arguments. - * async.autoInject({ - * //... - * write_file: ['get_data', 'make_folder', function(get_data, make_folder, callback) { - * callback(null, 'filename'); - * }], - * email_link: ['write_file', function(write_file, callback) { - * callback(null, {'file':write_file, 'email':'user@example.com'}); - * }] - * //... - * }, function(err, results) { - * console.log('err = ', err); - * console.log('email_link = ', results.email_link); - * }); - */ -function autoInject(tasks, callback) { - var newTasks = {}; - - (0, _baseForOwn2.default)(tasks, function (taskFn, key) { - var params; - var fnIsAsync = (0, _wrapAsync.isAsync)(taskFn); - var hasNoDeps = !fnIsAsync && taskFn.length === 1 || fnIsAsync && taskFn.length === 0; - - if ((0, _isArray2.default)(taskFn)) { - params = taskFn.slice(0, -1); - taskFn = taskFn[taskFn.length - 1]; - - newTasks[key] = params.concat(params.length > 0 ? newTask : taskFn); - } else if (hasNoDeps) { - // no dependencies, use the function as-is - newTasks[key] = taskFn; - } else { - params = parseParams(taskFn); - if (taskFn.length === 0 && !fnIsAsync && params.length === 0) { - throw new Error("autoInject task functions require explicit parameters."); - } - - // remove callback param - if (!fnIsAsync) params.pop(); - - newTasks[key] = params.concat(newTask); - } - - function newTask(results, taskCb) { - var newArgs = (0, _arrayMap2.default)(params, function (name) { - return results[name]; - }); - newArgs.push(taskCb); - (0, _wrapAsync2.default)(taskFn).apply(null, newArgs); - } - }); - - (0, _auto2.default)(newTasks, callback); -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/bower.json b/node_modules/async/bower.json deleted file mode 100644 index 7dbeb14..0000000 --- a/node_modules/async/bower.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "name": "async", - "main": "dist/async.js", - "ignore": [ - "bower_components", - "lib", - "mocha_test", - "node_modules", - "perf", - "support", - "**/.*", - "*.config.js", - "*.json", - "index.js", - "Makefile" - ] -} diff --git a/node_modules/async/cargo.js b/node_modules/async/cargo.js deleted file mode 100644 index c7e59c7..0000000 --- a/node_modules/async/cargo.js +++ /dev/null @@ -1,94 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = cargo; - -var _queue = require('./internal/queue'); - -var _queue2 = _interopRequireDefault(_queue); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * A cargo of tasks for the worker function to complete. Cargo inherits all of - * the same methods and event callbacks as [`queue`]{@link module:ControlFlow.queue}. - * @typedef {Object} CargoObject - * @memberOf module:ControlFlow - * @property {Function} length - A function returning the number of items - * waiting to be processed. Invoke like `cargo.length()`. - * @property {number} payload - An `integer` for determining how many tasks - * should be process per round. This property can be changed after a `cargo` is - * created to alter the payload on-the-fly. - * @property {Function} push - Adds `task` to the `queue`. The callback is - * called once the `worker` has finished processing the task. Instead of a - * single task, an array of `tasks` can be submitted. The respective callback is - * used for every task in the list. Invoke like `cargo.push(task, [callback])`. - * @property {Function} saturated - A callback that is called when the - * `queue.length()` hits the concurrency and further tasks will be queued. - * @property {Function} empty - A callback that is called when the last item - * from the `queue` is given to a `worker`. - * @property {Function} drain - A callback that is called when the last item - * from the `queue` has returned from the `worker`. - * @property {Function} idle - a function returning false if there are items - * waiting or being processed, or true if not. Invoke like `cargo.idle()`. - * @property {Function} pause - a function that pauses the processing of tasks - * until `resume()` is called. Invoke like `cargo.pause()`. - * @property {Function} resume - a function that resumes the processing of - * queued tasks when the queue is paused. Invoke like `cargo.resume()`. - * @property {Function} kill - a function that removes the `drain` callback and - * empties remaining tasks from the queue forcing it to go idle. Invoke like `cargo.kill()`. - */ - -/** - * Creates a `cargo` object with the specified payload. Tasks added to the - * cargo will be processed altogether (up to the `payload` limit). If the - * `worker` is in progress, the task is queued until it becomes available. Once - * the `worker` has completed some tasks, each callback of those tasks is - * called. Check out [these](https://camo.githubusercontent.com/6bbd36f4cf5b35a0f11a96dcd2e97711ffc2fb37/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130382f62626330636662302d356632392d313165322d393734662d3333393763363464633835382e676966) [animations](https://camo.githubusercontent.com/f4810e00e1c5f5f8addbe3e9f49064fd5d102699/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130312f38346339323036362d356632392d313165322d383134662d3964336430323431336266642e676966) - * for how `cargo` and `queue` work. - * - * While [`queue`]{@link module:ControlFlow.queue} passes only one task to one of a group of workers - * at a time, cargo passes an array of tasks to a single worker, repeating - * when the worker is finished. - * - * @name cargo - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.queue]{@link module:ControlFlow.queue} - * @category Control Flow - * @param {AsyncFunction} worker - An asynchronous function for processing an array - * of queued tasks. Invoked with `(tasks, callback)`. - * @param {number} [payload=Infinity] - An optional `integer` for determining - * how many tasks should be processed per round; if omitted, the default is - * unlimited. - * @returns {module:ControlFlow.CargoObject} A cargo object to manage the tasks. Callbacks can - * attached as certain properties to listen for specific events during the - * lifecycle of the cargo and inner queue. - * @example - * - * // create a cargo object with payload 2 - * var cargo = async.cargo(function(tasks, callback) { - * for (var i=0; i async.dir(hello, 'world'); - * {hello: 'world'} - */ -exports.default = (0, _consoleFunc2.default)('dir'); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/dist/async.js b/node_modules/async/dist/async.js deleted file mode 100644 index 61c7588..0000000 --- a/node_modules/async/dist/async.js +++ /dev/null @@ -1,5612 +0,0 @@ -(function (global, factory) { - typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) : - typeof define === 'function' && define.amd ? define(['exports'], factory) : - (factory((global.async = global.async || {}))); -}(this, (function (exports) { 'use strict'; - -function slice(arrayLike, start) { - start = start|0; - var newLen = Math.max(arrayLike.length - start, 0); - var newArr = Array(newLen); - for(var idx = 0; idx < newLen; idx++) { - newArr[idx] = arrayLike[start + idx]; - } - return newArr; -} - -/** - * Creates a continuation function with some arguments already applied. - * - * Useful as a shorthand when combined with other control flow functions. Any - * arguments passed to the returned function are added to the arguments - * originally passed to apply. - * - * @name apply - * @static - * @memberOf module:Utils - * @method - * @category Util - * @param {Function} fn - The function you want to eventually apply all - * arguments to. Invokes with (arguments...). - * @param {...*} arguments... - Any number of arguments to automatically apply - * when the continuation is called. - * @returns {Function} the partially-applied function - * @example - * - * // using apply - * async.parallel([ - * async.apply(fs.writeFile, 'testfile1', 'test1'), - * async.apply(fs.writeFile, 'testfile2', 'test2') - * ]); - * - * - * // the same process without using apply - * async.parallel([ - * function(callback) { - * fs.writeFile('testfile1', 'test1', callback); - * }, - * function(callback) { - * fs.writeFile('testfile2', 'test2', callback); - * } - * ]); - * - * // It's possible to pass any number of additional arguments when calling the - * // continuation: - * - * node> var fn = async.apply(sys.puts, 'one'); - * node> fn('two', 'three'); - * one - * two - * three - */ -var apply = function(fn/*, ...args*/) { - var args = slice(arguments, 1); - return function(/*callArgs*/) { - var callArgs = slice(arguments); - return fn.apply(null, args.concat(callArgs)); - }; -}; - -var initialParams = function (fn) { - return function (/*...args, callback*/) { - var args = slice(arguments); - var callback = args.pop(); - fn.call(this, args, callback); - }; -}; - -/** - * Checks if `value` is the - * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types) - * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is an object, else `false`. - * @example - * - * _.isObject({}); - * // => true - * - * _.isObject([1, 2, 3]); - * // => true - * - * _.isObject(_.noop); - * // => true - * - * _.isObject(null); - * // => false - */ -function isObject(value) { - var type = typeof value; - return value != null && (type == 'object' || type == 'function'); -} - -var hasSetImmediate = typeof setImmediate === 'function' && setImmediate; -var hasNextTick = typeof process === 'object' && typeof process.nextTick === 'function'; - -function fallback(fn) { - setTimeout(fn, 0); -} - -function wrap(defer) { - return function (fn/*, ...args*/) { - var args = slice(arguments, 1); - defer(function () { - fn.apply(null, args); - }); - }; -} - -var _defer; - -if (hasSetImmediate) { - _defer = setImmediate; -} else if (hasNextTick) { - _defer = process.nextTick; -} else { - _defer = fallback; -} - -var setImmediate$1 = wrap(_defer); - -/** - * Take a sync function and make it async, passing its return value to a - * callback. This is useful for plugging sync functions into a waterfall, - * series, or other async functions. Any arguments passed to the generated - * function will be passed to the wrapped function (except for the final - * callback argument). Errors thrown will be passed to the callback. - * - * If the function passed to `asyncify` returns a Promise, that promises's - * resolved/rejected state will be used to call the callback, rather than simply - * the synchronous return value. - * - * This also means you can asyncify ES2017 `async` functions. - * - * @name asyncify - * @static - * @memberOf module:Utils - * @method - * @alias wrapSync - * @category Util - * @param {Function} func - The synchronous function, or Promise-returning - * function to convert to an {@link AsyncFunction}. - * @returns {AsyncFunction} An asynchronous wrapper of the `func`. To be - * invoked with `(args..., callback)`. - * @example - * - * // passing a regular synchronous function - * async.waterfall([ - * async.apply(fs.readFile, filename, "utf8"), - * async.asyncify(JSON.parse), - * function (data, next) { - * // data is the result of parsing the text. - * // If there was a parsing error, it would have been caught. - * } - * ], callback); - * - * // passing a function returning a promise - * async.waterfall([ - * async.apply(fs.readFile, filename, "utf8"), - * async.asyncify(function (contents) { - * return db.model.create(contents); - * }), - * function (model, next) { - * // `model` is the instantiated model object. - * // If there was an error, this function would be skipped. - * } - * ], callback); - * - * // es2017 example, though `asyncify` is not needed if your JS environment - * // supports async functions out of the box - * var q = async.queue(async.asyncify(async function(file) { - * var intermediateStep = await processFile(file); - * return await somePromise(intermediateStep) - * })); - * - * q.push(files); - */ -function asyncify(func) { - return initialParams(function (args, callback) { - var result; - try { - result = func.apply(this, args); - } catch (e) { - return callback(e); - } - // if result is Promise object - if (isObject(result) && typeof result.then === 'function') { - result.then(function(value) { - invokeCallback(callback, null, value); - }, function(err) { - invokeCallback(callback, err.message ? err : new Error(err)); - }); - } else { - callback(null, result); - } - }); -} - -function invokeCallback(callback, error, value) { - try { - callback(error, value); - } catch (e) { - setImmediate$1(rethrow, e); - } -} - -function rethrow(error) { - throw error; -} - -var supportsSymbol = typeof Symbol === 'function'; - -function isAsync(fn) { - return supportsSymbol && fn[Symbol.toStringTag] === 'AsyncFunction'; -} - -function wrapAsync(asyncFn) { - return isAsync(asyncFn) ? asyncify(asyncFn) : asyncFn; -} - -function applyEach$1(eachfn) { - return function(fns/*, ...args*/) { - var args = slice(arguments, 1); - var go = initialParams(function(args, callback) { - var that = this; - return eachfn(fns, function (fn, cb) { - wrapAsync(fn).apply(that, args.concat(cb)); - }, callback); - }); - if (args.length) { - return go.apply(this, args); - } - else { - return go; - } - }; -} - -/** Detect free variable `global` from Node.js. */ -var freeGlobal = typeof global == 'object' && global && global.Object === Object && global; - -/** Detect free variable `self`. */ -var freeSelf = typeof self == 'object' && self && self.Object === Object && self; - -/** Used as a reference to the global object. */ -var root = freeGlobal || freeSelf || Function('return this')(); - -/** Built-in value references. */ -var Symbol$1 = root.Symbol; - -/** Used for built-in method references. */ -var objectProto = Object.prototype; - -/** Used to check objects for own properties. */ -var hasOwnProperty = objectProto.hasOwnProperty; - -/** - * Used to resolve the - * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) - * of values. - */ -var nativeObjectToString = objectProto.toString; - -/** Built-in value references. */ -var symToStringTag$1 = Symbol$1 ? Symbol$1.toStringTag : undefined; - -/** - * A specialized version of `baseGetTag` which ignores `Symbol.toStringTag` values. - * - * @private - * @param {*} value The value to query. - * @returns {string} Returns the raw `toStringTag`. - */ -function getRawTag(value) { - var isOwn = hasOwnProperty.call(value, symToStringTag$1), - tag = value[symToStringTag$1]; - - try { - value[symToStringTag$1] = undefined; - var unmasked = true; - } catch (e) {} - - var result = nativeObjectToString.call(value); - if (unmasked) { - if (isOwn) { - value[symToStringTag$1] = tag; - } else { - delete value[symToStringTag$1]; - } - } - return result; -} - -/** Used for built-in method references. */ -var objectProto$1 = Object.prototype; - -/** - * Used to resolve the - * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) - * of values. - */ -var nativeObjectToString$1 = objectProto$1.toString; - -/** - * Converts `value` to a string using `Object.prototype.toString`. - * - * @private - * @param {*} value The value to convert. - * @returns {string} Returns the converted string. - */ -function objectToString(value) { - return nativeObjectToString$1.call(value); -} - -/** `Object#toString` result references. */ -var nullTag = '[object Null]'; -var undefinedTag = '[object Undefined]'; - -/** Built-in value references. */ -var symToStringTag = Symbol$1 ? Symbol$1.toStringTag : undefined; - -/** - * The base implementation of `getTag` without fallbacks for buggy environments. - * - * @private - * @param {*} value The value to query. - * @returns {string} Returns the `toStringTag`. - */ -function baseGetTag(value) { - if (value == null) { - return value === undefined ? undefinedTag : nullTag; - } - return (symToStringTag && symToStringTag in Object(value)) - ? getRawTag(value) - : objectToString(value); -} - -/** `Object#toString` result references. */ -var asyncTag = '[object AsyncFunction]'; -var funcTag = '[object Function]'; -var genTag = '[object GeneratorFunction]'; -var proxyTag = '[object Proxy]'; - -/** - * Checks if `value` is classified as a `Function` object. - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a function, else `false`. - * @example - * - * _.isFunction(_); - * // => true - * - * _.isFunction(/abc/); - * // => false - */ -function isFunction(value) { - if (!isObject(value)) { - return false; - } - // The use of `Object#toString` avoids issues with the `typeof` operator - // in Safari 9 which returns 'object' for typed arrays and other constructors. - var tag = baseGetTag(value); - return tag == funcTag || tag == genTag || tag == asyncTag || tag == proxyTag; -} - -/** Used as references for various `Number` constants. */ -var MAX_SAFE_INTEGER = 9007199254740991; - -/** - * Checks if `value` is a valid array-like length. - * - * **Note:** This method is loosely based on - * [`ToLength`](http://ecma-international.org/ecma-262/7.0/#sec-tolength). - * - * @static - * @memberOf _ - * @since 4.0.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a valid length, else `false`. - * @example - * - * _.isLength(3); - * // => true - * - * _.isLength(Number.MIN_VALUE); - * // => false - * - * _.isLength(Infinity); - * // => false - * - * _.isLength('3'); - * // => false - */ -function isLength(value) { - return typeof value == 'number' && - value > -1 && value % 1 == 0 && value <= MAX_SAFE_INTEGER; -} - -/** - * Checks if `value` is array-like. A value is considered array-like if it's - * not a function and has a `value.length` that's an integer greater than or - * equal to `0` and less than or equal to `Number.MAX_SAFE_INTEGER`. - * - * @static - * @memberOf _ - * @since 4.0.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is array-like, else `false`. - * @example - * - * _.isArrayLike([1, 2, 3]); - * // => true - * - * _.isArrayLike(document.body.children); - * // => true - * - * _.isArrayLike('abc'); - * // => true - * - * _.isArrayLike(_.noop); - * // => false - */ -function isArrayLike(value) { - return value != null && isLength(value.length) && !isFunction(value); -} - -// A temporary value used to identify if the loop should be broken. -// See #1064, #1293 -var breakLoop = {}; - -/** - * This method returns `undefined`. - * - * @static - * @memberOf _ - * @since 2.3.0 - * @category Util - * @example - * - * _.times(2, _.noop); - * // => [undefined, undefined] - */ -function noop() { - // No operation performed. -} - -function once(fn) { - return function () { - if (fn === null) return; - var callFn = fn; - fn = null; - callFn.apply(this, arguments); - }; -} - -var iteratorSymbol = typeof Symbol === 'function' && Symbol.iterator; - -var getIterator = function (coll) { - return iteratorSymbol && coll[iteratorSymbol] && coll[iteratorSymbol](); -}; - -/** - * The base implementation of `_.times` without support for iteratee shorthands - * or max array length checks. - * - * @private - * @param {number} n The number of times to invoke `iteratee`. - * @param {Function} iteratee The function invoked per iteration. - * @returns {Array} Returns the array of results. - */ -function baseTimes(n, iteratee) { - var index = -1, - result = Array(n); - - while (++index < n) { - result[index] = iteratee(index); - } - return result; -} - -/** - * Checks if `value` is object-like. A value is object-like if it's not `null` - * and has a `typeof` result of "object". - * - * @static - * @memberOf _ - * @since 4.0.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is object-like, else `false`. - * @example - * - * _.isObjectLike({}); - * // => true - * - * _.isObjectLike([1, 2, 3]); - * // => true - * - * _.isObjectLike(_.noop); - * // => false - * - * _.isObjectLike(null); - * // => false - */ -function isObjectLike(value) { - return value != null && typeof value == 'object'; -} - -/** `Object#toString` result references. */ -var argsTag = '[object Arguments]'; - -/** - * The base implementation of `_.isArguments`. - * - * @private - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is an `arguments` object, - */ -function baseIsArguments(value) { - return isObjectLike(value) && baseGetTag(value) == argsTag; -} - -/** Used for built-in method references. */ -var objectProto$3 = Object.prototype; - -/** Used to check objects for own properties. */ -var hasOwnProperty$2 = objectProto$3.hasOwnProperty; - -/** Built-in value references. */ -var propertyIsEnumerable = objectProto$3.propertyIsEnumerable; - -/** - * Checks if `value` is likely an `arguments` object. - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is an `arguments` object, - * else `false`. - * @example - * - * _.isArguments(function() { return arguments; }()); - * // => true - * - * _.isArguments([1, 2, 3]); - * // => false - */ -var isArguments = baseIsArguments(function() { return arguments; }()) ? baseIsArguments : function(value) { - return isObjectLike(value) && hasOwnProperty$2.call(value, 'callee') && - !propertyIsEnumerable.call(value, 'callee'); -}; - -/** - * Checks if `value` is classified as an `Array` object. - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is an array, else `false`. - * @example - * - * _.isArray([1, 2, 3]); - * // => true - * - * _.isArray(document.body.children); - * // => false - * - * _.isArray('abc'); - * // => false - * - * _.isArray(_.noop); - * // => false - */ -var isArray = Array.isArray; - -/** - * This method returns `false`. - * - * @static - * @memberOf _ - * @since 4.13.0 - * @category Util - * @returns {boolean} Returns `false`. - * @example - * - * _.times(2, _.stubFalse); - * // => [false, false] - */ -function stubFalse() { - return false; -} - -/** Detect free variable `exports`. */ -var freeExports = typeof exports == 'object' && exports && !exports.nodeType && exports; - -/** Detect free variable `module`. */ -var freeModule = freeExports && typeof module == 'object' && module && !module.nodeType && module; - -/** Detect the popular CommonJS extension `module.exports`. */ -var moduleExports = freeModule && freeModule.exports === freeExports; - -/** Built-in value references. */ -var Buffer = moduleExports ? root.Buffer : undefined; - -/* Built-in method references for those with the same name as other `lodash` methods. */ -var nativeIsBuffer = Buffer ? Buffer.isBuffer : undefined; - -/** - * Checks if `value` is a buffer. - * - * @static - * @memberOf _ - * @since 4.3.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a buffer, else `false`. - * @example - * - * _.isBuffer(new Buffer(2)); - * // => true - * - * _.isBuffer(new Uint8Array(2)); - * // => false - */ -var isBuffer = nativeIsBuffer || stubFalse; - -/** Used as references for various `Number` constants. */ -var MAX_SAFE_INTEGER$1 = 9007199254740991; - -/** Used to detect unsigned integer values. */ -var reIsUint = /^(?:0|[1-9]\d*)$/; - -/** - * Checks if `value` is a valid array-like index. - * - * @private - * @param {*} value The value to check. - * @param {number} [length=MAX_SAFE_INTEGER] The upper bounds of a valid index. - * @returns {boolean} Returns `true` if `value` is a valid index, else `false`. - */ -function isIndex(value, length) { - var type = typeof value; - length = length == null ? MAX_SAFE_INTEGER$1 : length; - - return !!length && - (type == 'number' || - (type != 'symbol' && reIsUint.test(value))) && - (value > -1 && value % 1 == 0 && value < length); -} - -/** `Object#toString` result references. */ -var argsTag$1 = '[object Arguments]'; -var arrayTag = '[object Array]'; -var boolTag = '[object Boolean]'; -var dateTag = '[object Date]'; -var errorTag = '[object Error]'; -var funcTag$1 = '[object Function]'; -var mapTag = '[object Map]'; -var numberTag = '[object Number]'; -var objectTag = '[object Object]'; -var regexpTag = '[object RegExp]'; -var setTag = '[object Set]'; -var stringTag = '[object String]'; -var weakMapTag = '[object WeakMap]'; - -var arrayBufferTag = '[object ArrayBuffer]'; -var dataViewTag = '[object DataView]'; -var float32Tag = '[object Float32Array]'; -var float64Tag = '[object Float64Array]'; -var int8Tag = '[object Int8Array]'; -var int16Tag = '[object Int16Array]'; -var int32Tag = '[object Int32Array]'; -var uint8Tag = '[object Uint8Array]'; -var uint8ClampedTag = '[object Uint8ClampedArray]'; -var uint16Tag = '[object Uint16Array]'; -var uint32Tag = '[object Uint32Array]'; - -/** Used to identify `toStringTag` values of typed arrays. */ -var typedArrayTags = {}; -typedArrayTags[float32Tag] = typedArrayTags[float64Tag] = -typedArrayTags[int8Tag] = typedArrayTags[int16Tag] = -typedArrayTags[int32Tag] = typedArrayTags[uint8Tag] = -typedArrayTags[uint8ClampedTag] = typedArrayTags[uint16Tag] = -typedArrayTags[uint32Tag] = true; -typedArrayTags[argsTag$1] = typedArrayTags[arrayTag] = -typedArrayTags[arrayBufferTag] = typedArrayTags[boolTag] = -typedArrayTags[dataViewTag] = typedArrayTags[dateTag] = -typedArrayTags[errorTag] = typedArrayTags[funcTag$1] = -typedArrayTags[mapTag] = typedArrayTags[numberTag] = -typedArrayTags[objectTag] = typedArrayTags[regexpTag] = -typedArrayTags[setTag] = typedArrayTags[stringTag] = -typedArrayTags[weakMapTag] = false; - -/** - * The base implementation of `_.isTypedArray` without Node.js optimizations. - * - * @private - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a typed array, else `false`. - */ -function baseIsTypedArray(value) { - return isObjectLike(value) && - isLength(value.length) && !!typedArrayTags[baseGetTag(value)]; -} - -/** - * The base implementation of `_.unary` without support for storing metadata. - * - * @private - * @param {Function} func The function to cap arguments for. - * @returns {Function} Returns the new capped function. - */ -function baseUnary(func) { - return function(value) { - return func(value); - }; -} - -/** Detect free variable `exports`. */ -var freeExports$1 = typeof exports == 'object' && exports && !exports.nodeType && exports; - -/** Detect free variable `module`. */ -var freeModule$1 = freeExports$1 && typeof module == 'object' && module && !module.nodeType && module; - -/** Detect the popular CommonJS extension `module.exports`. */ -var moduleExports$1 = freeModule$1 && freeModule$1.exports === freeExports$1; - -/** Detect free variable `process` from Node.js. */ -var freeProcess = moduleExports$1 && freeGlobal.process; - -/** Used to access faster Node.js helpers. */ -var nodeUtil = (function() { - try { - // Use `util.types` for Node.js 10+. - var types = freeModule$1 && freeModule$1.require && freeModule$1.require('util').types; - - if (types) { - return types; - } - - // Legacy `process.binding('util')` for Node.js < 10. - return freeProcess && freeProcess.binding && freeProcess.binding('util'); - } catch (e) {} -}()); - -/* Node.js helper references. */ -var nodeIsTypedArray = nodeUtil && nodeUtil.isTypedArray; - -/** - * Checks if `value` is classified as a typed array. - * - * @static - * @memberOf _ - * @since 3.0.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a typed array, else `false`. - * @example - * - * _.isTypedArray(new Uint8Array); - * // => true - * - * _.isTypedArray([]); - * // => false - */ -var isTypedArray = nodeIsTypedArray ? baseUnary(nodeIsTypedArray) : baseIsTypedArray; - -/** Used for built-in method references. */ -var objectProto$2 = Object.prototype; - -/** Used to check objects for own properties. */ -var hasOwnProperty$1 = objectProto$2.hasOwnProperty; - -/** - * Creates an array of the enumerable property names of the array-like `value`. - * - * @private - * @param {*} value The value to query. - * @param {boolean} inherited Specify returning inherited property names. - * @returns {Array} Returns the array of property names. - */ -function arrayLikeKeys(value, inherited) { - var isArr = isArray(value), - isArg = !isArr && isArguments(value), - isBuff = !isArr && !isArg && isBuffer(value), - isType = !isArr && !isArg && !isBuff && isTypedArray(value), - skipIndexes = isArr || isArg || isBuff || isType, - result = skipIndexes ? baseTimes(value.length, String) : [], - length = result.length; - - for (var key in value) { - if ((inherited || hasOwnProperty$1.call(value, key)) && - !(skipIndexes && ( - // Safari 9 has enumerable `arguments.length` in strict mode. - key == 'length' || - // Node.js 0.10 has enumerable non-index properties on buffers. - (isBuff && (key == 'offset' || key == 'parent')) || - // PhantomJS 2 has enumerable non-index properties on typed arrays. - (isType && (key == 'buffer' || key == 'byteLength' || key == 'byteOffset')) || - // Skip index properties. - isIndex(key, length) - ))) { - result.push(key); - } - } - return result; -} - -/** Used for built-in method references. */ -var objectProto$5 = Object.prototype; - -/** - * Checks if `value` is likely a prototype object. - * - * @private - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a prototype, else `false`. - */ -function isPrototype(value) { - var Ctor = value && value.constructor, - proto = (typeof Ctor == 'function' && Ctor.prototype) || objectProto$5; - - return value === proto; -} - -/** - * Creates a unary function that invokes `func` with its argument transformed. - * - * @private - * @param {Function} func The function to wrap. - * @param {Function} transform The argument transform. - * @returns {Function} Returns the new function. - */ -function overArg(func, transform) { - return function(arg) { - return func(transform(arg)); - }; -} - -/* Built-in method references for those with the same name as other `lodash` methods. */ -var nativeKeys = overArg(Object.keys, Object); - -/** Used for built-in method references. */ -var objectProto$4 = Object.prototype; - -/** Used to check objects for own properties. */ -var hasOwnProperty$3 = objectProto$4.hasOwnProperty; - -/** - * The base implementation of `_.keys` which doesn't treat sparse arrays as dense. - * - * @private - * @param {Object} object The object to query. - * @returns {Array} Returns the array of property names. - */ -function baseKeys(object) { - if (!isPrototype(object)) { - return nativeKeys(object); - } - var result = []; - for (var key in Object(object)) { - if (hasOwnProperty$3.call(object, key) && key != 'constructor') { - result.push(key); - } - } - return result; -} - -/** - * Creates an array of the own enumerable property names of `object`. - * - * **Note:** Non-object values are coerced to objects. See the - * [ES spec](http://ecma-international.org/ecma-262/7.0/#sec-object.keys) - * for more details. - * - * @static - * @since 0.1.0 - * @memberOf _ - * @category Object - * @param {Object} object The object to query. - * @returns {Array} Returns the array of property names. - * @example - * - * function Foo() { - * this.a = 1; - * this.b = 2; - * } - * - * Foo.prototype.c = 3; - * - * _.keys(new Foo); - * // => ['a', 'b'] (iteration order is not guaranteed) - * - * _.keys('hi'); - * // => ['0', '1'] - */ -function keys(object) { - return isArrayLike(object) ? arrayLikeKeys(object) : baseKeys(object); -} - -function createArrayIterator(coll) { - var i = -1; - var len = coll.length; - return function next() { - return ++i < len ? {value: coll[i], key: i} : null; - } -} - -function createES2015Iterator(iterator) { - var i = -1; - return function next() { - var item = iterator.next(); - if (item.done) - return null; - i++; - return {value: item.value, key: i}; - } -} - -function createObjectIterator(obj) { - var okeys = keys(obj); - var i = -1; - var len = okeys.length; - return function next() { - var key = okeys[++i]; - if (key === '__proto__') { - return next(); - } - return i < len ? {value: obj[key], key: key} : null; - }; -} - -function iterator(coll) { - if (isArrayLike(coll)) { - return createArrayIterator(coll); - } - - var iterator = getIterator(coll); - return iterator ? createES2015Iterator(iterator) : createObjectIterator(coll); -} - -function onlyOnce(fn) { - return function() { - if (fn === null) throw new Error("Callback was already called."); - var callFn = fn; - fn = null; - callFn.apply(this, arguments); - }; -} - -function _eachOfLimit(limit) { - return function (obj, iteratee, callback) { - callback = once(callback || noop); - if (limit <= 0 || !obj) { - return callback(null); - } - var nextElem = iterator(obj); - var done = false; - var running = 0; - var looping = false; - - function iterateeCallback(err, value) { - running -= 1; - if (err) { - done = true; - callback(err); - } - else if (value === breakLoop || (done && running <= 0)) { - done = true; - return callback(null); - } - else if (!looping) { - replenish(); - } - } - - function replenish () { - looping = true; - while (running < limit && !done) { - var elem = nextElem(); - if (elem === null) { - done = true; - if (running <= 0) { - callback(null); - } - return; - } - running += 1; - iteratee(elem.value, elem.key, onlyOnce(iterateeCallback)); - } - looping = false; - } - - replenish(); - }; -} - -/** - * The same as [`eachOf`]{@link module:Collections.eachOf} but runs a maximum of `limit` async operations at a - * time. - * - * @name eachOfLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.eachOf]{@link module:Collections.eachOf} - * @alias forEachOfLimit - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async function to apply to each - * item in `coll`. The `key` is the item's key, or index in the case of an - * array. - * Invoked with (item, key, callback). - * @param {Function} [callback] - A callback which is called when all - * `iteratee` functions have finished, or an error occurs. Invoked with (err). - */ -function eachOfLimit(coll, limit, iteratee, callback) { - _eachOfLimit(limit)(coll, wrapAsync(iteratee), callback); -} - -function doLimit(fn, limit) { - return function (iterable, iteratee, callback) { - return fn(iterable, limit, iteratee, callback); - }; -} - -// eachOf implementation optimized for array-likes -function eachOfArrayLike(coll, iteratee, callback) { - callback = once(callback || noop); - var index = 0, - completed = 0, - length = coll.length; - if (length === 0) { - callback(null); - } - - function iteratorCallback(err, value) { - if (err) { - callback(err); - } else if ((++completed === length) || value === breakLoop) { - callback(null); - } - } - - for (; index < length; index++) { - iteratee(coll[index], index, onlyOnce(iteratorCallback)); - } -} - -// a generic version of eachOf which can handle array, object, and iterator cases. -var eachOfGeneric = doLimit(eachOfLimit, Infinity); - -/** - * Like [`each`]{@link module:Collections.each}, except that it passes the key (or index) as the second argument - * to the iteratee. - * - * @name eachOf - * @static - * @memberOf module:Collections - * @method - * @alias forEachOf - * @category Collection - * @see [async.each]{@link module:Collections.each} - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - A function to apply to each - * item in `coll`. - * The `key` is the item's key, or index in the case of an array. - * Invoked with (item, key, callback). - * @param {Function} [callback] - A callback which is called when all - * `iteratee` functions have finished, or an error occurs. Invoked with (err). - * @example - * - * var obj = {dev: "/dev.json", test: "/test.json", prod: "/prod.json"}; - * var configs = {}; - * - * async.forEachOf(obj, function (value, key, callback) { - * fs.readFile(__dirname + value, "utf8", function (err, data) { - * if (err) return callback(err); - * try { - * configs[key] = JSON.parse(data); - * } catch (e) { - * return callback(e); - * } - * callback(); - * }); - * }, function (err) { - * if (err) console.error(err.message); - * // configs is now a map of JSON data - * doSomethingWith(configs); - * }); - */ -var eachOf = function(coll, iteratee, callback) { - var eachOfImplementation = isArrayLike(coll) ? eachOfArrayLike : eachOfGeneric; - eachOfImplementation(coll, wrapAsync(iteratee), callback); -}; - -function doParallel(fn) { - return function (obj, iteratee, callback) { - return fn(eachOf, obj, wrapAsync(iteratee), callback); - }; -} - -function _asyncMap(eachfn, arr, iteratee, callback) { - callback = callback || noop; - arr = arr || []; - var results = []; - var counter = 0; - var _iteratee = wrapAsync(iteratee); - - eachfn(arr, function (value, _, callback) { - var index = counter++; - _iteratee(value, function (err, v) { - results[index] = v; - callback(err); - }); - }, function (err) { - callback(err, results); - }); -} - -/** - * Produces a new collection of values by mapping each value in `coll` through - * the `iteratee` function. The `iteratee` is called with an item from `coll` - * and a callback for when it has finished processing. Each of these callback - * takes 2 arguments: an `error`, and the transformed item from `coll`. If - * `iteratee` passes an error to its callback, the main `callback` (for the - * `map` function) is immediately called with the error. - * - * Note, that since this function applies the `iteratee` to each item in - * parallel, there is no guarantee that the `iteratee` functions will complete - * in order. However, the results array will be in the same order as the - * original `coll`. - * - * If `map` is passed an Object, the results will be an Array. The results - * will roughly be in the order of the original Objects' keys (but this can - * vary across JavaScript engines). - * - * @name map - * @static - * @memberOf module:Collections - * @method - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The iteratee should complete with the transformed item. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Results is an Array of the - * transformed items from the `coll`. Invoked with (err, results). - * @example - * - * async.map(['file1','file2','file3'], fs.stat, function(err, results) { - * // results is now an array of stats for each file - * }); - */ -var map = doParallel(_asyncMap); - -/** - * Applies the provided arguments to each function in the array, calling - * `callback` after all functions have completed. If you only provide the first - * argument, `fns`, then it will return a function which lets you pass in the - * arguments as if it were a single function call. If more arguments are - * provided, `callback` is required while `args` is still optional. - * - * @name applyEach - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {Array|Iterable|Object} fns - A collection of {@link AsyncFunction}s - * to all call with the same arguments - * @param {...*} [args] - any number of separate arguments to pass to the - * function. - * @param {Function} [callback] - the final argument should be the callback, - * called when all functions have completed processing. - * @returns {Function} - If only the first argument, `fns`, is provided, it will - * return a function which lets you pass in the arguments as if it were a single - * function call. The signature is `(..args, callback)`. If invoked with any - * arguments, `callback` is required. - * @example - * - * async.applyEach([enableSearch, updateSchema], 'bucket', callback); - * - * // partial application example: - * async.each( - * buckets, - * async.applyEach([enableSearch, updateSchema]), - * callback - * ); - */ -var applyEach = applyEach$1(map); - -function doParallelLimit(fn) { - return function (obj, limit, iteratee, callback) { - return fn(_eachOfLimit(limit), obj, wrapAsync(iteratee), callback); - }; -} - -/** - * The same as [`map`]{@link module:Collections.map} but runs a maximum of `limit` async operations at a time. - * - * @name mapLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.map]{@link module:Collections.map} - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The iteratee should complete with the transformed item. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Results is an array of the - * transformed items from the `coll`. Invoked with (err, results). - */ -var mapLimit = doParallelLimit(_asyncMap); - -/** - * The same as [`map`]{@link module:Collections.map} but runs only a single async operation at a time. - * - * @name mapSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.map]{@link module:Collections.map} - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The iteratee should complete with the transformed item. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Results is an array of the - * transformed items from the `coll`. Invoked with (err, results). - */ -var mapSeries = doLimit(mapLimit, 1); - -/** - * The same as [`applyEach`]{@link module:ControlFlow.applyEach} but runs only a single async operation at a time. - * - * @name applyEachSeries - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.applyEach]{@link module:ControlFlow.applyEach} - * @category Control Flow - * @param {Array|Iterable|Object} fns - A collection of {@link AsyncFunction}s to all - * call with the same arguments - * @param {...*} [args] - any number of separate arguments to pass to the - * function. - * @param {Function} [callback] - the final argument should be the callback, - * called when all functions have completed processing. - * @returns {Function} - If only the first argument is provided, it will return - * a function which lets you pass in the arguments as if it were a single - * function call. - */ -var applyEachSeries = applyEach$1(mapSeries); - -/** - * A specialized version of `_.forEach` for arrays without support for - * iteratee shorthands. - * - * @private - * @param {Array} [array] The array to iterate over. - * @param {Function} iteratee The function invoked per iteration. - * @returns {Array} Returns `array`. - */ -function arrayEach(array, iteratee) { - var index = -1, - length = array == null ? 0 : array.length; - - while (++index < length) { - if (iteratee(array[index], index, array) === false) { - break; - } - } - return array; -} - -/** - * Creates a base function for methods like `_.forIn` and `_.forOwn`. - * - * @private - * @param {boolean} [fromRight] Specify iterating from right to left. - * @returns {Function} Returns the new base function. - */ -function createBaseFor(fromRight) { - return function(object, iteratee, keysFunc) { - var index = -1, - iterable = Object(object), - props = keysFunc(object), - length = props.length; - - while (length--) { - var key = props[fromRight ? length : ++index]; - if (iteratee(iterable[key], key, iterable) === false) { - break; - } - } - return object; - }; -} - -/** - * The base implementation of `baseForOwn` which iterates over `object` - * properties returned by `keysFunc` and invokes `iteratee` for each property. - * Iteratee functions may exit iteration early by explicitly returning `false`. - * - * @private - * @param {Object} object The object to iterate over. - * @param {Function} iteratee The function invoked per iteration. - * @param {Function} keysFunc The function to get the keys of `object`. - * @returns {Object} Returns `object`. - */ -var baseFor = createBaseFor(); - -/** - * The base implementation of `_.forOwn` without support for iteratee shorthands. - * - * @private - * @param {Object} object The object to iterate over. - * @param {Function} iteratee The function invoked per iteration. - * @returns {Object} Returns `object`. - */ -function baseForOwn(object, iteratee) { - return object && baseFor(object, iteratee, keys); -} - -/** - * The base implementation of `_.findIndex` and `_.findLastIndex` without - * support for iteratee shorthands. - * - * @private - * @param {Array} array The array to inspect. - * @param {Function} predicate The function invoked per iteration. - * @param {number} fromIndex The index to search from. - * @param {boolean} [fromRight] Specify iterating from right to left. - * @returns {number} Returns the index of the matched value, else `-1`. - */ -function baseFindIndex(array, predicate, fromIndex, fromRight) { - var length = array.length, - index = fromIndex + (fromRight ? 1 : -1); - - while ((fromRight ? index-- : ++index < length)) { - if (predicate(array[index], index, array)) { - return index; - } - } - return -1; -} - -/** - * The base implementation of `_.isNaN` without support for number objects. - * - * @private - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is `NaN`, else `false`. - */ -function baseIsNaN(value) { - return value !== value; -} - -/** - * A specialized version of `_.indexOf` which performs strict equality - * comparisons of values, i.e. `===`. - * - * @private - * @param {Array} array The array to inspect. - * @param {*} value The value to search for. - * @param {number} fromIndex The index to search from. - * @returns {number} Returns the index of the matched value, else `-1`. - */ -function strictIndexOf(array, value, fromIndex) { - var index = fromIndex - 1, - length = array.length; - - while (++index < length) { - if (array[index] === value) { - return index; - } - } - return -1; -} - -/** - * The base implementation of `_.indexOf` without `fromIndex` bounds checks. - * - * @private - * @param {Array} array The array to inspect. - * @param {*} value The value to search for. - * @param {number} fromIndex The index to search from. - * @returns {number} Returns the index of the matched value, else `-1`. - */ -function baseIndexOf(array, value, fromIndex) { - return value === value - ? strictIndexOf(array, value, fromIndex) - : baseFindIndex(array, baseIsNaN, fromIndex); -} - -/** - * Determines the best order for running the {@link AsyncFunction}s in `tasks`, based on - * their requirements. Each function can optionally depend on other functions - * being completed first, and each function is run as soon as its requirements - * are satisfied. - * - * If any of the {@link AsyncFunction}s pass an error to their callback, the `auto` sequence - * will stop. Further tasks will not execute (so any other functions depending - * on it will not run), and the main `callback` is immediately called with the - * error. - * - * {@link AsyncFunction}s also receive an object containing the results of functions which - * have completed so far as the first argument, if they have dependencies. If a - * task function has no dependencies, it will only be passed a callback. - * - * @name auto - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {Object} tasks - An object. Each of its properties is either a - * function or an array of requirements, with the {@link AsyncFunction} itself the last item - * in the array. The object's key of a property serves as the name of the task - * defined by that property, i.e. can be used when specifying requirements for - * other tasks. The function receives one or two arguments: - * * a `results` object, containing the results of the previously executed - * functions, only passed if the task has any dependencies, - * * a `callback(err, result)` function, which must be called when finished, - * passing an `error` (which can be `null`) and the result of the function's - * execution. - * @param {number} [concurrency=Infinity] - An optional `integer` for - * determining the maximum number of tasks that can be run in parallel. By - * default, as many as possible. - * @param {Function} [callback] - An optional callback which is called when all - * the tasks have been completed. It receives the `err` argument if any `tasks` - * pass an error to their callback. Results are always returned; however, if an - * error occurs, no further `tasks` will be performed, and the results object - * will only contain partial results. Invoked with (err, results). - * @returns undefined - * @example - * - * async.auto({ - * // this function will just be passed a callback - * readData: async.apply(fs.readFile, 'data.txt', 'utf-8'), - * showData: ['readData', function(results, cb) { - * // results.readData is the file's contents - * // ... - * }] - * }, callback); - * - * async.auto({ - * get_data: function(callback) { - * console.log('in get_data'); - * // async code to get some data - * callback(null, 'data', 'converted to array'); - * }, - * make_folder: function(callback) { - * console.log('in make_folder'); - * // async code to create a directory to store a file in - * // this is run at the same time as getting the data - * callback(null, 'folder'); - * }, - * write_file: ['get_data', 'make_folder', function(results, callback) { - * console.log('in write_file', JSON.stringify(results)); - * // once there is some data and the directory exists, - * // write the data to a file in the directory - * callback(null, 'filename'); - * }], - * email_link: ['write_file', function(results, callback) { - * console.log('in email_link', JSON.stringify(results)); - * // once the file is written let's email a link to it... - * // results.write_file contains the filename returned by write_file. - * callback(null, {'file':results.write_file, 'email':'user@example.com'}); - * }] - * }, function(err, results) { - * console.log('err = ', err); - * console.log('results = ', results); - * }); - */ -var auto = function (tasks, concurrency, callback) { - if (typeof concurrency === 'function') { - // concurrency is optional, shift the args. - callback = concurrency; - concurrency = null; - } - callback = once(callback || noop); - var keys$$1 = keys(tasks); - var numTasks = keys$$1.length; - if (!numTasks) { - return callback(null); - } - if (!concurrency) { - concurrency = numTasks; - } - - var results = {}; - var runningTasks = 0; - var hasError = false; - - var listeners = Object.create(null); - - var readyTasks = []; - - // for cycle detection: - var readyToCheck = []; // tasks that have been identified as reachable - // without the possibility of returning to an ancestor task - var uncheckedDependencies = {}; - - baseForOwn(tasks, function (task, key) { - if (!isArray(task)) { - // no dependencies - enqueueTask(key, [task]); - readyToCheck.push(key); - return; - } - - var dependencies = task.slice(0, task.length - 1); - var remainingDependencies = dependencies.length; - if (remainingDependencies === 0) { - enqueueTask(key, task); - readyToCheck.push(key); - return; - } - uncheckedDependencies[key] = remainingDependencies; - - arrayEach(dependencies, function (dependencyName) { - if (!tasks[dependencyName]) { - throw new Error('async.auto task `' + key + - '` has a non-existent dependency `' + - dependencyName + '` in ' + - dependencies.join(', ')); - } - addListener(dependencyName, function () { - remainingDependencies--; - if (remainingDependencies === 0) { - enqueueTask(key, task); - } - }); - }); - }); - - checkForDeadlocks(); - processQueue(); - - function enqueueTask(key, task) { - readyTasks.push(function () { - runTask(key, task); - }); - } - - function processQueue() { - if (readyTasks.length === 0 && runningTasks === 0) { - return callback(null, results); - } - while(readyTasks.length && runningTasks < concurrency) { - var run = readyTasks.shift(); - run(); - } - - } - - function addListener(taskName, fn) { - var taskListeners = listeners[taskName]; - if (!taskListeners) { - taskListeners = listeners[taskName] = []; - } - - taskListeners.push(fn); - } - - function taskComplete(taskName) { - var taskListeners = listeners[taskName] || []; - arrayEach(taskListeners, function (fn) { - fn(); - }); - processQueue(); - } - - - function runTask(key, task) { - if (hasError) return; - - var taskCallback = onlyOnce(function(err, result) { - runningTasks--; - if (arguments.length > 2) { - result = slice(arguments, 1); - } - if (err) { - var safeResults = {}; - baseForOwn(results, function(val, rkey) { - safeResults[rkey] = val; - }); - safeResults[key] = result; - hasError = true; - listeners = Object.create(null); - - callback(err, safeResults); - } else { - results[key] = result; - taskComplete(key); - } - }); - - runningTasks++; - var taskFn = wrapAsync(task[task.length - 1]); - if (task.length > 1) { - taskFn(results, taskCallback); - } else { - taskFn(taskCallback); - } - } - - function checkForDeadlocks() { - // Kahn's algorithm - // https://en.wikipedia.org/wiki/Topological_sorting#Kahn.27s_algorithm - // http://connalle.blogspot.com/2013/10/topological-sortingkahn-algorithm.html - var currentTask; - var counter = 0; - while (readyToCheck.length) { - currentTask = readyToCheck.pop(); - counter++; - arrayEach(getDependents(currentTask), function (dependent) { - if (--uncheckedDependencies[dependent] === 0) { - readyToCheck.push(dependent); - } - }); - } - - if (counter !== numTasks) { - throw new Error( - 'async.auto cannot execute tasks due to a recursive dependency' - ); - } - } - - function getDependents(taskName) { - var result = []; - baseForOwn(tasks, function (task, key) { - if (isArray(task) && baseIndexOf(task, taskName, 0) >= 0) { - result.push(key); - } - }); - return result; - } -}; - -/** - * A specialized version of `_.map` for arrays without support for iteratee - * shorthands. - * - * @private - * @param {Array} [array] The array to iterate over. - * @param {Function} iteratee The function invoked per iteration. - * @returns {Array} Returns the new mapped array. - */ -function arrayMap(array, iteratee) { - var index = -1, - length = array == null ? 0 : array.length, - result = Array(length); - - while (++index < length) { - result[index] = iteratee(array[index], index, array); - } - return result; -} - -/** `Object#toString` result references. */ -var symbolTag = '[object Symbol]'; - -/** - * Checks if `value` is classified as a `Symbol` primitive or object. - * - * @static - * @memberOf _ - * @since 4.0.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a symbol, else `false`. - * @example - * - * _.isSymbol(Symbol.iterator); - * // => true - * - * _.isSymbol('abc'); - * // => false - */ -function isSymbol(value) { - return typeof value == 'symbol' || - (isObjectLike(value) && baseGetTag(value) == symbolTag); -} - -/** Used as references for various `Number` constants. */ -var INFINITY = 1 / 0; - -/** Used to convert symbols to primitives and strings. */ -var symbolProto = Symbol$1 ? Symbol$1.prototype : undefined; -var symbolToString = symbolProto ? symbolProto.toString : undefined; - -/** - * The base implementation of `_.toString` which doesn't convert nullish - * values to empty strings. - * - * @private - * @param {*} value The value to process. - * @returns {string} Returns the string. - */ -function baseToString(value) { - // Exit early for strings to avoid a performance hit in some environments. - if (typeof value == 'string') { - return value; - } - if (isArray(value)) { - // Recursively convert values (susceptible to call stack limits). - return arrayMap(value, baseToString) + ''; - } - if (isSymbol(value)) { - return symbolToString ? symbolToString.call(value) : ''; - } - var result = (value + ''); - return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result; -} - -/** - * The base implementation of `_.slice` without an iteratee call guard. - * - * @private - * @param {Array} array The array to slice. - * @param {number} [start=0] The start position. - * @param {number} [end=array.length] The end position. - * @returns {Array} Returns the slice of `array`. - */ -function baseSlice(array, start, end) { - var index = -1, - length = array.length; - - if (start < 0) { - start = -start > length ? 0 : (length + start); - } - end = end > length ? length : end; - if (end < 0) { - end += length; - } - length = start > end ? 0 : ((end - start) >>> 0); - start >>>= 0; - - var result = Array(length); - while (++index < length) { - result[index] = array[index + start]; - } - return result; -} - -/** - * Casts `array` to a slice if it's needed. - * - * @private - * @param {Array} array The array to inspect. - * @param {number} start The start position. - * @param {number} [end=array.length] The end position. - * @returns {Array} Returns the cast slice. - */ -function castSlice(array, start, end) { - var length = array.length; - end = end === undefined ? length : end; - return (!start && end >= length) ? array : baseSlice(array, start, end); -} - -/** - * Used by `_.trim` and `_.trimEnd` to get the index of the last string symbol - * that is not found in the character symbols. - * - * @private - * @param {Array} strSymbols The string symbols to inspect. - * @param {Array} chrSymbols The character symbols to find. - * @returns {number} Returns the index of the last unmatched string symbol. - */ -function charsEndIndex(strSymbols, chrSymbols) { - var index = strSymbols.length; - - while (index-- && baseIndexOf(chrSymbols, strSymbols[index], 0) > -1) {} - return index; -} - -/** - * Used by `_.trim` and `_.trimStart` to get the index of the first string symbol - * that is not found in the character symbols. - * - * @private - * @param {Array} strSymbols The string symbols to inspect. - * @param {Array} chrSymbols The character symbols to find. - * @returns {number} Returns the index of the first unmatched string symbol. - */ -function charsStartIndex(strSymbols, chrSymbols) { - var index = -1, - length = strSymbols.length; - - while (++index < length && baseIndexOf(chrSymbols, strSymbols[index], 0) > -1) {} - return index; -} - -/** - * Converts an ASCII `string` to an array. - * - * @private - * @param {string} string The string to convert. - * @returns {Array} Returns the converted array. - */ -function asciiToArray(string) { - return string.split(''); -} - -/** Used to compose unicode character classes. */ -var rsAstralRange = '\\ud800-\\udfff'; -var rsComboMarksRange = '\\u0300-\\u036f'; -var reComboHalfMarksRange = '\\ufe20-\\ufe2f'; -var rsComboSymbolsRange = '\\u20d0-\\u20ff'; -var rsComboRange = rsComboMarksRange + reComboHalfMarksRange + rsComboSymbolsRange; -var rsVarRange = '\\ufe0e\\ufe0f'; - -/** Used to compose unicode capture groups. */ -var rsZWJ = '\\u200d'; - -/** Used to detect strings with [zero-width joiners or code points from the astral planes](http://eev.ee/blog/2015/09/12/dark-corners-of-unicode/). */ -var reHasUnicode = RegExp('[' + rsZWJ + rsAstralRange + rsComboRange + rsVarRange + ']'); - -/** - * Checks if `string` contains Unicode symbols. - * - * @private - * @param {string} string The string to inspect. - * @returns {boolean} Returns `true` if a symbol is found, else `false`. - */ -function hasUnicode(string) { - return reHasUnicode.test(string); -} - -/** Used to compose unicode character classes. */ -var rsAstralRange$1 = '\\ud800-\\udfff'; -var rsComboMarksRange$1 = '\\u0300-\\u036f'; -var reComboHalfMarksRange$1 = '\\ufe20-\\ufe2f'; -var rsComboSymbolsRange$1 = '\\u20d0-\\u20ff'; -var rsComboRange$1 = rsComboMarksRange$1 + reComboHalfMarksRange$1 + rsComboSymbolsRange$1; -var rsVarRange$1 = '\\ufe0e\\ufe0f'; - -/** Used to compose unicode capture groups. */ -var rsAstral = '[' + rsAstralRange$1 + ']'; -var rsCombo = '[' + rsComboRange$1 + ']'; -var rsFitz = '\\ud83c[\\udffb-\\udfff]'; -var rsModifier = '(?:' + rsCombo + '|' + rsFitz + ')'; -var rsNonAstral = '[^' + rsAstralRange$1 + ']'; -var rsRegional = '(?:\\ud83c[\\udde6-\\uddff]){2}'; -var rsSurrPair = '[\\ud800-\\udbff][\\udc00-\\udfff]'; -var rsZWJ$1 = '\\u200d'; - -/** Used to compose unicode regexes. */ -var reOptMod = rsModifier + '?'; -var rsOptVar = '[' + rsVarRange$1 + ']?'; -var rsOptJoin = '(?:' + rsZWJ$1 + '(?:' + [rsNonAstral, rsRegional, rsSurrPair].join('|') + ')' + rsOptVar + reOptMod + ')*'; -var rsSeq = rsOptVar + reOptMod + rsOptJoin; -var rsSymbol = '(?:' + [rsNonAstral + rsCombo + '?', rsCombo, rsRegional, rsSurrPair, rsAstral].join('|') + ')'; - -/** Used to match [string symbols](https://mathiasbynens.be/notes/javascript-unicode). */ -var reUnicode = RegExp(rsFitz + '(?=' + rsFitz + ')|' + rsSymbol + rsSeq, 'g'); - -/** - * Converts a Unicode `string` to an array. - * - * @private - * @param {string} string The string to convert. - * @returns {Array} Returns the converted array. - */ -function unicodeToArray(string) { - return string.match(reUnicode) || []; -} - -/** - * Converts `string` to an array. - * - * @private - * @param {string} string The string to convert. - * @returns {Array} Returns the converted array. - */ -function stringToArray(string) { - return hasUnicode(string) - ? unicodeToArray(string) - : asciiToArray(string); -} - -/** - * Converts `value` to a string. An empty string is returned for `null` - * and `undefined` values. The sign of `-0` is preserved. - * - * @static - * @memberOf _ - * @since 4.0.0 - * @category Lang - * @param {*} value The value to convert. - * @returns {string} Returns the converted string. - * @example - * - * _.toString(null); - * // => '' - * - * _.toString(-0); - * // => '-0' - * - * _.toString([1, 2, 3]); - * // => '1,2,3' - */ -function toString(value) { - return value == null ? '' : baseToString(value); -} - -/** Used to match leading and trailing whitespace. */ -var reTrim = /^\s+|\s+$/g; - -/** - * Removes leading and trailing whitespace or specified characters from `string`. - * - * @static - * @memberOf _ - * @since 3.0.0 - * @category String - * @param {string} [string=''] The string to trim. - * @param {string} [chars=whitespace] The characters to trim. - * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. - * @returns {string} Returns the trimmed string. - * @example - * - * _.trim(' abc '); - * // => 'abc' - * - * _.trim('-_-abc-_-', '_-'); - * // => 'abc' - * - * _.map([' foo ', ' bar '], _.trim); - * // => ['foo', 'bar'] - */ -function trim(string, chars, guard) { - string = toString(string); - if (string && (guard || chars === undefined)) { - return string.replace(reTrim, ''); - } - if (!string || !(chars = baseToString(chars))) { - return string; - } - var strSymbols = stringToArray(string), - chrSymbols = stringToArray(chars), - start = charsStartIndex(strSymbols, chrSymbols), - end = charsEndIndex(strSymbols, chrSymbols) + 1; - - return castSlice(strSymbols, start, end).join(''); -} - -var FN_ARGS = /^(?:async\s+)?(function)?\s*[^\(]*\(\s*([^\)]*)\)/m; -var FN_ARG_SPLIT = /,/; -var FN_ARG = /(=.+)?(\s*)$/; -var STRIP_COMMENTS = /((\/\/.*$)|(\/\*[\s\S]*?\*\/))/mg; - -function parseParams(func) { - func = func.toString().replace(STRIP_COMMENTS, ''); - func = func.match(FN_ARGS)[2].replace(' ', ''); - func = func ? func.split(FN_ARG_SPLIT) : []; - func = func.map(function (arg){ - return trim(arg.replace(FN_ARG, '')); - }); - return func; -} - -/** - * A dependency-injected version of the [async.auto]{@link module:ControlFlow.auto} function. Dependent - * tasks are specified as parameters to the function, after the usual callback - * parameter, with the parameter names matching the names of the tasks it - * depends on. This can provide even more readable task graphs which can be - * easier to maintain. - * - * If a final callback is specified, the task results are similarly injected, - * specified as named parameters after the initial error parameter. - * - * The autoInject function is purely syntactic sugar and its semantics are - * otherwise equivalent to [async.auto]{@link module:ControlFlow.auto}. - * - * @name autoInject - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.auto]{@link module:ControlFlow.auto} - * @category Control Flow - * @param {Object} tasks - An object, each of whose properties is an {@link AsyncFunction} of - * the form 'func([dependencies...], callback). The object's key of a property - * serves as the name of the task defined by that property, i.e. can be used - * when specifying requirements for other tasks. - * * The `callback` parameter is a `callback(err, result)` which must be called - * when finished, passing an `error` (which can be `null`) and the result of - * the function's execution. The remaining parameters name other tasks on - * which the task is dependent, and the results from those tasks are the - * arguments of those parameters. - * @param {Function} [callback] - An optional callback which is called when all - * the tasks have been completed. It receives the `err` argument if any `tasks` - * pass an error to their callback, and a `results` object with any completed - * task results, similar to `auto`. - * @example - * - * // The example from `auto` can be rewritten as follows: - * async.autoInject({ - * get_data: function(callback) { - * // async code to get some data - * callback(null, 'data', 'converted to array'); - * }, - * make_folder: function(callback) { - * // async code to create a directory to store a file in - * // this is run at the same time as getting the data - * callback(null, 'folder'); - * }, - * write_file: function(get_data, make_folder, callback) { - * // once there is some data and the directory exists, - * // write the data to a file in the directory - * callback(null, 'filename'); - * }, - * email_link: function(write_file, callback) { - * // once the file is written let's email a link to it... - * // write_file contains the filename returned by write_file. - * callback(null, {'file':write_file, 'email':'user@example.com'}); - * } - * }, function(err, results) { - * console.log('err = ', err); - * console.log('email_link = ', results.email_link); - * }); - * - * // If you are using a JS minifier that mangles parameter names, `autoInject` - * // will not work with plain functions, since the parameter names will be - * // collapsed to a single letter identifier. To work around this, you can - * // explicitly specify the names of the parameters your task function needs - * // in an array, similar to Angular.js dependency injection. - * - * // This still has an advantage over plain `auto`, since the results a task - * // depends on are still spread into arguments. - * async.autoInject({ - * //... - * write_file: ['get_data', 'make_folder', function(get_data, make_folder, callback) { - * callback(null, 'filename'); - * }], - * email_link: ['write_file', function(write_file, callback) { - * callback(null, {'file':write_file, 'email':'user@example.com'}); - * }] - * //... - * }, function(err, results) { - * console.log('err = ', err); - * console.log('email_link = ', results.email_link); - * }); - */ -function autoInject(tasks, callback) { - var newTasks = {}; - - baseForOwn(tasks, function (taskFn, key) { - var params; - var fnIsAsync = isAsync(taskFn); - var hasNoDeps = - (!fnIsAsync && taskFn.length === 1) || - (fnIsAsync && taskFn.length === 0); - - if (isArray(taskFn)) { - params = taskFn.slice(0, -1); - taskFn = taskFn[taskFn.length - 1]; - - newTasks[key] = params.concat(params.length > 0 ? newTask : taskFn); - } else if (hasNoDeps) { - // no dependencies, use the function as-is - newTasks[key] = taskFn; - } else { - params = parseParams(taskFn); - if (taskFn.length === 0 && !fnIsAsync && params.length === 0) { - throw new Error("autoInject task functions require explicit parameters."); - } - - // remove callback param - if (!fnIsAsync) params.pop(); - - newTasks[key] = params.concat(newTask); - } - - function newTask(results, taskCb) { - var newArgs = arrayMap(params, function (name) { - return results[name]; - }); - newArgs.push(taskCb); - wrapAsync(taskFn).apply(null, newArgs); - } - }); - - auto(newTasks, callback); -} - -// Simple doubly linked list (https://en.wikipedia.org/wiki/Doubly_linked_list) implementation -// used for queues. This implementation assumes that the node provided by the user can be modified -// to adjust the next and last properties. We implement only the minimal functionality -// for queue support. -function DLL() { - this.head = this.tail = null; - this.length = 0; -} - -function setInitial(dll, node) { - dll.length = 1; - dll.head = dll.tail = node; -} - -DLL.prototype.removeLink = function(node) { - if (node.prev) node.prev.next = node.next; - else this.head = node.next; - if (node.next) node.next.prev = node.prev; - else this.tail = node.prev; - - node.prev = node.next = null; - this.length -= 1; - return node; -}; - -DLL.prototype.empty = function () { - while(this.head) this.shift(); - return this; -}; - -DLL.prototype.insertAfter = function(node, newNode) { - newNode.prev = node; - newNode.next = node.next; - if (node.next) node.next.prev = newNode; - else this.tail = newNode; - node.next = newNode; - this.length += 1; -}; - -DLL.prototype.insertBefore = function(node, newNode) { - newNode.prev = node.prev; - newNode.next = node; - if (node.prev) node.prev.next = newNode; - else this.head = newNode; - node.prev = newNode; - this.length += 1; -}; - -DLL.prototype.unshift = function(node) { - if (this.head) this.insertBefore(this.head, node); - else setInitial(this, node); -}; - -DLL.prototype.push = function(node) { - if (this.tail) this.insertAfter(this.tail, node); - else setInitial(this, node); -}; - -DLL.prototype.shift = function() { - return this.head && this.removeLink(this.head); -}; - -DLL.prototype.pop = function() { - return this.tail && this.removeLink(this.tail); -}; - -DLL.prototype.toArray = function () { - var arr = Array(this.length); - var curr = this.head; - for(var idx = 0; idx < this.length; idx++) { - arr[idx] = curr.data; - curr = curr.next; - } - return arr; -}; - -DLL.prototype.remove = function (testFn) { - var curr = this.head; - while(!!curr) { - var next = curr.next; - if (testFn(curr)) { - this.removeLink(curr); - } - curr = next; - } - return this; -}; - -function queue(worker, concurrency, payload) { - if (concurrency == null) { - concurrency = 1; - } - else if(concurrency === 0) { - throw new Error('Concurrency must not be zero'); - } - - var _worker = wrapAsync(worker); - var numRunning = 0; - var workersList = []; - - var processingScheduled = false; - function _insert(data, insertAtFront, callback) { - if (callback != null && typeof callback !== 'function') { - throw new Error('task callback must be a function'); - } - q.started = true; - if (!isArray(data)) { - data = [data]; - } - if (data.length === 0 && q.idle()) { - // call drain immediately if there are no tasks - return setImmediate$1(function() { - q.drain(); - }); - } - - for (var i = 0, l = data.length; i < l; i++) { - var item = { - data: data[i], - callback: callback || noop - }; - - if (insertAtFront) { - q._tasks.unshift(item); - } else { - q._tasks.push(item); - } - } - - if (!processingScheduled) { - processingScheduled = true; - setImmediate$1(function() { - processingScheduled = false; - q.process(); - }); - } - } - - function _next(tasks) { - return function(err){ - numRunning -= 1; - - for (var i = 0, l = tasks.length; i < l; i++) { - var task = tasks[i]; - - var index = baseIndexOf(workersList, task, 0); - if (index === 0) { - workersList.shift(); - } else if (index > 0) { - workersList.splice(index, 1); - } - - task.callback.apply(task, arguments); - - if (err != null) { - q.error(err, task.data); - } - } - - if (numRunning <= (q.concurrency - q.buffer) ) { - q.unsaturated(); - } - - if (q.idle()) { - q.drain(); - } - q.process(); - }; - } - - var isProcessing = false; - var q = { - _tasks: new DLL(), - concurrency: concurrency, - payload: payload, - saturated: noop, - unsaturated:noop, - buffer: concurrency / 4, - empty: noop, - drain: noop, - error: noop, - started: false, - paused: false, - push: function (data, callback) { - _insert(data, false, callback); - }, - kill: function () { - q.drain = noop; - q._tasks.empty(); - }, - unshift: function (data, callback) { - _insert(data, true, callback); - }, - remove: function (testFn) { - q._tasks.remove(testFn); - }, - process: function () { - // Avoid trying to start too many processing operations. This can occur - // when callbacks resolve synchronously (#1267). - if (isProcessing) { - return; - } - isProcessing = true; - while(!q.paused && numRunning < q.concurrency && q._tasks.length){ - var tasks = [], data = []; - var l = q._tasks.length; - if (q.payload) l = Math.min(l, q.payload); - for (var i = 0; i < l; i++) { - var node = q._tasks.shift(); - tasks.push(node); - workersList.push(node); - data.push(node.data); - } - - numRunning += 1; - - if (q._tasks.length === 0) { - q.empty(); - } - - if (numRunning === q.concurrency) { - q.saturated(); - } - - var cb = onlyOnce(_next(tasks)); - _worker(data, cb); - } - isProcessing = false; - }, - length: function () { - return q._tasks.length; - }, - running: function () { - return numRunning; - }, - workersList: function () { - return workersList; - }, - idle: function() { - return q._tasks.length + numRunning === 0; - }, - pause: function () { - q.paused = true; - }, - resume: function () { - if (q.paused === false) { return; } - q.paused = false; - setImmediate$1(q.process); - } - }; - return q; -} - -/** - * A cargo of tasks for the worker function to complete. Cargo inherits all of - * the same methods and event callbacks as [`queue`]{@link module:ControlFlow.queue}. - * @typedef {Object} CargoObject - * @memberOf module:ControlFlow - * @property {Function} length - A function returning the number of items - * waiting to be processed. Invoke like `cargo.length()`. - * @property {number} payload - An `integer` for determining how many tasks - * should be process per round. This property can be changed after a `cargo` is - * created to alter the payload on-the-fly. - * @property {Function} push - Adds `task` to the `queue`. The callback is - * called once the `worker` has finished processing the task. Instead of a - * single task, an array of `tasks` can be submitted. The respective callback is - * used for every task in the list. Invoke like `cargo.push(task, [callback])`. - * @property {Function} saturated - A callback that is called when the - * `queue.length()` hits the concurrency and further tasks will be queued. - * @property {Function} empty - A callback that is called when the last item - * from the `queue` is given to a `worker`. - * @property {Function} drain - A callback that is called when the last item - * from the `queue` has returned from the `worker`. - * @property {Function} idle - a function returning false if there are items - * waiting or being processed, or true if not. Invoke like `cargo.idle()`. - * @property {Function} pause - a function that pauses the processing of tasks - * until `resume()` is called. Invoke like `cargo.pause()`. - * @property {Function} resume - a function that resumes the processing of - * queued tasks when the queue is paused. Invoke like `cargo.resume()`. - * @property {Function} kill - a function that removes the `drain` callback and - * empties remaining tasks from the queue forcing it to go idle. Invoke like `cargo.kill()`. - */ - -/** - * Creates a `cargo` object with the specified payload. Tasks added to the - * cargo will be processed altogether (up to the `payload` limit). If the - * `worker` is in progress, the task is queued until it becomes available. Once - * the `worker` has completed some tasks, each callback of those tasks is - * called. Check out [these](https://camo.githubusercontent.com/6bbd36f4cf5b35a0f11a96dcd2e97711ffc2fb37/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130382f62626330636662302d356632392d313165322d393734662d3333393763363464633835382e676966) [animations](https://camo.githubusercontent.com/f4810e00e1c5f5f8addbe3e9f49064fd5d102699/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130312f38346339323036362d356632392d313165322d383134662d3964336430323431336266642e676966) - * for how `cargo` and `queue` work. - * - * While [`queue`]{@link module:ControlFlow.queue} passes only one task to one of a group of workers - * at a time, cargo passes an array of tasks to a single worker, repeating - * when the worker is finished. - * - * @name cargo - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.queue]{@link module:ControlFlow.queue} - * @category Control Flow - * @param {AsyncFunction} worker - An asynchronous function for processing an array - * of queued tasks. Invoked with `(tasks, callback)`. - * @param {number} [payload=Infinity] - An optional `integer` for determining - * how many tasks should be processed per round; if omitted, the default is - * unlimited. - * @returns {module:ControlFlow.CargoObject} A cargo object to manage the tasks. Callbacks can - * attached as certain properties to listen for specific events during the - * lifecycle of the cargo and inner queue. - * @example - * - * // create a cargo object with payload 2 - * var cargo = async.cargo(function(tasks, callback) { - * for (var i=0; i true - */ -function identity(value) { - return value; -} - -function _createTester(check, getResult) { - return function(eachfn, arr, iteratee, cb) { - cb = cb || noop; - var testPassed = false; - var testResult; - eachfn(arr, function(value, _, callback) { - iteratee(value, function(err, result) { - if (err) { - callback(err); - } else if (check(result) && !testResult) { - testPassed = true; - testResult = getResult(true, value); - callback(null, breakLoop); - } else { - callback(); - } - }); - }, function(err) { - if (err) { - cb(err); - } else { - cb(null, testPassed ? testResult : getResult(false)); - } - }); - }; -} - -function _findGetResult(v, x) { - return x; -} - -/** - * Returns the first value in `coll` that passes an async truth test. The - * `iteratee` is applied in parallel, meaning the first iteratee to return - * `true` will fire the detect `callback` with that result. That means the - * result might not be the first item in the original `coll` (in terms of order) - * that passes the test. - - * If order within the original `coll` is important, then look at - * [`detectSeries`]{@link module:Collections.detectSeries}. - * - * @name detect - * @static - * @memberOf module:Collections - * @method - * @alias find - * @category Collections - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. - * The iteratee must complete with a boolean value as its result. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called as soon as any - * iteratee returns `true`, or after all the `iteratee` functions have finished. - * Result will be the first item in the array that passes the truth test - * (iteratee) or the value `undefined` if none passed. Invoked with - * (err, result). - * @example - * - * async.detect(['file1','file2','file3'], function(filePath, callback) { - * fs.access(filePath, function(err) { - * callback(null, !err) - * }); - * }, function(err, result) { - * // result now equals the first file in the list that exists - * }); - */ -var detect = doParallel(_createTester(identity, _findGetResult)); - -/** - * The same as [`detect`]{@link module:Collections.detect} but runs a maximum of `limit` async operations at a - * time. - * - * @name detectLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.detect]{@link module:Collections.detect} - * @alias findLimit - * @category Collections - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. - * The iteratee must complete with a boolean value as its result. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called as soon as any - * iteratee returns `true`, or after all the `iteratee` functions have finished. - * Result will be the first item in the array that passes the truth test - * (iteratee) or the value `undefined` if none passed. Invoked with - * (err, result). - */ -var detectLimit = doParallelLimit(_createTester(identity, _findGetResult)); - -/** - * The same as [`detect`]{@link module:Collections.detect} but runs only a single async operation at a time. - * - * @name detectSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.detect]{@link module:Collections.detect} - * @alias findSeries - * @category Collections - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. - * The iteratee must complete with a boolean value as its result. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called as soon as any - * iteratee returns `true`, or after all the `iteratee` functions have finished. - * Result will be the first item in the array that passes the truth test - * (iteratee) or the value `undefined` if none passed. Invoked with - * (err, result). - */ -var detectSeries = doLimit(detectLimit, 1); - -function consoleFunc(name) { - return function (fn/*, ...args*/) { - var args = slice(arguments, 1); - args.push(function (err/*, ...args*/) { - var args = slice(arguments, 1); - if (typeof console === 'object') { - if (err) { - if (console.error) { - console.error(err); - } - } else if (console[name]) { - arrayEach(args, function (x) { - console[name](x); - }); - } - } - }); - wrapAsync(fn).apply(null, args); - }; -} - -/** - * Logs the result of an [`async` function]{@link AsyncFunction} to the - * `console` using `console.dir` to display the properties of the resulting object. - * Only works in Node.js or in browsers that support `console.dir` and - * `console.error` (such as FF and Chrome). - * If multiple arguments are returned from the async function, - * `console.dir` is called on each argument in order. - * - * @name dir - * @static - * @memberOf module:Utils - * @method - * @category Util - * @param {AsyncFunction} function - The function you want to eventually apply - * all arguments to. - * @param {...*} arguments... - Any number of arguments to apply to the function. - * @example - * - * // in a module - * var hello = function(name, callback) { - * setTimeout(function() { - * callback(null, {hello: name}); - * }, 1000); - * }; - * - * // in the node repl - * node> async.dir(hello, 'world'); - * {hello: 'world'} - */ -var dir = consoleFunc('dir'); - -/** - * The post-check version of [`during`]{@link module:ControlFlow.during}. To reflect the difference in - * the order of operations, the arguments `test` and `fn` are switched. - * - * Also a version of [`doWhilst`]{@link module:ControlFlow.doWhilst} with asynchronous `test` function. - * @name doDuring - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.during]{@link module:ControlFlow.during} - * @category Control Flow - * @param {AsyncFunction} fn - An async function which is called each time - * `test` passes. Invoked with (callback). - * @param {AsyncFunction} test - asynchronous truth test to perform before each - * execution of `fn`. Invoked with (...args, callback), where `...args` are the - * non-error args from the previous callback of `fn`. - * @param {Function} [callback] - A callback which is called after the test - * function has failed and repeated execution of `fn` has stopped. `callback` - * will be passed an error if one occurred, otherwise `null`. - */ -function doDuring(fn, test, callback) { - callback = onlyOnce(callback || noop); - var _fn = wrapAsync(fn); - var _test = wrapAsync(test); - - function next(err/*, ...args*/) { - if (err) return callback(err); - var args = slice(arguments, 1); - args.push(check); - _test.apply(this, args); - } - - function check(err, truth) { - if (err) return callback(err); - if (!truth) return callback(null); - _fn(next); - } - - check(null, true); - -} - -/** - * The post-check version of [`whilst`]{@link module:ControlFlow.whilst}. To reflect the difference in - * the order of operations, the arguments `test` and `iteratee` are switched. - * - * `doWhilst` is to `whilst` as `do while` is to `while` in plain JavaScript. - * - * @name doWhilst - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.whilst]{@link module:ControlFlow.whilst} - * @category Control Flow - * @param {AsyncFunction} iteratee - A function which is called each time `test` - * passes. Invoked with (callback). - * @param {Function} test - synchronous truth test to perform after each - * execution of `iteratee`. Invoked with any non-error callback results of - * `iteratee`. - * @param {Function} [callback] - A callback which is called after the test - * function has failed and repeated execution of `iteratee` has stopped. - * `callback` will be passed an error and any arguments passed to the final - * `iteratee`'s callback. Invoked with (err, [results]); - */ -function doWhilst(iteratee, test, callback) { - callback = onlyOnce(callback || noop); - var _iteratee = wrapAsync(iteratee); - var next = function(err/*, ...args*/) { - if (err) return callback(err); - var args = slice(arguments, 1); - if (test.apply(this, args)) return _iteratee(next); - callback.apply(null, [null].concat(args)); - }; - _iteratee(next); -} - -/** - * Like ['doWhilst']{@link module:ControlFlow.doWhilst}, except the `test` is inverted. Note the - * argument ordering differs from `until`. - * - * @name doUntil - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.doWhilst]{@link module:ControlFlow.doWhilst} - * @category Control Flow - * @param {AsyncFunction} iteratee - An async function which is called each time - * `test` fails. Invoked with (callback). - * @param {Function} test - synchronous truth test to perform after each - * execution of `iteratee`. Invoked with any non-error callback results of - * `iteratee`. - * @param {Function} [callback] - A callback which is called after the test - * function has passed and repeated execution of `iteratee` has stopped. `callback` - * will be passed an error and any arguments passed to the final `iteratee`'s - * callback. Invoked with (err, [results]); - */ -function doUntil(iteratee, test, callback) { - doWhilst(iteratee, function() { - return !test.apply(this, arguments); - }, callback); -} - -/** - * Like [`whilst`]{@link module:ControlFlow.whilst}, except the `test` is an asynchronous function that - * is passed a callback in the form of `function (err, truth)`. If error is - * passed to `test` or `fn`, the main callback is immediately called with the - * value of the error. - * - * @name during - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.whilst]{@link module:ControlFlow.whilst} - * @category Control Flow - * @param {AsyncFunction} test - asynchronous truth test to perform before each - * execution of `fn`. Invoked with (callback). - * @param {AsyncFunction} fn - An async function which is called each time - * `test` passes. Invoked with (callback). - * @param {Function} [callback] - A callback which is called after the test - * function has failed and repeated execution of `fn` has stopped. `callback` - * will be passed an error, if one occurred, otherwise `null`. - * @example - * - * var count = 0; - * - * async.during( - * function (callback) { - * return callback(null, count < 5); - * }, - * function (callback) { - * count++; - * setTimeout(callback, 1000); - * }, - * function (err) { - * // 5 seconds have passed - * } - * ); - */ -function during(test, fn, callback) { - callback = onlyOnce(callback || noop); - var _fn = wrapAsync(fn); - var _test = wrapAsync(test); - - function next(err) { - if (err) return callback(err); - _test(check); - } - - function check(err, truth) { - if (err) return callback(err); - if (!truth) return callback(null); - _fn(next); - } - - _test(check); -} - -function _withoutIndex(iteratee) { - return function (value, index, callback) { - return iteratee(value, callback); - }; -} - -/** - * Applies the function `iteratee` to each item in `coll`, in parallel. - * The `iteratee` is called with an item from the list, and a callback for when - * it has finished. If the `iteratee` passes an error to its `callback`, the - * main `callback` (for the `each` function) is immediately called with the - * error. - * - * Note, that since this function applies `iteratee` to each item in parallel, - * there is no guarantee that the iteratee functions will complete in order. - * - * @name each - * @static - * @memberOf module:Collections - * @method - * @alias forEach - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to - * each item in `coll`. Invoked with (item, callback). - * The array index is not passed to the iteratee. - * If you need the index, use `eachOf`. - * @param {Function} [callback] - A callback which is called when all - * `iteratee` functions have finished, or an error occurs. Invoked with (err). - * @example - * - * // assuming openFiles is an array of file names and saveFile is a function - * // to save the modified contents of that file: - * - * async.each(openFiles, saveFile, function(err){ - * // if any of the saves produced an error, err would equal that error - * }); - * - * // assuming openFiles is an array of file names - * async.each(openFiles, function(file, callback) { - * - * // Perform operation on file here. - * console.log('Processing file ' + file); - * - * if( file.length > 32 ) { - * console.log('This file name is too long'); - * callback('File name too long'); - * } else { - * // Do work to process file here - * console.log('File processed'); - * callback(); - * } - * }, function(err) { - * // if any of the file processing produced an error, err would equal that error - * if( err ) { - * // One of the iterations produced an error. - * // All processing will now stop. - * console.log('A file failed to process'); - * } else { - * console.log('All files have been processed successfully'); - * } - * }); - */ -function eachLimit(coll, iteratee, callback) { - eachOf(coll, _withoutIndex(wrapAsync(iteratee)), callback); -} - -/** - * The same as [`each`]{@link module:Collections.each} but runs a maximum of `limit` async operations at a time. - * - * @name eachLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.each]{@link module:Collections.each} - * @alias forEachLimit - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The array index is not passed to the iteratee. - * If you need the index, use `eachOfLimit`. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called when all - * `iteratee` functions have finished, or an error occurs. Invoked with (err). - */ -function eachLimit$1(coll, limit, iteratee, callback) { - _eachOfLimit(limit)(coll, _withoutIndex(wrapAsync(iteratee)), callback); -} - -/** - * The same as [`each`]{@link module:Collections.each} but runs only a single async operation at a time. - * - * @name eachSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.each]{@link module:Collections.each} - * @alias forEachSeries - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to each - * item in `coll`. - * The array index is not passed to the iteratee. - * If you need the index, use `eachOfSeries`. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called when all - * `iteratee` functions have finished, or an error occurs. Invoked with (err). - */ -var eachSeries = doLimit(eachLimit$1, 1); - -/** - * Wrap an async function and ensure it calls its callback on a later tick of - * the event loop. If the function already calls its callback on a next tick, - * no extra deferral is added. This is useful for preventing stack overflows - * (`RangeError: Maximum call stack size exceeded`) and generally keeping - * [Zalgo](http://blog.izs.me/post/59142742143/designing-apis-for-asynchrony) - * contained. ES2017 `async` functions are returned as-is -- they are immune - * to Zalgo's corrupting influences, as they always resolve on a later tick. - * - * @name ensureAsync - * @static - * @memberOf module:Utils - * @method - * @category Util - * @param {AsyncFunction} fn - an async function, one that expects a node-style - * callback as its last argument. - * @returns {AsyncFunction} Returns a wrapped function with the exact same call - * signature as the function passed in. - * @example - * - * function sometimesAsync(arg, callback) { - * if (cache[arg]) { - * return callback(null, cache[arg]); // this would be synchronous!! - * } else { - * doSomeIO(arg, callback); // this IO would be asynchronous - * } - * } - * - * // this has a risk of stack overflows if many results are cached in a row - * async.mapSeries(args, sometimesAsync, done); - * - * // this will defer sometimesAsync's callback if necessary, - * // preventing stack overflows - * async.mapSeries(args, async.ensureAsync(sometimesAsync), done); - */ -function ensureAsync(fn) { - if (isAsync(fn)) return fn; - return initialParams(function (args, callback) { - var sync = true; - args.push(function () { - var innerArgs = arguments; - if (sync) { - setImmediate$1(function () { - callback.apply(null, innerArgs); - }); - } else { - callback.apply(null, innerArgs); - } - }); - fn.apply(this, args); - sync = false; - }); -} - -function notId(v) { - return !v; -} - -/** - * Returns `true` if every element in `coll` satisfies an async test. If any - * iteratee call returns `false`, the main `callback` is immediately called. - * - * @name every - * @static - * @memberOf module:Collections - * @method - * @alias all - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collection in parallel. - * The iteratee must complete with a boolean result value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Result will be either `true` or `false` - * depending on the values of the async tests. Invoked with (err, result). - * @example - * - * async.every(['file1','file2','file3'], function(filePath, callback) { - * fs.access(filePath, function(err) { - * callback(null, !err) - * }); - * }, function(err, result) { - * // if result is true then every file exists - * }); - */ -var every = doParallel(_createTester(notId, notId)); - -/** - * The same as [`every`]{@link module:Collections.every} but runs a maximum of `limit` async operations at a time. - * - * @name everyLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.every]{@link module:Collections.every} - * @alias allLimit - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collection in parallel. - * The iteratee must complete with a boolean result value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Result will be either `true` or `false` - * depending on the values of the async tests. Invoked with (err, result). - */ -var everyLimit = doParallelLimit(_createTester(notId, notId)); - -/** - * The same as [`every`]{@link module:Collections.every} but runs only a single async operation at a time. - * - * @name everySeries - * @static - * @memberOf module:Collections - * @method - * @see [async.every]{@link module:Collections.every} - * @alias allSeries - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collection in series. - * The iteratee must complete with a boolean result value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Result will be either `true` or `false` - * depending on the values of the async tests. Invoked with (err, result). - */ -var everySeries = doLimit(everyLimit, 1); - -/** - * The base implementation of `_.property` without support for deep paths. - * - * @private - * @param {string} key The key of the property to get. - * @returns {Function} Returns the new accessor function. - */ -function baseProperty(key) { - return function(object) { - return object == null ? undefined : object[key]; - }; -} - -function filterArray(eachfn, arr, iteratee, callback) { - var truthValues = new Array(arr.length); - eachfn(arr, function (x, index, callback) { - iteratee(x, function (err, v) { - truthValues[index] = !!v; - callback(err); - }); - }, function (err) { - if (err) return callback(err); - var results = []; - for (var i = 0; i < arr.length; i++) { - if (truthValues[i]) results.push(arr[i]); - } - callback(null, results); - }); -} - -function filterGeneric(eachfn, coll, iteratee, callback) { - var results = []; - eachfn(coll, function (x, index, callback) { - iteratee(x, function (err, v) { - if (err) { - callback(err); - } else { - if (v) { - results.push({index: index, value: x}); - } - callback(); - } - }); - }, function (err) { - if (err) { - callback(err); - } else { - callback(null, arrayMap(results.sort(function (a, b) { - return a.index - b.index; - }), baseProperty('value'))); - } - }); -} - -function _filter(eachfn, coll, iteratee, callback) { - var filter = isArrayLike(coll) ? filterArray : filterGeneric; - filter(eachfn, coll, wrapAsync(iteratee), callback || noop); -} - -/** - * Returns a new array of all the values in `coll` which pass an async truth - * test. This operation is performed in parallel, but the results array will be - * in the same order as the original. - * - * @name filter - * @static - * @memberOf module:Collections - * @method - * @alias select - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {Function} iteratee - A truth test to apply to each item in `coll`. - * The `iteratee` is passed a `callback(err, truthValue)`, which must be called - * with a boolean argument once it has completed. Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Invoked with (err, results). - * @example - * - * async.filter(['file1','file2','file3'], function(filePath, callback) { - * fs.access(filePath, function(err) { - * callback(null, !err) - * }); - * }, function(err, results) { - * // results now equals an array of the existing files - * }); - */ -var filter = doParallel(_filter); - -/** - * The same as [`filter`]{@link module:Collections.filter} but runs a maximum of `limit` async operations at a - * time. - * - * @name filterLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.filter]{@link module:Collections.filter} - * @alias selectLimit - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {Function} iteratee - A truth test to apply to each item in `coll`. - * The `iteratee` is passed a `callback(err, truthValue)`, which must be called - * with a boolean argument once it has completed. Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Invoked with (err, results). - */ -var filterLimit = doParallelLimit(_filter); - -/** - * The same as [`filter`]{@link module:Collections.filter} but runs only a single async operation at a time. - * - * @name filterSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.filter]{@link module:Collections.filter} - * @alias selectSeries - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {Function} iteratee - A truth test to apply to each item in `coll`. - * The `iteratee` is passed a `callback(err, truthValue)`, which must be called - * with a boolean argument once it has completed. Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Invoked with (err, results) - */ -var filterSeries = doLimit(filterLimit, 1); - -/** - * Calls the asynchronous function `fn` with a callback parameter that allows it - * to call itself again, in series, indefinitely. - - * If an error is passed to the callback then `errback` is called with the - * error, and execution stops, otherwise it will never be called. - * - * @name forever - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {AsyncFunction} fn - an async function to call repeatedly. - * Invoked with (next). - * @param {Function} [errback] - when `fn` passes an error to it's callback, - * this function will be called, and execution stops. Invoked with (err). - * @example - * - * async.forever( - * function(next) { - * // next is suitable for passing to things that need a callback(err [, whatever]); - * // it will result in this function being called again. - * }, - * function(err) { - * // if next is called with a value in its first parameter, it will appear - * // in here as 'err', and execution will stop. - * } - * ); - */ -function forever(fn, errback) { - var done = onlyOnce(errback || noop); - var task = wrapAsync(ensureAsync(fn)); - - function next(err) { - if (err) return done(err); - task(next); - } - next(); -} - -/** - * The same as [`groupBy`]{@link module:Collections.groupBy} but runs a maximum of `limit` async operations at a time. - * - * @name groupByLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.groupBy]{@link module:Collections.groupBy} - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The iteratee should complete with a `key` to group the value under. - * Invoked with (value, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Result is an `Object` whoses - * properties are arrays of values which returned the corresponding key. - */ -var groupByLimit = function(coll, limit, iteratee, callback) { - callback = callback || noop; - var _iteratee = wrapAsync(iteratee); - mapLimit(coll, limit, function(val, callback) { - _iteratee(val, function(err, key) { - if (err) return callback(err); - return callback(null, {key: key, val: val}); - }); - }, function(err, mapResults) { - var result = {}; - // from MDN, handle object having an `hasOwnProperty` prop - var hasOwnProperty = Object.prototype.hasOwnProperty; - - for (var i = 0; i < mapResults.length; i++) { - if (mapResults[i]) { - var key = mapResults[i].key; - var val = mapResults[i].val; - - if (hasOwnProperty.call(result, key)) { - result[key].push(val); - } else { - result[key] = [val]; - } - } - } - - return callback(err, result); - }); -}; - -/** - * Returns a new object, where each value corresponds to an array of items, from - * `coll`, that returned the corresponding key. That is, the keys of the object - * correspond to the values passed to the `iteratee` callback. - * - * Note: Since this function applies the `iteratee` to each item in parallel, - * there is no guarantee that the `iteratee` functions will complete in order. - * However, the values for each key in the `result` will be in the same order as - * the original `coll`. For Objects, the values will roughly be in the order of - * the original Objects' keys (but this can vary across JavaScript engines). - * - * @name groupBy - * @static - * @memberOf module:Collections - * @method - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The iteratee should complete with a `key` to group the value under. - * Invoked with (value, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Result is an `Object` whoses - * properties are arrays of values which returned the corresponding key. - * @example - * - * async.groupBy(['userId1', 'userId2', 'userId3'], function(userId, callback) { - * db.findById(userId, function(err, user) { - * if (err) return callback(err); - * return callback(null, user.age); - * }); - * }, function(err, result) { - * // result is object containing the userIds grouped by age - * // e.g. { 30: ['userId1', 'userId3'], 42: ['userId2']}; - * }); - */ -var groupBy = doLimit(groupByLimit, Infinity); - -/** - * The same as [`groupBy`]{@link module:Collections.groupBy} but runs only a single async operation at a time. - * - * @name groupBySeries - * @static - * @memberOf module:Collections - * @method - * @see [async.groupBy]{@link module:Collections.groupBy} - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The iteratee should complete with a `key` to group the value under. - * Invoked with (value, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Result is an `Object` whoses - * properties are arrays of values which returned the corresponding key. - */ -var groupBySeries = doLimit(groupByLimit, 1); - -/** - * Logs the result of an `async` function to the `console`. Only works in - * Node.js or in browsers that support `console.log` and `console.error` (such - * as FF and Chrome). If multiple arguments are returned from the async - * function, `console.log` is called on each argument in order. - * - * @name log - * @static - * @memberOf module:Utils - * @method - * @category Util - * @param {AsyncFunction} function - The function you want to eventually apply - * all arguments to. - * @param {...*} arguments... - Any number of arguments to apply to the function. - * @example - * - * // in a module - * var hello = function(name, callback) { - * setTimeout(function() { - * callback(null, 'hello ' + name); - * }, 1000); - * }; - * - * // in the node repl - * node> async.log(hello, 'world'); - * 'hello world' - */ -var log = consoleFunc('log'); - -/** - * The same as [`mapValues`]{@link module:Collections.mapValues} but runs a maximum of `limit` async operations at a - * time. - * - * @name mapValuesLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.mapValues]{@link module:Collections.mapValues} - * @category Collection - * @param {Object} obj - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - A function to apply to each value and key - * in `coll`. - * The iteratee should complete with the transformed value as its result. - * Invoked with (value, key, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. `result` is a new object consisting - * of each key from `obj`, with each transformed value on the right-hand side. - * Invoked with (err, result). - */ -function mapValuesLimit(obj, limit, iteratee, callback) { - callback = once(callback || noop); - var newObj = {}; - var _iteratee = wrapAsync(iteratee); - eachOfLimit(obj, limit, function(val, key, next) { - _iteratee(val, key, function (err, result) { - if (err) return next(err); - newObj[key] = result; - next(); - }); - }, function (err) { - callback(err, newObj); - }); -} - -/** - * A relative of [`map`]{@link module:Collections.map}, designed for use with objects. - * - * Produces a new Object by mapping each value of `obj` through the `iteratee` - * function. The `iteratee` is called each `value` and `key` from `obj` and a - * callback for when it has finished processing. Each of these callbacks takes - * two arguments: an `error`, and the transformed item from `obj`. If `iteratee` - * passes an error to its callback, the main `callback` (for the `mapValues` - * function) is immediately called with the error. - * - * Note, the order of the keys in the result is not guaranteed. The keys will - * be roughly in the order they complete, (but this is very engine-specific) - * - * @name mapValues - * @static - * @memberOf module:Collections - * @method - * @category Collection - * @param {Object} obj - A collection to iterate over. - * @param {AsyncFunction} iteratee - A function to apply to each value and key - * in `coll`. - * The iteratee should complete with the transformed value as its result. - * Invoked with (value, key, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. `result` is a new object consisting - * of each key from `obj`, with each transformed value on the right-hand side. - * Invoked with (err, result). - * @example - * - * async.mapValues({ - * f1: 'file1', - * f2: 'file2', - * f3: 'file3' - * }, function (file, key, callback) { - * fs.stat(file, callback); - * }, function(err, result) { - * // result is now a map of stats for each file, e.g. - * // { - * // f1: [stats for file1], - * // f2: [stats for file2], - * // f3: [stats for file3] - * // } - * }); - */ - -var mapValues = doLimit(mapValuesLimit, Infinity); - -/** - * The same as [`mapValues`]{@link module:Collections.mapValues} but runs only a single async operation at a time. - * - * @name mapValuesSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.mapValues]{@link module:Collections.mapValues} - * @category Collection - * @param {Object} obj - A collection to iterate over. - * @param {AsyncFunction} iteratee - A function to apply to each value and key - * in `coll`. - * The iteratee should complete with the transformed value as its result. - * Invoked with (value, key, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. `result` is a new object consisting - * of each key from `obj`, with each transformed value on the right-hand side. - * Invoked with (err, result). - */ -var mapValuesSeries = doLimit(mapValuesLimit, 1); - -function has(obj, key) { - return key in obj; -} - -/** - * Caches the results of an async function. When creating a hash to store - * function results against, the callback is omitted from the hash and an - * optional hash function can be used. - * - * If no hash function is specified, the first argument is used as a hash key, - * which may work reasonably if it is a string or a data type that converts to a - * distinct string. Note that objects and arrays will not behave reasonably. - * Neither will cases where the other arguments are significant. In such cases, - * specify your own hash function. - * - * The cache of results is exposed as the `memo` property of the function - * returned by `memoize`. - * - * @name memoize - * @static - * @memberOf module:Utils - * @method - * @category Util - * @param {AsyncFunction} fn - The async function to proxy and cache results from. - * @param {Function} hasher - An optional function for generating a custom hash - * for storing results. It has all the arguments applied to it apart from the - * callback, and must be synchronous. - * @returns {AsyncFunction} a memoized version of `fn` - * @example - * - * var slow_fn = function(name, callback) { - * // do something - * callback(null, result); - * }; - * var fn = async.memoize(slow_fn); - * - * // fn can now be used as if it were slow_fn - * fn('some name', function() { - * // callback - * }); - */ -function memoize(fn, hasher) { - var memo = Object.create(null); - var queues = Object.create(null); - hasher = hasher || identity; - var _fn = wrapAsync(fn); - var memoized = initialParams(function memoized(args, callback) { - var key = hasher.apply(null, args); - if (has(memo, key)) { - setImmediate$1(function() { - callback.apply(null, memo[key]); - }); - } else if (has(queues, key)) { - queues[key].push(callback); - } else { - queues[key] = [callback]; - _fn.apply(null, args.concat(function(/*args*/) { - var args = slice(arguments); - memo[key] = args; - var q = queues[key]; - delete queues[key]; - for (var i = 0, l = q.length; i < l; i++) { - q[i].apply(null, args); - } - })); - } - }); - memoized.memo = memo; - memoized.unmemoized = fn; - return memoized; -} - -/** - * Calls `callback` on a later loop around the event loop. In Node.js this just - * calls `process.nextTick`. In the browser it will use `setImmediate` if - * available, otherwise `setTimeout(callback, 0)`, which means other higher - * priority events may precede the execution of `callback`. - * - * This is used internally for browser-compatibility purposes. - * - * @name nextTick - * @static - * @memberOf module:Utils - * @method - * @see [async.setImmediate]{@link module:Utils.setImmediate} - * @category Util - * @param {Function} callback - The function to call on a later loop around - * the event loop. Invoked with (args...). - * @param {...*} args... - any number of additional arguments to pass to the - * callback on the next tick. - * @example - * - * var call_order = []; - * async.nextTick(function() { - * call_order.push('two'); - * // call_order now equals ['one','two'] - * }); - * call_order.push('one'); - * - * async.setImmediate(function (a, b, c) { - * // a, b, and c equal 1, 2, and 3 - * }, 1, 2, 3); - */ -var _defer$1; - -if (hasNextTick) { - _defer$1 = process.nextTick; -} else if (hasSetImmediate) { - _defer$1 = setImmediate; -} else { - _defer$1 = fallback; -} - -var nextTick = wrap(_defer$1); - -function _parallel(eachfn, tasks, callback) { - callback = callback || noop; - var results = isArrayLike(tasks) ? [] : {}; - - eachfn(tasks, function (task, key, callback) { - wrapAsync(task)(function (err, result) { - if (arguments.length > 2) { - result = slice(arguments, 1); - } - results[key] = result; - callback(err); - }); - }, function (err) { - callback(err, results); - }); -} - -/** - * Run the `tasks` collection of functions in parallel, without waiting until - * the previous function has completed. If any of the functions pass an error to - * its callback, the main `callback` is immediately called with the value of the - * error. Once the `tasks` have completed, the results are passed to the final - * `callback` as an array. - * - * **Note:** `parallel` is about kicking-off I/O tasks in parallel, not about - * parallel execution of code. If your tasks do not use any timers or perform - * any I/O, they will actually be executed in series. Any synchronous setup - * sections for each task will happen one after the other. JavaScript remains - * single-threaded. - * - * **Hint:** Use [`reflect`]{@link module:Utils.reflect} to continue the - * execution of other tasks when a task fails. - * - * It is also possible to use an object instead of an array. Each property will - * be run as a function and the results will be passed to the final `callback` - * as an object instead of an array. This can be a more readable way of handling - * results from {@link async.parallel}. - * - * @name parallel - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {Array|Iterable|Object} tasks - A collection of - * [async functions]{@link AsyncFunction} to run. - * Each async function can complete with any number of optional `result` values. - * @param {Function} [callback] - An optional callback to run once all the - * functions have completed successfully. This function gets a results array - * (or object) containing all the result arguments passed to the task callbacks. - * Invoked with (err, results). - * - * @example - * async.parallel([ - * function(callback) { - * setTimeout(function() { - * callback(null, 'one'); - * }, 200); - * }, - * function(callback) { - * setTimeout(function() { - * callback(null, 'two'); - * }, 100); - * } - * ], - * // optional callback - * function(err, results) { - * // the results array will equal ['one','two'] even though - * // the second function had a shorter timeout. - * }); - * - * // an example using an object instead of an array - * async.parallel({ - * one: function(callback) { - * setTimeout(function() { - * callback(null, 1); - * }, 200); - * }, - * two: function(callback) { - * setTimeout(function() { - * callback(null, 2); - * }, 100); - * } - * }, function(err, results) { - * // results is now equals to: {one: 1, two: 2} - * }); - */ -function parallelLimit(tasks, callback) { - _parallel(eachOf, tasks, callback); -} - -/** - * The same as [`parallel`]{@link module:ControlFlow.parallel} but runs a maximum of `limit` async operations at a - * time. - * - * @name parallelLimit - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.parallel]{@link module:ControlFlow.parallel} - * @category Control Flow - * @param {Array|Iterable|Object} tasks - A collection of - * [async functions]{@link AsyncFunction} to run. - * Each async function can complete with any number of optional `result` values. - * @param {number} limit - The maximum number of async operations at a time. - * @param {Function} [callback] - An optional callback to run once all the - * functions have completed successfully. This function gets a results array - * (or object) containing all the result arguments passed to the task callbacks. - * Invoked with (err, results). - */ -function parallelLimit$1(tasks, limit, callback) { - _parallel(_eachOfLimit(limit), tasks, callback); -} - -/** - * A queue of tasks for the worker function to complete. - * @typedef {Object} QueueObject - * @memberOf module:ControlFlow - * @property {Function} length - a function returning the number of items - * waiting to be processed. Invoke with `queue.length()`. - * @property {boolean} started - a boolean indicating whether or not any - * items have been pushed and processed by the queue. - * @property {Function} running - a function returning the number of items - * currently being processed. Invoke with `queue.running()`. - * @property {Function} workersList - a function returning the array of items - * currently being processed. Invoke with `queue.workersList()`. - * @property {Function} idle - a function returning false if there are items - * waiting or being processed, or true if not. Invoke with `queue.idle()`. - * @property {number} concurrency - an integer for determining how many `worker` - * functions should be run in parallel. This property can be changed after a - * `queue` is created to alter the concurrency on-the-fly. - * @property {Function} push - add a new task to the `queue`. Calls `callback` - * once the `worker` has finished processing the task. Instead of a single task, - * a `tasks` array can be submitted. The respective callback is used for every - * task in the list. Invoke with `queue.push(task, [callback])`, - * @property {Function} unshift - add a new task to the front of the `queue`. - * Invoke with `queue.unshift(task, [callback])`. - * @property {Function} remove - remove items from the queue that match a test - * function. The test function will be passed an object with a `data` property, - * and a `priority` property, if this is a - * [priorityQueue]{@link module:ControlFlow.priorityQueue} object. - * Invoked with `queue.remove(testFn)`, where `testFn` is of the form - * `function ({data, priority}) {}` and returns a Boolean. - * @property {Function} saturated - a callback that is called when the number of - * running workers hits the `concurrency` limit, and further tasks will be - * queued. - * @property {Function} unsaturated - a callback that is called when the number - * of running workers is less than the `concurrency` & `buffer` limits, and - * further tasks will not be queued. - * @property {number} buffer - A minimum threshold buffer in order to say that - * the `queue` is `unsaturated`. - * @property {Function} empty - a callback that is called when the last item - * from the `queue` is given to a `worker`. - * @property {Function} drain - a callback that is called when the last item - * from the `queue` has returned from the `worker`. - * @property {Function} error - a callback that is called when a task errors. - * Has the signature `function(error, task)`. - * @property {boolean} paused - a boolean for determining whether the queue is - * in a paused state. - * @property {Function} pause - a function that pauses the processing of tasks - * until `resume()` is called. Invoke with `queue.pause()`. - * @property {Function} resume - a function that resumes the processing of - * queued tasks when the queue is paused. Invoke with `queue.resume()`. - * @property {Function} kill - a function that removes the `drain` callback and - * empties remaining tasks from the queue forcing it to go idle. No more tasks - * should be pushed to the queue after calling this function. Invoke with `queue.kill()`. - */ - -/** - * Creates a `queue` object with the specified `concurrency`. Tasks added to the - * `queue` are processed in parallel (up to the `concurrency` limit). If all - * `worker`s are in progress, the task is queued until one becomes available. - * Once a `worker` completes a `task`, that `task`'s callback is called. - * - * @name queue - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {AsyncFunction} worker - An async function for processing a queued task. - * If you want to handle errors from an individual task, pass a callback to - * `q.push()`. Invoked with (task, callback). - * @param {number} [concurrency=1] - An `integer` for determining how many - * `worker` functions should be run in parallel. If omitted, the concurrency - * defaults to `1`. If the concurrency is `0`, an error is thrown. - * @returns {module:ControlFlow.QueueObject} A queue object to manage the tasks. Callbacks can - * attached as certain properties to listen for specific events during the - * lifecycle of the queue. - * @example - * - * // create a queue object with concurrency 2 - * var q = async.queue(function(task, callback) { - * console.log('hello ' + task.name); - * callback(); - * }, 2); - * - * // assign a callback - * q.drain = function() { - * console.log('all items have been processed'); - * }; - * - * // add some items to the queue - * q.push({name: 'foo'}, function(err) { - * console.log('finished processing foo'); - * }); - * q.push({name: 'bar'}, function (err) { - * console.log('finished processing bar'); - * }); - * - * // add some items to the queue (batch-wise) - * q.push([{name: 'baz'},{name: 'bay'},{name: 'bax'}], function(err) { - * console.log('finished processing item'); - * }); - * - * // add some items to the front of the queue - * q.unshift({name: 'bar'}, function (err) { - * console.log('finished processing bar'); - * }); - */ -var queue$1 = function (worker, concurrency) { - var _worker = wrapAsync(worker); - return queue(function (items, cb) { - _worker(items[0], cb); - }, concurrency, 1); -}; - -/** - * The same as [async.queue]{@link module:ControlFlow.queue} only tasks are assigned a priority and - * completed in ascending priority order. - * - * @name priorityQueue - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.queue]{@link module:ControlFlow.queue} - * @category Control Flow - * @param {AsyncFunction} worker - An async function for processing a queued task. - * If you want to handle errors from an individual task, pass a callback to - * `q.push()`. - * Invoked with (task, callback). - * @param {number} concurrency - An `integer` for determining how many `worker` - * functions should be run in parallel. If omitted, the concurrency defaults to - * `1`. If the concurrency is `0`, an error is thrown. - * @returns {module:ControlFlow.QueueObject} A priorityQueue object to manage the tasks. There are two - * differences between `queue` and `priorityQueue` objects: - * * `push(task, priority, [callback])` - `priority` should be a number. If an - * array of `tasks` is given, all tasks will be assigned the same priority. - * * The `unshift` method was removed. - */ -var priorityQueue = function(worker, concurrency) { - // Start with a normal queue - var q = queue$1(worker, concurrency); - - // Override push to accept second parameter representing priority - q.push = function(data, priority, callback) { - if (callback == null) callback = noop; - if (typeof callback !== 'function') { - throw new Error('task callback must be a function'); - } - q.started = true; - if (!isArray(data)) { - data = [data]; - } - if (data.length === 0) { - // call drain immediately if there are no tasks - return setImmediate$1(function() { - q.drain(); - }); - } - - priority = priority || 0; - var nextNode = q._tasks.head; - while (nextNode && priority >= nextNode.priority) { - nextNode = nextNode.next; - } - - for (var i = 0, l = data.length; i < l; i++) { - var item = { - data: data[i], - priority: priority, - callback: callback - }; - - if (nextNode) { - q._tasks.insertBefore(nextNode, item); - } else { - q._tasks.push(item); - } - } - setImmediate$1(q.process); - }; - - // Remove unshift function - delete q.unshift; - - return q; -}; - -/** - * Runs the `tasks` array of functions in parallel, without waiting until the - * previous function has completed. Once any of the `tasks` complete or pass an - * error to its callback, the main `callback` is immediately called. It's - * equivalent to `Promise.race()`. - * - * @name race - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {Array} tasks - An array containing [async functions]{@link AsyncFunction} - * to run. Each function can complete with an optional `result` value. - * @param {Function} callback - A callback to run once any of the functions have - * completed. This function gets an error or result from the first function that - * completed. Invoked with (err, result). - * @returns undefined - * @example - * - * async.race([ - * function(callback) { - * setTimeout(function() { - * callback(null, 'one'); - * }, 200); - * }, - * function(callback) { - * setTimeout(function() { - * callback(null, 'two'); - * }, 100); - * } - * ], - * // main callback - * function(err, result) { - * // the result will be equal to 'two' as it finishes earlier - * }); - */ -function race(tasks, callback) { - callback = once(callback || noop); - if (!isArray(tasks)) return callback(new TypeError('First argument to race must be an array of functions')); - if (!tasks.length) return callback(); - for (var i = 0, l = tasks.length; i < l; i++) { - wrapAsync(tasks[i])(callback); - } -} - -/** - * Same as [`reduce`]{@link module:Collections.reduce}, only operates on `array` in reverse order. - * - * @name reduceRight - * @static - * @memberOf module:Collections - * @method - * @see [async.reduce]{@link module:Collections.reduce} - * @alias foldr - * @category Collection - * @param {Array} array - A collection to iterate over. - * @param {*} memo - The initial state of the reduction. - * @param {AsyncFunction} iteratee - A function applied to each item in the - * array to produce the next step in the reduction. - * The `iteratee` should complete with the next state of the reduction. - * If the iteratee complete with an error, the reduction is stopped and the - * main `callback` is immediately called with the error. - * Invoked with (memo, item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Result is the reduced value. Invoked with - * (err, result). - */ -function reduceRight (array, memo, iteratee, callback) { - var reversed = slice(array).reverse(); - reduce(reversed, memo, iteratee, callback); -} - -/** - * Wraps the async function in another function that always completes with a - * result object, even when it errors. - * - * The result object has either the property `error` or `value`. - * - * @name reflect - * @static - * @memberOf module:Utils - * @method - * @category Util - * @param {AsyncFunction} fn - The async function you want to wrap - * @returns {Function} - A function that always passes null to it's callback as - * the error. The second argument to the callback will be an `object` with - * either an `error` or a `value` property. - * @example - * - * async.parallel([ - * async.reflect(function(callback) { - * // do some stuff ... - * callback(null, 'one'); - * }), - * async.reflect(function(callback) { - * // do some more stuff but error ... - * callback('bad stuff happened'); - * }), - * async.reflect(function(callback) { - * // do some more stuff ... - * callback(null, 'two'); - * }) - * ], - * // optional callback - * function(err, results) { - * // values - * // results[0].value = 'one' - * // results[1].error = 'bad stuff happened' - * // results[2].value = 'two' - * }); - */ -function reflect(fn) { - var _fn = wrapAsync(fn); - return initialParams(function reflectOn(args, reflectCallback) { - args.push(function callback(error, cbArg) { - if (error) { - reflectCallback(null, { error: error }); - } else { - var value; - if (arguments.length <= 2) { - value = cbArg; - } else { - value = slice(arguments, 1); - } - reflectCallback(null, { value: value }); - } - }); - - return _fn.apply(this, args); - }); -} - -/** - * A helper function that wraps an array or an object of functions with `reflect`. - * - * @name reflectAll - * @static - * @memberOf module:Utils - * @method - * @see [async.reflect]{@link module:Utils.reflect} - * @category Util - * @param {Array|Object|Iterable} tasks - The collection of - * [async functions]{@link AsyncFunction} to wrap in `async.reflect`. - * @returns {Array} Returns an array of async functions, each wrapped in - * `async.reflect` - * @example - * - * let tasks = [ - * function(callback) { - * setTimeout(function() { - * callback(null, 'one'); - * }, 200); - * }, - * function(callback) { - * // do some more stuff but error ... - * callback(new Error('bad stuff happened')); - * }, - * function(callback) { - * setTimeout(function() { - * callback(null, 'two'); - * }, 100); - * } - * ]; - * - * async.parallel(async.reflectAll(tasks), - * // optional callback - * function(err, results) { - * // values - * // results[0].value = 'one' - * // results[1].error = Error('bad stuff happened') - * // results[2].value = 'two' - * }); - * - * // an example using an object instead of an array - * let tasks = { - * one: function(callback) { - * setTimeout(function() { - * callback(null, 'one'); - * }, 200); - * }, - * two: function(callback) { - * callback('two'); - * }, - * three: function(callback) { - * setTimeout(function() { - * callback(null, 'three'); - * }, 100); - * } - * }; - * - * async.parallel(async.reflectAll(tasks), - * // optional callback - * function(err, results) { - * // values - * // results.one.value = 'one' - * // results.two.error = 'two' - * // results.three.value = 'three' - * }); - */ -function reflectAll(tasks) { - var results; - if (isArray(tasks)) { - results = arrayMap(tasks, reflect); - } else { - results = {}; - baseForOwn(tasks, function(task, key) { - results[key] = reflect.call(this, task); - }); - } - return results; -} - -function reject$1(eachfn, arr, iteratee, callback) { - _filter(eachfn, arr, function(value, cb) { - iteratee(value, function(err, v) { - cb(err, !v); - }); - }, callback); -} - -/** - * The opposite of [`filter`]{@link module:Collections.filter}. Removes values that pass an `async` truth test. - * - * @name reject - * @static - * @memberOf module:Collections - * @method - * @see [async.filter]{@link module:Collections.filter} - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {Function} iteratee - An async truth test to apply to each item in - * `coll`. - * The should complete with a boolean value as its `result`. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Invoked with (err, results). - * @example - * - * async.reject(['file1','file2','file3'], function(filePath, callback) { - * fs.access(filePath, function(err) { - * callback(null, !err) - * }); - * }, function(err, results) { - * // results now equals an array of missing files - * createFiles(results); - * }); - */ -var reject = doParallel(reject$1); - -/** - * The same as [`reject`]{@link module:Collections.reject} but runs a maximum of `limit` async operations at a - * time. - * - * @name rejectLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.reject]{@link module:Collections.reject} - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {Function} iteratee - An async truth test to apply to each item in - * `coll`. - * The should complete with a boolean value as its `result`. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Invoked with (err, results). - */ -var rejectLimit = doParallelLimit(reject$1); - -/** - * The same as [`reject`]{@link module:Collections.reject} but runs only a single async operation at a time. - * - * @name rejectSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.reject]{@link module:Collections.reject} - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {Function} iteratee - An async truth test to apply to each item in - * `coll`. - * The should complete with a boolean value as its `result`. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Invoked with (err, results). - */ -var rejectSeries = doLimit(rejectLimit, 1); - -/** - * Creates a function that returns `value`. - * - * @static - * @memberOf _ - * @since 2.4.0 - * @category Util - * @param {*} value The value to return from the new function. - * @returns {Function} Returns the new constant function. - * @example - * - * var objects = _.times(2, _.constant({ 'a': 1 })); - * - * console.log(objects); - * // => [{ 'a': 1 }, { 'a': 1 }] - * - * console.log(objects[0] === objects[1]); - * // => true - */ -function constant$1(value) { - return function() { - return value; - }; -} - -/** - * Attempts to get a successful response from `task` no more than `times` times - * before returning an error. If the task is successful, the `callback` will be - * passed the result of the successful task. If all attempts fail, the callback - * will be passed the error and result (if any) of the final attempt. - * - * @name retry - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @see [async.retryable]{@link module:ControlFlow.retryable} - * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - Can be either an - * object with `times` and `interval` or a number. - * * `times` - The number of attempts to make before giving up. The default - * is `5`. - * * `interval` - The time to wait between retries, in milliseconds. The - * default is `0`. The interval may also be specified as a function of the - * retry count (see example). - * * `errorFilter` - An optional synchronous function that is invoked on - * erroneous result. If it returns `true` the retry attempts will continue; - * if the function returns `false` the retry flow is aborted with the current - * attempt's error and result being returned to the final callback. - * Invoked with (err). - * * If `opts` is a number, the number specifies the number of times to retry, - * with the default interval of `0`. - * @param {AsyncFunction} task - An async function to retry. - * Invoked with (callback). - * @param {Function} [callback] - An optional callback which is called when the - * task has succeeded, or after the final failed attempt. It receives the `err` - * and `result` arguments of the last attempt at completing the `task`. Invoked - * with (err, results). - * - * @example - * - * // The `retry` function can be used as a stand-alone control flow by passing - * // a callback, as shown below: - * - * // try calling apiMethod 3 times - * async.retry(3, apiMethod, function(err, result) { - * // do something with the result - * }); - * - * // try calling apiMethod 3 times, waiting 200 ms between each retry - * async.retry({times: 3, interval: 200}, apiMethod, function(err, result) { - * // do something with the result - * }); - * - * // try calling apiMethod 10 times with exponential backoff - * // (i.e. intervals of 100, 200, 400, 800, 1600, ... milliseconds) - * async.retry({ - * times: 10, - * interval: function(retryCount) { - * return 50 * Math.pow(2, retryCount); - * } - * }, apiMethod, function(err, result) { - * // do something with the result - * }); - * - * // try calling apiMethod the default 5 times no delay between each retry - * async.retry(apiMethod, function(err, result) { - * // do something with the result - * }); - * - * // try calling apiMethod only when error condition satisfies, all other - * // errors will abort the retry control flow and return to final callback - * async.retry({ - * errorFilter: function(err) { - * return err.message === 'Temporary error'; // only retry on a specific error - * } - * }, apiMethod, function(err, result) { - * // do something with the result - * }); - * - * // to retry individual methods that are not as reliable within other - * // control flow functions, use the `retryable` wrapper: - * async.auto({ - * users: api.getUsers.bind(api), - * payments: async.retryable(3, api.getPayments.bind(api)) - * }, function(err, results) { - * // do something with the results - * }); - * - */ -function retry(opts, task, callback) { - var DEFAULT_TIMES = 5; - var DEFAULT_INTERVAL = 0; - - var options = { - times: DEFAULT_TIMES, - intervalFunc: constant$1(DEFAULT_INTERVAL) - }; - - function parseTimes(acc, t) { - if (typeof t === 'object') { - acc.times = +t.times || DEFAULT_TIMES; - - acc.intervalFunc = typeof t.interval === 'function' ? - t.interval : - constant$1(+t.interval || DEFAULT_INTERVAL); - - acc.errorFilter = t.errorFilter; - } else if (typeof t === 'number' || typeof t === 'string') { - acc.times = +t || DEFAULT_TIMES; - } else { - throw new Error("Invalid arguments for async.retry"); - } - } - - if (arguments.length < 3 && typeof opts === 'function') { - callback = task || noop; - task = opts; - } else { - parseTimes(options, opts); - callback = callback || noop; - } - - if (typeof task !== 'function') { - throw new Error("Invalid arguments for async.retry"); - } - - var _task = wrapAsync(task); - - var attempt = 1; - function retryAttempt() { - _task(function(err) { - if (err && attempt++ < options.times && - (typeof options.errorFilter != 'function' || - options.errorFilter(err))) { - setTimeout(retryAttempt, options.intervalFunc(attempt)); - } else { - callback.apply(null, arguments); - } - }); - } - - retryAttempt(); -} - -/** - * A close relative of [`retry`]{@link module:ControlFlow.retry}. This method - * wraps a task and makes it retryable, rather than immediately calling it - * with retries. - * - * @name retryable - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.retry]{@link module:ControlFlow.retry} - * @category Control Flow - * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - optional - * options, exactly the same as from `retry` - * @param {AsyncFunction} task - the asynchronous function to wrap. - * This function will be passed any arguments passed to the returned wrapper. - * Invoked with (...args, callback). - * @returns {AsyncFunction} The wrapped function, which when invoked, will - * retry on an error, based on the parameters specified in `opts`. - * This function will accept the same parameters as `task`. - * @example - * - * async.auto({ - * dep1: async.retryable(3, getFromFlakyService), - * process: ["dep1", async.retryable(3, function (results, cb) { - * maybeProcessData(results.dep1, cb); - * })] - * }, callback); - */ -var retryable = function (opts, task) { - if (!task) { - task = opts; - opts = null; - } - var _task = wrapAsync(task); - return initialParams(function (args, callback) { - function taskFn(cb) { - _task.apply(null, args.concat(cb)); - } - - if (opts) retry(opts, taskFn, callback); - else retry(taskFn, callback); - - }); -}; - -/** - * Run the functions in the `tasks` collection in series, each one running once - * the previous function has completed. If any functions in the series pass an - * error to its callback, no more functions are run, and `callback` is - * immediately called with the value of the error. Otherwise, `callback` - * receives an array of results when `tasks` have completed. - * - * It is also possible to use an object instead of an array. Each property will - * be run as a function, and the results will be passed to the final `callback` - * as an object instead of an array. This can be a more readable way of handling - * results from {@link async.series}. - * - * **Note** that while many implementations preserve the order of object - * properties, the [ECMAScript Language Specification](http://www.ecma-international.org/ecma-262/5.1/#sec-8.6) - * explicitly states that - * - * > The mechanics and order of enumerating the properties is not specified. - * - * So if you rely on the order in which your series of functions are executed, - * and want this to work on all platforms, consider using an array. - * - * @name series - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {Array|Iterable|Object} tasks - A collection containing - * [async functions]{@link AsyncFunction} to run in series. - * Each function can complete with any number of optional `result` values. - * @param {Function} [callback] - An optional callback to run once all the - * functions have completed. This function gets a results array (or object) - * containing all the result arguments passed to the `task` callbacks. Invoked - * with (err, result). - * @example - * async.series([ - * function(callback) { - * // do some stuff ... - * callback(null, 'one'); - * }, - * function(callback) { - * // do some more stuff ... - * callback(null, 'two'); - * } - * ], - * // optional callback - * function(err, results) { - * // results is now equal to ['one', 'two'] - * }); - * - * async.series({ - * one: function(callback) { - * setTimeout(function() { - * callback(null, 1); - * }, 200); - * }, - * two: function(callback){ - * setTimeout(function() { - * callback(null, 2); - * }, 100); - * } - * }, function(err, results) { - * // results is now equal to: {one: 1, two: 2} - * }); - */ -function series(tasks, callback) { - _parallel(eachOfSeries, tasks, callback); -} - -/** - * Returns `true` if at least one element in the `coll` satisfies an async test. - * If any iteratee call returns `true`, the main `callback` is immediately - * called. - * - * @name some - * @static - * @memberOf module:Collections - * @method - * @alias any - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collections in parallel. - * The iteratee should complete with a boolean `result` value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called as soon as any - * iteratee returns `true`, or after all the iteratee functions have finished. - * Result will be either `true` or `false` depending on the values of the async - * tests. Invoked with (err, result). - * @example - * - * async.some(['file1','file2','file3'], function(filePath, callback) { - * fs.access(filePath, function(err) { - * callback(null, !err) - * }); - * }, function(err, result) { - * // if result is true then at least one of the files exists - * }); - */ -var some = doParallel(_createTester(Boolean, identity)); - -/** - * The same as [`some`]{@link module:Collections.some} but runs a maximum of `limit` async operations at a time. - * - * @name someLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.some]{@link module:Collections.some} - * @alias anyLimit - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collections in parallel. - * The iteratee should complete with a boolean `result` value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called as soon as any - * iteratee returns `true`, or after all the iteratee functions have finished. - * Result will be either `true` or `false` depending on the values of the async - * tests. Invoked with (err, result). - */ -var someLimit = doParallelLimit(_createTester(Boolean, identity)); - -/** - * The same as [`some`]{@link module:Collections.some} but runs only a single async operation at a time. - * - * @name someSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.some]{@link module:Collections.some} - * @alias anySeries - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collections in series. - * The iteratee should complete with a boolean `result` value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called as soon as any - * iteratee returns `true`, or after all the iteratee functions have finished. - * Result will be either `true` or `false` depending on the values of the async - * tests. Invoked with (err, result). - */ -var someSeries = doLimit(someLimit, 1); - -/** - * Sorts a list by the results of running each `coll` value through an async - * `iteratee`. - * - * @name sortBy - * @static - * @memberOf module:Collections - * @method - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The iteratee should complete with a value to use as the sort criteria as - * its `result`. - * Invoked with (item, callback). - * @param {Function} callback - A callback which is called after all the - * `iteratee` functions have finished, or an error occurs. Results is the items - * from the original `coll` sorted by the values returned by the `iteratee` - * calls. Invoked with (err, results). - * @example - * - * async.sortBy(['file1','file2','file3'], function(file, callback) { - * fs.stat(file, function(err, stats) { - * callback(err, stats.mtime); - * }); - * }, function(err, results) { - * // results is now the original array of files sorted by - * // modified date - * }); - * - * // By modifying the callback parameter the - * // sorting order can be influenced: - * - * // ascending order - * async.sortBy([1,9,3,5], function(x, callback) { - * callback(null, x); - * }, function(err,result) { - * // result callback - * }); - * - * // descending order - * async.sortBy([1,9,3,5], function(x, callback) { - * callback(null, x*-1); //<- x*-1 instead of x, turns the order around - * }, function(err,result) { - * // result callback - * }); - */ -function sortBy (coll, iteratee, callback) { - var _iteratee = wrapAsync(iteratee); - map(coll, function (x, callback) { - _iteratee(x, function (err, criteria) { - if (err) return callback(err); - callback(null, {value: x, criteria: criteria}); - }); - }, function (err, results) { - if (err) return callback(err); - callback(null, arrayMap(results.sort(comparator), baseProperty('value'))); - }); - - function comparator(left, right) { - var a = left.criteria, b = right.criteria; - return a < b ? -1 : a > b ? 1 : 0; - } -} - -/** - * Sets a time limit on an asynchronous function. If the function does not call - * its callback within the specified milliseconds, it will be called with a - * timeout error. The code property for the error object will be `'ETIMEDOUT'`. - * - * @name timeout - * @static - * @memberOf module:Utils - * @method - * @category Util - * @param {AsyncFunction} asyncFn - The async function to limit in time. - * @param {number} milliseconds - The specified time limit. - * @param {*} [info] - Any variable you want attached (`string`, `object`, etc) - * to timeout Error for more information.. - * @returns {AsyncFunction} Returns a wrapped function that can be used with any - * of the control flow functions. - * Invoke this function with the same parameters as you would `asyncFunc`. - * @example - * - * function myFunction(foo, callback) { - * doAsyncTask(foo, function(err, data) { - * // handle errors - * if (err) return callback(err); - * - * // do some stuff ... - * - * // return processed data - * return callback(null, data); - * }); - * } - * - * var wrapped = async.timeout(myFunction, 1000); - * - * // call `wrapped` as you would `myFunction` - * wrapped({ bar: 'bar' }, function(err, data) { - * // if `myFunction` takes < 1000 ms to execute, `err` - * // and `data` will have their expected values - * - * // else `err` will be an Error with the code 'ETIMEDOUT' - * }); - */ -function timeout(asyncFn, milliseconds, info) { - var fn = wrapAsync(asyncFn); - - return initialParams(function (args, callback) { - var timedOut = false; - var timer; - - function timeoutCallback() { - var name = asyncFn.name || 'anonymous'; - var error = new Error('Callback function "' + name + '" timed out.'); - error.code = 'ETIMEDOUT'; - if (info) { - error.info = info; - } - timedOut = true; - callback(error); - } - - args.push(function () { - if (!timedOut) { - callback.apply(null, arguments); - clearTimeout(timer); - } - }); - - // setup timer and call original function - timer = setTimeout(timeoutCallback, milliseconds); - fn.apply(null, args); - }); -} - -/* Built-in method references for those with the same name as other `lodash` methods. */ -var nativeCeil = Math.ceil; -var nativeMax = Math.max; - -/** - * The base implementation of `_.range` and `_.rangeRight` which doesn't - * coerce arguments. - * - * @private - * @param {number} start The start of the range. - * @param {number} end The end of the range. - * @param {number} step The value to increment or decrement by. - * @param {boolean} [fromRight] Specify iterating from right to left. - * @returns {Array} Returns the range of numbers. - */ -function baseRange(start, end, step, fromRight) { - var index = -1, - length = nativeMax(nativeCeil((end - start) / (step || 1)), 0), - result = Array(length); - - while (length--) { - result[fromRight ? length : ++index] = start; - start += step; - } - return result; -} - -/** - * The same as [times]{@link module:ControlFlow.times} but runs a maximum of `limit` async operations at a - * time. - * - * @name timesLimit - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.times]{@link module:ControlFlow.times} - * @category Control Flow - * @param {number} count - The number of times to run the function. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - The async function to call `n` times. - * Invoked with the iteration index and a callback: (n, next). - * @param {Function} callback - see [async.map]{@link module:Collections.map}. - */ -function timeLimit(count, limit, iteratee, callback) { - var _iteratee = wrapAsync(iteratee); - mapLimit(baseRange(0, count, 1), limit, _iteratee, callback); -} - -/** - * Calls the `iteratee` function `n` times, and accumulates results in the same - * manner you would use with [map]{@link module:Collections.map}. - * - * @name times - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.map]{@link module:Collections.map} - * @category Control Flow - * @param {number} n - The number of times to run the function. - * @param {AsyncFunction} iteratee - The async function to call `n` times. - * Invoked with the iteration index and a callback: (n, next). - * @param {Function} callback - see {@link module:Collections.map}. - * @example - * - * // Pretend this is some complicated async factory - * var createUser = function(id, callback) { - * callback(null, { - * id: 'user' + id - * }); - * }; - * - * // generate 5 users - * async.times(5, function(n, next) { - * createUser(n, function(err, user) { - * next(err, user); - * }); - * }, function(err, users) { - * // we should now have 5 users - * }); - */ -var times = doLimit(timeLimit, Infinity); - -/** - * The same as [times]{@link module:ControlFlow.times} but runs only a single async operation at a time. - * - * @name timesSeries - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.times]{@link module:ControlFlow.times} - * @category Control Flow - * @param {number} n - The number of times to run the function. - * @param {AsyncFunction} iteratee - The async function to call `n` times. - * Invoked with the iteration index and a callback: (n, next). - * @param {Function} callback - see {@link module:Collections.map}. - */ -var timesSeries = doLimit(timeLimit, 1); - -/** - * A relative of `reduce`. Takes an Object or Array, and iterates over each - * element in series, each step potentially mutating an `accumulator` value. - * The type of the accumulator defaults to the type of collection passed in. - * - * @name transform - * @static - * @memberOf module:Collections - * @method - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {*} [accumulator] - The initial state of the transform. If omitted, - * it will default to an empty Object or Array, depending on the type of `coll` - * @param {AsyncFunction} iteratee - A function applied to each item in the - * collection that potentially modifies the accumulator. - * Invoked with (accumulator, item, key, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Result is the transformed accumulator. - * Invoked with (err, result). - * @example - * - * async.transform([1,2,3], function(acc, item, index, callback) { - * // pointless async: - * process.nextTick(function() { - * acc.push(item * 2) - * callback(null) - * }); - * }, function(err, result) { - * // result is now equal to [2, 4, 6] - * }); - * - * @example - * - * async.transform({a: 1, b: 2, c: 3}, function (obj, val, key, callback) { - * setImmediate(function () { - * obj[key] = val * 2; - * callback(); - * }) - * }, function (err, result) { - * // result is equal to {a: 2, b: 4, c: 6} - * }) - */ -function transform (coll, accumulator, iteratee, callback) { - if (arguments.length <= 3) { - callback = iteratee; - iteratee = accumulator; - accumulator = isArray(coll) ? [] : {}; - } - callback = once(callback || noop); - var _iteratee = wrapAsync(iteratee); - - eachOf(coll, function(v, k, cb) { - _iteratee(accumulator, v, k, cb); - }, function(err) { - callback(err, accumulator); - }); -} - -/** - * It runs each task in series but stops whenever any of the functions were - * successful. If one of the tasks were successful, the `callback` will be - * passed the result of the successful task. If all tasks fail, the callback - * will be passed the error and result (if any) of the final attempt. - * - * @name tryEach - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {Array|Iterable|Object} tasks - A collection containing functions to - * run, each function is passed a `callback(err, result)` it must call on - * completion with an error `err` (which can be `null`) and an optional `result` - * value. - * @param {Function} [callback] - An optional callback which is called when one - * of the tasks has succeeded, or all have failed. It receives the `err` and - * `result` arguments of the last attempt at completing the `task`. Invoked with - * (err, results). - * @example - * async.tryEach([ - * function getDataFromFirstWebsite(callback) { - * // Try getting the data from the first website - * callback(err, data); - * }, - * function getDataFromSecondWebsite(callback) { - * // First website failed, - * // Try getting the data from the backup website - * callback(err, data); - * } - * ], - * // optional callback - * function(err, results) { - * Now do something with the data. - * }); - * - */ -function tryEach(tasks, callback) { - var error = null; - var result; - callback = callback || noop; - eachSeries(tasks, function(task, callback) { - wrapAsync(task)(function (err, res/*, ...args*/) { - if (arguments.length > 2) { - result = slice(arguments, 1); - } else { - result = res; - } - error = err; - callback(!err); - }); - }, function () { - callback(error, result); - }); -} - -/** - * Undoes a [memoize]{@link module:Utils.memoize}d function, reverting it to the original, - * unmemoized form. Handy for testing. - * - * @name unmemoize - * @static - * @memberOf module:Utils - * @method - * @see [async.memoize]{@link module:Utils.memoize} - * @category Util - * @param {AsyncFunction} fn - the memoized function - * @returns {AsyncFunction} a function that calls the original unmemoized function - */ -function unmemoize(fn) { - return function () { - return (fn.unmemoized || fn).apply(null, arguments); - }; -} - -/** - * Repeatedly call `iteratee`, while `test` returns `true`. Calls `callback` when - * stopped, or an error occurs. - * - * @name whilst - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {Function} test - synchronous truth test to perform before each - * execution of `iteratee`. Invoked with (). - * @param {AsyncFunction} iteratee - An async function which is called each time - * `test` passes. Invoked with (callback). - * @param {Function} [callback] - A callback which is called after the test - * function has failed and repeated execution of `iteratee` has stopped. `callback` - * will be passed an error and any arguments passed to the final `iteratee`'s - * callback. Invoked with (err, [results]); - * @returns undefined - * @example - * - * var count = 0; - * async.whilst( - * function() { return count < 5; }, - * function(callback) { - * count++; - * setTimeout(function() { - * callback(null, count); - * }, 1000); - * }, - * function (err, n) { - * // 5 seconds have passed, n = 5 - * } - * ); - */ -function whilst(test, iteratee, callback) { - callback = onlyOnce(callback || noop); - var _iteratee = wrapAsync(iteratee); - if (!test()) return callback(null); - var next = function(err/*, ...args*/) { - if (err) return callback(err); - if (test()) return _iteratee(next); - var args = slice(arguments, 1); - callback.apply(null, [null].concat(args)); - }; - _iteratee(next); -} - -/** - * Repeatedly call `iteratee` until `test` returns `true`. Calls `callback` when - * stopped, or an error occurs. `callback` will be passed an error and any - * arguments passed to the final `iteratee`'s callback. - * - * The inverse of [whilst]{@link module:ControlFlow.whilst}. - * - * @name until - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.whilst]{@link module:ControlFlow.whilst} - * @category Control Flow - * @param {Function} test - synchronous truth test to perform before each - * execution of `iteratee`. Invoked with (). - * @param {AsyncFunction} iteratee - An async function which is called each time - * `test` fails. Invoked with (callback). - * @param {Function} [callback] - A callback which is called after the test - * function has passed and repeated execution of `iteratee` has stopped. `callback` - * will be passed an error and any arguments passed to the final `iteratee`'s - * callback. Invoked with (err, [results]); - */ -function until(test, iteratee, callback) { - whilst(function() { - return !test.apply(this, arguments); - }, iteratee, callback); -} - -/** - * Runs the `tasks` array of functions in series, each passing their results to - * the next in the array. However, if any of the `tasks` pass an error to their - * own callback, the next function is not executed, and the main `callback` is - * immediately called with the error. - * - * @name waterfall - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {Array} tasks - An array of [async functions]{@link AsyncFunction} - * to run. - * Each function should complete with any number of `result` values. - * The `result` values will be passed as arguments, in order, to the next task. - * @param {Function} [callback] - An optional callback to run once all the - * functions have completed. This will be passed the results of the last task's - * callback. Invoked with (err, [results]). - * @returns undefined - * @example - * - * async.waterfall([ - * function(callback) { - * callback(null, 'one', 'two'); - * }, - * function(arg1, arg2, callback) { - * // arg1 now equals 'one' and arg2 now equals 'two' - * callback(null, 'three'); - * }, - * function(arg1, callback) { - * // arg1 now equals 'three' - * callback(null, 'done'); - * } - * ], function (err, result) { - * // result now equals 'done' - * }); - * - * // Or, with named functions: - * async.waterfall([ - * myFirstFunction, - * mySecondFunction, - * myLastFunction, - * ], function (err, result) { - * // result now equals 'done' - * }); - * function myFirstFunction(callback) { - * callback(null, 'one', 'two'); - * } - * function mySecondFunction(arg1, arg2, callback) { - * // arg1 now equals 'one' and arg2 now equals 'two' - * callback(null, 'three'); - * } - * function myLastFunction(arg1, callback) { - * // arg1 now equals 'three' - * callback(null, 'done'); - * } - */ -var waterfall = function(tasks, callback) { - callback = once(callback || noop); - if (!isArray(tasks)) return callback(new Error('First argument to waterfall must be an array of functions')); - if (!tasks.length) return callback(); - var taskIndex = 0; - - function nextTask(args) { - var task = wrapAsync(tasks[taskIndex++]); - args.push(onlyOnce(next)); - task.apply(null, args); - } - - function next(err/*, ...args*/) { - if (err || taskIndex === tasks.length) { - return callback.apply(null, arguments); - } - nextTask(slice(arguments, 1)); - } - - nextTask([]); -}; - -/** - * An "async function" in the context of Async is an asynchronous function with - * a variable number of parameters, with the final parameter being a callback. - * (`function (arg1, arg2, ..., callback) {}`) - * The final callback is of the form `callback(err, results...)`, which must be - * called once the function is completed. The callback should be called with a - * Error as its first argument to signal that an error occurred. - * Otherwise, if no error occurred, it should be called with `null` as the first - * argument, and any additional `result` arguments that may apply, to signal - * successful completion. - * The callback must be called exactly once, ideally on a later tick of the - * JavaScript event loop. - * - * This type of function is also referred to as a "Node-style async function", - * or a "continuation passing-style function" (CPS). Most of the methods of this - * library are themselves CPS/Node-style async functions, or functions that - * return CPS/Node-style async functions. - * - * Wherever we accept a Node-style async function, we also directly accept an - * [ES2017 `async` function]{@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function}. - * In this case, the `async` function will not be passed a final callback - * argument, and any thrown error will be used as the `err` argument of the - * implicit callback, and the return value will be used as the `result` value. - * (i.e. a `rejected` of the returned Promise becomes the `err` callback - * argument, and a `resolved` value becomes the `result`.) - * - * Note, due to JavaScript limitations, we can only detect native `async` - * functions and not transpilied implementations. - * Your environment must have `async`/`await` support for this to work. - * (e.g. Node > v7.6, or a recent version of a modern browser). - * If you are using `async` functions through a transpiler (e.g. Babel), you - * must still wrap the function with [asyncify]{@link module:Utils.asyncify}, - * because the `async function` will be compiled to an ordinary function that - * returns a promise. - * - * @typedef {Function} AsyncFunction - * @static - */ - -/** - * Async is a utility module which provides straight-forward, powerful functions - * for working with asynchronous JavaScript. Although originally designed for - * use with [Node.js](http://nodejs.org) and installable via - * `npm install --save async`, it can also be used directly in the browser. - * @module async - * @see AsyncFunction - */ - - -/** - * A collection of `async` functions for manipulating collections, such as - * arrays and objects. - * @module Collections - */ - -/** - * A collection of `async` functions for controlling the flow through a script. - * @module ControlFlow - */ - -/** - * A collection of `async` utility functions. - * @module Utils - */ - -var index = { - apply: apply, - applyEach: applyEach, - applyEachSeries: applyEachSeries, - asyncify: asyncify, - auto: auto, - autoInject: autoInject, - cargo: cargo, - compose: compose, - concat: concat, - concatLimit: concatLimit, - concatSeries: concatSeries, - constant: constant, - detect: detect, - detectLimit: detectLimit, - detectSeries: detectSeries, - dir: dir, - doDuring: doDuring, - doUntil: doUntil, - doWhilst: doWhilst, - during: during, - each: eachLimit, - eachLimit: eachLimit$1, - eachOf: eachOf, - eachOfLimit: eachOfLimit, - eachOfSeries: eachOfSeries, - eachSeries: eachSeries, - ensureAsync: ensureAsync, - every: every, - everyLimit: everyLimit, - everySeries: everySeries, - filter: filter, - filterLimit: filterLimit, - filterSeries: filterSeries, - forever: forever, - groupBy: groupBy, - groupByLimit: groupByLimit, - groupBySeries: groupBySeries, - log: log, - map: map, - mapLimit: mapLimit, - mapSeries: mapSeries, - mapValues: mapValues, - mapValuesLimit: mapValuesLimit, - mapValuesSeries: mapValuesSeries, - memoize: memoize, - nextTick: nextTick, - parallel: parallelLimit, - parallelLimit: parallelLimit$1, - priorityQueue: priorityQueue, - queue: queue$1, - race: race, - reduce: reduce, - reduceRight: reduceRight, - reflect: reflect, - reflectAll: reflectAll, - reject: reject, - rejectLimit: rejectLimit, - rejectSeries: rejectSeries, - retry: retry, - retryable: retryable, - seq: seq, - series: series, - setImmediate: setImmediate$1, - some: some, - someLimit: someLimit, - someSeries: someSeries, - sortBy: sortBy, - timeout: timeout, - times: times, - timesLimit: timeLimit, - timesSeries: timesSeries, - transform: transform, - tryEach: tryEach, - unmemoize: unmemoize, - until: until, - waterfall: waterfall, - whilst: whilst, - - // aliases - all: every, - allLimit: everyLimit, - allSeries: everySeries, - any: some, - anyLimit: someLimit, - anySeries: someSeries, - find: detect, - findLimit: detectLimit, - findSeries: detectSeries, - forEach: eachLimit, - forEachSeries: eachSeries, - forEachLimit: eachLimit$1, - forEachOf: eachOf, - forEachOfSeries: eachOfSeries, - forEachOfLimit: eachOfLimit, - inject: reduce, - foldl: reduce, - foldr: reduceRight, - select: filter, - selectLimit: filterLimit, - selectSeries: filterSeries, - wrapSync: asyncify -}; - -exports['default'] = index; -exports.apply = apply; -exports.applyEach = applyEach; -exports.applyEachSeries = applyEachSeries; -exports.asyncify = asyncify; -exports.auto = auto; -exports.autoInject = autoInject; -exports.cargo = cargo; -exports.compose = compose; -exports.concat = concat; -exports.concatLimit = concatLimit; -exports.concatSeries = concatSeries; -exports.constant = constant; -exports.detect = detect; -exports.detectLimit = detectLimit; -exports.detectSeries = detectSeries; -exports.dir = dir; -exports.doDuring = doDuring; -exports.doUntil = doUntil; -exports.doWhilst = doWhilst; -exports.during = during; -exports.each = eachLimit; -exports.eachLimit = eachLimit$1; -exports.eachOf = eachOf; -exports.eachOfLimit = eachOfLimit; -exports.eachOfSeries = eachOfSeries; -exports.eachSeries = eachSeries; -exports.ensureAsync = ensureAsync; -exports.every = every; -exports.everyLimit = everyLimit; -exports.everySeries = everySeries; -exports.filter = filter; -exports.filterLimit = filterLimit; -exports.filterSeries = filterSeries; -exports.forever = forever; -exports.groupBy = groupBy; -exports.groupByLimit = groupByLimit; -exports.groupBySeries = groupBySeries; -exports.log = log; -exports.map = map; -exports.mapLimit = mapLimit; -exports.mapSeries = mapSeries; -exports.mapValues = mapValues; -exports.mapValuesLimit = mapValuesLimit; -exports.mapValuesSeries = mapValuesSeries; -exports.memoize = memoize; -exports.nextTick = nextTick; -exports.parallel = parallelLimit; -exports.parallelLimit = parallelLimit$1; -exports.priorityQueue = priorityQueue; -exports.queue = queue$1; -exports.race = race; -exports.reduce = reduce; -exports.reduceRight = reduceRight; -exports.reflect = reflect; -exports.reflectAll = reflectAll; -exports.reject = reject; -exports.rejectLimit = rejectLimit; -exports.rejectSeries = rejectSeries; -exports.retry = retry; -exports.retryable = retryable; -exports.seq = seq; -exports.series = series; -exports.setImmediate = setImmediate$1; -exports.some = some; -exports.someLimit = someLimit; -exports.someSeries = someSeries; -exports.sortBy = sortBy; -exports.timeout = timeout; -exports.times = times; -exports.timesLimit = timeLimit; -exports.timesSeries = timesSeries; -exports.transform = transform; -exports.tryEach = tryEach; -exports.unmemoize = unmemoize; -exports.until = until; -exports.waterfall = waterfall; -exports.whilst = whilst; -exports.all = every; -exports.allLimit = everyLimit; -exports.allSeries = everySeries; -exports.any = some; -exports.anyLimit = someLimit; -exports.anySeries = someSeries; -exports.find = detect; -exports.findLimit = detectLimit; -exports.findSeries = detectSeries; -exports.forEach = eachLimit; -exports.forEachSeries = eachSeries; -exports.forEachLimit = eachLimit$1; -exports.forEachOf = eachOf; -exports.forEachOfSeries = eachOfSeries; -exports.forEachOfLimit = eachOfLimit; -exports.inject = reduce; -exports.foldl = reduce; -exports.foldr = reduceRight; -exports.select = filter; -exports.selectLimit = filterLimit; -exports.selectSeries = filterSeries; -exports.wrapSync = asyncify; - -Object.defineProperty(exports, '__esModule', { value: true }); - -}))); diff --git a/node_modules/async/dist/async.min.js b/node_modules/async/dist/async.min.js deleted file mode 100644 index 0b9e113..0000000 --- a/node_modules/async/dist/async.min.js +++ /dev/null @@ -1,2 +0,0 @@ -!function(n,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports):"function"==typeof define&&define.amd?define(["exports"],t):t(n.async=n.async||{})}(this,function(n){"use strict";function t(n,t){t|=0;for(var e=Math.max(n.length-t,0),r=Array(e),u=0;u-1&&n%1==0&&n<=Tt}function d(n){return null!=n&&v(n.length)&&!y(n)}function m(){}function g(n){return function(){if(null!==n){var t=n;n=null,t.apply(this,arguments)}}}function b(n,t){for(var e=-1,r=Array(n);++e-1&&n%1==0&&nu?0:u+t),e=e>u?u:e,e<0&&(e+=u),u=t>e?0:e-t>>>0,t>>>=0;for(var i=Array(u);++r=r?n:Z(n,t,e)}function tn(n,t){for(var e=n.length;e--&&J(t,n[e],0)>-1;);return e}function en(n,t){for(var e=-1,r=n.length;++e-1;);return e}function rn(n){return n.split("")}function un(n){return Xe.test(n)}function on(n){return n.match(mr)||[]}function cn(n){return un(n)?on(n):rn(n)}function fn(n){return null==n?"":Y(n)}function an(n,t,e){if(n=fn(n),n&&(e||void 0===t))return n.replace(gr,"");if(!n||!(t=Y(t)))return n;var r=cn(n),u=cn(t),i=en(r,u),o=tn(r,u)+1;return nn(r,i,o).join("")}function ln(n){return n=n.toString().replace(kr,""),n=n.match(br)[2].replace(" ",""),n=n?n.split(jr):[],n=n.map(function(n){return an(n.replace(Sr,""))})}function sn(n,t){var e={};N(n,function(n,t){function r(t,e){var r=K(u,function(n){return t[n]});r.push(e),a(n).apply(null,r)}var u,i=f(n),o=!i&&1===n.length||i&&0===n.length;if(Pt(n))u=n.slice(0,-1),n=n[n.length-1],e[t]=u.concat(u.length>0?r:n);else if(o)e[t]=n;else{if(u=ln(n),0===n.length&&!i&&0===u.length)throw new Error("autoInject task functions require explicit parameters.");i||u.pop(),e[t]=u.concat(r)}}),Ve(e,t)}function pn(){this.head=this.tail=null,this.length=0}function hn(n,t){n.length=1,n.head=n.tail=t}function yn(n,t,e){function r(n,t,e){if(null!=e&&"function"!=typeof e)throw new Error("task callback must be a function");if(s.started=!0,Pt(n)||(n=[n]),0===n.length&&s.idle())return lt(function(){s.drain()});for(var r=0,u=n.length;r0&&c.splice(i,1),u.callback.apply(u,arguments),null!=t&&s.error(t,u.data)}o<=s.concurrency-s.buffer&&s.unsaturated(),s.idle()&&s.drain(),s.process()}}if(null==t)t=1;else if(0===t)throw new Error("Concurrency must not be zero");var i=a(n),o=0,c=[],f=!1,l=!1,s={_tasks:new pn,concurrency:t,payload:e,saturated:m,unsaturated:m,buffer:t/4,empty:m,drain:m,error:m,started:!1,paused:!1,push:function(n,t){r(n,!1,t)},kill:function(){s.drain=m,s._tasks.empty()},unshift:function(n,t){r(n,!0,t)},remove:function(n){s._tasks.remove(n)},process:function(){if(!l){for(l=!0;!s.paused&&o2&&(i=t(arguments,1)),u[e]=i,r(n)})},function(n){r(n,u)})}function Dn(n,t){Vn(Fe,n,t)}function Rn(n,t,e){Vn(q(t),n,e)}function Cn(n,t){if(t=g(t||m),!Pt(n))return t(new TypeError("First argument to race must be an array of functions"));if(!n.length)return t();for(var e=0,r=n.length;er?1:0}var u=a(t);Ie(n,function(n,t){u(n,function(e,r){return e?t(e):void t(null,{value:n,criteria:r})})},function(n,t){return n?e(n):void e(null,K(t.sort(r),Bn("value")))})}function Xn(n,t,e){var r=a(n);return ct(function(u,i){function o(){var t=n.name||"anonymous",r=new Error('Callback function "'+t+'" timed out.');r.code="ETIMEDOUT",e&&(r.info=e),f=!0,i(r)}var c,f=!1;u.push(function(){f||(i.apply(null,arguments),clearTimeout(c))}),c=setTimeout(o,t),r.apply(null,u)})}function Yn(n,t,e,r){for(var u=-1,i=iu(uu((t-n)/(e||1)),0),o=Array(i);i--;)o[r?i:++u]=n,n+=e;return o}function Zn(n,t,e,r){var u=a(e);Ue(Yn(0,n,1),t,u,r)}function nt(n,t,e,r){arguments.length<=3&&(r=e,e=t,t=Pt(n)?[]:{}),r=g(r||m);var u=a(e);Fe(n,function(n,e,r){u(t,n,e,r)},function(n){r(n,t)})}function tt(n,e){var r,u=null;e=e||m,Ur(n,function(n,e){a(n)(function(n,i){r=arguments.length>2?t(arguments,1):i,u=n,e(!n)})},function(){e(u,r)})}function et(n){return function(){return(n.unmemoized||n).apply(null,arguments)}}function rt(n,e,r){r=U(r||m);var u=a(e);if(!n())return r(null);var i=function(e){if(e)return r(e);if(n())return u(i);var o=t(arguments,1);r.apply(null,[null].concat(o))};u(i)}function ut(n,t,e){rt(function(){return!n.apply(this,arguments)},t,e)}var it,ot=function(n){var e=t(arguments,1);return function(){var r=t(arguments);return n.apply(null,e.concat(r))}},ct=function(n){return function(){var e=t(arguments),r=e.pop();n.call(this,e,r)}},ft="function"==typeof setImmediate&&setImmediate,at="object"==typeof process&&"function"==typeof process.nextTick;it=ft?setImmediate:at?process.nextTick:r;var lt=u(it),st="function"==typeof Symbol,pt="object"==typeof global&&global&&global.Object===Object&&global,ht="object"==typeof self&&self&&self.Object===Object&&self,yt=pt||ht||Function("return this")(),vt=yt.Symbol,dt=Object.prototype,mt=dt.hasOwnProperty,gt=dt.toString,bt=vt?vt.toStringTag:void 0,jt=Object.prototype,St=jt.toString,kt="[object Null]",Lt="[object Undefined]",Ot=vt?vt.toStringTag:void 0,wt="[object AsyncFunction]",xt="[object Function]",Et="[object GeneratorFunction]",At="[object Proxy]",Tt=9007199254740991,_t={},Bt="function"==typeof Symbol&&Symbol.iterator,Ft=function(n){return Bt&&n[Bt]&&n[Bt]()},It="[object Arguments]",Mt=Object.prototype,Ut=Mt.hasOwnProperty,qt=Mt.propertyIsEnumerable,zt=S(function(){return arguments}())?S:function(n){return j(n)&&Ut.call(n,"callee")&&!qt.call(n,"callee")},Pt=Array.isArray,Vt="object"==typeof n&&n&&!n.nodeType&&n,Dt=Vt&&"object"==typeof module&&module&&!module.nodeType&&module,Rt=Dt&&Dt.exports===Vt,Ct=Rt?yt.Buffer:void 0,$t=Ct?Ct.isBuffer:void 0,Wt=$t||k,Nt=9007199254740991,Qt=/^(?:0|[1-9]\d*)$/,Gt="[object Arguments]",Ht="[object Array]",Jt="[object Boolean]",Kt="[object Date]",Xt="[object Error]",Yt="[object Function]",Zt="[object Map]",ne="[object Number]",te="[object Object]",ee="[object RegExp]",re="[object Set]",ue="[object String]",ie="[object WeakMap]",oe="[object ArrayBuffer]",ce="[object DataView]",fe="[object Float32Array]",ae="[object Float64Array]",le="[object Int8Array]",se="[object Int16Array]",pe="[object Int32Array]",he="[object Uint8Array]",ye="[object Uint8ClampedArray]",ve="[object Uint16Array]",de="[object Uint32Array]",me={};me[fe]=me[ae]=me[le]=me[se]=me[pe]=me[he]=me[ye]=me[ve]=me[de]=!0,me[Gt]=me[Ht]=me[oe]=me[Jt]=me[ce]=me[Kt]=me[Xt]=me[Yt]=me[Zt]=me[ne]=me[te]=me[ee]=me[re]=me[ue]=me[ie]=!1;var ge="object"==typeof n&&n&&!n.nodeType&&n,be=ge&&"object"==typeof module&&module&&!module.nodeType&&module,je=be&&be.exports===ge,Se=je&&pt.process,ke=function(){try{var n=be&&be.require&&be.require("util").types;return n?n:Se&&Se.binding&&Se.binding("util")}catch(n){}}(),Le=ke&&ke.isTypedArray,Oe=Le?w(Le):O,we=Object.prototype,xe=we.hasOwnProperty,Ee=Object.prototype,Ae=A(Object.keys,Object),Te=Object.prototype,_e=Te.hasOwnProperty,Be=P(z,1/0),Fe=function(n,t,e){var r=d(n)?V:Be;r(n,a(t),e)},Ie=D(R),Me=l(Ie),Ue=C(R),qe=P(Ue,1),ze=l(qe),Pe=W(),Ve=function(n,e,r){function u(n,t){j.push(function(){f(n,t)})}function i(){if(0===j.length&&0===v)return r(null,y);for(;j.length&&v2&&(u=t(arguments,1)),e){var i={};N(y,function(n,t){i[t]=n}),i[n]=u,d=!0,b=Object.create(null),r(e,i)}else y[n]=u,c(n)});v++;var i=a(e[e.length-1]);e.length>1?i(y,u):i(u)}}function l(){for(var n,t=0;S.length;)n=S.pop(),t++,$(s(n),function(n){0===--k[n]&&S.push(n)});if(t!==h)throw new Error("async.auto cannot execute tasks due to a recursive dependency")}function s(t){var e=[];return N(n,function(n,r){Pt(n)&&J(n,t,0)>=0&&e.push(r)}),e}"function"==typeof e&&(r=e,e=null),r=g(r||m);var p=_(n),h=p.length;if(!h)return r(null);e||(e=h);var y={},v=0,d=!1,b=Object.create(null),j=[],S=[],k={};N(n,function(t,e){if(!Pt(t))return u(e,[t]),void S.push(e);var r=t.slice(0,t.length-1),i=r.length;return 0===i?(u(e,t),void S.push(e)):(k[e]=i,void $(r,function(c){if(!n[c])throw new Error("async.auto task `"+e+"` has a non-existent dependency `"+c+"` in "+r.join(", "));o(c,function(){i--,0===i&&u(e,t)})}))}),l(),i()},De="[object Symbol]",Re=1/0,Ce=vt?vt.prototype:void 0,$e=Ce?Ce.toString:void 0,We="\\ud800-\\udfff",Ne="\\u0300-\\u036f",Qe="\\ufe20-\\ufe2f",Ge="\\u20d0-\\u20ff",He=Ne+Qe+Ge,Je="\\ufe0e\\ufe0f",Ke="\\u200d",Xe=RegExp("["+Ke+We+He+Je+"]"),Ye="\\ud800-\\udfff",Ze="\\u0300-\\u036f",nr="\\ufe20-\\ufe2f",tr="\\u20d0-\\u20ff",er=Ze+nr+tr,rr="\\ufe0e\\ufe0f",ur="["+Ye+"]",ir="["+er+"]",or="\\ud83c[\\udffb-\\udfff]",cr="(?:"+ir+"|"+or+")",fr="[^"+Ye+"]",ar="(?:\\ud83c[\\udde6-\\uddff]){2}",lr="[\\ud800-\\udbff][\\udc00-\\udfff]",sr="\\u200d",pr=cr+"?",hr="["+rr+"]?",yr="(?:"+sr+"(?:"+[fr,ar,lr].join("|")+")"+hr+pr+")*",vr=hr+pr+yr,dr="(?:"+[fr+ir+"?",ir,ar,lr,ur].join("|")+")",mr=RegExp(or+"(?="+or+")|"+dr+vr,"g"),gr=/^\s+|\s+$/g,br=/^(?:async\s+)?(function)?\s*[^\(]*\(\s*([^\)]*)\)/m,jr=/,/,Sr=/(=.+)?(\s*)$/,kr=/((\/\/.*$)|(\/\*[\s\S]*?\*\/))/gm;pn.prototype.removeLink=function(n){return n.prev?n.prev.next=n.next:this.head=n.next,n.next?n.next.prev=n.prev:this.tail=n.prev,n.prev=n.next=null,this.length-=1,n},pn.prototype.empty=function(){for(;this.head;)this.shift();return this},pn.prototype.insertAfter=function(n,t){t.prev=n,t.next=n.next,n.next?n.next.prev=t:this.tail=t,n.next=t,this.length+=1},pn.prototype.insertBefore=function(n,t){t.prev=n.prev,t.next=n,n.prev?n.prev.next=t:this.head=t,n.prev=t,this.length+=1},pn.prototype.unshift=function(n){this.head?this.insertBefore(this.head,n):hn(this,n)},pn.prototype.push=function(n){this.tail?this.insertAfter(this.tail,n):hn(this,n)},pn.prototype.shift=function(){return this.head&&this.removeLink(this.head)},pn.prototype.pop=function(){return this.tail&&this.removeLink(this.tail)},pn.prototype.toArray=function(){for(var n=Array(this.length),t=this.head,e=0;e=u.priority;)u=u.next;for(var i=0,o=n.length;i 32 ) { - * console.log('This file name is too long'); - * callback('File name too long'); - * } else { - * // Do work to process file here - * console.log('File processed'); - * callback(); - * } - * }, function(err) { - * // if any of the file processing produced an error, err would equal that error - * if( err ) { - * // One of the iterations produced an error. - * // All processing will now stop. - * console.log('A file failed to process'); - * } else { - * console.log('All files have been processed successfully'); - * } - * }); - */ -function eachLimit(coll, iteratee, callback) { - (0, _eachOf2.default)(coll, (0, _withoutIndex2.default)((0, _wrapAsync2.default)(iteratee)), callback); -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/eachLimit.js b/node_modules/async/eachLimit.js deleted file mode 100644 index fff721b..0000000 --- a/node_modules/async/eachLimit.js +++ /dev/null @@ -1,45 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = eachLimit; - -var _eachOfLimit = require('./internal/eachOfLimit'); - -var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); - -var _withoutIndex = require('./internal/withoutIndex'); - -var _withoutIndex2 = _interopRequireDefault(_withoutIndex); - -var _wrapAsync = require('./internal/wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * The same as [`each`]{@link module:Collections.each} but runs a maximum of `limit` async operations at a time. - * - * @name eachLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.each]{@link module:Collections.each} - * @alias forEachLimit - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The array index is not passed to the iteratee. - * If you need the index, use `eachOfLimit`. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called when all - * `iteratee` functions have finished, or an error occurs. Invoked with (err). - */ -function eachLimit(coll, limit, iteratee, callback) { - (0, _eachOfLimit2.default)(limit)(coll, (0, _withoutIndex2.default)((0, _wrapAsync2.default)(iteratee)), callback); -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/eachOf.js b/node_modules/async/eachOf.js deleted file mode 100644 index 055b9bd..0000000 --- a/node_modules/async/eachOf.js +++ /dev/null @@ -1,111 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -exports.default = function (coll, iteratee, callback) { - var eachOfImplementation = (0, _isArrayLike2.default)(coll) ? eachOfArrayLike : eachOfGeneric; - eachOfImplementation(coll, (0, _wrapAsync2.default)(iteratee), callback); -}; - -var _isArrayLike = require('lodash/isArrayLike'); - -var _isArrayLike2 = _interopRequireDefault(_isArrayLike); - -var _breakLoop = require('./internal/breakLoop'); - -var _breakLoop2 = _interopRequireDefault(_breakLoop); - -var _eachOfLimit = require('./eachOfLimit'); - -var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); - -var _doLimit = require('./internal/doLimit'); - -var _doLimit2 = _interopRequireDefault(_doLimit); - -var _noop = require('lodash/noop'); - -var _noop2 = _interopRequireDefault(_noop); - -var _once = require('./internal/once'); - -var _once2 = _interopRequireDefault(_once); - -var _onlyOnce = require('./internal/onlyOnce'); - -var _onlyOnce2 = _interopRequireDefault(_onlyOnce); - -var _wrapAsync = require('./internal/wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -// eachOf implementation optimized for array-likes -function eachOfArrayLike(coll, iteratee, callback) { - callback = (0, _once2.default)(callback || _noop2.default); - var index = 0, - completed = 0, - length = coll.length; - if (length === 0) { - callback(null); - } - - function iteratorCallback(err, value) { - if (err) { - callback(err); - } else if (++completed === length || value === _breakLoop2.default) { - callback(null); - } - } - - for (; index < length; index++) { - iteratee(coll[index], index, (0, _onlyOnce2.default)(iteratorCallback)); - } -} - -// a generic version of eachOf which can handle array, object, and iterator cases. -var eachOfGeneric = (0, _doLimit2.default)(_eachOfLimit2.default, Infinity); - -/** - * Like [`each`]{@link module:Collections.each}, except that it passes the key (or index) as the second argument - * to the iteratee. - * - * @name eachOf - * @static - * @memberOf module:Collections - * @method - * @alias forEachOf - * @category Collection - * @see [async.each]{@link module:Collections.each} - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - A function to apply to each - * item in `coll`. - * The `key` is the item's key, or index in the case of an array. - * Invoked with (item, key, callback). - * @param {Function} [callback] - A callback which is called when all - * `iteratee` functions have finished, or an error occurs. Invoked with (err). - * @example - * - * var obj = {dev: "/dev.json", test: "/test.json", prod: "/prod.json"}; - * var configs = {}; - * - * async.forEachOf(obj, function (value, key, callback) { - * fs.readFile(__dirname + value, "utf8", function (err, data) { - * if (err) return callback(err); - * try { - * configs[key] = JSON.parse(data); - * } catch (e) { - * return callback(e); - * } - * callback(); - * }); - * }, function (err) { - * if (err) console.error(err.message); - * // configs is now a map of JSON data - * doSomethingWith(configs); - * }); - */ -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/eachOfLimit.js b/node_modules/async/eachOfLimit.js deleted file mode 100644 index 30a1329..0000000 --- a/node_modules/async/eachOfLimit.js +++ /dev/null @@ -1,41 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = eachOfLimit; - -var _eachOfLimit2 = require('./internal/eachOfLimit'); - -var _eachOfLimit3 = _interopRequireDefault(_eachOfLimit2); - -var _wrapAsync = require('./internal/wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * The same as [`eachOf`]{@link module:Collections.eachOf} but runs a maximum of `limit` async operations at a - * time. - * - * @name eachOfLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.eachOf]{@link module:Collections.eachOf} - * @alias forEachOfLimit - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async function to apply to each - * item in `coll`. The `key` is the item's key, or index in the case of an - * array. - * Invoked with (item, key, callback). - * @param {Function} [callback] - A callback which is called when all - * `iteratee` functions have finished, or an error occurs. Invoked with (err). - */ -function eachOfLimit(coll, limit, iteratee, callback) { - (0, _eachOfLimit3.default)(limit)(coll, (0, _wrapAsync2.default)(iteratee), callback); -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/eachOfSeries.js b/node_modules/async/eachOfSeries.js deleted file mode 100644 index 9dfd711..0000000 --- a/node_modules/async/eachOfSeries.js +++ /dev/null @@ -1,35 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _eachOfLimit = require('./eachOfLimit'); - -var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); - -var _doLimit = require('./internal/doLimit'); - -var _doLimit2 = _interopRequireDefault(_doLimit); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * The same as [`eachOf`]{@link module:Collections.eachOf} but runs only a single async operation at a time. - * - * @name eachOfSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.eachOf]{@link module:Collections.eachOf} - * @alias forEachOfSeries - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * Invoked with (item, key, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Invoked with (err). - */ -exports.default = (0, _doLimit2.default)(_eachOfLimit2.default, 1); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/eachSeries.js b/node_modules/async/eachSeries.js deleted file mode 100644 index 55c7840..0000000 --- a/node_modules/async/eachSeries.js +++ /dev/null @@ -1,37 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _eachLimit = require('./eachLimit'); - -var _eachLimit2 = _interopRequireDefault(_eachLimit); - -var _doLimit = require('./internal/doLimit'); - -var _doLimit2 = _interopRequireDefault(_doLimit); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * The same as [`each`]{@link module:Collections.each} but runs only a single async operation at a time. - * - * @name eachSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.each]{@link module:Collections.each} - * @alias forEachSeries - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to each - * item in `coll`. - * The array index is not passed to the iteratee. - * If you need the index, use `eachOfSeries`. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called when all - * `iteratee` functions have finished, or an error occurs. Invoked with (err). - */ -exports.default = (0, _doLimit2.default)(_eachLimit2.default, 1); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/ensureAsync.js b/node_modules/async/ensureAsync.js deleted file mode 100644 index 1f57aec..0000000 --- a/node_modules/async/ensureAsync.js +++ /dev/null @@ -1,73 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = ensureAsync; - -var _setImmediate = require('./internal/setImmediate'); - -var _setImmediate2 = _interopRequireDefault(_setImmediate); - -var _initialParams = require('./internal/initialParams'); - -var _initialParams2 = _interopRequireDefault(_initialParams); - -var _wrapAsync = require('./internal/wrapAsync'); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Wrap an async function and ensure it calls its callback on a later tick of - * the event loop. If the function already calls its callback on a next tick, - * no extra deferral is added. This is useful for preventing stack overflows - * (`RangeError: Maximum call stack size exceeded`) and generally keeping - * [Zalgo](http://blog.izs.me/post/59142742143/designing-apis-for-asynchrony) - * contained. ES2017 `async` functions are returned as-is -- they are immune - * to Zalgo's corrupting influences, as they always resolve on a later tick. - * - * @name ensureAsync - * @static - * @memberOf module:Utils - * @method - * @category Util - * @param {AsyncFunction} fn - an async function, one that expects a node-style - * callback as its last argument. - * @returns {AsyncFunction} Returns a wrapped function with the exact same call - * signature as the function passed in. - * @example - * - * function sometimesAsync(arg, callback) { - * if (cache[arg]) { - * return callback(null, cache[arg]); // this would be synchronous!! - * } else { - * doSomeIO(arg, callback); // this IO would be asynchronous - * } - * } - * - * // this has a risk of stack overflows if many results are cached in a row - * async.mapSeries(args, sometimesAsync, done); - * - * // this will defer sometimesAsync's callback if necessary, - * // preventing stack overflows - * async.mapSeries(args, async.ensureAsync(sometimesAsync), done); - */ -function ensureAsync(fn) { - if ((0, _wrapAsync.isAsync)(fn)) return fn; - return (0, _initialParams2.default)(function (args, callback) { - var sync = true; - args.push(function () { - var innerArgs = arguments; - if (sync) { - (0, _setImmediate2.default)(function () { - callback.apply(null, innerArgs); - }); - } else { - callback.apply(null, innerArgs); - } - }); - fn.apply(this, args); - sync = false; - }); -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/every.js b/node_modules/async/every.js deleted file mode 100644 index d0565b0..0000000 --- a/node_modules/async/every.js +++ /dev/null @@ -1,50 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _createTester = require('./internal/createTester'); - -var _createTester2 = _interopRequireDefault(_createTester); - -var _doParallel = require('./internal/doParallel'); - -var _doParallel2 = _interopRequireDefault(_doParallel); - -var _notId = require('./internal/notId'); - -var _notId2 = _interopRequireDefault(_notId); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Returns `true` if every element in `coll` satisfies an async test. If any - * iteratee call returns `false`, the main `callback` is immediately called. - * - * @name every - * @static - * @memberOf module:Collections - * @method - * @alias all - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collection in parallel. - * The iteratee must complete with a boolean result value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Result will be either `true` or `false` - * depending on the values of the async tests. Invoked with (err, result). - * @example - * - * async.every(['file1','file2','file3'], function(filePath, callback) { - * fs.access(filePath, function(err) { - * callback(null, !err) - * }); - * }, function(err, result) { - * // if result is true then every file exists - * }); - */ -exports.default = (0, _doParallel2.default)((0, _createTester2.default)(_notId2.default, _notId2.default)); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/everyLimit.js b/node_modules/async/everyLimit.js deleted file mode 100644 index a1a759a..0000000 --- a/node_modules/async/everyLimit.js +++ /dev/null @@ -1,42 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _createTester = require('./internal/createTester'); - -var _createTester2 = _interopRequireDefault(_createTester); - -var _doParallelLimit = require('./internal/doParallelLimit'); - -var _doParallelLimit2 = _interopRequireDefault(_doParallelLimit); - -var _notId = require('./internal/notId'); - -var _notId2 = _interopRequireDefault(_notId); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * The same as [`every`]{@link module:Collections.every} but runs a maximum of `limit` async operations at a time. - * - * @name everyLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.every]{@link module:Collections.every} - * @alias allLimit - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collection in parallel. - * The iteratee must complete with a boolean result value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Result will be either `true` or `false` - * depending on the values of the async tests. Invoked with (err, result). - */ -exports.default = (0, _doParallelLimit2.default)((0, _createTester2.default)(_notId2.default, _notId2.default)); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/everySeries.js b/node_modules/async/everySeries.js deleted file mode 100644 index 23bfebb..0000000 --- a/node_modules/async/everySeries.js +++ /dev/null @@ -1,37 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _everyLimit = require('./everyLimit'); - -var _everyLimit2 = _interopRequireDefault(_everyLimit); - -var _doLimit = require('./internal/doLimit'); - -var _doLimit2 = _interopRequireDefault(_doLimit); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * The same as [`every`]{@link module:Collections.every} but runs only a single async operation at a time. - * - * @name everySeries - * @static - * @memberOf module:Collections - * @method - * @see [async.every]{@link module:Collections.every} - * @alias allSeries - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collection in series. - * The iteratee must complete with a boolean result value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Result will be either `true` or `false` - * depending on the values of the async tests. Invoked with (err, result). - */ -exports.default = (0, _doLimit2.default)(_everyLimit2.default, 1); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/filter.js b/node_modules/async/filter.js deleted file mode 100644 index 54772d5..0000000 --- a/node_modules/async/filter.js +++ /dev/null @@ -1,45 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _filter = require('./internal/filter'); - -var _filter2 = _interopRequireDefault(_filter); - -var _doParallel = require('./internal/doParallel'); - -var _doParallel2 = _interopRequireDefault(_doParallel); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Returns a new array of all the values in `coll` which pass an async truth - * test. This operation is performed in parallel, but the results array will be - * in the same order as the original. - * - * @name filter - * @static - * @memberOf module:Collections - * @method - * @alias select - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {Function} iteratee - A truth test to apply to each item in `coll`. - * The `iteratee` is passed a `callback(err, truthValue)`, which must be called - * with a boolean argument once it has completed. Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Invoked with (err, results). - * @example - * - * async.filter(['file1','file2','file3'], function(filePath, callback) { - * fs.access(filePath, function(err) { - * callback(null, !err) - * }); - * }, function(err, results) { - * // results now equals an array of the existing files - * }); - */ -exports.default = (0, _doParallel2.default)(_filter2.default); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/filterLimit.js b/node_modules/async/filterLimit.js deleted file mode 100644 index 06216f7..0000000 --- a/node_modules/async/filterLimit.js +++ /dev/null @@ -1,37 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _filter = require('./internal/filter'); - -var _filter2 = _interopRequireDefault(_filter); - -var _doParallelLimit = require('./internal/doParallelLimit'); - -var _doParallelLimit2 = _interopRequireDefault(_doParallelLimit); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * The same as [`filter`]{@link module:Collections.filter} but runs a maximum of `limit` async operations at a - * time. - * - * @name filterLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.filter]{@link module:Collections.filter} - * @alias selectLimit - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {Function} iteratee - A truth test to apply to each item in `coll`. - * The `iteratee` is passed a `callback(err, truthValue)`, which must be called - * with a boolean argument once it has completed. Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Invoked with (err, results). - */ -exports.default = (0, _doParallelLimit2.default)(_filter2.default); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/filterSeries.js b/node_modules/async/filterSeries.js deleted file mode 100644 index e48d966..0000000 --- a/node_modules/async/filterSeries.js +++ /dev/null @@ -1,35 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _filterLimit = require('./filterLimit'); - -var _filterLimit2 = _interopRequireDefault(_filterLimit); - -var _doLimit = require('./internal/doLimit'); - -var _doLimit2 = _interopRequireDefault(_doLimit); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * The same as [`filter`]{@link module:Collections.filter} but runs only a single async operation at a time. - * - * @name filterSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.filter]{@link module:Collections.filter} - * @alias selectSeries - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {Function} iteratee - A truth test to apply to each item in `coll`. - * The `iteratee` is passed a `callback(err, truthValue)`, which must be called - * with a boolean argument once it has completed. Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Invoked with (err, results) - */ -exports.default = (0, _doLimit2.default)(_filterLimit2.default, 1); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/find.js b/node_modules/async/find.js deleted file mode 100644 index db46783..0000000 --- a/node_modules/async/find.js +++ /dev/null @@ -1,61 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _identity = require('lodash/identity'); - -var _identity2 = _interopRequireDefault(_identity); - -var _createTester = require('./internal/createTester'); - -var _createTester2 = _interopRequireDefault(_createTester); - -var _doParallel = require('./internal/doParallel'); - -var _doParallel2 = _interopRequireDefault(_doParallel); - -var _findGetResult = require('./internal/findGetResult'); - -var _findGetResult2 = _interopRequireDefault(_findGetResult); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Returns the first value in `coll` that passes an async truth test. The - * `iteratee` is applied in parallel, meaning the first iteratee to return - * `true` will fire the detect `callback` with that result. That means the - * result might not be the first item in the original `coll` (in terms of order) - * that passes the test. - - * If order within the original `coll` is important, then look at - * [`detectSeries`]{@link module:Collections.detectSeries}. - * - * @name detect - * @static - * @memberOf module:Collections - * @method - * @alias find - * @category Collections - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. - * The iteratee must complete with a boolean value as its result. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called as soon as any - * iteratee returns `true`, or after all the `iteratee` functions have finished. - * Result will be the first item in the array that passes the truth test - * (iteratee) or the value `undefined` if none passed. Invoked with - * (err, result). - * @example - * - * async.detect(['file1','file2','file3'], function(filePath, callback) { - * fs.access(filePath, function(err) { - * callback(null, !err) - * }); - * }, function(err, result) { - * // result now equals the first file in the list that exists - * }); - */ -exports.default = (0, _doParallel2.default)((0, _createTester2.default)(_identity2.default, _findGetResult2.default)); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/findLimit.js b/node_modules/async/findLimit.js deleted file mode 100644 index 6bf6560..0000000 --- a/node_modules/async/findLimit.js +++ /dev/null @@ -1,48 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _identity = require('lodash/identity'); - -var _identity2 = _interopRequireDefault(_identity); - -var _createTester = require('./internal/createTester'); - -var _createTester2 = _interopRequireDefault(_createTester); - -var _doParallelLimit = require('./internal/doParallelLimit'); - -var _doParallelLimit2 = _interopRequireDefault(_doParallelLimit); - -var _findGetResult = require('./internal/findGetResult'); - -var _findGetResult2 = _interopRequireDefault(_findGetResult); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * The same as [`detect`]{@link module:Collections.detect} but runs a maximum of `limit` async operations at a - * time. - * - * @name detectLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.detect]{@link module:Collections.detect} - * @alias findLimit - * @category Collections - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. - * The iteratee must complete with a boolean value as its result. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called as soon as any - * iteratee returns `true`, or after all the `iteratee` functions have finished. - * Result will be the first item in the array that passes the truth test - * (iteratee) or the value `undefined` if none passed. Invoked with - * (err, result). - */ -exports.default = (0, _doParallelLimit2.default)((0, _createTester2.default)(_identity2.default, _findGetResult2.default)); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/findSeries.js b/node_modules/async/findSeries.js deleted file mode 100644 index 6fe16c9..0000000 --- a/node_modules/async/findSeries.js +++ /dev/null @@ -1,38 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _detectLimit = require('./detectLimit'); - -var _detectLimit2 = _interopRequireDefault(_detectLimit); - -var _doLimit = require('./internal/doLimit'); - -var _doLimit2 = _interopRequireDefault(_doLimit); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * The same as [`detect`]{@link module:Collections.detect} but runs only a single async operation at a time. - * - * @name detectSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.detect]{@link module:Collections.detect} - * @alias findSeries - * @category Collections - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. - * The iteratee must complete with a boolean value as its result. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called as soon as any - * iteratee returns `true`, or after all the `iteratee` functions have finished. - * Result will be the first item in the array that passes the truth test - * (iteratee) or the value `undefined` if none passed. Invoked with - * (err, result). - */ -exports.default = (0, _doLimit2.default)(_detectLimit2.default, 1); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/foldl.js b/node_modules/async/foldl.js deleted file mode 100644 index 3fb8019..0000000 --- a/node_modules/async/foldl.js +++ /dev/null @@ -1,78 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = reduce; - -var _eachOfSeries = require('./eachOfSeries'); - -var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); - -var _noop = require('lodash/noop'); - -var _noop2 = _interopRequireDefault(_noop); - -var _once = require('./internal/once'); - -var _once2 = _interopRequireDefault(_once); - -var _wrapAsync = require('./internal/wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Reduces `coll` into a single value using an async `iteratee` to return each - * successive step. `memo` is the initial state of the reduction. This function - * only operates in series. - * - * For performance reasons, it may make sense to split a call to this function - * into a parallel map, and then use the normal `Array.prototype.reduce` on the - * results. This function is for situations where each step in the reduction - * needs to be async; if you can get the data before reducing it, then it's - * probably a good idea to do so. - * - * @name reduce - * @static - * @memberOf module:Collections - * @method - * @alias inject - * @alias foldl - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {*} memo - The initial state of the reduction. - * @param {AsyncFunction} iteratee - A function applied to each item in the - * array to produce the next step in the reduction. - * The `iteratee` should complete with the next state of the reduction. - * If the iteratee complete with an error, the reduction is stopped and the - * main `callback` is immediately called with the error. - * Invoked with (memo, item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Result is the reduced value. Invoked with - * (err, result). - * @example - * - * async.reduce([1,2,3], 0, function(memo, item, callback) { - * // pointless async: - * process.nextTick(function() { - * callback(null, memo + item) - * }); - * }, function(err, result) { - * // result is now equal to the last value of memo, which is 6 - * }); - */ -function reduce(coll, memo, iteratee, callback) { - callback = (0, _once2.default)(callback || _noop2.default); - var _iteratee = (0, _wrapAsync2.default)(iteratee); - (0, _eachOfSeries2.default)(coll, function (x, i, callback) { - _iteratee(memo, x, function (err, v) { - memo = v; - callback(err); - }); - }, function (err) { - callback(err, memo); - }); -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/foldr.js b/node_modules/async/foldr.js deleted file mode 100644 index 3d17d32..0000000 --- a/node_modules/async/foldr.js +++ /dev/null @@ -1,44 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = reduceRight; - -var _reduce = require('./reduce'); - -var _reduce2 = _interopRequireDefault(_reduce); - -var _slice = require('./internal/slice'); - -var _slice2 = _interopRequireDefault(_slice); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Same as [`reduce`]{@link module:Collections.reduce}, only operates on `array` in reverse order. - * - * @name reduceRight - * @static - * @memberOf module:Collections - * @method - * @see [async.reduce]{@link module:Collections.reduce} - * @alias foldr - * @category Collection - * @param {Array} array - A collection to iterate over. - * @param {*} memo - The initial state of the reduction. - * @param {AsyncFunction} iteratee - A function applied to each item in the - * array to produce the next step in the reduction. - * The `iteratee` should complete with the next state of the reduction. - * If the iteratee complete with an error, the reduction is stopped and the - * main `callback` is immediately called with the error. - * Invoked with (memo, item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Result is the reduced value. Invoked with - * (err, result). - */ -function reduceRight(array, memo, iteratee, callback) { - var reversed = (0, _slice2.default)(array).reverse(); - (0, _reduce2.default)(reversed, memo, iteratee, callback); -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/forEach.js b/node_modules/async/forEach.js deleted file mode 100644 index 4b20af3..0000000 --- a/node_modules/async/forEach.js +++ /dev/null @@ -1,82 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = eachLimit; - -var _eachOf = require('./eachOf'); - -var _eachOf2 = _interopRequireDefault(_eachOf); - -var _withoutIndex = require('./internal/withoutIndex'); - -var _withoutIndex2 = _interopRequireDefault(_withoutIndex); - -var _wrapAsync = require('./internal/wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Applies the function `iteratee` to each item in `coll`, in parallel. - * The `iteratee` is called with an item from the list, and a callback for when - * it has finished. If the `iteratee` passes an error to its `callback`, the - * main `callback` (for the `each` function) is immediately called with the - * error. - * - * Note, that since this function applies `iteratee` to each item in parallel, - * there is no guarantee that the iteratee functions will complete in order. - * - * @name each - * @static - * @memberOf module:Collections - * @method - * @alias forEach - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to - * each item in `coll`. Invoked with (item, callback). - * The array index is not passed to the iteratee. - * If you need the index, use `eachOf`. - * @param {Function} [callback] - A callback which is called when all - * `iteratee` functions have finished, or an error occurs. Invoked with (err). - * @example - * - * // assuming openFiles is an array of file names and saveFile is a function - * // to save the modified contents of that file: - * - * async.each(openFiles, saveFile, function(err){ - * // if any of the saves produced an error, err would equal that error - * }); - * - * // assuming openFiles is an array of file names - * async.each(openFiles, function(file, callback) { - * - * // Perform operation on file here. - * console.log('Processing file ' + file); - * - * if( file.length > 32 ) { - * console.log('This file name is too long'); - * callback('File name too long'); - * } else { - * // Do work to process file here - * console.log('File processed'); - * callback(); - * } - * }, function(err) { - * // if any of the file processing produced an error, err would equal that error - * if( err ) { - * // One of the iterations produced an error. - * // All processing will now stop. - * console.log('A file failed to process'); - * } else { - * console.log('All files have been processed successfully'); - * } - * }); - */ -function eachLimit(coll, iteratee, callback) { - (0, _eachOf2.default)(coll, (0, _withoutIndex2.default)((0, _wrapAsync2.default)(iteratee)), callback); -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/forEachLimit.js b/node_modules/async/forEachLimit.js deleted file mode 100644 index fff721b..0000000 --- a/node_modules/async/forEachLimit.js +++ /dev/null @@ -1,45 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = eachLimit; - -var _eachOfLimit = require('./internal/eachOfLimit'); - -var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); - -var _withoutIndex = require('./internal/withoutIndex'); - -var _withoutIndex2 = _interopRequireDefault(_withoutIndex); - -var _wrapAsync = require('./internal/wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * The same as [`each`]{@link module:Collections.each} but runs a maximum of `limit` async operations at a time. - * - * @name eachLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.each]{@link module:Collections.each} - * @alias forEachLimit - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The array index is not passed to the iteratee. - * If you need the index, use `eachOfLimit`. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called when all - * `iteratee` functions have finished, or an error occurs. Invoked with (err). - */ -function eachLimit(coll, limit, iteratee, callback) { - (0, _eachOfLimit2.default)(limit)(coll, (0, _withoutIndex2.default)((0, _wrapAsync2.default)(iteratee)), callback); -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/forEachOf.js b/node_modules/async/forEachOf.js deleted file mode 100644 index 055b9bd..0000000 --- a/node_modules/async/forEachOf.js +++ /dev/null @@ -1,111 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -exports.default = function (coll, iteratee, callback) { - var eachOfImplementation = (0, _isArrayLike2.default)(coll) ? eachOfArrayLike : eachOfGeneric; - eachOfImplementation(coll, (0, _wrapAsync2.default)(iteratee), callback); -}; - -var _isArrayLike = require('lodash/isArrayLike'); - -var _isArrayLike2 = _interopRequireDefault(_isArrayLike); - -var _breakLoop = require('./internal/breakLoop'); - -var _breakLoop2 = _interopRequireDefault(_breakLoop); - -var _eachOfLimit = require('./eachOfLimit'); - -var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); - -var _doLimit = require('./internal/doLimit'); - -var _doLimit2 = _interopRequireDefault(_doLimit); - -var _noop = require('lodash/noop'); - -var _noop2 = _interopRequireDefault(_noop); - -var _once = require('./internal/once'); - -var _once2 = _interopRequireDefault(_once); - -var _onlyOnce = require('./internal/onlyOnce'); - -var _onlyOnce2 = _interopRequireDefault(_onlyOnce); - -var _wrapAsync = require('./internal/wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -// eachOf implementation optimized for array-likes -function eachOfArrayLike(coll, iteratee, callback) { - callback = (0, _once2.default)(callback || _noop2.default); - var index = 0, - completed = 0, - length = coll.length; - if (length === 0) { - callback(null); - } - - function iteratorCallback(err, value) { - if (err) { - callback(err); - } else if (++completed === length || value === _breakLoop2.default) { - callback(null); - } - } - - for (; index < length; index++) { - iteratee(coll[index], index, (0, _onlyOnce2.default)(iteratorCallback)); - } -} - -// a generic version of eachOf which can handle array, object, and iterator cases. -var eachOfGeneric = (0, _doLimit2.default)(_eachOfLimit2.default, Infinity); - -/** - * Like [`each`]{@link module:Collections.each}, except that it passes the key (or index) as the second argument - * to the iteratee. - * - * @name eachOf - * @static - * @memberOf module:Collections - * @method - * @alias forEachOf - * @category Collection - * @see [async.each]{@link module:Collections.each} - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - A function to apply to each - * item in `coll`. - * The `key` is the item's key, or index in the case of an array. - * Invoked with (item, key, callback). - * @param {Function} [callback] - A callback which is called when all - * `iteratee` functions have finished, or an error occurs. Invoked with (err). - * @example - * - * var obj = {dev: "/dev.json", test: "/test.json", prod: "/prod.json"}; - * var configs = {}; - * - * async.forEachOf(obj, function (value, key, callback) { - * fs.readFile(__dirname + value, "utf8", function (err, data) { - * if (err) return callback(err); - * try { - * configs[key] = JSON.parse(data); - * } catch (e) { - * return callback(e); - * } - * callback(); - * }); - * }, function (err) { - * if (err) console.error(err.message); - * // configs is now a map of JSON data - * doSomethingWith(configs); - * }); - */ -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/forEachOfLimit.js b/node_modules/async/forEachOfLimit.js deleted file mode 100644 index 30a1329..0000000 --- a/node_modules/async/forEachOfLimit.js +++ /dev/null @@ -1,41 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = eachOfLimit; - -var _eachOfLimit2 = require('./internal/eachOfLimit'); - -var _eachOfLimit3 = _interopRequireDefault(_eachOfLimit2); - -var _wrapAsync = require('./internal/wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * The same as [`eachOf`]{@link module:Collections.eachOf} but runs a maximum of `limit` async operations at a - * time. - * - * @name eachOfLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.eachOf]{@link module:Collections.eachOf} - * @alias forEachOfLimit - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async function to apply to each - * item in `coll`. The `key` is the item's key, or index in the case of an - * array. - * Invoked with (item, key, callback). - * @param {Function} [callback] - A callback which is called when all - * `iteratee` functions have finished, or an error occurs. Invoked with (err). - */ -function eachOfLimit(coll, limit, iteratee, callback) { - (0, _eachOfLimit3.default)(limit)(coll, (0, _wrapAsync2.default)(iteratee), callback); -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/forEachOfSeries.js b/node_modules/async/forEachOfSeries.js deleted file mode 100644 index 9dfd711..0000000 --- a/node_modules/async/forEachOfSeries.js +++ /dev/null @@ -1,35 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _eachOfLimit = require('./eachOfLimit'); - -var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); - -var _doLimit = require('./internal/doLimit'); - -var _doLimit2 = _interopRequireDefault(_doLimit); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * The same as [`eachOf`]{@link module:Collections.eachOf} but runs only a single async operation at a time. - * - * @name eachOfSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.eachOf]{@link module:Collections.eachOf} - * @alias forEachOfSeries - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * Invoked with (item, key, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Invoked with (err). - */ -exports.default = (0, _doLimit2.default)(_eachOfLimit2.default, 1); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/forEachSeries.js b/node_modules/async/forEachSeries.js deleted file mode 100644 index 55c7840..0000000 --- a/node_modules/async/forEachSeries.js +++ /dev/null @@ -1,37 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _eachLimit = require('./eachLimit'); - -var _eachLimit2 = _interopRequireDefault(_eachLimit); - -var _doLimit = require('./internal/doLimit'); - -var _doLimit2 = _interopRequireDefault(_doLimit); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * The same as [`each`]{@link module:Collections.each} but runs only a single async operation at a time. - * - * @name eachSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.each]{@link module:Collections.each} - * @alias forEachSeries - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to each - * item in `coll`. - * The array index is not passed to the iteratee. - * If you need the index, use `eachOfSeries`. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called when all - * `iteratee` functions have finished, or an error occurs. Invoked with (err). - */ -exports.default = (0, _doLimit2.default)(_eachLimit2.default, 1); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/forever.js b/node_modules/async/forever.js deleted file mode 100644 index 6c7b8a4..0000000 --- a/node_modules/async/forever.js +++ /dev/null @@ -1,65 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = forever; - -var _noop = require('lodash/noop'); - -var _noop2 = _interopRequireDefault(_noop); - -var _onlyOnce = require('./internal/onlyOnce'); - -var _onlyOnce2 = _interopRequireDefault(_onlyOnce); - -var _ensureAsync = require('./ensureAsync'); - -var _ensureAsync2 = _interopRequireDefault(_ensureAsync); - -var _wrapAsync = require('./internal/wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Calls the asynchronous function `fn` with a callback parameter that allows it - * to call itself again, in series, indefinitely. - - * If an error is passed to the callback then `errback` is called with the - * error, and execution stops, otherwise it will never be called. - * - * @name forever - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {AsyncFunction} fn - an async function to call repeatedly. - * Invoked with (next). - * @param {Function} [errback] - when `fn` passes an error to it's callback, - * this function will be called, and execution stops. Invoked with (err). - * @example - * - * async.forever( - * function(next) { - * // next is suitable for passing to things that need a callback(err [, whatever]); - * // it will result in this function being called again. - * }, - * function(err) { - * // if next is called with a value in its first parameter, it will appear - * // in here as 'err', and execution will stop. - * } - * ); - */ -function forever(fn, errback) { - var done = (0, _onlyOnce2.default)(errback || _noop2.default); - var task = (0, _wrapAsync2.default)((0, _ensureAsync2.default)(fn)); - - function next(err) { - if (err) return done(err); - task(next); - } - next(); -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/groupBy.js b/node_modules/async/groupBy.js deleted file mode 100644 index 755cba7..0000000 --- a/node_modules/async/groupBy.js +++ /dev/null @@ -1,54 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _doLimit = require('./internal/doLimit'); - -var _doLimit2 = _interopRequireDefault(_doLimit); - -var _groupByLimit = require('./groupByLimit'); - -var _groupByLimit2 = _interopRequireDefault(_groupByLimit); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Returns a new object, where each value corresponds to an array of items, from - * `coll`, that returned the corresponding key. That is, the keys of the object - * correspond to the values passed to the `iteratee` callback. - * - * Note: Since this function applies the `iteratee` to each item in parallel, - * there is no guarantee that the `iteratee` functions will complete in order. - * However, the values for each key in the `result` will be in the same order as - * the original `coll`. For Objects, the values will roughly be in the order of - * the original Objects' keys (but this can vary across JavaScript engines). - * - * @name groupBy - * @static - * @memberOf module:Collections - * @method - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The iteratee should complete with a `key` to group the value under. - * Invoked with (value, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Result is an `Object` whoses - * properties are arrays of values which returned the corresponding key. - * @example - * - * async.groupBy(['userId1', 'userId2', 'userId3'], function(userId, callback) { - * db.findById(userId, function(err, user) { - * if (err) return callback(err); - * return callback(null, user.age); - * }); - * }, function(err, result) { - * // result is object containing the userIds grouped by age - * // e.g. { 30: ['userId1', 'userId3'], 42: ['userId2']}; - * }); - */ -exports.default = (0, _doLimit2.default)(_groupByLimit2.default, Infinity); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/groupByLimit.js b/node_modules/async/groupByLimit.js deleted file mode 100644 index fec13f8..0000000 --- a/node_modules/async/groupByLimit.js +++ /dev/null @@ -1,71 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -exports.default = function (coll, limit, iteratee, callback) { - callback = callback || _noop2.default; - var _iteratee = (0, _wrapAsync2.default)(iteratee); - (0, _mapLimit2.default)(coll, limit, function (val, callback) { - _iteratee(val, function (err, key) { - if (err) return callback(err); - return callback(null, { key: key, val: val }); - }); - }, function (err, mapResults) { - var result = {}; - // from MDN, handle object having an `hasOwnProperty` prop - var hasOwnProperty = Object.prototype.hasOwnProperty; - - for (var i = 0; i < mapResults.length; i++) { - if (mapResults[i]) { - var key = mapResults[i].key; - var val = mapResults[i].val; - - if (hasOwnProperty.call(result, key)) { - result[key].push(val); - } else { - result[key] = [val]; - } - } - } - - return callback(err, result); - }); -}; - -var _noop = require('lodash/noop'); - -var _noop2 = _interopRequireDefault(_noop); - -var _mapLimit = require('./mapLimit'); - -var _mapLimit2 = _interopRequireDefault(_mapLimit); - -var _wrapAsync = require('./internal/wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -; -/** - * The same as [`groupBy`]{@link module:Collections.groupBy} but runs a maximum of `limit` async operations at a time. - * - * @name groupByLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.groupBy]{@link module:Collections.groupBy} - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The iteratee should complete with a `key` to group the value under. - * Invoked with (value, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Result is an `Object` whoses - * properties are arrays of values which returned the corresponding key. - */ -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/groupBySeries.js b/node_modules/async/groupBySeries.js deleted file mode 100644 index b94805e..0000000 --- a/node_modules/async/groupBySeries.js +++ /dev/null @@ -1,37 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _doLimit = require('./internal/doLimit'); - -var _doLimit2 = _interopRequireDefault(_doLimit); - -var _groupByLimit = require('./groupByLimit'); - -var _groupByLimit2 = _interopRequireDefault(_groupByLimit); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * The same as [`groupBy`]{@link module:Collections.groupBy} but runs only a single async operation at a time. - * - * @name groupBySeries - * @static - * @memberOf module:Collections - * @method - * @see [async.groupBy]{@link module:Collections.groupBy} - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The iteratee should complete with a `key` to group the value under. - * Invoked with (value, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Result is an `Object` whoses - * properties are arrays of values which returned the corresponding key. - */ -exports.default = (0, _doLimit2.default)(_groupByLimit2.default, 1); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/index.js b/node_modules/async/index.js deleted file mode 100644 index c39d8d8..0000000 --- a/node_modules/async/index.js +++ /dev/null @@ -1,582 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.wrapSync = exports.selectSeries = exports.selectLimit = exports.select = exports.foldr = exports.foldl = exports.inject = exports.forEachOfLimit = exports.forEachOfSeries = exports.forEachOf = exports.forEachLimit = exports.forEachSeries = exports.forEach = exports.findSeries = exports.findLimit = exports.find = exports.anySeries = exports.anyLimit = exports.any = exports.allSeries = exports.allLimit = exports.all = exports.whilst = exports.waterfall = exports.until = exports.unmemoize = exports.tryEach = exports.transform = exports.timesSeries = exports.timesLimit = exports.times = exports.timeout = exports.sortBy = exports.someSeries = exports.someLimit = exports.some = exports.setImmediate = exports.series = exports.seq = exports.retryable = exports.retry = exports.rejectSeries = exports.rejectLimit = exports.reject = exports.reflectAll = exports.reflect = exports.reduceRight = exports.reduce = exports.race = exports.queue = exports.priorityQueue = exports.parallelLimit = exports.parallel = exports.nextTick = exports.memoize = exports.mapValuesSeries = exports.mapValuesLimit = exports.mapValues = exports.mapSeries = exports.mapLimit = exports.map = exports.log = exports.groupBySeries = exports.groupByLimit = exports.groupBy = exports.forever = exports.filterSeries = exports.filterLimit = exports.filter = exports.everySeries = exports.everyLimit = exports.every = exports.ensureAsync = exports.eachSeries = exports.eachOfSeries = exports.eachOfLimit = exports.eachOf = exports.eachLimit = exports.each = exports.during = exports.doWhilst = exports.doUntil = exports.doDuring = exports.dir = exports.detectSeries = exports.detectLimit = exports.detect = exports.constant = exports.concatSeries = exports.concatLimit = exports.concat = exports.compose = exports.cargo = exports.autoInject = exports.auto = exports.asyncify = exports.applyEachSeries = exports.applyEach = exports.apply = undefined; - -var _apply = require('./apply'); - -var _apply2 = _interopRequireDefault(_apply); - -var _applyEach = require('./applyEach'); - -var _applyEach2 = _interopRequireDefault(_applyEach); - -var _applyEachSeries = require('./applyEachSeries'); - -var _applyEachSeries2 = _interopRequireDefault(_applyEachSeries); - -var _asyncify = require('./asyncify'); - -var _asyncify2 = _interopRequireDefault(_asyncify); - -var _auto = require('./auto'); - -var _auto2 = _interopRequireDefault(_auto); - -var _autoInject = require('./autoInject'); - -var _autoInject2 = _interopRequireDefault(_autoInject); - -var _cargo = require('./cargo'); - -var _cargo2 = _interopRequireDefault(_cargo); - -var _compose = require('./compose'); - -var _compose2 = _interopRequireDefault(_compose); - -var _concat = require('./concat'); - -var _concat2 = _interopRequireDefault(_concat); - -var _concatLimit = require('./concatLimit'); - -var _concatLimit2 = _interopRequireDefault(_concatLimit); - -var _concatSeries = require('./concatSeries'); - -var _concatSeries2 = _interopRequireDefault(_concatSeries); - -var _constant = require('./constant'); - -var _constant2 = _interopRequireDefault(_constant); - -var _detect = require('./detect'); - -var _detect2 = _interopRequireDefault(_detect); - -var _detectLimit = require('./detectLimit'); - -var _detectLimit2 = _interopRequireDefault(_detectLimit); - -var _detectSeries = require('./detectSeries'); - -var _detectSeries2 = _interopRequireDefault(_detectSeries); - -var _dir = require('./dir'); - -var _dir2 = _interopRequireDefault(_dir); - -var _doDuring = require('./doDuring'); - -var _doDuring2 = _interopRequireDefault(_doDuring); - -var _doUntil = require('./doUntil'); - -var _doUntil2 = _interopRequireDefault(_doUntil); - -var _doWhilst = require('./doWhilst'); - -var _doWhilst2 = _interopRequireDefault(_doWhilst); - -var _during = require('./during'); - -var _during2 = _interopRequireDefault(_during); - -var _each = require('./each'); - -var _each2 = _interopRequireDefault(_each); - -var _eachLimit = require('./eachLimit'); - -var _eachLimit2 = _interopRequireDefault(_eachLimit); - -var _eachOf = require('./eachOf'); - -var _eachOf2 = _interopRequireDefault(_eachOf); - -var _eachOfLimit = require('./eachOfLimit'); - -var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); - -var _eachOfSeries = require('./eachOfSeries'); - -var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); - -var _eachSeries = require('./eachSeries'); - -var _eachSeries2 = _interopRequireDefault(_eachSeries); - -var _ensureAsync = require('./ensureAsync'); - -var _ensureAsync2 = _interopRequireDefault(_ensureAsync); - -var _every = require('./every'); - -var _every2 = _interopRequireDefault(_every); - -var _everyLimit = require('./everyLimit'); - -var _everyLimit2 = _interopRequireDefault(_everyLimit); - -var _everySeries = require('./everySeries'); - -var _everySeries2 = _interopRequireDefault(_everySeries); - -var _filter = require('./filter'); - -var _filter2 = _interopRequireDefault(_filter); - -var _filterLimit = require('./filterLimit'); - -var _filterLimit2 = _interopRequireDefault(_filterLimit); - -var _filterSeries = require('./filterSeries'); - -var _filterSeries2 = _interopRequireDefault(_filterSeries); - -var _forever = require('./forever'); - -var _forever2 = _interopRequireDefault(_forever); - -var _groupBy = require('./groupBy'); - -var _groupBy2 = _interopRequireDefault(_groupBy); - -var _groupByLimit = require('./groupByLimit'); - -var _groupByLimit2 = _interopRequireDefault(_groupByLimit); - -var _groupBySeries = require('./groupBySeries'); - -var _groupBySeries2 = _interopRequireDefault(_groupBySeries); - -var _log = require('./log'); - -var _log2 = _interopRequireDefault(_log); - -var _map = require('./map'); - -var _map2 = _interopRequireDefault(_map); - -var _mapLimit = require('./mapLimit'); - -var _mapLimit2 = _interopRequireDefault(_mapLimit); - -var _mapSeries = require('./mapSeries'); - -var _mapSeries2 = _interopRequireDefault(_mapSeries); - -var _mapValues = require('./mapValues'); - -var _mapValues2 = _interopRequireDefault(_mapValues); - -var _mapValuesLimit = require('./mapValuesLimit'); - -var _mapValuesLimit2 = _interopRequireDefault(_mapValuesLimit); - -var _mapValuesSeries = require('./mapValuesSeries'); - -var _mapValuesSeries2 = _interopRequireDefault(_mapValuesSeries); - -var _memoize = require('./memoize'); - -var _memoize2 = _interopRequireDefault(_memoize); - -var _nextTick = require('./nextTick'); - -var _nextTick2 = _interopRequireDefault(_nextTick); - -var _parallel = require('./parallel'); - -var _parallel2 = _interopRequireDefault(_parallel); - -var _parallelLimit = require('./parallelLimit'); - -var _parallelLimit2 = _interopRequireDefault(_parallelLimit); - -var _priorityQueue = require('./priorityQueue'); - -var _priorityQueue2 = _interopRequireDefault(_priorityQueue); - -var _queue = require('./queue'); - -var _queue2 = _interopRequireDefault(_queue); - -var _race = require('./race'); - -var _race2 = _interopRequireDefault(_race); - -var _reduce = require('./reduce'); - -var _reduce2 = _interopRequireDefault(_reduce); - -var _reduceRight = require('./reduceRight'); - -var _reduceRight2 = _interopRequireDefault(_reduceRight); - -var _reflect = require('./reflect'); - -var _reflect2 = _interopRequireDefault(_reflect); - -var _reflectAll = require('./reflectAll'); - -var _reflectAll2 = _interopRequireDefault(_reflectAll); - -var _reject = require('./reject'); - -var _reject2 = _interopRequireDefault(_reject); - -var _rejectLimit = require('./rejectLimit'); - -var _rejectLimit2 = _interopRequireDefault(_rejectLimit); - -var _rejectSeries = require('./rejectSeries'); - -var _rejectSeries2 = _interopRequireDefault(_rejectSeries); - -var _retry = require('./retry'); - -var _retry2 = _interopRequireDefault(_retry); - -var _retryable = require('./retryable'); - -var _retryable2 = _interopRequireDefault(_retryable); - -var _seq = require('./seq'); - -var _seq2 = _interopRequireDefault(_seq); - -var _series = require('./series'); - -var _series2 = _interopRequireDefault(_series); - -var _setImmediate = require('./setImmediate'); - -var _setImmediate2 = _interopRequireDefault(_setImmediate); - -var _some = require('./some'); - -var _some2 = _interopRequireDefault(_some); - -var _someLimit = require('./someLimit'); - -var _someLimit2 = _interopRequireDefault(_someLimit); - -var _someSeries = require('./someSeries'); - -var _someSeries2 = _interopRequireDefault(_someSeries); - -var _sortBy = require('./sortBy'); - -var _sortBy2 = _interopRequireDefault(_sortBy); - -var _timeout = require('./timeout'); - -var _timeout2 = _interopRequireDefault(_timeout); - -var _times = require('./times'); - -var _times2 = _interopRequireDefault(_times); - -var _timesLimit = require('./timesLimit'); - -var _timesLimit2 = _interopRequireDefault(_timesLimit); - -var _timesSeries = require('./timesSeries'); - -var _timesSeries2 = _interopRequireDefault(_timesSeries); - -var _transform = require('./transform'); - -var _transform2 = _interopRequireDefault(_transform); - -var _tryEach = require('./tryEach'); - -var _tryEach2 = _interopRequireDefault(_tryEach); - -var _unmemoize = require('./unmemoize'); - -var _unmemoize2 = _interopRequireDefault(_unmemoize); - -var _until = require('./until'); - -var _until2 = _interopRequireDefault(_until); - -var _waterfall = require('./waterfall'); - -var _waterfall2 = _interopRequireDefault(_waterfall); - -var _whilst = require('./whilst'); - -var _whilst2 = _interopRequireDefault(_whilst); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -exports.default = { - apply: _apply2.default, - applyEach: _applyEach2.default, - applyEachSeries: _applyEachSeries2.default, - asyncify: _asyncify2.default, - auto: _auto2.default, - autoInject: _autoInject2.default, - cargo: _cargo2.default, - compose: _compose2.default, - concat: _concat2.default, - concatLimit: _concatLimit2.default, - concatSeries: _concatSeries2.default, - constant: _constant2.default, - detect: _detect2.default, - detectLimit: _detectLimit2.default, - detectSeries: _detectSeries2.default, - dir: _dir2.default, - doDuring: _doDuring2.default, - doUntil: _doUntil2.default, - doWhilst: _doWhilst2.default, - during: _during2.default, - each: _each2.default, - eachLimit: _eachLimit2.default, - eachOf: _eachOf2.default, - eachOfLimit: _eachOfLimit2.default, - eachOfSeries: _eachOfSeries2.default, - eachSeries: _eachSeries2.default, - ensureAsync: _ensureAsync2.default, - every: _every2.default, - everyLimit: _everyLimit2.default, - everySeries: _everySeries2.default, - filter: _filter2.default, - filterLimit: _filterLimit2.default, - filterSeries: _filterSeries2.default, - forever: _forever2.default, - groupBy: _groupBy2.default, - groupByLimit: _groupByLimit2.default, - groupBySeries: _groupBySeries2.default, - log: _log2.default, - map: _map2.default, - mapLimit: _mapLimit2.default, - mapSeries: _mapSeries2.default, - mapValues: _mapValues2.default, - mapValuesLimit: _mapValuesLimit2.default, - mapValuesSeries: _mapValuesSeries2.default, - memoize: _memoize2.default, - nextTick: _nextTick2.default, - parallel: _parallel2.default, - parallelLimit: _parallelLimit2.default, - priorityQueue: _priorityQueue2.default, - queue: _queue2.default, - race: _race2.default, - reduce: _reduce2.default, - reduceRight: _reduceRight2.default, - reflect: _reflect2.default, - reflectAll: _reflectAll2.default, - reject: _reject2.default, - rejectLimit: _rejectLimit2.default, - rejectSeries: _rejectSeries2.default, - retry: _retry2.default, - retryable: _retryable2.default, - seq: _seq2.default, - series: _series2.default, - setImmediate: _setImmediate2.default, - some: _some2.default, - someLimit: _someLimit2.default, - someSeries: _someSeries2.default, - sortBy: _sortBy2.default, - timeout: _timeout2.default, - times: _times2.default, - timesLimit: _timesLimit2.default, - timesSeries: _timesSeries2.default, - transform: _transform2.default, - tryEach: _tryEach2.default, - unmemoize: _unmemoize2.default, - until: _until2.default, - waterfall: _waterfall2.default, - whilst: _whilst2.default, - - // aliases - all: _every2.default, - allLimit: _everyLimit2.default, - allSeries: _everySeries2.default, - any: _some2.default, - anyLimit: _someLimit2.default, - anySeries: _someSeries2.default, - find: _detect2.default, - findLimit: _detectLimit2.default, - findSeries: _detectSeries2.default, - forEach: _each2.default, - forEachSeries: _eachSeries2.default, - forEachLimit: _eachLimit2.default, - forEachOf: _eachOf2.default, - forEachOfSeries: _eachOfSeries2.default, - forEachOfLimit: _eachOfLimit2.default, - inject: _reduce2.default, - foldl: _reduce2.default, - foldr: _reduceRight2.default, - select: _filter2.default, - selectLimit: _filterLimit2.default, - selectSeries: _filterSeries2.default, - wrapSync: _asyncify2.default -}; /** - * An "async function" in the context of Async is an asynchronous function with - * a variable number of parameters, with the final parameter being a callback. - * (`function (arg1, arg2, ..., callback) {}`) - * The final callback is of the form `callback(err, results...)`, which must be - * called once the function is completed. The callback should be called with a - * Error as its first argument to signal that an error occurred. - * Otherwise, if no error occurred, it should be called with `null` as the first - * argument, and any additional `result` arguments that may apply, to signal - * successful completion. - * The callback must be called exactly once, ideally on a later tick of the - * JavaScript event loop. - * - * This type of function is also referred to as a "Node-style async function", - * or a "continuation passing-style function" (CPS). Most of the methods of this - * library are themselves CPS/Node-style async functions, or functions that - * return CPS/Node-style async functions. - * - * Wherever we accept a Node-style async function, we also directly accept an - * [ES2017 `async` function]{@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function}. - * In this case, the `async` function will not be passed a final callback - * argument, and any thrown error will be used as the `err` argument of the - * implicit callback, and the return value will be used as the `result` value. - * (i.e. a `rejected` of the returned Promise becomes the `err` callback - * argument, and a `resolved` value becomes the `result`.) - * - * Note, due to JavaScript limitations, we can only detect native `async` - * functions and not transpilied implementations. - * Your environment must have `async`/`await` support for this to work. - * (e.g. Node > v7.6, or a recent version of a modern browser). - * If you are using `async` functions through a transpiler (e.g. Babel), you - * must still wrap the function with [asyncify]{@link module:Utils.asyncify}, - * because the `async function` will be compiled to an ordinary function that - * returns a promise. - * - * @typedef {Function} AsyncFunction - * @static - */ - -/** - * Async is a utility module which provides straight-forward, powerful functions - * for working with asynchronous JavaScript. Although originally designed for - * use with [Node.js](http://nodejs.org) and installable via - * `npm install --save async`, it can also be used directly in the browser. - * @module async - * @see AsyncFunction - */ - -/** - * A collection of `async` functions for manipulating collections, such as - * arrays and objects. - * @module Collections - */ - -/** - * A collection of `async` functions for controlling the flow through a script. - * @module ControlFlow - */ - -/** - * A collection of `async` utility functions. - * @module Utils - */ - -exports.apply = _apply2.default; -exports.applyEach = _applyEach2.default; -exports.applyEachSeries = _applyEachSeries2.default; -exports.asyncify = _asyncify2.default; -exports.auto = _auto2.default; -exports.autoInject = _autoInject2.default; -exports.cargo = _cargo2.default; -exports.compose = _compose2.default; -exports.concat = _concat2.default; -exports.concatLimit = _concatLimit2.default; -exports.concatSeries = _concatSeries2.default; -exports.constant = _constant2.default; -exports.detect = _detect2.default; -exports.detectLimit = _detectLimit2.default; -exports.detectSeries = _detectSeries2.default; -exports.dir = _dir2.default; -exports.doDuring = _doDuring2.default; -exports.doUntil = _doUntil2.default; -exports.doWhilst = _doWhilst2.default; -exports.during = _during2.default; -exports.each = _each2.default; -exports.eachLimit = _eachLimit2.default; -exports.eachOf = _eachOf2.default; -exports.eachOfLimit = _eachOfLimit2.default; -exports.eachOfSeries = _eachOfSeries2.default; -exports.eachSeries = _eachSeries2.default; -exports.ensureAsync = _ensureAsync2.default; -exports.every = _every2.default; -exports.everyLimit = _everyLimit2.default; -exports.everySeries = _everySeries2.default; -exports.filter = _filter2.default; -exports.filterLimit = _filterLimit2.default; -exports.filterSeries = _filterSeries2.default; -exports.forever = _forever2.default; -exports.groupBy = _groupBy2.default; -exports.groupByLimit = _groupByLimit2.default; -exports.groupBySeries = _groupBySeries2.default; -exports.log = _log2.default; -exports.map = _map2.default; -exports.mapLimit = _mapLimit2.default; -exports.mapSeries = _mapSeries2.default; -exports.mapValues = _mapValues2.default; -exports.mapValuesLimit = _mapValuesLimit2.default; -exports.mapValuesSeries = _mapValuesSeries2.default; -exports.memoize = _memoize2.default; -exports.nextTick = _nextTick2.default; -exports.parallel = _parallel2.default; -exports.parallelLimit = _parallelLimit2.default; -exports.priorityQueue = _priorityQueue2.default; -exports.queue = _queue2.default; -exports.race = _race2.default; -exports.reduce = _reduce2.default; -exports.reduceRight = _reduceRight2.default; -exports.reflect = _reflect2.default; -exports.reflectAll = _reflectAll2.default; -exports.reject = _reject2.default; -exports.rejectLimit = _rejectLimit2.default; -exports.rejectSeries = _rejectSeries2.default; -exports.retry = _retry2.default; -exports.retryable = _retryable2.default; -exports.seq = _seq2.default; -exports.series = _series2.default; -exports.setImmediate = _setImmediate2.default; -exports.some = _some2.default; -exports.someLimit = _someLimit2.default; -exports.someSeries = _someSeries2.default; -exports.sortBy = _sortBy2.default; -exports.timeout = _timeout2.default; -exports.times = _times2.default; -exports.timesLimit = _timesLimit2.default; -exports.timesSeries = _timesSeries2.default; -exports.transform = _transform2.default; -exports.tryEach = _tryEach2.default; -exports.unmemoize = _unmemoize2.default; -exports.until = _until2.default; -exports.waterfall = _waterfall2.default; -exports.whilst = _whilst2.default; -exports.all = _every2.default; -exports.allLimit = _everyLimit2.default; -exports.allSeries = _everySeries2.default; -exports.any = _some2.default; -exports.anyLimit = _someLimit2.default; -exports.anySeries = _someSeries2.default; -exports.find = _detect2.default; -exports.findLimit = _detectLimit2.default; -exports.findSeries = _detectSeries2.default; -exports.forEach = _each2.default; -exports.forEachSeries = _eachSeries2.default; -exports.forEachLimit = _eachLimit2.default; -exports.forEachOf = _eachOf2.default; -exports.forEachOfSeries = _eachOfSeries2.default; -exports.forEachOfLimit = _eachOfLimit2.default; -exports.inject = _reduce2.default; -exports.foldl = _reduce2.default; -exports.foldr = _reduceRight2.default; -exports.select = _filter2.default; -exports.selectLimit = _filterLimit2.default; -exports.selectSeries = _filterSeries2.default; -exports.wrapSync = _asyncify2.default; \ No newline at end of file diff --git a/node_modules/async/inject.js b/node_modules/async/inject.js deleted file mode 100644 index 3fb8019..0000000 --- a/node_modules/async/inject.js +++ /dev/null @@ -1,78 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = reduce; - -var _eachOfSeries = require('./eachOfSeries'); - -var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); - -var _noop = require('lodash/noop'); - -var _noop2 = _interopRequireDefault(_noop); - -var _once = require('./internal/once'); - -var _once2 = _interopRequireDefault(_once); - -var _wrapAsync = require('./internal/wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Reduces `coll` into a single value using an async `iteratee` to return each - * successive step. `memo` is the initial state of the reduction. This function - * only operates in series. - * - * For performance reasons, it may make sense to split a call to this function - * into a parallel map, and then use the normal `Array.prototype.reduce` on the - * results. This function is for situations where each step in the reduction - * needs to be async; if you can get the data before reducing it, then it's - * probably a good idea to do so. - * - * @name reduce - * @static - * @memberOf module:Collections - * @method - * @alias inject - * @alias foldl - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {*} memo - The initial state of the reduction. - * @param {AsyncFunction} iteratee - A function applied to each item in the - * array to produce the next step in the reduction. - * The `iteratee` should complete with the next state of the reduction. - * If the iteratee complete with an error, the reduction is stopped and the - * main `callback` is immediately called with the error. - * Invoked with (memo, item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Result is the reduced value. Invoked with - * (err, result). - * @example - * - * async.reduce([1,2,3], 0, function(memo, item, callback) { - * // pointless async: - * process.nextTick(function() { - * callback(null, memo + item) - * }); - * }, function(err, result) { - * // result is now equal to the last value of memo, which is 6 - * }); - */ -function reduce(coll, memo, iteratee, callback) { - callback = (0, _once2.default)(callback || _noop2.default); - var _iteratee = (0, _wrapAsync2.default)(iteratee); - (0, _eachOfSeries2.default)(coll, function (x, i, callback) { - _iteratee(memo, x, function (err, v) { - memo = v; - callback(err); - }); - }, function (err) { - callback(err, memo); - }); -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/DoublyLinkedList.js b/node_modules/async/internal/DoublyLinkedList.js deleted file mode 100644 index 7e71728..0000000 --- a/node_modules/async/internal/DoublyLinkedList.js +++ /dev/null @@ -1,88 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = DLL; -// Simple doubly linked list (https://en.wikipedia.org/wiki/Doubly_linked_list) implementation -// used for queues. This implementation assumes that the node provided by the user can be modified -// to adjust the next and last properties. We implement only the minimal functionality -// for queue support. -function DLL() { - this.head = this.tail = null; - this.length = 0; -} - -function setInitial(dll, node) { - dll.length = 1; - dll.head = dll.tail = node; -} - -DLL.prototype.removeLink = function (node) { - if (node.prev) node.prev.next = node.next;else this.head = node.next; - if (node.next) node.next.prev = node.prev;else this.tail = node.prev; - - node.prev = node.next = null; - this.length -= 1; - return node; -}; - -DLL.prototype.empty = function () { - while (this.head) this.shift(); - return this; -}; - -DLL.prototype.insertAfter = function (node, newNode) { - newNode.prev = node; - newNode.next = node.next; - if (node.next) node.next.prev = newNode;else this.tail = newNode; - node.next = newNode; - this.length += 1; -}; - -DLL.prototype.insertBefore = function (node, newNode) { - newNode.prev = node.prev; - newNode.next = node; - if (node.prev) node.prev.next = newNode;else this.head = newNode; - node.prev = newNode; - this.length += 1; -}; - -DLL.prototype.unshift = function (node) { - if (this.head) this.insertBefore(this.head, node);else setInitial(this, node); -}; - -DLL.prototype.push = function (node) { - if (this.tail) this.insertAfter(this.tail, node);else setInitial(this, node); -}; - -DLL.prototype.shift = function () { - return this.head && this.removeLink(this.head); -}; - -DLL.prototype.pop = function () { - return this.tail && this.removeLink(this.tail); -}; - -DLL.prototype.toArray = function () { - var arr = Array(this.length); - var curr = this.head; - for (var idx = 0; idx < this.length; idx++) { - arr[idx] = curr.data; - curr = curr.next; - } - return arr; -}; - -DLL.prototype.remove = function (testFn) { - var curr = this.head; - while (!!curr) { - var next = curr.next; - if (testFn(curr)) { - this.removeLink(curr); - } - curr = next; - } - return this; -}; -module.exports = exports["default"]; \ No newline at end of file diff --git a/node_modules/async/internal/applyEach.js b/node_modules/async/internal/applyEach.js deleted file mode 100644 index 322e03c..0000000 --- a/node_modules/async/internal/applyEach.js +++ /dev/null @@ -1,38 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = applyEach; - -var _slice = require('./slice'); - -var _slice2 = _interopRequireDefault(_slice); - -var _initialParams = require('./initialParams'); - -var _initialParams2 = _interopRequireDefault(_initialParams); - -var _wrapAsync = require('./wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function applyEach(eachfn) { - return function (fns /*, ...args*/) { - var args = (0, _slice2.default)(arguments, 1); - var go = (0, _initialParams2.default)(function (args, callback) { - var that = this; - return eachfn(fns, function (fn, cb) { - (0, _wrapAsync2.default)(fn).apply(that, args.concat(cb)); - }, callback); - }); - if (args.length) { - return go.apply(this, args); - } else { - return go; - } - }; -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/breakLoop.js b/node_modules/async/internal/breakLoop.js deleted file mode 100644 index 1065058..0000000 --- a/node_modules/async/internal/breakLoop.js +++ /dev/null @@ -1,9 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -// A temporary value used to identify if the loop should be broken. -// See #1064, #1293 -exports.default = {}; -module.exports = exports["default"]; \ No newline at end of file diff --git a/node_modules/async/internal/consoleFunc.js b/node_modules/async/internal/consoleFunc.js deleted file mode 100644 index 603f48e..0000000 --- a/node_modules/async/internal/consoleFunc.js +++ /dev/null @@ -1,42 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = consoleFunc; - -var _arrayEach = require('lodash/_arrayEach'); - -var _arrayEach2 = _interopRequireDefault(_arrayEach); - -var _slice = require('./slice'); - -var _slice2 = _interopRequireDefault(_slice); - -var _wrapAsync = require('./wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function consoleFunc(name) { - return function (fn /*, ...args*/) { - var args = (0, _slice2.default)(arguments, 1); - args.push(function (err /*, ...args*/) { - var args = (0, _slice2.default)(arguments, 1); - if (typeof console === 'object') { - if (err) { - if (console.error) { - console.error(err); - } - } else if (console[name]) { - (0, _arrayEach2.default)(args, function (x) { - console[name](x); - }); - } - } - }); - (0, _wrapAsync2.default)(fn).apply(null, args); - }; -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/createTester.js b/node_modules/async/internal/createTester.js deleted file mode 100644 index ce96e8b..0000000 --- a/node_modules/async/internal/createTester.js +++ /dev/null @@ -1,44 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = _createTester; - -var _noop = require('lodash/noop'); - -var _noop2 = _interopRequireDefault(_noop); - -var _breakLoop = require('./breakLoop'); - -var _breakLoop2 = _interopRequireDefault(_breakLoop); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function _createTester(check, getResult) { - return function (eachfn, arr, iteratee, cb) { - cb = cb || _noop2.default; - var testPassed = false; - var testResult; - eachfn(arr, function (value, _, callback) { - iteratee(value, function (err, result) { - if (err) { - callback(err); - } else if (check(result) && !testResult) { - testPassed = true; - testResult = getResult(true, value); - callback(null, _breakLoop2.default); - } else { - callback(); - } - }); - }, function (err) { - if (err) { - cb(err); - } else { - cb(null, testPassed ? testResult : getResult(false)); - } - }); - }; -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/doLimit.js b/node_modules/async/internal/doLimit.js deleted file mode 100644 index 963c608..0000000 --- a/node_modules/async/internal/doLimit.js +++ /dev/null @@ -1,12 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = doLimit; -function doLimit(fn, limit) { - return function (iterable, iteratee, callback) { - return fn(iterable, limit, iteratee, callback); - }; -} -module.exports = exports["default"]; \ No newline at end of file diff --git a/node_modules/async/internal/doParallel.js b/node_modules/async/internal/doParallel.js deleted file mode 100644 index bb40207..0000000 --- a/node_modules/async/internal/doParallel.js +++ /dev/null @@ -1,23 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = doParallel; - -var _eachOf = require('../eachOf'); - -var _eachOf2 = _interopRequireDefault(_eachOf); - -var _wrapAsync = require('./wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function doParallel(fn) { - return function (obj, iteratee, callback) { - return fn(_eachOf2.default, obj, (0, _wrapAsync2.default)(iteratee), callback); - }; -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/doParallelLimit.js b/node_modules/async/internal/doParallelLimit.js deleted file mode 100644 index a7e963d..0000000 --- a/node_modules/async/internal/doParallelLimit.js +++ /dev/null @@ -1,23 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = doParallelLimit; - -var _eachOfLimit = require('./eachOfLimit'); - -var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); - -var _wrapAsync = require('./wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function doParallelLimit(fn) { - return function (obj, limit, iteratee, callback) { - return fn((0, _eachOfLimit2.default)(limit), obj, (0, _wrapAsync2.default)(iteratee), callback); - }; -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/eachOfLimit.js b/node_modules/async/internal/eachOfLimit.js deleted file mode 100644 index 6f6fe55..0000000 --- a/node_modules/async/internal/eachOfLimit.js +++ /dev/null @@ -1,74 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = _eachOfLimit; - -var _noop = require('lodash/noop'); - -var _noop2 = _interopRequireDefault(_noop); - -var _once = require('./once'); - -var _once2 = _interopRequireDefault(_once); - -var _iterator = require('./iterator'); - -var _iterator2 = _interopRequireDefault(_iterator); - -var _onlyOnce = require('./onlyOnce'); - -var _onlyOnce2 = _interopRequireDefault(_onlyOnce); - -var _breakLoop = require('./breakLoop'); - -var _breakLoop2 = _interopRequireDefault(_breakLoop); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function _eachOfLimit(limit) { - return function (obj, iteratee, callback) { - callback = (0, _once2.default)(callback || _noop2.default); - if (limit <= 0 || !obj) { - return callback(null); - } - var nextElem = (0, _iterator2.default)(obj); - var done = false; - var running = 0; - var looping = false; - - function iterateeCallback(err, value) { - running -= 1; - if (err) { - done = true; - callback(err); - } else if (value === _breakLoop2.default || done && running <= 0) { - done = true; - return callback(null); - } else if (!looping) { - replenish(); - } - } - - function replenish() { - looping = true; - while (running < limit && !done) { - var elem = nextElem(); - if (elem === null) { - done = true; - if (running <= 0) { - callback(null); - } - return; - } - running += 1; - iteratee(elem.value, elem.key, (0, _onlyOnce2.default)(iterateeCallback)); - } - looping = false; - } - - replenish(); - }; -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/filter.js b/node_modules/async/internal/filter.js deleted file mode 100644 index 74f3986..0000000 --- a/node_modules/async/internal/filter.js +++ /dev/null @@ -1,75 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = _filter; - -var _arrayMap = require('lodash/_arrayMap'); - -var _arrayMap2 = _interopRequireDefault(_arrayMap); - -var _isArrayLike = require('lodash/isArrayLike'); - -var _isArrayLike2 = _interopRequireDefault(_isArrayLike); - -var _baseProperty = require('lodash/_baseProperty'); - -var _baseProperty2 = _interopRequireDefault(_baseProperty); - -var _noop = require('lodash/noop'); - -var _noop2 = _interopRequireDefault(_noop); - -var _wrapAsync = require('./wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function filterArray(eachfn, arr, iteratee, callback) { - var truthValues = new Array(arr.length); - eachfn(arr, function (x, index, callback) { - iteratee(x, function (err, v) { - truthValues[index] = !!v; - callback(err); - }); - }, function (err) { - if (err) return callback(err); - var results = []; - for (var i = 0; i < arr.length; i++) { - if (truthValues[i]) results.push(arr[i]); - } - callback(null, results); - }); -} - -function filterGeneric(eachfn, coll, iteratee, callback) { - var results = []; - eachfn(coll, function (x, index, callback) { - iteratee(x, function (err, v) { - if (err) { - callback(err); - } else { - if (v) { - results.push({ index: index, value: x }); - } - callback(); - } - }); - }, function (err) { - if (err) { - callback(err); - } else { - callback(null, (0, _arrayMap2.default)(results.sort(function (a, b) { - return a.index - b.index; - }), (0, _baseProperty2.default)('value'))); - } - }); -} - -function _filter(eachfn, coll, iteratee, callback) { - var filter = (0, _isArrayLike2.default)(coll) ? filterArray : filterGeneric; - filter(eachfn, coll, (0, _wrapAsync2.default)(iteratee), callback || _noop2.default); -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/findGetResult.js b/node_modules/async/internal/findGetResult.js deleted file mode 100644 index f8d3fe0..0000000 --- a/node_modules/async/internal/findGetResult.js +++ /dev/null @@ -1,10 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = _findGetResult; -function _findGetResult(v, x) { - return x; -} -module.exports = exports["default"]; \ No newline at end of file diff --git a/node_modules/async/internal/getIterator.js b/node_modules/async/internal/getIterator.js deleted file mode 100644 index 3eadd24..0000000 --- a/node_modules/async/internal/getIterator.js +++ /dev/null @@ -1,13 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -exports.default = function (coll) { - return iteratorSymbol && coll[iteratorSymbol] && coll[iteratorSymbol](); -}; - -var iteratorSymbol = typeof Symbol === 'function' && Symbol.iterator; - -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/initialParams.js b/node_modules/async/internal/initialParams.js deleted file mode 100644 index df02cb1..0000000 --- a/node_modules/async/internal/initialParams.js +++ /dev/null @@ -1,21 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -exports.default = function (fn) { - return function () /*...args, callback*/{ - var args = (0, _slice2.default)(arguments); - var callback = args.pop(); - fn.call(this, args, callback); - }; -}; - -var _slice = require('./slice'); - -var _slice2 = _interopRequireDefault(_slice); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/iterator.js b/node_modules/async/internal/iterator.js deleted file mode 100644 index 886beb5..0000000 --- a/node_modules/async/internal/iterator.js +++ /dev/null @@ -1,61 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = iterator; - -var _isArrayLike = require('lodash/isArrayLike'); - -var _isArrayLike2 = _interopRequireDefault(_isArrayLike); - -var _getIterator = require('./getIterator'); - -var _getIterator2 = _interopRequireDefault(_getIterator); - -var _keys = require('lodash/keys'); - -var _keys2 = _interopRequireDefault(_keys); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function createArrayIterator(coll) { - var i = -1; - var len = coll.length; - return function next() { - return ++i < len ? { value: coll[i], key: i } : null; - }; -} - -function createES2015Iterator(iterator) { - var i = -1; - return function next() { - var item = iterator.next(); - if (item.done) return null; - i++; - return { value: item.value, key: i }; - }; -} - -function createObjectIterator(obj) { - var okeys = (0, _keys2.default)(obj); - var i = -1; - var len = okeys.length; - return function next() { - var key = okeys[++i]; - if (key === '__proto__') { - return next(); - } - return i < len ? { value: obj[key], key: key } : null; - }; -} - -function iterator(coll) { - if ((0, _isArrayLike2.default)(coll)) { - return createArrayIterator(coll); - } - - var iterator = (0, _getIterator2.default)(coll); - return iterator ? createES2015Iterator(iterator) : createObjectIterator(coll); -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/map.js b/node_modules/async/internal/map.js deleted file mode 100644 index f4f2aa5..0000000 --- a/node_modules/async/internal/map.js +++ /dev/null @@ -1,35 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = _asyncMap; - -var _noop = require('lodash/noop'); - -var _noop2 = _interopRequireDefault(_noop); - -var _wrapAsync = require('./wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function _asyncMap(eachfn, arr, iteratee, callback) { - callback = callback || _noop2.default; - arr = arr || []; - var results = []; - var counter = 0; - var _iteratee = (0, _wrapAsync2.default)(iteratee); - - eachfn(arr, function (value, _, callback) { - var index = counter++; - _iteratee(value, function (err, v) { - results[index] = v; - callback(err); - }); - }, function (err) { - callback(err, results); - }); -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/notId.js b/node_modules/async/internal/notId.js deleted file mode 100644 index 0106c92..0000000 --- a/node_modules/async/internal/notId.js +++ /dev/null @@ -1,10 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = notId; -function notId(v) { - return !v; -} -module.exports = exports["default"]; \ No newline at end of file diff --git a/node_modules/async/internal/once.js b/node_modules/async/internal/once.js deleted file mode 100644 index f0c379f..0000000 --- a/node_modules/async/internal/once.js +++ /dev/null @@ -1,15 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = once; -function once(fn) { - return function () { - if (fn === null) return; - var callFn = fn; - fn = null; - callFn.apply(this, arguments); - }; -} -module.exports = exports["default"]; \ No newline at end of file diff --git a/node_modules/async/internal/onlyOnce.js b/node_modules/async/internal/onlyOnce.js deleted file mode 100644 index f2e3001..0000000 --- a/node_modules/async/internal/onlyOnce.js +++ /dev/null @@ -1,15 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = onlyOnce; -function onlyOnce(fn) { - return function () { - if (fn === null) throw new Error("Callback was already called."); - var callFn = fn; - fn = null; - callFn.apply(this, arguments); - }; -} -module.exports = exports["default"]; \ No newline at end of file diff --git a/node_modules/async/internal/parallel.js b/node_modules/async/internal/parallel.js deleted file mode 100644 index c97293b..0000000 --- a/node_modules/async/internal/parallel.js +++ /dev/null @@ -1,42 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = _parallel; - -var _noop = require('lodash/noop'); - -var _noop2 = _interopRequireDefault(_noop); - -var _isArrayLike = require('lodash/isArrayLike'); - -var _isArrayLike2 = _interopRequireDefault(_isArrayLike); - -var _slice = require('./slice'); - -var _slice2 = _interopRequireDefault(_slice); - -var _wrapAsync = require('./wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function _parallel(eachfn, tasks, callback) { - callback = callback || _noop2.default; - var results = (0, _isArrayLike2.default)(tasks) ? [] : {}; - - eachfn(tasks, function (task, key, callback) { - (0, _wrapAsync2.default)(task)(function (err, result) { - if (arguments.length > 2) { - result = (0, _slice2.default)(arguments, 1); - } - results[key] = result; - callback(err); - }); - }, function (err) { - callback(err, results); - }); -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/queue.js b/node_modules/async/internal/queue.js deleted file mode 100644 index 19534a7..0000000 --- a/node_modules/async/internal/queue.js +++ /dev/null @@ -1,204 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = queue; - -var _baseIndexOf = require('lodash/_baseIndexOf'); - -var _baseIndexOf2 = _interopRequireDefault(_baseIndexOf); - -var _isArray = require('lodash/isArray'); - -var _isArray2 = _interopRequireDefault(_isArray); - -var _noop = require('lodash/noop'); - -var _noop2 = _interopRequireDefault(_noop); - -var _onlyOnce = require('./onlyOnce'); - -var _onlyOnce2 = _interopRequireDefault(_onlyOnce); - -var _setImmediate = require('./setImmediate'); - -var _setImmediate2 = _interopRequireDefault(_setImmediate); - -var _DoublyLinkedList = require('./DoublyLinkedList'); - -var _DoublyLinkedList2 = _interopRequireDefault(_DoublyLinkedList); - -var _wrapAsync = require('./wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function queue(worker, concurrency, payload) { - if (concurrency == null) { - concurrency = 1; - } else if (concurrency === 0) { - throw new Error('Concurrency must not be zero'); - } - - var _worker = (0, _wrapAsync2.default)(worker); - var numRunning = 0; - var workersList = []; - - var processingScheduled = false; - function _insert(data, insertAtFront, callback) { - if (callback != null && typeof callback !== 'function') { - throw new Error('task callback must be a function'); - } - q.started = true; - if (!(0, _isArray2.default)(data)) { - data = [data]; - } - if (data.length === 0 && q.idle()) { - // call drain immediately if there are no tasks - return (0, _setImmediate2.default)(function () { - q.drain(); - }); - } - - for (var i = 0, l = data.length; i < l; i++) { - var item = { - data: data[i], - callback: callback || _noop2.default - }; - - if (insertAtFront) { - q._tasks.unshift(item); - } else { - q._tasks.push(item); - } - } - - if (!processingScheduled) { - processingScheduled = true; - (0, _setImmediate2.default)(function () { - processingScheduled = false; - q.process(); - }); - } - } - - function _next(tasks) { - return function (err) { - numRunning -= 1; - - for (var i = 0, l = tasks.length; i < l; i++) { - var task = tasks[i]; - - var index = (0, _baseIndexOf2.default)(workersList, task, 0); - if (index === 0) { - workersList.shift(); - } else if (index > 0) { - workersList.splice(index, 1); - } - - task.callback.apply(task, arguments); - - if (err != null) { - q.error(err, task.data); - } - } - - if (numRunning <= q.concurrency - q.buffer) { - q.unsaturated(); - } - - if (q.idle()) { - q.drain(); - } - q.process(); - }; - } - - var isProcessing = false; - var q = { - _tasks: new _DoublyLinkedList2.default(), - concurrency: concurrency, - payload: payload, - saturated: _noop2.default, - unsaturated: _noop2.default, - buffer: concurrency / 4, - empty: _noop2.default, - drain: _noop2.default, - error: _noop2.default, - started: false, - paused: false, - push: function (data, callback) { - _insert(data, false, callback); - }, - kill: function () { - q.drain = _noop2.default; - q._tasks.empty(); - }, - unshift: function (data, callback) { - _insert(data, true, callback); - }, - remove: function (testFn) { - q._tasks.remove(testFn); - }, - process: function () { - // Avoid trying to start too many processing operations. This can occur - // when callbacks resolve synchronously (#1267). - if (isProcessing) { - return; - } - isProcessing = true; - while (!q.paused && numRunning < q.concurrency && q._tasks.length) { - var tasks = [], - data = []; - var l = q._tasks.length; - if (q.payload) l = Math.min(l, q.payload); - for (var i = 0; i < l; i++) { - var node = q._tasks.shift(); - tasks.push(node); - workersList.push(node); - data.push(node.data); - } - - numRunning += 1; - - if (q._tasks.length === 0) { - q.empty(); - } - - if (numRunning === q.concurrency) { - q.saturated(); - } - - var cb = (0, _onlyOnce2.default)(_next(tasks)); - _worker(data, cb); - } - isProcessing = false; - }, - length: function () { - return q._tasks.length; - }, - running: function () { - return numRunning; - }, - workersList: function () { - return workersList; - }, - idle: function () { - return q._tasks.length + numRunning === 0; - }, - pause: function () { - q.paused = true; - }, - resume: function () { - if (q.paused === false) { - return; - } - q.paused = false; - (0, _setImmediate2.default)(q.process); - } - }; - return q; -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/reject.js b/node_modules/async/internal/reject.js deleted file mode 100644 index 5dbfcfb..0000000 --- a/node_modules/async/internal/reject.js +++ /dev/null @@ -1,21 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = reject; - -var _filter = require('./filter'); - -var _filter2 = _interopRequireDefault(_filter); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function reject(eachfn, arr, iteratee, callback) { - (0, _filter2.default)(eachfn, arr, function (value, cb) { - iteratee(value, function (err, v) { - cb(err, !v); - }); - }, callback); -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/setImmediate.js b/node_modules/async/internal/setImmediate.js deleted file mode 100644 index 3545f2b..0000000 --- a/node_modules/async/internal/setImmediate.js +++ /dev/null @@ -1,42 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.hasNextTick = exports.hasSetImmediate = undefined; -exports.fallback = fallback; -exports.wrap = wrap; - -var _slice = require('./slice'); - -var _slice2 = _interopRequireDefault(_slice); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -var hasSetImmediate = exports.hasSetImmediate = typeof setImmediate === 'function' && setImmediate; -var hasNextTick = exports.hasNextTick = typeof process === 'object' && typeof process.nextTick === 'function'; - -function fallback(fn) { - setTimeout(fn, 0); -} - -function wrap(defer) { - return function (fn /*, ...args*/) { - var args = (0, _slice2.default)(arguments, 1); - defer(function () { - fn.apply(null, args); - }); - }; -} - -var _defer; - -if (hasSetImmediate) { - _defer = setImmediate; -} else if (hasNextTick) { - _defer = process.nextTick; -} else { - _defer = fallback; -} - -exports.default = wrap(_defer); \ No newline at end of file diff --git a/node_modules/async/internal/slice.js b/node_modules/async/internal/slice.js deleted file mode 100644 index 56f10c0..0000000 --- a/node_modules/async/internal/slice.js +++ /dev/null @@ -1,16 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = slice; -function slice(arrayLike, start) { - start = start | 0; - var newLen = Math.max(arrayLike.length - start, 0); - var newArr = Array(newLen); - for (var idx = 0; idx < newLen; idx++) { - newArr[idx] = arrayLike[start + idx]; - } - return newArr; -} -module.exports = exports["default"]; \ No newline at end of file diff --git a/node_modules/async/internal/withoutIndex.js b/node_modules/async/internal/withoutIndex.js deleted file mode 100644 index 2bd3579..0000000 --- a/node_modules/async/internal/withoutIndex.js +++ /dev/null @@ -1,12 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = _withoutIndex; -function _withoutIndex(iteratee) { - return function (value, index, callback) { - return iteratee(value, callback); - }; -} -module.exports = exports["default"]; \ No newline at end of file diff --git a/node_modules/async/internal/wrapAsync.js b/node_modules/async/internal/wrapAsync.js deleted file mode 100644 index bc6c966..0000000 --- a/node_modules/async/internal/wrapAsync.js +++ /dev/null @@ -1,25 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.isAsync = undefined; - -var _asyncify = require('../asyncify'); - -var _asyncify2 = _interopRequireDefault(_asyncify); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -var supportsSymbol = typeof Symbol === 'function'; - -function isAsync(fn) { - return supportsSymbol && fn[Symbol.toStringTag] === 'AsyncFunction'; -} - -function wrapAsync(asyncFn) { - return isAsync(asyncFn) ? (0, _asyncify2.default)(asyncFn) : asyncFn; -} - -exports.default = wrapAsync; -exports.isAsync = isAsync; \ No newline at end of file diff --git a/node_modules/async/log.js b/node_modules/async/log.js deleted file mode 100644 index c643867..0000000 --- a/node_modules/async/log.js +++ /dev/null @@ -1,41 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _consoleFunc = require('./internal/consoleFunc'); - -var _consoleFunc2 = _interopRequireDefault(_consoleFunc); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Logs the result of an `async` function to the `console`. Only works in - * Node.js or in browsers that support `console.log` and `console.error` (such - * as FF and Chrome). If multiple arguments are returned from the async - * function, `console.log` is called on each argument in order. - * - * @name log - * @static - * @memberOf module:Utils - * @method - * @category Util - * @param {AsyncFunction} function - The function you want to eventually apply - * all arguments to. - * @param {...*} arguments... - Any number of arguments to apply to the function. - * @example - * - * // in a module - * var hello = function(name, callback) { - * setTimeout(function() { - * callback(null, 'hello ' + name); - * }, 1000); - * }; - * - * // in the node repl - * node> async.log(hello, 'world'); - * 'hello world' - */ -exports.default = (0, _consoleFunc2.default)('log'); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/map.js b/node_modules/async/map.js deleted file mode 100644 index 67c9cda..0000000 --- a/node_modules/async/map.js +++ /dev/null @@ -1,54 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _doParallel = require('./internal/doParallel'); - -var _doParallel2 = _interopRequireDefault(_doParallel); - -var _map = require('./internal/map'); - -var _map2 = _interopRequireDefault(_map); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Produces a new collection of values by mapping each value in `coll` through - * the `iteratee` function. The `iteratee` is called with an item from `coll` - * and a callback for when it has finished processing. Each of these callback - * takes 2 arguments: an `error`, and the transformed item from `coll`. If - * `iteratee` passes an error to its callback, the main `callback` (for the - * `map` function) is immediately called with the error. - * - * Note, that since this function applies the `iteratee` to each item in - * parallel, there is no guarantee that the `iteratee` functions will complete - * in order. However, the results array will be in the same order as the - * original `coll`. - * - * If `map` is passed an Object, the results will be an Array. The results - * will roughly be in the order of the original Objects' keys (but this can - * vary across JavaScript engines). - * - * @name map - * @static - * @memberOf module:Collections - * @method - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The iteratee should complete with the transformed item. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Results is an Array of the - * transformed items from the `coll`. Invoked with (err, results). - * @example - * - * async.map(['file1','file2','file3'], fs.stat, function(err, results) { - * // results is now an array of stats for each file - * }); - */ -exports.default = (0, _doParallel2.default)(_map2.default); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/mapLimit.js b/node_modules/async/mapLimit.js deleted file mode 100644 index c8b60d8..0000000 --- a/node_modules/async/mapLimit.js +++ /dev/null @@ -1,37 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _doParallelLimit = require('./internal/doParallelLimit'); - -var _doParallelLimit2 = _interopRequireDefault(_doParallelLimit); - -var _map = require('./internal/map'); - -var _map2 = _interopRequireDefault(_map); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * The same as [`map`]{@link module:Collections.map} but runs a maximum of `limit` async operations at a time. - * - * @name mapLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.map]{@link module:Collections.map} - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The iteratee should complete with the transformed item. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Results is an array of the - * transformed items from the `coll`. Invoked with (err, results). - */ -exports.default = (0, _doParallelLimit2.default)(_map2.default); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/mapSeries.js b/node_modules/async/mapSeries.js deleted file mode 100644 index 61b42d0..0000000 --- a/node_modules/async/mapSeries.js +++ /dev/null @@ -1,36 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _mapLimit = require('./mapLimit'); - -var _mapLimit2 = _interopRequireDefault(_mapLimit); - -var _doLimit = require('./internal/doLimit'); - -var _doLimit2 = _interopRequireDefault(_doLimit); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * The same as [`map`]{@link module:Collections.map} but runs only a single async operation at a time. - * - * @name mapSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.map]{@link module:Collections.map} - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The iteratee should complete with the transformed item. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Results is an array of the - * transformed items from the `coll`. Invoked with (err, results). - */ -exports.default = (0, _doLimit2.default)(_mapLimit2.default, 1); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/mapValues.js b/node_modules/async/mapValues.js deleted file mode 100644 index 3d838ca..0000000 --- a/node_modules/async/mapValues.js +++ /dev/null @@ -1,63 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _mapValuesLimit = require('./mapValuesLimit'); - -var _mapValuesLimit2 = _interopRequireDefault(_mapValuesLimit); - -var _doLimit = require('./internal/doLimit'); - -var _doLimit2 = _interopRequireDefault(_doLimit); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * A relative of [`map`]{@link module:Collections.map}, designed for use with objects. - * - * Produces a new Object by mapping each value of `obj` through the `iteratee` - * function. The `iteratee` is called each `value` and `key` from `obj` and a - * callback for when it has finished processing. Each of these callbacks takes - * two arguments: an `error`, and the transformed item from `obj`. If `iteratee` - * passes an error to its callback, the main `callback` (for the `mapValues` - * function) is immediately called with the error. - * - * Note, the order of the keys in the result is not guaranteed. The keys will - * be roughly in the order they complete, (but this is very engine-specific) - * - * @name mapValues - * @static - * @memberOf module:Collections - * @method - * @category Collection - * @param {Object} obj - A collection to iterate over. - * @param {AsyncFunction} iteratee - A function to apply to each value and key - * in `coll`. - * The iteratee should complete with the transformed value as its result. - * Invoked with (value, key, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. `result` is a new object consisting - * of each key from `obj`, with each transformed value on the right-hand side. - * Invoked with (err, result). - * @example - * - * async.mapValues({ - * f1: 'file1', - * f2: 'file2', - * f3: 'file3' - * }, function (file, key, callback) { - * fs.stat(file, callback); - * }, function(err, result) { - * // result is now a map of stats for each file, e.g. - * // { - * // f1: [stats for file1], - * // f2: [stats for file2], - * // f3: [stats for file3] - * // } - * }); - */ - -exports.default = (0, _doLimit2.default)(_mapValuesLimit2.default, Infinity); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/mapValuesLimit.js b/node_modules/async/mapValuesLimit.js deleted file mode 100644 index 912a8b5..0000000 --- a/node_modules/async/mapValuesLimit.js +++ /dev/null @@ -1,61 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = mapValuesLimit; - -var _eachOfLimit = require('./eachOfLimit'); - -var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); - -var _noop = require('lodash/noop'); - -var _noop2 = _interopRequireDefault(_noop); - -var _once = require('./internal/once'); - -var _once2 = _interopRequireDefault(_once); - -var _wrapAsync = require('./internal/wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * The same as [`mapValues`]{@link module:Collections.mapValues} but runs a maximum of `limit` async operations at a - * time. - * - * @name mapValuesLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.mapValues]{@link module:Collections.mapValues} - * @category Collection - * @param {Object} obj - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - A function to apply to each value and key - * in `coll`. - * The iteratee should complete with the transformed value as its result. - * Invoked with (value, key, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. `result` is a new object consisting - * of each key from `obj`, with each transformed value on the right-hand side. - * Invoked with (err, result). - */ -function mapValuesLimit(obj, limit, iteratee, callback) { - callback = (0, _once2.default)(callback || _noop2.default); - var newObj = {}; - var _iteratee = (0, _wrapAsync2.default)(iteratee); - (0, _eachOfLimit2.default)(obj, limit, function (val, key, next) { - _iteratee(val, key, function (err, result) { - if (err) return next(err); - newObj[key] = result; - next(); - }); - }, function (err) { - callback(err, newObj); - }); -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/mapValuesSeries.js b/node_modules/async/mapValuesSeries.js deleted file mode 100644 index b378c4a..0000000 --- a/node_modules/async/mapValuesSeries.js +++ /dev/null @@ -1,37 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _mapValuesLimit = require('./mapValuesLimit'); - -var _mapValuesLimit2 = _interopRequireDefault(_mapValuesLimit); - -var _doLimit = require('./internal/doLimit'); - -var _doLimit2 = _interopRequireDefault(_doLimit); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * The same as [`mapValues`]{@link module:Collections.mapValues} but runs only a single async operation at a time. - * - * @name mapValuesSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.mapValues]{@link module:Collections.mapValues} - * @category Collection - * @param {Object} obj - A collection to iterate over. - * @param {AsyncFunction} iteratee - A function to apply to each value and key - * in `coll`. - * The iteratee should complete with the transformed value as its result. - * Invoked with (value, key, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. `result` is a new object consisting - * of each key from `obj`, with each transformed value on the right-hand side. - * Invoked with (err, result). - */ -exports.default = (0, _doLimit2.default)(_mapValuesLimit2.default, 1); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/memoize.js b/node_modules/async/memoize.js deleted file mode 100644 index 1f2b566..0000000 --- a/node_modules/async/memoize.js +++ /dev/null @@ -1,101 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = memoize; - -var _identity = require('lodash/identity'); - -var _identity2 = _interopRequireDefault(_identity); - -var _slice = require('./internal/slice'); - -var _slice2 = _interopRequireDefault(_slice); - -var _setImmediate = require('./internal/setImmediate'); - -var _setImmediate2 = _interopRequireDefault(_setImmediate); - -var _initialParams = require('./internal/initialParams'); - -var _initialParams2 = _interopRequireDefault(_initialParams); - -var _wrapAsync = require('./internal/wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function has(obj, key) { - return key in obj; -} - -/** - * Caches the results of an async function. When creating a hash to store - * function results against, the callback is omitted from the hash and an - * optional hash function can be used. - * - * If no hash function is specified, the first argument is used as a hash key, - * which may work reasonably if it is a string or a data type that converts to a - * distinct string. Note that objects and arrays will not behave reasonably. - * Neither will cases where the other arguments are significant. In such cases, - * specify your own hash function. - * - * The cache of results is exposed as the `memo` property of the function - * returned by `memoize`. - * - * @name memoize - * @static - * @memberOf module:Utils - * @method - * @category Util - * @param {AsyncFunction} fn - The async function to proxy and cache results from. - * @param {Function} hasher - An optional function for generating a custom hash - * for storing results. It has all the arguments applied to it apart from the - * callback, and must be synchronous. - * @returns {AsyncFunction} a memoized version of `fn` - * @example - * - * var slow_fn = function(name, callback) { - * // do something - * callback(null, result); - * }; - * var fn = async.memoize(slow_fn); - * - * // fn can now be used as if it were slow_fn - * fn('some name', function() { - * // callback - * }); - */ -function memoize(fn, hasher) { - var memo = Object.create(null); - var queues = Object.create(null); - hasher = hasher || _identity2.default; - var _fn = (0, _wrapAsync2.default)(fn); - var memoized = (0, _initialParams2.default)(function memoized(args, callback) { - var key = hasher.apply(null, args); - if (has(memo, key)) { - (0, _setImmediate2.default)(function () { - callback.apply(null, memo[key]); - }); - } else if (has(queues, key)) { - queues[key].push(callback); - } else { - queues[key] = [callback]; - _fn.apply(null, args.concat(function () /*args*/{ - var args = (0, _slice2.default)(arguments); - memo[key] = args; - var q = queues[key]; - delete queues[key]; - for (var i = 0, l = q.length; i < l; i++) { - q[i].apply(null, args); - } - })); - } - }); - memoized.memo = memo; - memoized.unmemoized = fn; - return memoized; -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/nextTick.js b/node_modules/async/nextTick.js deleted file mode 100644 index 886f58e..0000000 --- a/node_modules/async/nextTick.js +++ /dev/null @@ -1,51 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _setImmediate = require('./internal/setImmediate'); - -/** - * Calls `callback` on a later loop around the event loop. In Node.js this just - * calls `process.nextTick`. In the browser it will use `setImmediate` if - * available, otherwise `setTimeout(callback, 0)`, which means other higher - * priority events may precede the execution of `callback`. - * - * This is used internally for browser-compatibility purposes. - * - * @name nextTick - * @static - * @memberOf module:Utils - * @method - * @see [async.setImmediate]{@link module:Utils.setImmediate} - * @category Util - * @param {Function} callback - The function to call on a later loop around - * the event loop. Invoked with (args...). - * @param {...*} args... - any number of additional arguments to pass to the - * callback on the next tick. - * @example - * - * var call_order = []; - * async.nextTick(function() { - * call_order.push('two'); - * // call_order now equals ['one','two'] - * }); - * call_order.push('one'); - * - * async.setImmediate(function (a, b, c) { - * // a, b, and c equal 1, 2, and 3 - * }, 1, 2, 3); - */ -var _defer; - -if (_setImmediate.hasNextTick) { - _defer = process.nextTick; -} else if (_setImmediate.hasSetImmediate) { - _defer = setImmediate; -} else { - _defer = _setImmediate.fallback; -} - -exports.default = (0, _setImmediate.wrap)(_defer); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/package.json b/node_modules/async/package.json deleted file mode 100644 index 76b3db7..0000000 --- a/node_modules/async/package.json +++ /dev/null @@ -1,80 +0,0 @@ -{ - "name": "async", - "description": "Higher-order functions and common patterns for asynchronous code", - "version": "2.6.4", - "main": "dist/async.js", - "author": "Caolan McMahon", - "homepage": "https://caolan.github.io/async/", - "repository": { - "type": "git", - "url": "https://github.com/caolan/async.git" - }, - "bugs": { - "url": "https://github.com/caolan/async/issues" - }, - "keywords": [ - "async", - "callback", - "module", - "utility" - ], - "dependencies": { - "lodash": "^4.17.14" - }, - "devDependencies": { - "babel-cli": "^6.24.0", - "babel-core": "^6.26.3", - "babel-plugin-add-module-exports": "^0.2.1", - "babel-plugin-istanbul": "^2.0.1", - "babel-plugin-transform-es2015-modules-commonjs": "^6.26.2", - "babel-preset-es2015": "^6.3.13", - "babel-preset-es2017": "^6.22.0", - "babelify": "^8.0.0", - "benchmark": "^2.1.1", - "bluebird": "^3.4.6", - "browserify": "^16.2.2", - "chai": "^4.1.2", - "cheerio": "^0.22.0", - "coveralls": "^3.0.1", - "es6-promise": "^2.3.0", - "eslint": "^2.13.1", - "fs-extra": "^0.26.7", - "gh-pages-deploy": "^0.5.0", - "jsdoc": "^3.4.0", - "karma": "^2.0.2", - "karma-browserify": "^5.2.0", - "karma-firefox-launcher": "^1.1.0", - "karma-mocha": "^1.2.0", - "karma-mocha-reporter": "^2.2.0", - "mocha": "^5.2.0", - "native-promise-only": "^0.8.0-a", - "nyc": "^11.8.0", - "rimraf": "^2.5.0", - "rollup": "^0.36.3", - "rollup-plugin-node-resolve": "^2.0.0", - "rollup-plugin-npm": "^2.0.0", - "rsvp": "^3.0.18", - "semver": "^5.5.0", - "uglify-js": "~2.7.3", - "yargs": "^11.0.0" - }, - "scripts": { - "coverage": "nyc npm run mocha-node-test -- --grep @nycinvalid --invert", - "coveralls": "npm run coverage && nyc report --reporter=text-lcov | coveralls", - "jsdoc": "jsdoc -c ./support/jsdoc/jsdoc.json && node support/jsdoc/jsdoc-fix-html.js", - "lint": "eslint lib/ mocha_test/ perf/memory.js perf/suites.js perf/benchmark.js support/build/ support/*.js karma.conf.js", - "mocha-browser-test": "karma start", - "mocha-node-test": "mocha mocha_test/ --compilers js:babel-core/register", - "mocha-test": "npm run mocha-node-test && npm run mocha-browser-test", - "test": "npm run lint && npm run mocha-node-test" - }, - "license": "MIT", - "gh-pages-deploy": { - "staticpath": "docs" - }, - "nyc": { - "exclude": [ - "mocha_test" - ] - } -} \ No newline at end of file diff --git a/node_modules/async/parallel.js b/node_modules/async/parallel.js deleted file mode 100644 index da28a4d..0000000 --- a/node_modules/async/parallel.js +++ /dev/null @@ -1,90 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = parallelLimit; - -var _eachOf = require('./eachOf'); - -var _eachOf2 = _interopRequireDefault(_eachOf); - -var _parallel = require('./internal/parallel'); - -var _parallel2 = _interopRequireDefault(_parallel); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Run the `tasks` collection of functions in parallel, without waiting until - * the previous function has completed. If any of the functions pass an error to - * its callback, the main `callback` is immediately called with the value of the - * error. Once the `tasks` have completed, the results are passed to the final - * `callback` as an array. - * - * **Note:** `parallel` is about kicking-off I/O tasks in parallel, not about - * parallel execution of code. If your tasks do not use any timers or perform - * any I/O, they will actually be executed in series. Any synchronous setup - * sections for each task will happen one after the other. JavaScript remains - * single-threaded. - * - * **Hint:** Use [`reflect`]{@link module:Utils.reflect} to continue the - * execution of other tasks when a task fails. - * - * It is also possible to use an object instead of an array. Each property will - * be run as a function and the results will be passed to the final `callback` - * as an object instead of an array. This can be a more readable way of handling - * results from {@link async.parallel}. - * - * @name parallel - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {Array|Iterable|Object} tasks - A collection of - * [async functions]{@link AsyncFunction} to run. - * Each async function can complete with any number of optional `result` values. - * @param {Function} [callback] - An optional callback to run once all the - * functions have completed successfully. This function gets a results array - * (or object) containing all the result arguments passed to the task callbacks. - * Invoked with (err, results). - * - * @example - * async.parallel([ - * function(callback) { - * setTimeout(function() { - * callback(null, 'one'); - * }, 200); - * }, - * function(callback) { - * setTimeout(function() { - * callback(null, 'two'); - * }, 100); - * } - * ], - * // optional callback - * function(err, results) { - * // the results array will equal ['one','two'] even though - * // the second function had a shorter timeout. - * }); - * - * // an example using an object instead of an array - * async.parallel({ - * one: function(callback) { - * setTimeout(function() { - * callback(null, 1); - * }, 200); - * }, - * two: function(callback) { - * setTimeout(function() { - * callback(null, 2); - * }, 100); - * } - * }, function(err, results) { - * // results is now equals to: {one: 1, two: 2} - * }); - */ -function parallelLimit(tasks, callback) { - (0, _parallel2.default)(_eachOf2.default, tasks, callback); -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/parallelLimit.js b/node_modules/async/parallelLimit.js deleted file mode 100644 index a026526..0000000 --- a/node_modules/async/parallelLimit.js +++ /dev/null @@ -1,40 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = parallelLimit; - -var _eachOfLimit = require('./internal/eachOfLimit'); - -var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); - -var _parallel = require('./internal/parallel'); - -var _parallel2 = _interopRequireDefault(_parallel); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * The same as [`parallel`]{@link module:ControlFlow.parallel} but runs a maximum of `limit` async operations at a - * time. - * - * @name parallelLimit - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.parallel]{@link module:ControlFlow.parallel} - * @category Control Flow - * @param {Array|Iterable|Object} tasks - A collection of - * [async functions]{@link AsyncFunction} to run. - * Each async function can complete with any number of optional `result` values. - * @param {number} limit - The maximum number of async operations at a time. - * @param {Function} [callback] - An optional callback to run once all the - * functions have completed successfully. This function gets a results array - * (or object) containing all the result arguments passed to the task callbacks. - * Invoked with (err, results). - */ -function parallelLimit(tasks, limit, callback) { - (0, _parallel2.default)((0, _eachOfLimit2.default)(limit), tasks, callback); -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/priorityQueue.js b/node_modules/async/priorityQueue.js deleted file mode 100644 index 3a5f023..0000000 --- a/node_modules/async/priorityQueue.js +++ /dev/null @@ -1,98 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -exports.default = function (worker, concurrency) { - // Start with a normal queue - var q = (0, _queue2.default)(worker, concurrency); - - // Override push to accept second parameter representing priority - q.push = function (data, priority, callback) { - if (callback == null) callback = _noop2.default; - if (typeof callback !== 'function') { - throw new Error('task callback must be a function'); - } - q.started = true; - if (!(0, _isArray2.default)(data)) { - data = [data]; - } - if (data.length === 0) { - // call drain immediately if there are no tasks - return (0, _setImmediate2.default)(function () { - q.drain(); - }); - } - - priority = priority || 0; - var nextNode = q._tasks.head; - while (nextNode && priority >= nextNode.priority) { - nextNode = nextNode.next; - } - - for (var i = 0, l = data.length; i < l; i++) { - var item = { - data: data[i], - priority: priority, - callback: callback - }; - - if (nextNode) { - q._tasks.insertBefore(nextNode, item); - } else { - q._tasks.push(item); - } - } - (0, _setImmediate2.default)(q.process); - }; - - // Remove unshift function - delete q.unshift; - - return q; -}; - -var _isArray = require('lodash/isArray'); - -var _isArray2 = _interopRequireDefault(_isArray); - -var _noop = require('lodash/noop'); - -var _noop2 = _interopRequireDefault(_noop); - -var _setImmediate = require('./setImmediate'); - -var _setImmediate2 = _interopRequireDefault(_setImmediate); - -var _queue = require('./queue'); - -var _queue2 = _interopRequireDefault(_queue); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -module.exports = exports['default']; - -/** - * The same as [async.queue]{@link module:ControlFlow.queue} only tasks are assigned a priority and - * completed in ascending priority order. - * - * @name priorityQueue - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.queue]{@link module:ControlFlow.queue} - * @category Control Flow - * @param {AsyncFunction} worker - An async function for processing a queued task. - * If you want to handle errors from an individual task, pass a callback to - * `q.push()`. - * Invoked with (task, callback). - * @param {number} concurrency - An `integer` for determining how many `worker` - * functions should be run in parallel. If omitted, the concurrency defaults to - * `1`. If the concurrency is `0`, an error is thrown. - * @returns {module:ControlFlow.QueueObject} A priorityQueue object to manage the tasks. There are two - * differences between `queue` and `priorityQueue` objects: - * * `push(task, priority, [callback])` - `priority` should be a number. If an - * array of `tasks` is given, all tasks will be assigned the same priority. - * * The `unshift` method was removed. - */ \ No newline at end of file diff --git a/node_modules/async/queue.js b/node_modules/async/queue.js deleted file mode 100644 index 0ca8ba2..0000000 --- a/node_modules/async/queue.js +++ /dev/null @@ -1,130 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -exports.default = function (worker, concurrency) { - var _worker = (0, _wrapAsync2.default)(worker); - return (0, _queue2.default)(function (items, cb) { - _worker(items[0], cb); - }, concurrency, 1); -}; - -var _queue = require('./internal/queue'); - -var _queue2 = _interopRequireDefault(_queue); - -var _wrapAsync = require('./internal/wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -module.exports = exports['default']; - -/** - * A queue of tasks for the worker function to complete. - * @typedef {Object} QueueObject - * @memberOf module:ControlFlow - * @property {Function} length - a function returning the number of items - * waiting to be processed. Invoke with `queue.length()`. - * @property {boolean} started - a boolean indicating whether or not any - * items have been pushed and processed by the queue. - * @property {Function} running - a function returning the number of items - * currently being processed. Invoke with `queue.running()`. - * @property {Function} workersList - a function returning the array of items - * currently being processed. Invoke with `queue.workersList()`. - * @property {Function} idle - a function returning false if there are items - * waiting or being processed, or true if not. Invoke with `queue.idle()`. - * @property {number} concurrency - an integer for determining how many `worker` - * functions should be run in parallel. This property can be changed after a - * `queue` is created to alter the concurrency on-the-fly. - * @property {Function} push - add a new task to the `queue`. Calls `callback` - * once the `worker` has finished processing the task. Instead of a single task, - * a `tasks` array can be submitted. The respective callback is used for every - * task in the list. Invoke with `queue.push(task, [callback])`, - * @property {Function} unshift - add a new task to the front of the `queue`. - * Invoke with `queue.unshift(task, [callback])`. - * @property {Function} remove - remove items from the queue that match a test - * function. The test function will be passed an object with a `data` property, - * and a `priority` property, if this is a - * [priorityQueue]{@link module:ControlFlow.priorityQueue} object. - * Invoked with `queue.remove(testFn)`, where `testFn` is of the form - * `function ({data, priority}) {}` and returns a Boolean. - * @property {Function} saturated - a callback that is called when the number of - * running workers hits the `concurrency` limit, and further tasks will be - * queued. - * @property {Function} unsaturated - a callback that is called when the number - * of running workers is less than the `concurrency` & `buffer` limits, and - * further tasks will not be queued. - * @property {number} buffer - A minimum threshold buffer in order to say that - * the `queue` is `unsaturated`. - * @property {Function} empty - a callback that is called when the last item - * from the `queue` is given to a `worker`. - * @property {Function} drain - a callback that is called when the last item - * from the `queue` has returned from the `worker`. - * @property {Function} error - a callback that is called when a task errors. - * Has the signature `function(error, task)`. - * @property {boolean} paused - a boolean for determining whether the queue is - * in a paused state. - * @property {Function} pause - a function that pauses the processing of tasks - * until `resume()` is called. Invoke with `queue.pause()`. - * @property {Function} resume - a function that resumes the processing of - * queued tasks when the queue is paused. Invoke with `queue.resume()`. - * @property {Function} kill - a function that removes the `drain` callback and - * empties remaining tasks from the queue forcing it to go idle. No more tasks - * should be pushed to the queue after calling this function. Invoke with `queue.kill()`. - */ - -/** - * Creates a `queue` object with the specified `concurrency`. Tasks added to the - * `queue` are processed in parallel (up to the `concurrency` limit). If all - * `worker`s are in progress, the task is queued until one becomes available. - * Once a `worker` completes a `task`, that `task`'s callback is called. - * - * @name queue - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {AsyncFunction} worker - An async function for processing a queued task. - * If you want to handle errors from an individual task, pass a callback to - * `q.push()`. Invoked with (task, callback). - * @param {number} [concurrency=1] - An `integer` for determining how many - * `worker` functions should be run in parallel. If omitted, the concurrency - * defaults to `1`. If the concurrency is `0`, an error is thrown. - * @returns {module:ControlFlow.QueueObject} A queue object to manage the tasks. Callbacks can - * attached as certain properties to listen for specific events during the - * lifecycle of the queue. - * @example - * - * // create a queue object with concurrency 2 - * var q = async.queue(function(task, callback) { - * console.log('hello ' + task.name); - * callback(); - * }, 2); - * - * // assign a callback - * q.drain = function() { - * console.log('all items have been processed'); - * }; - * - * // add some items to the queue - * q.push({name: 'foo'}, function(err) { - * console.log('finished processing foo'); - * }); - * q.push({name: 'bar'}, function (err) { - * console.log('finished processing bar'); - * }); - * - * // add some items to the queue (batch-wise) - * q.push([{name: 'baz'},{name: 'bay'},{name: 'bax'}], function(err) { - * console.log('finished processing item'); - * }); - * - * // add some items to the front of the queue - * q.unshift({name: 'bar'}, function (err) { - * console.log('finished processing bar'); - * }); - */ \ No newline at end of file diff --git a/node_modules/async/race.js b/node_modules/async/race.js deleted file mode 100644 index 6713c74..0000000 --- a/node_modules/async/race.js +++ /dev/null @@ -1,70 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = race; - -var _isArray = require('lodash/isArray'); - -var _isArray2 = _interopRequireDefault(_isArray); - -var _noop = require('lodash/noop'); - -var _noop2 = _interopRequireDefault(_noop); - -var _once = require('./internal/once'); - -var _once2 = _interopRequireDefault(_once); - -var _wrapAsync = require('./internal/wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Runs the `tasks` array of functions in parallel, without waiting until the - * previous function has completed. Once any of the `tasks` complete or pass an - * error to its callback, the main `callback` is immediately called. It's - * equivalent to `Promise.race()`. - * - * @name race - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {Array} tasks - An array containing [async functions]{@link AsyncFunction} - * to run. Each function can complete with an optional `result` value. - * @param {Function} callback - A callback to run once any of the functions have - * completed. This function gets an error or result from the first function that - * completed. Invoked with (err, result). - * @returns undefined - * @example - * - * async.race([ - * function(callback) { - * setTimeout(function() { - * callback(null, 'one'); - * }, 200); - * }, - * function(callback) { - * setTimeout(function() { - * callback(null, 'two'); - * }, 100); - * } - * ], - * // main callback - * function(err, result) { - * // the result will be equal to 'two' as it finishes earlier - * }); - */ -function race(tasks, callback) { - callback = (0, _once2.default)(callback || _noop2.default); - if (!(0, _isArray2.default)(tasks)) return callback(new TypeError('First argument to race must be an array of functions')); - if (!tasks.length) return callback(); - for (var i = 0, l = tasks.length; i < l; i++) { - (0, _wrapAsync2.default)(tasks[i])(callback); - } -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/reduce.js b/node_modules/async/reduce.js deleted file mode 100644 index 3fb8019..0000000 --- a/node_modules/async/reduce.js +++ /dev/null @@ -1,78 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = reduce; - -var _eachOfSeries = require('./eachOfSeries'); - -var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); - -var _noop = require('lodash/noop'); - -var _noop2 = _interopRequireDefault(_noop); - -var _once = require('./internal/once'); - -var _once2 = _interopRequireDefault(_once); - -var _wrapAsync = require('./internal/wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Reduces `coll` into a single value using an async `iteratee` to return each - * successive step. `memo` is the initial state of the reduction. This function - * only operates in series. - * - * For performance reasons, it may make sense to split a call to this function - * into a parallel map, and then use the normal `Array.prototype.reduce` on the - * results. This function is for situations where each step in the reduction - * needs to be async; if you can get the data before reducing it, then it's - * probably a good idea to do so. - * - * @name reduce - * @static - * @memberOf module:Collections - * @method - * @alias inject - * @alias foldl - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {*} memo - The initial state of the reduction. - * @param {AsyncFunction} iteratee - A function applied to each item in the - * array to produce the next step in the reduction. - * The `iteratee` should complete with the next state of the reduction. - * If the iteratee complete with an error, the reduction is stopped and the - * main `callback` is immediately called with the error. - * Invoked with (memo, item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Result is the reduced value. Invoked with - * (err, result). - * @example - * - * async.reduce([1,2,3], 0, function(memo, item, callback) { - * // pointless async: - * process.nextTick(function() { - * callback(null, memo + item) - * }); - * }, function(err, result) { - * // result is now equal to the last value of memo, which is 6 - * }); - */ -function reduce(coll, memo, iteratee, callback) { - callback = (0, _once2.default)(callback || _noop2.default); - var _iteratee = (0, _wrapAsync2.default)(iteratee); - (0, _eachOfSeries2.default)(coll, function (x, i, callback) { - _iteratee(memo, x, function (err, v) { - memo = v; - callback(err); - }); - }, function (err) { - callback(err, memo); - }); -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/reduceRight.js b/node_modules/async/reduceRight.js deleted file mode 100644 index 3d17d32..0000000 --- a/node_modules/async/reduceRight.js +++ /dev/null @@ -1,44 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = reduceRight; - -var _reduce = require('./reduce'); - -var _reduce2 = _interopRequireDefault(_reduce); - -var _slice = require('./internal/slice'); - -var _slice2 = _interopRequireDefault(_slice); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Same as [`reduce`]{@link module:Collections.reduce}, only operates on `array` in reverse order. - * - * @name reduceRight - * @static - * @memberOf module:Collections - * @method - * @see [async.reduce]{@link module:Collections.reduce} - * @alias foldr - * @category Collection - * @param {Array} array - A collection to iterate over. - * @param {*} memo - The initial state of the reduction. - * @param {AsyncFunction} iteratee - A function applied to each item in the - * array to produce the next step in the reduction. - * The `iteratee` should complete with the next state of the reduction. - * If the iteratee complete with an error, the reduction is stopped and the - * main `callback` is immediately called with the error. - * Invoked with (memo, item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Result is the reduced value. Invoked with - * (err, result). - */ -function reduceRight(array, memo, iteratee, callback) { - var reversed = (0, _slice2.default)(array).reverse(); - (0, _reduce2.default)(reversed, memo, iteratee, callback); -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/reflect.js b/node_modules/async/reflect.js deleted file mode 100644 index 098ba86..0000000 --- a/node_modules/async/reflect.js +++ /dev/null @@ -1,81 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = reflect; - -var _initialParams = require('./internal/initialParams'); - -var _initialParams2 = _interopRequireDefault(_initialParams); - -var _slice = require('./internal/slice'); - -var _slice2 = _interopRequireDefault(_slice); - -var _wrapAsync = require('./internal/wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Wraps the async function in another function that always completes with a - * result object, even when it errors. - * - * The result object has either the property `error` or `value`. - * - * @name reflect - * @static - * @memberOf module:Utils - * @method - * @category Util - * @param {AsyncFunction} fn - The async function you want to wrap - * @returns {Function} - A function that always passes null to it's callback as - * the error. The second argument to the callback will be an `object` with - * either an `error` or a `value` property. - * @example - * - * async.parallel([ - * async.reflect(function(callback) { - * // do some stuff ... - * callback(null, 'one'); - * }), - * async.reflect(function(callback) { - * // do some more stuff but error ... - * callback('bad stuff happened'); - * }), - * async.reflect(function(callback) { - * // do some more stuff ... - * callback(null, 'two'); - * }) - * ], - * // optional callback - * function(err, results) { - * // values - * // results[0].value = 'one' - * // results[1].error = 'bad stuff happened' - * // results[2].value = 'two' - * }); - */ -function reflect(fn) { - var _fn = (0, _wrapAsync2.default)(fn); - return (0, _initialParams2.default)(function reflectOn(args, reflectCallback) { - args.push(function callback(error, cbArg) { - if (error) { - reflectCallback(null, { error: error }); - } else { - var value; - if (arguments.length <= 2) { - value = cbArg; - } else { - value = (0, _slice2.default)(arguments, 1); - } - reflectCallback(null, { value: value }); - } - }); - - return _fn.apply(this, args); - }); -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/reflectAll.js b/node_modules/async/reflectAll.js deleted file mode 100644 index 966e83d..0000000 --- a/node_modules/async/reflectAll.js +++ /dev/null @@ -1,105 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = reflectAll; - -var _reflect = require('./reflect'); - -var _reflect2 = _interopRequireDefault(_reflect); - -var _isArray = require('lodash/isArray'); - -var _isArray2 = _interopRequireDefault(_isArray); - -var _arrayMap2 = require('lodash/_arrayMap'); - -var _arrayMap3 = _interopRequireDefault(_arrayMap2); - -var _baseForOwn = require('lodash/_baseForOwn'); - -var _baseForOwn2 = _interopRequireDefault(_baseForOwn); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * A helper function that wraps an array or an object of functions with `reflect`. - * - * @name reflectAll - * @static - * @memberOf module:Utils - * @method - * @see [async.reflect]{@link module:Utils.reflect} - * @category Util - * @param {Array|Object|Iterable} tasks - The collection of - * [async functions]{@link AsyncFunction} to wrap in `async.reflect`. - * @returns {Array} Returns an array of async functions, each wrapped in - * `async.reflect` - * @example - * - * let tasks = [ - * function(callback) { - * setTimeout(function() { - * callback(null, 'one'); - * }, 200); - * }, - * function(callback) { - * // do some more stuff but error ... - * callback(new Error('bad stuff happened')); - * }, - * function(callback) { - * setTimeout(function() { - * callback(null, 'two'); - * }, 100); - * } - * ]; - * - * async.parallel(async.reflectAll(tasks), - * // optional callback - * function(err, results) { - * // values - * // results[0].value = 'one' - * // results[1].error = Error('bad stuff happened') - * // results[2].value = 'two' - * }); - * - * // an example using an object instead of an array - * let tasks = { - * one: function(callback) { - * setTimeout(function() { - * callback(null, 'one'); - * }, 200); - * }, - * two: function(callback) { - * callback('two'); - * }, - * three: function(callback) { - * setTimeout(function() { - * callback(null, 'three'); - * }, 100); - * } - * }; - * - * async.parallel(async.reflectAll(tasks), - * // optional callback - * function(err, results) { - * // values - * // results.one.value = 'one' - * // results.two.error = 'two' - * // results.three.value = 'three' - * }); - */ -function reflectAll(tasks) { - var results; - if ((0, _isArray2.default)(tasks)) { - results = (0, _arrayMap3.default)(tasks, _reflect2.default); - } else { - results = {}; - (0, _baseForOwn2.default)(tasks, function (task, key) { - results[key] = _reflect2.default.call(this, task); - }); - } - return results; -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/reject.js b/node_modules/async/reject.js deleted file mode 100644 index 53802b5..0000000 --- a/node_modules/async/reject.js +++ /dev/null @@ -1,45 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _reject = require('./internal/reject'); - -var _reject2 = _interopRequireDefault(_reject); - -var _doParallel = require('./internal/doParallel'); - -var _doParallel2 = _interopRequireDefault(_doParallel); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * The opposite of [`filter`]{@link module:Collections.filter}. Removes values that pass an `async` truth test. - * - * @name reject - * @static - * @memberOf module:Collections - * @method - * @see [async.filter]{@link module:Collections.filter} - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {Function} iteratee - An async truth test to apply to each item in - * `coll`. - * The should complete with a boolean value as its `result`. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Invoked with (err, results). - * @example - * - * async.reject(['file1','file2','file3'], function(filePath, callback) { - * fs.access(filePath, function(err) { - * callback(null, !err) - * }); - * }, function(err, results) { - * // results now equals an array of missing files - * createFiles(results); - * }); - */ -exports.default = (0, _doParallel2.default)(_reject2.default); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/rejectLimit.js b/node_modules/async/rejectLimit.js deleted file mode 100644 index 74bba7f..0000000 --- a/node_modules/async/rejectLimit.js +++ /dev/null @@ -1,37 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _reject = require('./internal/reject'); - -var _reject2 = _interopRequireDefault(_reject); - -var _doParallelLimit = require('./internal/doParallelLimit'); - -var _doParallelLimit2 = _interopRequireDefault(_doParallelLimit); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * The same as [`reject`]{@link module:Collections.reject} but runs a maximum of `limit` async operations at a - * time. - * - * @name rejectLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.reject]{@link module:Collections.reject} - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {Function} iteratee - An async truth test to apply to each item in - * `coll`. - * The should complete with a boolean value as its `result`. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Invoked with (err, results). - */ -exports.default = (0, _doParallelLimit2.default)(_reject2.default); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/rejectSeries.js b/node_modules/async/rejectSeries.js deleted file mode 100644 index f905588..0000000 --- a/node_modules/async/rejectSeries.js +++ /dev/null @@ -1,35 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _rejectLimit = require('./rejectLimit'); - -var _rejectLimit2 = _interopRequireDefault(_rejectLimit); - -var _doLimit = require('./internal/doLimit'); - -var _doLimit2 = _interopRequireDefault(_doLimit); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * The same as [`reject`]{@link module:Collections.reject} but runs only a single async operation at a time. - * - * @name rejectSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.reject]{@link module:Collections.reject} - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {Function} iteratee - An async truth test to apply to each item in - * `coll`. - * The should complete with a boolean value as its `result`. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Invoked with (err, results). - */ -exports.default = (0, _doLimit2.default)(_rejectLimit2.default, 1); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/retry.js b/node_modules/async/retry.js deleted file mode 100644 index 6a1aa1e..0000000 --- a/node_modules/async/retry.js +++ /dev/null @@ -1,156 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = retry; - -var _noop = require('lodash/noop'); - -var _noop2 = _interopRequireDefault(_noop); - -var _constant = require('lodash/constant'); - -var _constant2 = _interopRequireDefault(_constant); - -var _wrapAsync = require('./internal/wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Attempts to get a successful response from `task` no more than `times` times - * before returning an error. If the task is successful, the `callback` will be - * passed the result of the successful task. If all attempts fail, the callback - * will be passed the error and result (if any) of the final attempt. - * - * @name retry - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @see [async.retryable]{@link module:ControlFlow.retryable} - * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - Can be either an - * object with `times` and `interval` or a number. - * * `times` - The number of attempts to make before giving up. The default - * is `5`. - * * `interval` - The time to wait between retries, in milliseconds. The - * default is `0`. The interval may also be specified as a function of the - * retry count (see example). - * * `errorFilter` - An optional synchronous function that is invoked on - * erroneous result. If it returns `true` the retry attempts will continue; - * if the function returns `false` the retry flow is aborted with the current - * attempt's error and result being returned to the final callback. - * Invoked with (err). - * * If `opts` is a number, the number specifies the number of times to retry, - * with the default interval of `0`. - * @param {AsyncFunction} task - An async function to retry. - * Invoked with (callback). - * @param {Function} [callback] - An optional callback which is called when the - * task has succeeded, or after the final failed attempt. It receives the `err` - * and `result` arguments of the last attempt at completing the `task`. Invoked - * with (err, results). - * - * @example - * - * // The `retry` function can be used as a stand-alone control flow by passing - * // a callback, as shown below: - * - * // try calling apiMethod 3 times - * async.retry(3, apiMethod, function(err, result) { - * // do something with the result - * }); - * - * // try calling apiMethod 3 times, waiting 200 ms between each retry - * async.retry({times: 3, interval: 200}, apiMethod, function(err, result) { - * // do something with the result - * }); - * - * // try calling apiMethod 10 times with exponential backoff - * // (i.e. intervals of 100, 200, 400, 800, 1600, ... milliseconds) - * async.retry({ - * times: 10, - * interval: function(retryCount) { - * return 50 * Math.pow(2, retryCount); - * } - * }, apiMethod, function(err, result) { - * // do something with the result - * }); - * - * // try calling apiMethod the default 5 times no delay between each retry - * async.retry(apiMethod, function(err, result) { - * // do something with the result - * }); - * - * // try calling apiMethod only when error condition satisfies, all other - * // errors will abort the retry control flow and return to final callback - * async.retry({ - * errorFilter: function(err) { - * return err.message === 'Temporary error'; // only retry on a specific error - * } - * }, apiMethod, function(err, result) { - * // do something with the result - * }); - * - * // to retry individual methods that are not as reliable within other - * // control flow functions, use the `retryable` wrapper: - * async.auto({ - * users: api.getUsers.bind(api), - * payments: async.retryable(3, api.getPayments.bind(api)) - * }, function(err, results) { - * // do something with the results - * }); - * - */ -function retry(opts, task, callback) { - var DEFAULT_TIMES = 5; - var DEFAULT_INTERVAL = 0; - - var options = { - times: DEFAULT_TIMES, - intervalFunc: (0, _constant2.default)(DEFAULT_INTERVAL) - }; - - function parseTimes(acc, t) { - if (typeof t === 'object') { - acc.times = +t.times || DEFAULT_TIMES; - - acc.intervalFunc = typeof t.interval === 'function' ? t.interval : (0, _constant2.default)(+t.interval || DEFAULT_INTERVAL); - - acc.errorFilter = t.errorFilter; - } else if (typeof t === 'number' || typeof t === 'string') { - acc.times = +t || DEFAULT_TIMES; - } else { - throw new Error("Invalid arguments for async.retry"); - } - } - - if (arguments.length < 3 && typeof opts === 'function') { - callback = task || _noop2.default; - task = opts; - } else { - parseTimes(options, opts); - callback = callback || _noop2.default; - } - - if (typeof task !== 'function') { - throw new Error("Invalid arguments for async.retry"); - } - - var _task = (0, _wrapAsync2.default)(task); - - var attempt = 1; - function retryAttempt() { - _task(function (err) { - if (err && attempt++ < options.times && (typeof options.errorFilter != 'function' || options.errorFilter(err))) { - setTimeout(retryAttempt, options.intervalFunc(attempt)); - } else { - callback.apply(null, arguments); - } - }); - } - - retryAttempt(); -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/retryable.js b/node_modules/async/retryable.js deleted file mode 100644 index 002bfb0..0000000 --- a/node_modules/async/retryable.js +++ /dev/null @@ -1,65 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -exports.default = function (opts, task) { - if (!task) { - task = opts; - opts = null; - } - var _task = (0, _wrapAsync2.default)(task); - return (0, _initialParams2.default)(function (args, callback) { - function taskFn(cb) { - _task.apply(null, args.concat(cb)); - } - - if (opts) (0, _retry2.default)(opts, taskFn, callback);else (0, _retry2.default)(taskFn, callback); - }); -}; - -var _retry = require('./retry'); - -var _retry2 = _interopRequireDefault(_retry); - -var _initialParams = require('./internal/initialParams'); - -var _initialParams2 = _interopRequireDefault(_initialParams); - -var _wrapAsync = require('./internal/wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -module.exports = exports['default']; - -/** - * A close relative of [`retry`]{@link module:ControlFlow.retry}. This method - * wraps a task and makes it retryable, rather than immediately calling it - * with retries. - * - * @name retryable - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.retry]{@link module:ControlFlow.retry} - * @category Control Flow - * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - optional - * options, exactly the same as from `retry` - * @param {AsyncFunction} task - the asynchronous function to wrap. - * This function will be passed any arguments passed to the returned wrapper. - * Invoked with (...args, callback). - * @returns {AsyncFunction} The wrapped function, which when invoked, will - * retry on an error, based on the parameters specified in `opts`. - * This function will accept the same parameters as `task`. - * @example - * - * async.auto({ - * dep1: async.retryable(3, getFromFlakyService), - * process: ["dep1", async.retryable(3, function (results, cb) { - * maybeProcessData(results.dep1, cb); - * })] - * }, callback); - */ \ No newline at end of file diff --git a/node_modules/async/select.js b/node_modules/async/select.js deleted file mode 100644 index 54772d5..0000000 --- a/node_modules/async/select.js +++ /dev/null @@ -1,45 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _filter = require('./internal/filter'); - -var _filter2 = _interopRequireDefault(_filter); - -var _doParallel = require('./internal/doParallel'); - -var _doParallel2 = _interopRequireDefault(_doParallel); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Returns a new array of all the values in `coll` which pass an async truth - * test. This operation is performed in parallel, but the results array will be - * in the same order as the original. - * - * @name filter - * @static - * @memberOf module:Collections - * @method - * @alias select - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {Function} iteratee - A truth test to apply to each item in `coll`. - * The `iteratee` is passed a `callback(err, truthValue)`, which must be called - * with a boolean argument once it has completed. Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Invoked with (err, results). - * @example - * - * async.filter(['file1','file2','file3'], function(filePath, callback) { - * fs.access(filePath, function(err) { - * callback(null, !err) - * }); - * }, function(err, results) { - * // results now equals an array of the existing files - * }); - */ -exports.default = (0, _doParallel2.default)(_filter2.default); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/selectLimit.js b/node_modules/async/selectLimit.js deleted file mode 100644 index 06216f7..0000000 --- a/node_modules/async/selectLimit.js +++ /dev/null @@ -1,37 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _filter = require('./internal/filter'); - -var _filter2 = _interopRequireDefault(_filter); - -var _doParallelLimit = require('./internal/doParallelLimit'); - -var _doParallelLimit2 = _interopRequireDefault(_doParallelLimit); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * The same as [`filter`]{@link module:Collections.filter} but runs a maximum of `limit` async operations at a - * time. - * - * @name filterLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.filter]{@link module:Collections.filter} - * @alias selectLimit - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {Function} iteratee - A truth test to apply to each item in `coll`. - * The `iteratee` is passed a `callback(err, truthValue)`, which must be called - * with a boolean argument once it has completed. Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Invoked with (err, results). - */ -exports.default = (0, _doParallelLimit2.default)(_filter2.default); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/selectSeries.js b/node_modules/async/selectSeries.js deleted file mode 100644 index e48d966..0000000 --- a/node_modules/async/selectSeries.js +++ /dev/null @@ -1,35 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _filterLimit = require('./filterLimit'); - -var _filterLimit2 = _interopRequireDefault(_filterLimit); - -var _doLimit = require('./internal/doLimit'); - -var _doLimit2 = _interopRequireDefault(_doLimit); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * The same as [`filter`]{@link module:Collections.filter} but runs only a single async operation at a time. - * - * @name filterSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.filter]{@link module:Collections.filter} - * @alias selectSeries - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {Function} iteratee - A truth test to apply to each item in `coll`. - * The `iteratee` is passed a `callback(err, truthValue)`, which must be called - * with a boolean argument once it has completed. Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Invoked with (err, results) - */ -exports.default = (0, _doLimit2.default)(_filterLimit2.default, 1); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/seq.js b/node_modules/async/seq.js deleted file mode 100644 index ff86ef9..0000000 --- a/node_modules/async/seq.js +++ /dev/null @@ -1,91 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = seq; - -var _noop = require('lodash/noop'); - -var _noop2 = _interopRequireDefault(_noop); - -var _slice = require('./internal/slice'); - -var _slice2 = _interopRequireDefault(_slice); - -var _reduce = require('./reduce'); - -var _reduce2 = _interopRequireDefault(_reduce); - -var _wrapAsync = require('./internal/wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -var _arrayMap = require('lodash/_arrayMap'); - -var _arrayMap2 = _interopRequireDefault(_arrayMap); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Version of the compose function that is more natural to read. Each function - * consumes the return value of the previous function. It is the equivalent of - * [compose]{@link module:ControlFlow.compose} with the arguments reversed. - * - * Each function is executed with the `this` binding of the composed function. - * - * @name seq - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.compose]{@link module:ControlFlow.compose} - * @category Control Flow - * @param {...AsyncFunction} functions - the asynchronous functions to compose - * @returns {Function} a function that composes the `functions` in order - * @example - * - * // Requires lodash (or underscore), express3 and dresende's orm2. - * // Part of an app, that fetches cats of the logged user. - * // This example uses `seq` function to avoid overnesting and error - * // handling clutter. - * app.get('/cats', function(request, response) { - * var User = request.models.User; - * async.seq( - * _.bind(User.get, User), // 'User.get' has signature (id, callback(err, data)) - * function(user, fn) { - * user.getCats(fn); // 'getCats' has signature (callback(err, data)) - * } - * )(req.session.user_id, function (err, cats) { - * if (err) { - * console.error(err); - * response.json({ status: 'error', message: err.message }); - * } else { - * response.json({ status: 'ok', message: 'Cats found', data: cats }); - * } - * }); - * }); - */ -function seq() /*...functions*/{ - var _functions = (0, _arrayMap2.default)(arguments, _wrapAsync2.default); - return function () /*...args*/{ - var args = (0, _slice2.default)(arguments); - var that = this; - - var cb = args[args.length - 1]; - if (typeof cb == 'function') { - args.pop(); - } else { - cb = _noop2.default; - } - - (0, _reduce2.default)(_functions, args, function (newargs, fn, cb) { - fn.apply(that, newargs.concat(function (err /*, ...nextargs*/) { - var nextargs = (0, _slice2.default)(arguments, 1); - cb(err, nextargs); - })); - }, function (err, results) { - cb.apply(that, [err].concat(results)); - }); - }; -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/series.js b/node_modules/async/series.js deleted file mode 100644 index e8c2928..0000000 --- a/node_modules/async/series.js +++ /dev/null @@ -1,85 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = series; - -var _parallel = require('./internal/parallel'); - -var _parallel2 = _interopRequireDefault(_parallel); - -var _eachOfSeries = require('./eachOfSeries'); - -var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Run the functions in the `tasks` collection in series, each one running once - * the previous function has completed. If any functions in the series pass an - * error to its callback, no more functions are run, and `callback` is - * immediately called with the value of the error. Otherwise, `callback` - * receives an array of results when `tasks` have completed. - * - * It is also possible to use an object instead of an array. Each property will - * be run as a function, and the results will be passed to the final `callback` - * as an object instead of an array. This can be a more readable way of handling - * results from {@link async.series}. - * - * **Note** that while many implementations preserve the order of object - * properties, the [ECMAScript Language Specification](http://www.ecma-international.org/ecma-262/5.1/#sec-8.6) - * explicitly states that - * - * > The mechanics and order of enumerating the properties is not specified. - * - * So if you rely on the order in which your series of functions are executed, - * and want this to work on all platforms, consider using an array. - * - * @name series - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {Array|Iterable|Object} tasks - A collection containing - * [async functions]{@link AsyncFunction} to run in series. - * Each function can complete with any number of optional `result` values. - * @param {Function} [callback] - An optional callback to run once all the - * functions have completed. This function gets a results array (or object) - * containing all the result arguments passed to the `task` callbacks. Invoked - * with (err, result). - * @example - * async.series([ - * function(callback) { - * // do some stuff ... - * callback(null, 'one'); - * }, - * function(callback) { - * // do some more stuff ... - * callback(null, 'two'); - * } - * ], - * // optional callback - * function(err, results) { - * // results is now equal to ['one', 'two'] - * }); - * - * async.series({ - * one: function(callback) { - * setTimeout(function() { - * callback(null, 1); - * }, 200); - * }, - * two: function(callback){ - * setTimeout(function() { - * callback(null, 2); - * }, 100); - * } - * }, function(err, results) { - * // results is now equal to: {one: 1, two: 2} - * }); - */ -function series(tasks, callback) { - (0, _parallel2.default)(_eachOfSeries2.default, tasks, callback); -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/setImmediate.js b/node_modules/async/setImmediate.js deleted file mode 100644 index e52f7c5..0000000 --- a/node_modules/async/setImmediate.js +++ /dev/null @@ -1,45 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _setImmediate = require('./internal/setImmediate'); - -var _setImmediate2 = _interopRequireDefault(_setImmediate); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Calls `callback` on a later loop around the event loop. In Node.js this just - * calls `setImmediate`. In the browser it will use `setImmediate` if - * available, otherwise `setTimeout(callback, 0)`, which means other higher - * priority events may precede the execution of `callback`. - * - * This is used internally for browser-compatibility purposes. - * - * @name setImmediate - * @static - * @memberOf module:Utils - * @method - * @see [async.nextTick]{@link module:Utils.nextTick} - * @category Util - * @param {Function} callback - The function to call on a later loop around - * the event loop. Invoked with (args...). - * @param {...*} args... - any number of additional arguments to pass to the - * callback on the next tick. - * @example - * - * var call_order = []; - * async.nextTick(function() { - * call_order.push('two'); - * // call_order now equals ['one','two'] - * }); - * call_order.push('one'); - * - * async.setImmediate(function (a, b, c) { - * // a, b, and c equal 1, 2, and 3 - * }, 1, 2, 3); - */ -exports.default = _setImmediate2.default; -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/some.js b/node_modules/async/some.js deleted file mode 100644 index a8e70f7..0000000 --- a/node_modules/async/some.js +++ /dev/null @@ -1,52 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _createTester = require('./internal/createTester'); - -var _createTester2 = _interopRequireDefault(_createTester); - -var _doParallel = require('./internal/doParallel'); - -var _doParallel2 = _interopRequireDefault(_doParallel); - -var _identity = require('lodash/identity'); - -var _identity2 = _interopRequireDefault(_identity); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Returns `true` if at least one element in the `coll` satisfies an async test. - * If any iteratee call returns `true`, the main `callback` is immediately - * called. - * - * @name some - * @static - * @memberOf module:Collections - * @method - * @alias any - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collections in parallel. - * The iteratee should complete with a boolean `result` value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called as soon as any - * iteratee returns `true`, or after all the iteratee functions have finished. - * Result will be either `true` or `false` depending on the values of the async - * tests. Invoked with (err, result). - * @example - * - * async.some(['file1','file2','file3'], function(filePath, callback) { - * fs.access(filePath, function(err) { - * callback(null, !err) - * }); - * }, function(err, result) { - * // if result is true then at least one of the files exists - * }); - */ -exports.default = (0, _doParallel2.default)((0, _createTester2.default)(Boolean, _identity2.default)); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/someLimit.js b/node_modules/async/someLimit.js deleted file mode 100644 index 24ca3f4..0000000 --- a/node_modules/async/someLimit.js +++ /dev/null @@ -1,43 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _createTester = require('./internal/createTester'); - -var _createTester2 = _interopRequireDefault(_createTester); - -var _doParallelLimit = require('./internal/doParallelLimit'); - -var _doParallelLimit2 = _interopRequireDefault(_doParallelLimit); - -var _identity = require('lodash/identity'); - -var _identity2 = _interopRequireDefault(_identity); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * The same as [`some`]{@link module:Collections.some} but runs a maximum of `limit` async operations at a time. - * - * @name someLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.some]{@link module:Collections.some} - * @alias anyLimit - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collections in parallel. - * The iteratee should complete with a boolean `result` value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called as soon as any - * iteratee returns `true`, or after all the iteratee functions have finished. - * Result will be either `true` or `false` depending on the values of the async - * tests. Invoked with (err, result). - */ -exports.default = (0, _doParallelLimit2.default)((0, _createTester2.default)(Boolean, _identity2.default)); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/someSeries.js b/node_modules/async/someSeries.js deleted file mode 100644 index dc24ed2..0000000 --- a/node_modules/async/someSeries.js +++ /dev/null @@ -1,38 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _someLimit = require('./someLimit'); - -var _someLimit2 = _interopRequireDefault(_someLimit); - -var _doLimit = require('./internal/doLimit'); - -var _doLimit2 = _interopRequireDefault(_doLimit); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * The same as [`some`]{@link module:Collections.some} but runs only a single async operation at a time. - * - * @name someSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.some]{@link module:Collections.some} - * @alias anySeries - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collections in series. - * The iteratee should complete with a boolean `result` value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called as soon as any - * iteratee returns `true`, or after all the iteratee functions have finished. - * Result will be either `true` or `false` depending on the values of the async - * tests. Invoked with (err, result). - */ -exports.default = (0, _doLimit2.default)(_someLimit2.default, 1); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/sortBy.js b/node_modules/async/sortBy.js deleted file mode 100644 index ee5e93d..0000000 --- a/node_modules/async/sortBy.js +++ /dev/null @@ -1,91 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = sortBy; - -var _arrayMap = require('lodash/_arrayMap'); - -var _arrayMap2 = _interopRequireDefault(_arrayMap); - -var _baseProperty = require('lodash/_baseProperty'); - -var _baseProperty2 = _interopRequireDefault(_baseProperty); - -var _map = require('./map'); - -var _map2 = _interopRequireDefault(_map); - -var _wrapAsync = require('./internal/wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Sorts a list by the results of running each `coll` value through an async - * `iteratee`. - * - * @name sortBy - * @static - * @memberOf module:Collections - * @method - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The iteratee should complete with a value to use as the sort criteria as - * its `result`. - * Invoked with (item, callback). - * @param {Function} callback - A callback which is called after all the - * `iteratee` functions have finished, or an error occurs. Results is the items - * from the original `coll` sorted by the values returned by the `iteratee` - * calls. Invoked with (err, results). - * @example - * - * async.sortBy(['file1','file2','file3'], function(file, callback) { - * fs.stat(file, function(err, stats) { - * callback(err, stats.mtime); - * }); - * }, function(err, results) { - * // results is now the original array of files sorted by - * // modified date - * }); - * - * // By modifying the callback parameter the - * // sorting order can be influenced: - * - * // ascending order - * async.sortBy([1,9,3,5], function(x, callback) { - * callback(null, x); - * }, function(err,result) { - * // result callback - * }); - * - * // descending order - * async.sortBy([1,9,3,5], function(x, callback) { - * callback(null, x*-1); //<- x*-1 instead of x, turns the order around - * }, function(err,result) { - * // result callback - * }); - */ -function sortBy(coll, iteratee, callback) { - var _iteratee = (0, _wrapAsync2.default)(iteratee); - (0, _map2.default)(coll, function (x, callback) { - _iteratee(x, function (err, criteria) { - if (err) return callback(err); - callback(null, { value: x, criteria: criteria }); - }); - }, function (err, results) { - if (err) return callback(err); - callback(null, (0, _arrayMap2.default)(results.sort(comparator), (0, _baseProperty2.default)('value'))); - }); - - function comparator(left, right) { - var a = left.criteria, - b = right.criteria; - return a < b ? -1 : a > b ? 1 : 0; - } -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/timeout.js b/node_modules/async/timeout.js deleted file mode 100644 index b5cb505..0000000 --- a/node_modules/async/timeout.js +++ /dev/null @@ -1,89 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = timeout; - -var _initialParams = require('./internal/initialParams'); - -var _initialParams2 = _interopRequireDefault(_initialParams); - -var _wrapAsync = require('./internal/wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Sets a time limit on an asynchronous function. If the function does not call - * its callback within the specified milliseconds, it will be called with a - * timeout error. The code property for the error object will be `'ETIMEDOUT'`. - * - * @name timeout - * @static - * @memberOf module:Utils - * @method - * @category Util - * @param {AsyncFunction} asyncFn - The async function to limit in time. - * @param {number} milliseconds - The specified time limit. - * @param {*} [info] - Any variable you want attached (`string`, `object`, etc) - * to timeout Error for more information.. - * @returns {AsyncFunction} Returns a wrapped function that can be used with any - * of the control flow functions. - * Invoke this function with the same parameters as you would `asyncFunc`. - * @example - * - * function myFunction(foo, callback) { - * doAsyncTask(foo, function(err, data) { - * // handle errors - * if (err) return callback(err); - * - * // do some stuff ... - * - * // return processed data - * return callback(null, data); - * }); - * } - * - * var wrapped = async.timeout(myFunction, 1000); - * - * // call `wrapped` as you would `myFunction` - * wrapped({ bar: 'bar' }, function(err, data) { - * // if `myFunction` takes < 1000 ms to execute, `err` - * // and `data` will have their expected values - * - * // else `err` will be an Error with the code 'ETIMEDOUT' - * }); - */ -function timeout(asyncFn, milliseconds, info) { - var fn = (0, _wrapAsync2.default)(asyncFn); - - return (0, _initialParams2.default)(function (args, callback) { - var timedOut = false; - var timer; - - function timeoutCallback() { - var name = asyncFn.name || 'anonymous'; - var error = new Error('Callback function "' + name + '" timed out.'); - error.code = 'ETIMEDOUT'; - if (info) { - error.info = info; - } - timedOut = true; - callback(error); - } - - args.push(function () { - if (!timedOut) { - callback.apply(null, arguments); - clearTimeout(timer); - } - }); - - // setup timer and call original function - timer = setTimeout(timeoutCallback, milliseconds); - fn.apply(null, args); - }); -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/times.js b/node_modules/async/times.js deleted file mode 100644 index b5ca24d..0000000 --- a/node_modules/async/times.js +++ /dev/null @@ -1,50 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _timesLimit = require('./timesLimit'); - -var _timesLimit2 = _interopRequireDefault(_timesLimit); - -var _doLimit = require('./internal/doLimit'); - -var _doLimit2 = _interopRequireDefault(_doLimit); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Calls the `iteratee` function `n` times, and accumulates results in the same - * manner you would use with [map]{@link module:Collections.map}. - * - * @name times - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.map]{@link module:Collections.map} - * @category Control Flow - * @param {number} n - The number of times to run the function. - * @param {AsyncFunction} iteratee - The async function to call `n` times. - * Invoked with the iteration index and a callback: (n, next). - * @param {Function} callback - see {@link module:Collections.map}. - * @example - * - * // Pretend this is some complicated async factory - * var createUser = function(id, callback) { - * callback(null, { - * id: 'user' + id - * }); - * }; - * - * // generate 5 users - * async.times(5, function(n, next) { - * createUser(n, function(err, user) { - * next(err, user); - * }); - * }, function(err, users) { - * // we should now have 5 users - * }); - */ -exports.default = (0, _doLimit2.default)(_timesLimit2.default, Infinity); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/timesLimit.js b/node_modules/async/timesLimit.js deleted file mode 100644 index aad8495..0000000 --- a/node_modules/async/timesLimit.js +++ /dev/null @@ -1,42 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = timeLimit; - -var _mapLimit = require('./mapLimit'); - -var _mapLimit2 = _interopRequireDefault(_mapLimit); - -var _baseRange = require('lodash/_baseRange'); - -var _baseRange2 = _interopRequireDefault(_baseRange); - -var _wrapAsync = require('./internal/wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * The same as [times]{@link module:ControlFlow.times} but runs a maximum of `limit` async operations at a - * time. - * - * @name timesLimit - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.times]{@link module:ControlFlow.times} - * @category Control Flow - * @param {number} count - The number of times to run the function. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - The async function to call `n` times. - * Invoked with the iteration index and a callback: (n, next). - * @param {Function} callback - see [async.map]{@link module:Collections.map}. - */ -function timeLimit(count, limit, iteratee, callback) { - var _iteratee = (0, _wrapAsync2.default)(iteratee); - (0, _mapLimit2.default)((0, _baseRange2.default)(0, count, 1), limit, _iteratee, callback); -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/timesSeries.js b/node_modules/async/timesSeries.js deleted file mode 100644 index f187a35..0000000 --- a/node_modules/async/timesSeries.js +++ /dev/null @@ -1,32 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -var _timesLimit = require('./timesLimit'); - -var _timesLimit2 = _interopRequireDefault(_timesLimit); - -var _doLimit = require('./internal/doLimit'); - -var _doLimit2 = _interopRequireDefault(_doLimit); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * The same as [times]{@link module:ControlFlow.times} but runs only a single async operation at a time. - * - * @name timesSeries - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.times]{@link module:ControlFlow.times} - * @category Control Flow - * @param {number} n - The number of times to run the function. - * @param {AsyncFunction} iteratee - The async function to call `n` times. - * Invoked with the iteration index and a callback: (n, next). - * @param {Function} callback - see {@link module:Collections.map}. - */ -exports.default = (0, _doLimit2.default)(_timesLimit2.default, 1); -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/transform.js b/node_modules/async/transform.js deleted file mode 100644 index 84ee217..0000000 --- a/node_modules/async/transform.js +++ /dev/null @@ -1,87 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = transform; - -var _isArray = require('lodash/isArray'); - -var _isArray2 = _interopRequireDefault(_isArray); - -var _noop = require('lodash/noop'); - -var _noop2 = _interopRequireDefault(_noop); - -var _eachOf = require('./eachOf'); - -var _eachOf2 = _interopRequireDefault(_eachOf); - -var _once = require('./internal/once'); - -var _once2 = _interopRequireDefault(_once); - -var _wrapAsync = require('./internal/wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * A relative of `reduce`. Takes an Object or Array, and iterates over each - * element in series, each step potentially mutating an `accumulator` value. - * The type of the accumulator defaults to the type of collection passed in. - * - * @name transform - * @static - * @memberOf module:Collections - * @method - * @category Collection - * @param {Array|Iterable|Object} coll - A collection to iterate over. - * @param {*} [accumulator] - The initial state of the transform. If omitted, - * it will default to an empty Object or Array, depending on the type of `coll` - * @param {AsyncFunction} iteratee - A function applied to each item in the - * collection that potentially modifies the accumulator. - * Invoked with (accumulator, item, key, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Result is the transformed accumulator. - * Invoked with (err, result). - * @example - * - * async.transform([1,2,3], function(acc, item, index, callback) { - * // pointless async: - * process.nextTick(function() { - * acc.push(item * 2) - * callback(null) - * }); - * }, function(err, result) { - * // result is now equal to [2, 4, 6] - * }); - * - * @example - * - * async.transform({a: 1, b: 2, c: 3}, function (obj, val, key, callback) { - * setImmediate(function () { - * obj[key] = val * 2; - * callback(); - * }) - * }, function (err, result) { - * // result is equal to {a: 2, b: 4, c: 6} - * }) - */ -function transform(coll, accumulator, iteratee, callback) { - if (arguments.length <= 3) { - callback = iteratee; - iteratee = accumulator; - accumulator = (0, _isArray2.default)(coll) ? [] : {}; - } - callback = (0, _once2.default)(callback || _noop2.default); - var _iteratee = (0, _wrapAsync2.default)(iteratee); - - (0, _eachOf2.default)(coll, function (v, k, cb) { - _iteratee(accumulator, v, k, cb); - }, function (err) { - callback(err, accumulator); - }); -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/tryEach.js b/node_modules/async/tryEach.js deleted file mode 100644 index f4e4c97..0000000 --- a/node_modules/async/tryEach.js +++ /dev/null @@ -1,81 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = tryEach; - -var _noop = require('lodash/noop'); - -var _noop2 = _interopRequireDefault(_noop); - -var _eachSeries = require('./eachSeries'); - -var _eachSeries2 = _interopRequireDefault(_eachSeries); - -var _wrapAsync = require('./internal/wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -var _slice = require('./internal/slice'); - -var _slice2 = _interopRequireDefault(_slice); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * It runs each task in series but stops whenever any of the functions were - * successful. If one of the tasks were successful, the `callback` will be - * passed the result of the successful task. If all tasks fail, the callback - * will be passed the error and result (if any) of the final attempt. - * - * @name tryEach - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {Array|Iterable|Object} tasks - A collection containing functions to - * run, each function is passed a `callback(err, result)` it must call on - * completion with an error `err` (which can be `null`) and an optional `result` - * value. - * @param {Function} [callback] - An optional callback which is called when one - * of the tasks has succeeded, or all have failed. It receives the `err` and - * `result` arguments of the last attempt at completing the `task`. Invoked with - * (err, results). - * @example - * async.tryEach([ - * function getDataFromFirstWebsite(callback) { - * // Try getting the data from the first website - * callback(err, data); - * }, - * function getDataFromSecondWebsite(callback) { - * // First website failed, - * // Try getting the data from the backup website - * callback(err, data); - * } - * ], - * // optional callback - * function(err, results) { - * Now do something with the data. - * }); - * - */ -function tryEach(tasks, callback) { - var error = null; - var result; - callback = callback || _noop2.default; - (0, _eachSeries2.default)(tasks, function (task, callback) { - (0, _wrapAsync2.default)(task)(function (err, res /*, ...args*/) { - if (arguments.length > 2) { - result = (0, _slice2.default)(arguments, 1); - } else { - result = res; - } - error = err; - callback(!err); - }); - }, function () { - callback(error, result); - }); -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/unmemoize.js b/node_modules/async/unmemoize.js deleted file mode 100644 index 08f9f9f..0000000 --- a/node_modules/async/unmemoize.js +++ /dev/null @@ -1,25 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = unmemoize; -/** - * Undoes a [memoize]{@link module:Utils.memoize}d function, reverting it to the original, - * unmemoized form. Handy for testing. - * - * @name unmemoize - * @static - * @memberOf module:Utils - * @method - * @see [async.memoize]{@link module:Utils.memoize} - * @category Util - * @param {AsyncFunction} fn - the memoized function - * @returns {AsyncFunction} a function that calls the original unmemoized function - */ -function unmemoize(fn) { - return function () { - return (fn.unmemoized || fn).apply(null, arguments); - }; -} -module.exports = exports["default"]; \ No newline at end of file diff --git a/node_modules/async/until.js b/node_modules/async/until.js deleted file mode 100644 index 29955ab..0000000 --- a/node_modules/async/until.js +++ /dev/null @@ -1,41 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = until; - -var _whilst = require('./whilst'); - -var _whilst2 = _interopRequireDefault(_whilst); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Repeatedly call `iteratee` until `test` returns `true`. Calls `callback` when - * stopped, or an error occurs. `callback` will be passed an error and any - * arguments passed to the final `iteratee`'s callback. - * - * The inverse of [whilst]{@link module:ControlFlow.whilst}. - * - * @name until - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.whilst]{@link module:ControlFlow.whilst} - * @category Control Flow - * @param {Function} test - synchronous truth test to perform before each - * execution of `iteratee`. Invoked with (). - * @param {AsyncFunction} iteratee - An async function which is called each time - * `test` fails. Invoked with (callback). - * @param {Function} [callback] - A callback which is called after the test - * function has passed and repeated execution of `iteratee` has stopped. `callback` - * will be passed an error and any arguments passed to the final `iteratee`'s - * callback. Invoked with (err, [results]); - */ -function until(test, iteratee, callback) { - (0, _whilst2.default)(function () { - return !test.apply(this, arguments); - }, iteratee, callback); -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/waterfall.js b/node_modules/async/waterfall.js deleted file mode 100644 index d547d6b..0000000 --- a/node_modules/async/waterfall.js +++ /dev/null @@ -1,113 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); - -exports.default = function (tasks, callback) { - callback = (0, _once2.default)(callback || _noop2.default); - if (!(0, _isArray2.default)(tasks)) return callback(new Error('First argument to waterfall must be an array of functions')); - if (!tasks.length) return callback(); - var taskIndex = 0; - - function nextTask(args) { - var task = (0, _wrapAsync2.default)(tasks[taskIndex++]); - args.push((0, _onlyOnce2.default)(next)); - task.apply(null, args); - } - - function next(err /*, ...args*/) { - if (err || taskIndex === tasks.length) { - return callback.apply(null, arguments); - } - nextTask((0, _slice2.default)(arguments, 1)); - } - - nextTask([]); -}; - -var _isArray = require('lodash/isArray'); - -var _isArray2 = _interopRequireDefault(_isArray); - -var _noop = require('lodash/noop'); - -var _noop2 = _interopRequireDefault(_noop); - -var _once = require('./internal/once'); - -var _once2 = _interopRequireDefault(_once); - -var _slice = require('./internal/slice'); - -var _slice2 = _interopRequireDefault(_slice); - -var _onlyOnce = require('./internal/onlyOnce'); - -var _onlyOnce2 = _interopRequireDefault(_onlyOnce); - -var _wrapAsync = require('./internal/wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -module.exports = exports['default']; - -/** - * Runs the `tasks` array of functions in series, each passing their results to - * the next in the array. However, if any of the `tasks` pass an error to their - * own callback, the next function is not executed, and the main `callback` is - * immediately called with the error. - * - * @name waterfall - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {Array} tasks - An array of [async functions]{@link AsyncFunction} - * to run. - * Each function should complete with any number of `result` values. - * The `result` values will be passed as arguments, in order, to the next task. - * @param {Function} [callback] - An optional callback to run once all the - * functions have completed. This will be passed the results of the last task's - * callback. Invoked with (err, [results]). - * @returns undefined - * @example - * - * async.waterfall([ - * function(callback) { - * callback(null, 'one', 'two'); - * }, - * function(arg1, arg2, callback) { - * // arg1 now equals 'one' and arg2 now equals 'two' - * callback(null, 'three'); - * }, - * function(arg1, callback) { - * // arg1 now equals 'three' - * callback(null, 'done'); - * } - * ], function (err, result) { - * // result now equals 'done' - * }); - * - * // Or, with named functions: - * async.waterfall([ - * myFirstFunction, - * mySecondFunction, - * myLastFunction, - * ], function (err, result) { - * // result now equals 'done' - * }); - * function myFirstFunction(callback) { - * callback(null, 'one', 'two'); - * } - * function mySecondFunction(arg1, arg2, callback) { - * // arg1 now equals 'one' and arg2 now equals 'two' - * callback(null, 'three'); - * } - * function myLastFunction(arg1, callback) { - * // arg1 now equals 'three' - * callback(null, 'done'); - * } - */ \ No newline at end of file diff --git a/node_modules/async/whilst.js b/node_modules/async/whilst.js deleted file mode 100644 index 9c4d8f6..0000000 --- a/node_modules/async/whilst.js +++ /dev/null @@ -1,72 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = whilst; - -var _noop = require('lodash/noop'); - -var _noop2 = _interopRequireDefault(_noop); - -var _slice = require('./internal/slice'); - -var _slice2 = _interopRequireDefault(_slice); - -var _onlyOnce = require('./internal/onlyOnce'); - -var _onlyOnce2 = _interopRequireDefault(_onlyOnce); - -var _wrapAsync = require('./internal/wrapAsync'); - -var _wrapAsync2 = _interopRequireDefault(_wrapAsync); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Repeatedly call `iteratee`, while `test` returns `true`. Calls `callback` when - * stopped, or an error occurs. - * - * @name whilst - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {Function} test - synchronous truth test to perform before each - * execution of `iteratee`. Invoked with (). - * @param {AsyncFunction} iteratee - An async function which is called each time - * `test` passes. Invoked with (callback). - * @param {Function} [callback] - A callback which is called after the test - * function has failed and repeated execution of `iteratee` has stopped. `callback` - * will be passed an error and any arguments passed to the final `iteratee`'s - * callback. Invoked with (err, [results]); - * @returns undefined - * @example - * - * var count = 0; - * async.whilst( - * function() { return count < 5; }, - * function(callback) { - * count++; - * setTimeout(function() { - * callback(null, count); - * }, 1000); - * }, - * function (err, n) { - * // 5 seconds have passed, n = 5 - * } - * ); - */ -function whilst(test, iteratee, callback) { - callback = (0, _onlyOnce2.default)(callback || _noop2.default); - var _iteratee = (0, _wrapAsync2.default)(iteratee); - if (!test()) return callback(null); - var next = function (err /*, ...args*/) { - if (err) return callback(err); - if (test()) return _iteratee(next); - var args = (0, _slice2.default)(arguments, 1); - callback.apply(null, [null].concat(args)); - }; - _iteratee(next); -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/wrapSync.js b/node_modules/async/wrapSync.js deleted file mode 100644 index 5e3fc91..0000000 --- a/node_modules/async/wrapSync.js +++ /dev/null @@ -1,110 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = asyncify; - -var _isObject = require('lodash/isObject'); - -var _isObject2 = _interopRequireDefault(_isObject); - -var _initialParams = require('./internal/initialParams'); - -var _initialParams2 = _interopRequireDefault(_initialParams); - -var _setImmediate = require('./internal/setImmediate'); - -var _setImmediate2 = _interopRequireDefault(_setImmediate); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Take a sync function and make it async, passing its return value to a - * callback. This is useful for plugging sync functions into a waterfall, - * series, or other async functions. Any arguments passed to the generated - * function will be passed to the wrapped function (except for the final - * callback argument). Errors thrown will be passed to the callback. - * - * If the function passed to `asyncify` returns a Promise, that promises's - * resolved/rejected state will be used to call the callback, rather than simply - * the synchronous return value. - * - * This also means you can asyncify ES2017 `async` functions. - * - * @name asyncify - * @static - * @memberOf module:Utils - * @method - * @alias wrapSync - * @category Util - * @param {Function} func - The synchronous function, or Promise-returning - * function to convert to an {@link AsyncFunction}. - * @returns {AsyncFunction} An asynchronous wrapper of the `func`. To be - * invoked with `(args..., callback)`. - * @example - * - * // passing a regular synchronous function - * async.waterfall([ - * async.apply(fs.readFile, filename, "utf8"), - * async.asyncify(JSON.parse), - * function (data, next) { - * // data is the result of parsing the text. - * // If there was a parsing error, it would have been caught. - * } - * ], callback); - * - * // passing a function returning a promise - * async.waterfall([ - * async.apply(fs.readFile, filename, "utf8"), - * async.asyncify(function (contents) { - * return db.model.create(contents); - * }), - * function (model, next) { - * // `model` is the instantiated model object. - * // If there was an error, this function would be skipped. - * } - * ], callback); - * - * // es2017 example, though `asyncify` is not needed if your JS environment - * // supports async functions out of the box - * var q = async.queue(async.asyncify(async function(file) { - * var intermediateStep = await processFile(file); - * return await somePromise(intermediateStep) - * })); - * - * q.push(files); - */ -function asyncify(func) { - return (0, _initialParams2.default)(function (args, callback) { - var result; - try { - result = func.apply(this, args); - } catch (e) { - return callback(e); - } - // if result is Promise object - if ((0, _isObject2.default)(result) && typeof result.then === 'function') { - result.then(function (value) { - invokeCallback(callback, null, value); - }, function (err) { - invokeCallback(callback, err.message ? err : new Error(err)); - }); - } else { - callback(null, result); - } - }); -} - -function invokeCallback(callback, error, value) { - try { - callback(error, value); - } catch (e) { - (0, _setImmediate2.default)(rethrow, e); - } -} - -function rethrow(error) { - throw error; -} -module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/asynckit/LICENSE b/node_modules/asynckit/LICENSE deleted file mode 100644 index c9eca5d..0000000 --- a/node_modules/asynckit/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2016 Alex Indigo - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/asynckit/README.md b/node_modules/asynckit/README.md deleted file mode 100644 index ddcc7e6..0000000 --- a/node_modules/asynckit/README.md +++ /dev/null @@ -1,233 +0,0 @@ -# asynckit [![NPM Module](https://img.shields.io/npm/v/asynckit.svg?style=flat)](https://www.npmjs.com/package/asynckit) - -Minimal async jobs utility library, with streams support. - -[![PhantomJS Build](https://img.shields.io/travis/alexindigo/asynckit/v0.4.0.svg?label=browser&style=flat)](https://travis-ci.org/alexindigo/asynckit) -[![Linux Build](https://img.shields.io/travis/alexindigo/asynckit/v0.4.0.svg?label=linux:0.12-6.x&style=flat)](https://travis-ci.org/alexindigo/asynckit) -[![Windows Build](https://img.shields.io/appveyor/ci/alexindigo/asynckit/v0.4.0.svg?label=windows:0.12-6.x&style=flat)](https://ci.appveyor.com/project/alexindigo/asynckit) - -[![Coverage Status](https://img.shields.io/coveralls/alexindigo/asynckit/v0.4.0.svg?label=code+coverage&style=flat)](https://coveralls.io/github/alexindigo/asynckit?branch=master) -[![Dependency Status](https://img.shields.io/david/alexindigo/asynckit/v0.4.0.svg?style=flat)](https://david-dm.org/alexindigo/asynckit) -[![bitHound Overall Score](https://www.bithound.io/github/alexindigo/asynckit/badges/score.svg)](https://www.bithound.io/github/alexindigo/asynckit) - - - -AsyncKit provides harness for `parallel` and `serial` iterators over list of items represented by arrays or objects. -Optionally it accepts abort function (should be synchronously return by iterator for each item), and terminates left over jobs upon an error event. For specific iteration order built-in (`ascending` and `descending`) and custom sort helpers also supported, via `asynckit.serialOrdered` method. - -It ensures async operations to keep behavior more stable and prevent `Maximum call stack size exceeded` errors, from sync iterators. - -| compression | size | -| :----------------- | -------: | -| asynckit.js | 12.34 kB | -| asynckit.min.js | 4.11 kB | -| asynckit.min.js.gz | 1.47 kB | - - -## Install - -```sh -$ npm install --save asynckit -``` - -## Examples - -### Parallel Jobs - -Runs iterator over provided array in parallel. Stores output in the `result` array, -on the matching positions. In unlikely event of an error from one of the jobs, -will terminate rest of the active jobs (if abort function is provided) -and return error along with salvaged data to the main callback function. - -#### Input Array - -```javascript -var parallel = require('asynckit').parallel - , assert = require('assert') - ; - -var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ] - , expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ] - , expectedTarget = [ 1, 1, 2, 4, 8, 16, 32, 64 ] - , target = [] - ; - -parallel(source, asyncJob, function(err, result) -{ - assert.deepEqual(result, expectedResult); - assert.deepEqual(target, expectedTarget); -}); - -// async job accepts one element from the array -// and a callback function -function asyncJob(item, cb) -{ - // different delays (in ms) per item - var delay = item * 25; - - // pretend different jobs take different time to finish - // and not in consequential order - var timeoutId = setTimeout(function() { - target.push(item); - cb(null, item * 2); - }, delay); - - // allow to cancel "leftover" jobs upon error - // return function, invoking of which will abort this job - return clearTimeout.bind(null, timeoutId); -} -``` - -More examples could be found in [test/test-parallel-array.js](test/test-parallel-array.js). - -#### Input Object - -Also it supports named jobs, listed via object. - -```javascript -var parallel = require('asynckit/parallel') - , assert = require('assert') - ; - -var source = { first: 1, one: 1, four: 4, sixteen: 16, sixtyFour: 64, thirtyTwo: 32, eight: 8, two: 2 } - , expectedResult = { first: 2, one: 2, four: 8, sixteen: 32, sixtyFour: 128, thirtyTwo: 64, eight: 16, two: 4 } - , expectedTarget = [ 1, 1, 2, 4, 8, 16, 32, 64 ] - , expectedKeys = [ 'first', 'one', 'two', 'four', 'eight', 'sixteen', 'thirtyTwo', 'sixtyFour' ] - , target = [] - , keys = [] - ; - -parallel(source, asyncJob, function(err, result) -{ - assert.deepEqual(result, expectedResult); - assert.deepEqual(target, expectedTarget); - assert.deepEqual(keys, expectedKeys); -}); - -// supports full value, key, callback (shortcut) interface -function asyncJob(item, key, cb) -{ - // different delays (in ms) per item - var delay = item * 25; - - // pretend different jobs take different time to finish - // and not in consequential order - var timeoutId = setTimeout(function() { - keys.push(key); - target.push(item); - cb(null, item * 2); - }, delay); - - // allow to cancel "leftover" jobs upon error - // return function, invoking of which will abort this job - return clearTimeout.bind(null, timeoutId); -} -``` - -More examples could be found in [test/test-parallel-object.js](test/test-parallel-object.js). - -### Serial Jobs - -Runs iterator over provided array sequentially. Stores output in the `result` array, -on the matching positions. In unlikely event of an error from one of the jobs, -will not proceed to the rest of the items in the list -and return error along with salvaged data to the main callback function. - -#### Input Array - -```javascript -var serial = require('asynckit/serial') - , assert = require('assert') - ; - -var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ] - , expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ] - , expectedTarget = [ 0, 1, 2, 3, 4, 5, 6, 7 ] - , target = [] - ; - -serial(source, asyncJob, function(err, result) -{ - assert.deepEqual(result, expectedResult); - assert.deepEqual(target, expectedTarget); -}); - -// extended interface (item, key, callback) -// also supported for arrays -function asyncJob(item, key, cb) -{ - target.push(key); - - // it will be automatically made async - // even it iterator "returns" in the same event loop - cb(null, item * 2); -} -``` - -More examples could be found in [test/test-serial-array.js](test/test-serial-array.js). - -#### Input Object - -Also it supports named jobs, listed via object. - -```javascript -var serial = require('asynckit').serial - , assert = require('assert') - ; - -var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ] - , expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ] - , expectedTarget = [ 0, 1, 2, 3, 4, 5, 6, 7 ] - , target = [] - ; - -var source = { first: 1, one: 1, four: 4, sixteen: 16, sixtyFour: 64, thirtyTwo: 32, eight: 8, two: 2 } - , expectedResult = { first: 2, one: 2, four: 8, sixteen: 32, sixtyFour: 128, thirtyTwo: 64, eight: 16, two: 4 } - , expectedTarget = [ 1, 1, 4, 16, 64, 32, 8, 2 ] - , target = [] - ; - - -serial(source, asyncJob, function(err, result) -{ - assert.deepEqual(result, expectedResult); - assert.deepEqual(target, expectedTarget); -}); - -// shortcut interface (item, callback) -// works for object as well as for the arrays -function asyncJob(item, cb) -{ - target.push(item); - - // it will be automatically made async - // even it iterator "returns" in the same event loop - cb(null, item * 2); -} -``` - -More examples could be found in [test/test-serial-object.js](test/test-serial-object.js). - -_Note: Since _object_ is an _unordered_ collection of properties, -it may produce unexpected results with sequential iterations. -Whenever order of the jobs' execution is important please use `serialOrdered` method._ - -### Ordered Serial Iterations - -TBD - -For example [compare-property](compare-property) package. - -### Streaming interface - -TBD - -## Want to Know More? - -More examples can be found in [test folder](test/). - -Or open an [issue](https://github.com/alexindigo/asynckit/issues) with questions and/or suggestions. - -## License - -AsyncKit is licensed under the MIT license. diff --git a/node_modules/asynckit/bench.js b/node_modules/asynckit/bench.js deleted file mode 100644 index c612f1a..0000000 --- a/node_modules/asynckit/bench.js +++ /dev/null @@ -1,76 +0,0 @@ -/* eslint no-console: "off" */ - -var asynckit = require('./') - , async = require('async') - , assert = require('assert') - , expected = 0 - ; - -var Benchmark = require('benchmark'); -var suite = new Benchmark.Suite; - -var source = []; -for (var z = 1; z < 100; z++) -{ - source.push(z); - expected += z; -} - -suite -// add tests - -.add('async.map', function(deferred) -{ - var total = 0; - - async.map(source, - function(i, cb) - { - setImmediate(function() - { - total += i; - cb(null, total); - }); - }, - function(err, result) - { - assert.ifError(err); - assert.equal(result[result.length - 1], expected); - deferred.resolve(); - }); -}, {'defer': true}) - - -.add('asynckit.parallel', function(deferred) -{ - var total = 0; - - asynckit.parallel(source, - function(i, cb) - { - setImmediate(function() - { - total += i; - cb(null, total); - }); - }, - function(err, result) - { - assert.ifError(err); - assert.equal(result[result.length - 1], expected); - deferred.resolve(); - }); -}, {'defer': true}) - - -// add listeners -.on('cycle', function(ev) -{ - console.log(String(ev.target)); -}) -.on('complete', function() -{ - console.log('Fastest is ' + this.filter('fastest').map('name')); -}) -// run async -.run({ 'async': true }); diff --git a/node_modules/asynckit/index.js b/node_modules/asynckit/index.js deleted file mode 100644 index 455f945..0000000 --- a/node_modules/asynckit/index.js +++ /dev/null @@ -1,6 +0,0 @@ -module.exports = -{ - parallel : require('./parallel.js'), - serial : require('./serial.js'), - serialOrdered : require('./serialOrdered.js') -}; diff --git a/node_modules/asynckit/lib/abort.js b/node_modules/asynckit/lib/abort.js deleted file mode 100644 index 114367e..0000000 --- a/node_modules/asynckit/lib/abort.js +++ /dev/null @@ -1,29 +0,0 @@ -// API -module.exports = abort; - -/** - * Aborts leftover active jobs - * - * @param {object} state - current state object - */ -function abort(state) -{ - Object.keys(state.jobs).forEach(clean.bind(state)); - - // reset leftover jobs - state.jobs = {}; -} - -/** - * Cleans up leftover job by invoking abort function for the provided job id - * - * @this state - * @param {string|number} key - job id to abort - */ -function clean(key) -{ - if (typeof this.jobs[key] == 'function') - { - this.jobs[key](); - } -} diff --git a/node_modules/asynckit/lib/async.js b/node_modules/asynckit/lib/async.js deleted file mode 100644 index 7f1288a..0000000 --- a/node_modules/asynckit/lib/async.js +++ /dev/null @@ -1,34 +0,0 @@ -var defer = require('./defer.js'); - -// API -module.exports = async; - -/** - * Runs provided callback asynchronously - * even if callback itself is not - * - * @param {function} callback - callback to invoke - * @returns {function} - augmented callback - */ -function async(callback) -{ - var isAsync = false; - - // check if async happened - defer(function() { isAsync = true; }); - - return function async_callback(err, result) - { - if (isAsync) - { - callback(err, result); - } - else - { - defer(function nextTick_callback() - { - callback(err, result); - }); - } - }; -} diff --git a/node_modules/asynckit/lib/defer.js b/node_modules/asynckit/lib/defer.js deleted file mode 100644 index b67110c..0000000 --- a/node_modules/asynckit/lib/defer.js +++ /dev/null @@ -1,26 +0,0 @@ -module.exports = defer; - -/** - * Runs provided function on next iteration of the event loop - * - * @param {function} fn - function to run - */ -function defer(fn) -{ - var nextTick = typeof setImmediate == 'function' - ? setImmediate - : ( - typeof process == 'object' && typeof process.nextTick == 'function' - ? process.nextTick - : null - ); - - if (nextTick) - { - nextTick(fn); - } - else - { - setTimeout(fn, 0); - } -} diff --git a/node_modules/asynckit/lib/iterate.js b/node_modules/asynckit/lib/iterate.js deleted file mode 100644 index 5d2839a..0000000 --- a/node_modules/asynckit/lib/iterate.js +++ /dev/null @@ -1,75 +0,0 @@ -var async = require('./async.js') - , abort = require('./abort.js') - ; - -// API -module.exports = iterate; - -/** - * Iterates over each job object - * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {object} state - current job status - * @param {function} callback - invoked when all elements processed - */ -function iterate(list, iterator, state, callback) -{ - // store current index - var key = state['keyedList'] ? state['keyedList'][state.index] : state.index; - - state.jobs[key] = runJob(iterator, key, list[key], function(error, output) - { - // don't repeat yourself - // skip secondary callbacks - if (!(key in state.jobs)) - { - return; - } - - // clean up jobs - delete state.jobs[key]; - - if (error) - { - // don't process rest of the results - // stop still active jobs - // and reset the list - abort(state); - } - else - { - state.results[key] = output; - } - - // return salvaged results - callback(error, state.results); - }); -} - -/** - * Runs iterator over provided job element - * - * @param {function} iterator - iterator to invoke - * @param {string|number} key - key/index of the element in the list of jobs - * @param {mixed} item - job description - * @param {function} callback - invoked after iterator is done with the job - * @returns {function|mixed} - job abort function or something else - */ -function runJob(iterator, key, item, callback) -{ - var aborter; - - // allow shortcut if iterator expects only two arguments - if (iterator.length == 2) - { - aborter = iterator(item, async(callback)); - } - // otherwise go with full three arguments - else - { - aborter = iterator(item, key, async(callback)); - } - - return aborter; -} diff --git a/node_modules/asynckit/lib/readable_asynckit.js b/node_modules/asynckit/lib/readable_asynckit.js deleted file mode 100644 index 78ad240..0000000 --- a/node_modules/asynckit/lib/readable_asynckit.js +++ /dev/null @@ -1,91 +0,0 @@ -var streamify = require('./streamify.js') - , defer = require('./defer.js') - ; - -// API -module.exports = ReadableAsyncKit; - -/** - * Base constructor for all streams - * used to hold properties/methods - */ -function ReadableAsyncKit() -{ - ReadableAsyncKit.super_.apply(this, arguments); - - // list of active jobs - this.jobs = {}; - - // add stream methods - this.destroy = destroy; - this._start = _start; - this._read = _read; -} - -/** - * Destroys readable stream, - * by aborting outstanding jobs - * - * @returns {void} - */ -function destroy() -{ - if (this.destroyed) - { - return; - } - - this.destroyed = true; - - if (typeof this.terminator == 'function') - { - this.terminator(); - } -} - -/** - * Starts provided jobs in async manner - * - * @private - */ -function _start() -{ - // first argument – runner function - var runner = arguments[0] - // take away first argument - , args = Array.prototype.slice.call(arguments, 1) - // second argument - input data - , input = args[0] - // last argument - result callback - , endCb = streamify.callback.call(this, args[args.length - 1]) - ; - - args[args.length - 1] = endCb; - // third argument - iterator - args[1] = streamify.iterator.call(this, args[1]); - - // allow time for proper setup - defer(function() - { - if (!this.destroyed) - { - this.terminator = runner.apply(null, args); - } - else - { - endCb(null, Array.isArray(input) ? [] : {}); - } - }.bind(this)); -} - - -/** - * Implement _read to comply with Readable streams - * Doesn't really make sense for flowing object mode - * - * @private - */ -function _read() -{ - -} diff --git a/node_modules/asynckit/lib/readable_parallel.js b/node_modules/asynckit/lib/readable_parallel.js deleted file mode 100644 index 5d2929f..0000000 --- a/node_modules/asynckit/lib/readable_parallel.js +++ /dev/null @@ -1,25 +0,0 @@ -var parallel = require('../parallel.js'); - -// API -module.exports = ReadableParallel; - -/** - * Streaming wrapper to `asynckit.parallel` - * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {function} callback - invoked when all elements processed - * @returns {stream.Readable#} - */ -function ReadableParallel(list, iterator, callback) -{ - if (!(this instanceof ReadableParallel)) - { - return new ReadableParallel(list, iterator, callback); - } - - // turn on object mode - ReadableParallel.super_.call(this, {objectMode: true}); - - this._start(parallel, list, iterator, callback); -} diff --git a/node_modules/asynckit/lib/readable_serial.js b/node_modules/asynckit/lib/readable_serial.js deleted file mode 100644 index 7822698..0000000 --- a/node_modules/asynckit/lib/readable_serial.js +++ /dev/null @@ -1,25 +0,0 @@ -var serial = require('../serial.js'); - -// API -module.exports = ReadableSerial; - -/** - * Streaming wrapper to `asynckit.serial` - * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {function} callback - invoked when all elements processed - * @returns {stream.Readable#} - */ -function ReadableSerial(list, iterator, callback) -{ - if (!(this instanceof ReadableSerial)) - { - return new ReadableSerial(list, iterator, callback); - } - - // turn on object mode - ReadableSerial.super_.call(this, {objectMode: true}); - - this._start(serial, list, iterator, callback); -} diff --git a/node_modules/asynckit/lib/readable_serial_ordered.js b/node_modules/asynckit/lib/readable_serial_ordered.js deleted file mode 100644 index 3de89c4..0000000 --- a/node_modules/asynckit/lib/readable_serial_ordered.js +++ /dev/null @@ -1,29 +0,0 @@ -var serialOrdered = require('../serialOrdered.js'); - -// API -module.exports = ReadableSerialOrdered; -// expose sort helpers -module.exports.ascending = serialOrdered.ascending; -module.exports.descending = serialOrdered.descending; - -/** - * Streaming wrapper to `asynckit.serialOrdered` - * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {function} sortMethod - custom sort function - * @param {function} callback - invoked when all elements processed - * @returns {stream.Readable#} - */ -function ReadableSerialOrdered(list, iterator, sortMethod, callback) -{ - if (!(this instanceof ReadableSerialOrdered)) - { - return new ReadableSerialOrdered(list, iterator, sortMethod, callback); - } - - // turn on object mode - ReadableSerialOrdered.super_.call(this, {objectMode: true}); - - this._start(serialOrdered, list, iterator, sortMethod, callback); -} diff --git a/node_modules/asynckit/lib/state.js b/node_modules/asynckit/lib/state.js deleted file mode 100644 index cbea7ad..0000000 --- a/node_modules/asynckit/lib/state.js +++ /dev/null @@ -1,37 +0,0 @@ -// API -module.exports = state; - -/** - * Creates initial state object - * for iteration over list - * - * @param {array|object} list - list to iterate over - * @param {function|null} sortMethod - function to use for keys sort, - * or `null` to keep them as is - * @returns {object} - initial state object - */ -function state(list, sortMethod) -{ - var isNamedList = !Array.isArray(list) - , initState = - { - index : 0, - keyedList: isNamedList || sortMethod ? Object.keys(list) : null, - jobs : {}, - results : isNamedList ? {} : [], - size : isNamedList ? Object.keys(list).length : list.length - } - ; - - if (sortMethod) - { - // sort array keys based on it's values - // sort object's keys just on own merit - initState.keyedList.sort(isNamedList ? sortMethod : function(a, b) - { - return sortMethod(list[a], list[b]); - }); - } - - return initState; -} diff --git a/node_modules/asynckit/lib/streamify.js b/node_modules/asynckit/lib/streamify.js deleted file mode 100644 index f56a1c9..0000000 --- a/node_modules/asynckit/lib/streamify.js +++ /dev/null @@ -1,141 +0,0 @@ -var async = require('./async.js'); - -// API -module.exports = { - iterator: wrapIterator, - callback: wrapCallback -}; - -/** - * Wraps iterators with long signature - * - * @this ReadableAsyncKit# - * @param {function} iterator - function to wrap - * @returns {function} - wrapped function - */ -function wrapIterator(iterator) -{ - var stream = this; - - return function(item, key, cb) - { - var aborter - , wrappedCb = async(wrapIteratorCallback.call(stream, cb, key)) - ; - - stream.jobs[key] = wrappedCb; - - // it's either shortcut (item, cb) - if (iterator.length == 2) - { - aborter = iterator(item, wrappedCb); - } - // or long format (item, key, cb) - else - { - aborter = iterator(item, key, wrappedCb); - } - - return aborter; - }; -} - -/** - * Wraps provided callback function - * allowing to execute snitch function before - * real callback - * - * @this ReadableAsyncKit# - * @param {function} callback - function to wrap - * @returns {function} - wrapped function - */ -function wrapCallback(callback) -{ - var stream = this; - - var wrapped = function(error, result) - { - return finisher.call(stream, error, result, callback); - }; - - return wrapped; -} - -/** - * Wraps provided iterator callback function - * makes sure snitch only called once, - * but passes secondary calls to the original callback - * - * @this ReadableAsyncKit# - * @param {function} callback - callback to wrap - * @param {number|string} key - iteration key - * @returns {function} wrapped callback - */ -function wrapIteratorCallback(callback, key) -{ - var stream = this; - - return function(error, output) - { - // don't repeat yourself - if (!(key in stream.jobs)) - { - callback(error, output); - return; - } - - // clean up jobs - delete stream.jobs[key]; - - return streamer.call(stream, error, {key: key, value: output}, callback); - }; -} - -/** - * Stream wrapper for iterator callback - * - * @this ReadableAsyncKit# - * @param {mixed} error - error response - * @param {mixed} output - iterator output - * @param {function} callback - callback that expects iterator results - */ -function streamer(error, output, callback) -{ - if (error && !this.error) - { - this.error = error; - this.pause(); - this.emit('error', error); - // send back value only, as expected - callback(error, output && output.value); - return; - } - - // stream stuff - this.push(output); - - // back to original track - // send back value only, as expected - callback(error, output && output.value); -} - -/** - * Stream wrapper for finishing callback - * - * @this ReadableAsyncKit# - * @param {mixed} error - error response - * @param {mixed} output - iterator output - * @param {function} callback - callback that expects final results - */ -function finisher(error, output, callback) -{ - // signal end of the stream - // only for successfully finished streams - if (!error) - { - this.push(null); - } - - // back to original track - callback(error, output); -} diff --git a/node_modules/asynckit/lib/terminator.js b/node_modules/asynckit/lib/terminator.js deleted file mode 100644 index d6eb992..0000000 --- a/node_modules/asynckit/lib/terminator.js +++ /dev/null @@ -1,29 +0,0 @@ -var abort = require('./abort.js') - , async = require('./async.js') - ; - -// API -module.exports = terminator; - -/** - * Terminates jobs in the attached state context - * - * @this AsyncKitState# - * @param {function} callback - final callback to invoke after termination - */ -function terminator(callback) -{ - if (!Object.keys(this.jobs).length) - { - return; - } - - // fast forward iteration index - this.index = this.size; - - // abort jobs - abort(this); - - // send back results we have so far - async(callback)(null, this.results); -} diff --git a/node_modules/asynckit/package.json b/node_modules/asynckit/package.json deleted file mode 100644 index 51147d6..0000000 --- a/node_modules/asynckit/package.json +++ /dev/null @@ -1,63 +0,0 @@ -{ - "name": "asynckit", - "version": "0.4.0", - "description": "Minimal async jobs utility library, with streams support", - "main": "index.js", - "scripts": { - "clean": "rimraf coverage", - "lint": "eslint *.js lib/*.js test/*.js", - "test": "istanbul cover --reporter=json tape -- 'test/test-*.js' | tap-spec", - "win-test": "tape test/test-*.js", - "browser": "browserify -t browserify-istanbul test/lib/browserify_adjustment.js test/test-*.js | obake --coverage | tap-spec", - "report": "istanbul report", - "size": "browserify index.js | size-table asynckit", - "debug": "tape test/test-*.js" - }, - "pre-commit": [ - "clean", - "lint", - "test", - "browser", - "report", - "size" - ], - "repository": { - "type": "git", - "url": "git+https://github.com/alexindigo/asynckit.git" - }, - "keywords": [ - "async", - "jobs", - "parallel", - "serial", - "iterator", - "array", - "object", - "stream", - "destroy", - "terminate", - "abort" - ], - "author": "Alex Indigo ", - "license": "MIT", - "bugs": { - "url": "https://github.com/alexindigo/asynckit/issues" - }, - "homepage": "https://github.com/alexindigo/asynckit#readme", - "devDependencies": { - "browserify": "^13.0.0", - "browserify-istanbul": "^2.0.0", - "coveralls": "^2.11.9", - "eslint": "^2.9.0", - "istanbul": "^0.4.3", - "obake": "^0.1.2", - "phantomjs-prebuilt": "^2.1.7", - "pre-commit": "^1.1.3", - "reamde": "^1.1.0", - "rimraf": "^2.5.2", - "size-table": "^0.2.0", - "tap-spec": "^4.1.1", - "tape": "^4.5.1" - }, - "dependencies": {} -} diff --git a/node_modules/asynckit/parallel.js b/node_modules/asynckit/parallel.js deleted file mode 100644 index 3c50344..0000000 --- a/node_modules/asynckit/parallel.js +++ /dev/null @@ -1,43 +0,0 @@ -var iterate = require('./lib/iterate.js') - , initState = require('./lib/state.js') - , terminator = require('./lib/terminator.js') - ; - -// Public API -module.exports = parallel; - -/** - * Runs iterator over provided array elements in parallel - * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {function} callback - invoked when all elements processed - * @returns {function} - jobs terminator - */ -function parallel(list, iterator, callback) -{ - var state = initState(list); - - while (state.index < (state['keyedList'] || list).length) - { - iterate(list, iterator, state, function(error, result) - { - if (error) - { - callback(error, result); - return; - } - - // looks like it's the last one - if (Object.keys(state.jobs).length === 0) - { - callback(null, state.results); - return; - } - }); - - state.index++; - } - - return terminator.bind(state, callback); -} diff --git a/node_modules/asynckit/serial.js b/node_modules/asynckit/serial.js deleted file mode 100644 index 6cd949a..0000000 --- a/node_modules/asynckit/serial.js +++ /dev/null @@ -1,17 +0,0 @@ -var serialOrdered = require('./serialOrdered.js'); - -// Public API -module.exports = serial; - -/** - * Runs iterator over provided array elements in series - * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {function} callback - invoked when all elements processed - * @returns {function} - jobs terminator - */ -function serial(list, iterator, callback) -{ - return serialOrdered(list, iterator, null, callback); -} diff --git a/node_modules/asynckit/serialOrdered.js b/node_modules/asynckit/serialOrdered.js deleted file mode 100644 index 607eafe..0000000 --- a/node_modules/asynckit/serialOrdered.js +++ /dev/null @@ -1,75 +0,0 @@ -var iterate = require('./lib/iterate.js') - , initState = require('./lib/state.js') - , terminator = require('./lib/terminator.js') - ; - -// Public API -module.exports = serialOrdered; -// sorting helpers -module.exports.ascending = ascending; -module.exports.descending = descending; - -/** - * Runs iterator over provided sorted array elements in series - * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {function} sortMethod - custom sort function - * @param {function} callback - invoked when all elements processed - * @returns {function} - jobs terminator - */ -function serialOrdered(list, iterator, sortMethod, callback) -{ - var state = initState(list, sortMethod); - - iterate(list, iterator, state, function iteratorHandler(error, result) - { - if (error) - { - callback(error, result); - return; - } - - state.index++; - - // are we there yet? - if (state.index < (state['keyedList'] || list).length) - { - iterate(list, iterator, state, iteratorHandler); - return; - } - - // done here - callback(null, state.results); - }); - - return terminator.bind(state, callback); -} - -/* - * -- Sort methods - */ - -/** - * sort helper to sort array elements in ascending order - * - * @param {mixed} a - an item to compare - * @param {mixed} b - an item to compare - * @returns {number} - comparison result - */ -function ascending(a, b) -{ - return a < b ? -1 : a > b ? 1 : 0; -} - -/** - * sort helper to sort array elements in descending order - * - * @param {mixed} a - an item to compare - * @param {mixed} b - an item to compare - * @returns {number} - comparison result - */ -function descending(a, b) -{ - return -1 * ascending(a, b); -} diff --git a/node_modules/asynckit/stream.js b/node_modules/asynckit/stream.js deleted file mode 100644 index d43465f..0000000 --- a/node_modules/asynckit/stream.js +++ /dev/null @@ -1,21 +0,0 @@ -var inherits = require('util').inherits - , Readable = require('stream').Readable - , ReadableAsyncKit = require('./lib/readable_asynckit.js') - , ReadableParallel = require('./lib/readable_parallel.js') - , ReadableSerial = require('./lib/readable_serial.js') - , ReadableSerialOrdered = require('./lib/readable_serial_ordered.js') - ; - -// API -module.exports = -{ - parallel : ReadableParallel, - serial : ReadableSerial, - serialOrdered : ReadableSerialOrdered, -}; - -inherits(ReadableAsyncKit, Readable); - -inherits(ReadableParallel, ReadableAsyncKit); -inherits(ReadableSerial, ReadableAsyncKit); -inherits(ReadableSerialOrdered, ReadableAsyncKit); diff --git a/node_modules/balanced-match/.github/FUNDING.yml b/node_modules/balanced-match/.github/FUNDING.yml deleted file mode 100644 index cea8b16..0000000 --- a/node_modules/balanced-match/.github/FUNDING.yml +++ /dev/null @@ -1,2 +0,0 @@ -tidelift: "npm/balanced-match" -patreon: juliangruber diff --git a/node_modules/balanced-match/LICENSE.md b/node_modules/balanced-match/LICENSE.md deleted file mode 100644 index 2cdc8e4..0000000 --- a/node_modules/balanced-match/LICENSE.md +++ /dev/null @@ -1,21 +0,0 @@ -(MIT) - -Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/balanced-match/README.md b/node_modules/balanced-match/README.md deleted file mode 100644 index d2a48b6..0000000 --- a/node_modules/balanced-match/README.md +++ /dev/null @@ -1,97 +0,0 @@ -# balanced-match - -Match balanced string pairs, like `{` and `}` or `` and ``. Supports regular expressions as well! - -[![build status](https://secure.travis-ci.org/juliangruber/balanced-match.svg)](http://travis-ci.org/juliangruber/balanced-match) -[![downloads](https://img.shields.io/npm/dm/balanced-match.svg)](https://www.npmjs.org/package/balanced-match) - -[![testling badge](https://ci.testling.com/juliangruber/balanced-match.png)](https://ci.testling.com/juliangruber/balanced-match) - -## Example - -Get the first matching pair of braces: - -```js -var balanced = require('balanced-match'); - -console.log(balanced('{', '}', 'pre{in{nested}}post')); -console.log(balanced('{', '}', 'pre{first}between{second}post')); -console.log(balanced(/\s+\{\s+/, /\s+\}\s+/, 'pre { in{nest} } post')); -``` - -The matches are: - -```bash -$ node example.js -{ start: 3, end: 14, pre: 'pre', body: 'in{nested}', post: 'post' } -{ start: 3, - end: 9, - pre: 'pre', - body: 'first', - post: 'between{second}post' } -{ start: 3, end: 17, pre: 'pre', body: 'in{nest}', post: 'post' } -``` - -## API - -### var m = balanced(a, b, str) - -For the first non-nested matching pair of `a` and `b` in `str`, return an -object with those keys: - -* **start** the index of the first match of `a` -* **end** the index of the matching `b` -* **pre** the preamble, `a` and `b` not included -* **body** the match, `a` and `b` not included -* **post** the postscript, `a` and `b` not included - -If there's no match, `undefined` will be returned. - -If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `['{', 'a', '']` and `{a}}` will match `['', 'a', '}']`. - -### var r = balanced.range(a, b, str) - -For the first non-nested matching pair of `a` and `b` in `str`, return an -array with indexes: `[ , ]`. - -If there's no match, `undefined` will be returned. - -If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `[ 1, 3 ]` and `{a}}` will match `[0, 2]`. - -## Installation - -With [npm](https://npmjs.org) do: - -```bash -npm install balanced-match -``` - -## Security contact information - -To report a security vulnerability, please use the -[Tidelift security contact](https://tidelift.com/security). -Tidelift will coordinate the fix and disclosure. - -## License - -(MIT) - -Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/balanced-match/index.js b/node_modules/balanced-match/index.js deleted file mode 100644 index c67a646..0000000 --- a/node_modules/balanced-match/index.js +++ /dev/null @@ -1,62 +0,0 @@ -'use strict'; -module.exports = balanced; -function balanced(a, b, str) { - if (a instanceof RegExp) a = maybeMatch(a, str); - if (b instanceof RegExp) b = maybeMatch(b, str); - - var r = range(a, b, str); - - return r && { - start: r[0], - end: r[1], - pre: str.slice(0, r[0]), - body: str.slice(r[0] + a.length, r[1]), - post: str.slice(r[1] + b.length) - }; -} - -function maybeMatch(reg, str) { - var m = str.match(reg); - return m ? m[0] : null; -} - -balanced.range = range; -function range(a, b, str) { - var begs, beg, left, right, result; - var ai = str.indexOf(a); - var bi = str.indexOf(b, ai + 1); - var i = ai; - - if (ai >= 0 && bi > 0) { - if(a===b) { - return [ai, bi]; - } - begs = []; - left = str.length; - - while (i >= 0 && !result) { - if (i == ai) { - begs.push(i); - ai = str.indexOf(a, i + 1); - } else if (begs.length == 1) { - result = [ begs.pop(), bi ]; - } else { - beg = begs.pop(); - if (beg < left) { - left = beg; - right = bi; - } - - bi = str.indexOf(b, i + 1); - } - - i = ai < bi && ai >= 0 ? ai : bi; - } - - if (begs.length) { - result = [ left, right ]; - } - } - - return result; -} diff --git a/node_modules/balanced-match/package.json b/node_modules/balanced-match/package.json deleted file mode 100644 index ce6073e..0000000 --- a/node_modules/balanced-match/package.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "name": "balanced-match", - "description": "Match balanced character pairs, like \"{\" and \"}\"", - "version": "1.0.2", - "repository": { - "type": "git", - "url": "git://github.com/juliangruber/balanced-match.git" - }, - "homepage": "https://github.com/juliangruber/balanced-match", - "main": "index.js", - "scripts": { - "test": "tape test/test.js", - "bench": "matcha test/bench.js" - }, - "devDependencies": { - "matcha": "^0.7.0", - "tape": "^4.6.0" - }, - "keywords": [ - "match", - "regexp", - "test", - "balanced", - "parse" - ], - "author": { - "name": "Julian Gruber", - "email": "mail@juliangruber.com", - "url": "http://juliangruber.com" - }, - "license": "MIT", - "testling": { - "files": "test/*.js", - "browsers": [ - "ie/8..latest", - "firefox/20..latest", - "firefox/nightly", - "chrome/25..latest", - "chrome/canary", - "opera/12..latest", - "opera/next", - "safari/5.1..latest", - "ipad/6.0..latest", - "iphone/6.0..latest", - "android-browser/4.2..latest" - ] - } -} diff --git a/node_modules/base64-js/LICENSE b/node_modules/base64-js/LICENSE deleted file mode 100644 index 6d52b8a..0000000 --- a/node_modules/base64-js/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014 Jameson Little - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/base64-js/README.md b/node_modules/base64-js/README.md deleted file mode 100644 index b42a48f..0000000 --- a/node_modules/base64-js/README.md +++ /dev/null @@ -1,34 +0,0 @@ -base64-js -========= - -`base64-js` does basic base64 encoding/decoding in pure JS. - -[![build status](https://secure.travis-ci.org/beatgammit/base64-js.png)](http://travis-ci.org/beatgammit/base64-js) - -Many browsers already have base64 encoding/decoding functionality, but it is for text data, not all-purpose binary data. - -Sometimes encoding/decoding binary data in the browser is useful, and that is what this module does. - -## install - -With [npm](https://npmjs.org) do: - -`npm install base64-js` and `var base64js = require('base64-js')` - -For use in web browsers do: - -`` - -[Get supported base64-js with the Tidelift Subscription](https://tidelift.com/subscription/pkg/npm-base64-js?utm_source=npm-base64-js&utm_medium=referral&utm_campaign=readme) - -## methods - -`base64js` has three exposed functions, `byteLength`, `toByteArray` and `fromByteArray`, which both take a single argument. - -* `byteLength` - Takes a base64 string and returns length of byte array -* `toByteArray` - Takes a base64 string and returns a byte array -* `fromByteArray` - Takes a byte array and returns a base64 string - -## license - -MIT diff --git a/node_modules/base64-js/base64js.min.js b/node_modules/base64-js/base64js.min.js deleted file mode 100644 index 908ac83..0000000 --- a/node_modules/base64-js/base64js.min.js +++ /dev/null @@ -1 +0,0 @@ -(function(a){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=a();else if("function"==typeof define&&define.amd)define([],a);else{var b;b="undefined"==typeof window?"undefined"==typeof global?"undefined"==typeof self?this:self:global:window,b.base64js=a()}})(function(){return function(){function b(d,e,g){function a(j,i){if(!e[j]){if(!d[j]){var f="function"==typeof require&&require;if(!i&&f)return f(j,!0);if(h)return h(j,!0);var c=new Error("Cannot find module '"+j+"'");throw c.code="MODULE_NOT_FOUND",c}var k=e[j]={exports:{}};d[j][0].call(k.exports,function(b){var c=d[j][1][b];return a(c||b)},k,k.exports,b,d,e,g)}return e[j].exports}for(var h="function"==typeof require&&require,c=0;c>16,j[k++]=255&b>>8,j[k++]=255&b;return 2===h&&(b=l[a.charCodeAt(c)]<<2|l[a.charCodeAt(c+1)]>>4,j[k++]=255&b),1===h&&(b=l[a.charCodeAt(c)]<<10|l[a.charCodeAt(c+1)]<<4|l[a.charCodeAt(c+2)]>>2,j[k++]=255&b>>8,j[k++]=255&b),j}function g(a){return k[63&a>>18]+k[63&a>>12]+k[63&a>>6]+k[63&a]}function h(a,b,c){for(var d,e=[],f=b;fj?j:g+f));return 1===d?(b=a[c-1],e.push(k[b>>2]+k[63&b<<4]+"==")):2===d&&(b=(a[c-2]<<8)+a[c-1],e.push(k[b>>10]+k[63&b>>4]+k[63&b<<2]+"=")),e.join("")}c.byteLength=function(a){var b=d(a),c=b[0],e=b[1];return 3*(c+e)/4-e},c.toByteArray=f,c.fromByteArray=j;for(var k=[],l=[],m="undefined"==typeof Uint8Array?Array:Uint8Array,n="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",o=0,p=n.length;o 0) { - throw new Error('Invalid string. Length must be a multiple of 4') - } - - // Trim off extra bytes after placeholder bytes are found - // See: https://github.com/beatgammit/base64-js/issues/42 - var validLen = b64.indexOf('=') - if (validLen === -1) validLen = len - - var placeHoldersLen = validLen === len - ? 0 - : 4 - (validLen % 4) - - return [validLen, placeHoldersLen] -} - -// base64 is 4/3 + up to two characters of the original data -function byteLength (b64) { - var lens = getLens(b64) - var validLen = lens[0] - var placeHoldersLen = lens[1] - return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen -} - -function _byteLength (b64, validLen, placeHoldersLen) { - return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen -} - -function toByteArray (b64) { - var tmp - var lens = getLens(b64) - var validLen = lens[0] - var placeHoldersLen = lens[1] - - var arr = new Arr(_byteLength(b64, validLen, placeHoldersLen)) - - var curByte = 0 - - // if there are placeholders, only get up to the last complete 4 chars - var len = placeHoldersLen > 0 - ? validLen - 4 - : validLen - - var i - for (i = 0; i < len; i += 4) { - tmp = - (revLookup[b64.charCodeAt(i)] << 18) | - (revLookup[b64.charCodeAt(i + 1)] << 12) | - (revLookup[b64.charCodeAt(i + 2)] << 6) | - revLookup[b64.charCodeAt(i + 3)] - arr[curByte++] = (tmp >> 16) & 0xFF - arr[curByte++] = (tmp >> 8) & 0xFF - arr[curByte++] = tmp & 0xFF - } - - if (placeHoldersLen === 2) { - tmp = - (revLookup[b64.charCodeAt(i)] << 2) | - (revLookup[b64.charCodeAt(i + 1)] >> 4) - arr[curByte++] = tmp & 0xFF - } - - if (placeHoldersLen === 1) { - tmp = - (revLookup[b64.charCodeAt(i)] << 10) | - (revLookup[b64.charCodeAt(i + 1)] << 4) | - (revLookup[b64.charCodeAt(i + 2)] >> 2) - arr[curByte++] = (tmp >> 8) & 0xFF - arr[curByte++] = tmp & 0xFF - } - - return arr -} - -function tripletToBase64 (num) { - return lookup[num >> 18 & 0x3F] + - lookup[num >> 12 & 0x3F] + - lookup[num >> 6 & 0x3F] + - lookup[num & 0x3F] -} - -function encodeChunk (uint8, start, end) { - var tmp - var output = [] - for (var i = start; i < end; i += 3) { - tmp = - ((uint8[i] << 16) & 0xFF0000) + - ((uint8[i + 1] << 8) & 0xFF00) + - (uint8[i + 2] & 0xFF) - output.push(tripletToBase64(tmp)) - } - return output.join('') -} - -function fromByteArray (uint8) { - var tmp - var len = uint8.length - var extraBytes = len % 3 // if we have 1 byte left, pad 2 bytes - var parts = [] - var maxChunkLength = 16383 // must be multiple of 3 - - // go through the array every three bytes, we'll deal with trailing stuff later - for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) { - parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength))) - } - - // pad the end with zeros, but make sure to not forget the extra bytes - if (extraBytes === 1) { - tmp = uint8[len - 1] - parts.push( - lookup[tmp >> 2] + - lookup[(tmp << 4) & 0x3F] + - '==' - ) - } else if (extraBytes === 2) { - tmp = (uint8[len - 2] << 8) + uint8[len - 1] - parts.push( - lookup[tmp >> 10] + - lookup[(tmp >> 4) & 0x3F] + - lookup[(tmp << 2) & 0x3F] + - '=' - ) - } - - return parts.join('') -} diff --git a/node_modules/base64-js/package.json b/node_modules/base64-js/package.json deleted file mode 100644 index c3972e3..0000000 --- a/node_modules/base64-js/package.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "name": "base64-js", - "description": "Base64 encoding/decoding in pure JS", - "version": "1.5.1", - "author": "T. Jameson Little ", - "typings": "index.d.ts", - "bugs": { - "url": "https://github.com/beatgammit/base64-js/issues" - }, - "devDependencies": { - "babel-minify": "^0.5.1", - "benchmark": "^2.1.4", - "browserify": "^16.3.0", - "standard": "*", - "tape": "4.x" - }, - "homepage": "https://github.com/beatgammit/base64-js", - "keywords": [ - "base64" - ], - "license": "MIT", - "main": "index.js", - "repository": { - "type": "git", - "url": "git://github.com/beatgammit/base64-js.git" - }, - "scripts": { - "build": "browserify -s base64js -r ./ | minify > base64js.min.js", - "lint": "standard", - "test": "npm run lint && npm run unit", - "unit": "tape test/*.js" - }, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] -} diff --git a/node_modules/bl/.travis.yml b/node_modules/bl/.travis.yml deleted file mode 100644 index 016eaf5..0000000 --- a/node_modules/bl/.travis.yml +++ /dev/null @@ -1,17 +0,0 @@ -sudo: false -arch: - - amd64 - - ppc64le -language: node_js -node_js: - - '6' - - '8' - - '10' - - '12' - - '14' - - '15' - - lts/* -notifications: - email: - - rod@vagg.org - - matteo.collina@gmail.com diff --git a/node_modules/bl/BufferList.js b/node_modules/bl/BufferList.js deleted file mode 100644 index 471ee77..0000000 --- a/node_modules/bl/BufferList.js +++ /dev/null @@ -1,396 +0,0 @@ -'use strict' - -const { Buffer } = require('buffer') -const symbol = Symbol.for('BufferList') - -function BufferList (buf) { - if (!(this instanceof BufferList)) { - return new BufferList(buf) - } - - BufferList._init.call(this, buf) -} - -BufferList._init = function _init (buf) { - Object.defineProperty(this, symbol, { value: true }) - - this._bufs = [] - this.length = 0 - - if (buf) { - this.append(buf) - } -} - -BufferList.prototype._new = function _new (buf) { - return new BufferList(buf) -} - -BufferList.prototype._offset = function _offset (offset) { - if (offset === 0) { - return [0, 0] - } - - let tot = 0 - - for (let i = 0; i < this._bufs.length; i++) { - const _t = tot + this._bufs[i].length - if (offset < _t || i === this._bufs.length - 1) { - return [i, offset - tot] - } - tot = _t - } -} - -BufferList.prototype._reverseOffset = function (blOffset) { - const bufferId = blOffset[0] - let offset = blOffset[1] - - for (let i = 0; i < bufferId; i++) { - offset += this._bufs[i].length - } - - return offset -} - -BufferList.prototype.get = function get (index) { - if (index > this.length || index < 0) { - return undefined - } - - const offset = this._offset(index) - - return this._bufs[offset[0]][offset[1]] -} - -BufferList.prototype.slice = function slice (start, end) { - if (typeof start === 'number' && start < 0) { - start += this.length - } - - if (typeof end === 'number' && end < 0) { - end += this.length - } - - return this.copy(null, 0, start, end) -} - -BufferList.prototype.copy = function copy (dst, dstStart, srcStart, srcEnd) { - if (typeof srcStart !== 'number' || srcStart < 0) { - srcStart = 0 - } - - if (typeof srcEnd !== 'number' || srcEnd > this.length) { - srcEnd = this.length - } - - if (srcStart >= this.length) { - return dst || Buffer.alloc(0) - } - - if (srcEnd <= 0) { - return dst || Buffer.alloc(0) - } - - const copy = !!dst - const off = this._offset(srcStart) - const len = srcEnd - srcStart - let bytes = len - let bufoff = (copy && dstStart) || 0 - let start = off[1] - - // copy/slice everything - if (srcStart === 0 && srcEnd === this.length) { - if (!copy) { - // slice, but full concat if multiple buffers - return this._bufs.length === 1 - ? this._bufs[0] - : Buffer.concat(this._bufs, this.length) - } - - // copy, need to copy individual buffers - for (let i = 0; i < this._bufs.length; i++) { - this._bufs[i].copy(dst, bufoff) - bufoff += this._bufs[i].length - } - - return dst - } - - // easy, cheap case where it's a subset of one of the buffers - if (bytes <= this._bufs[off[0]].length - start) { - return copy - ? this._bufs[off[0]].copy(dst, dstStart, start, start + bytes) - : this._bufs[off[0]].slice(start, start + bytes) - } - - if (!copy) { - // a slice, we need something to copy in to - dst = Buffer.allocUnsafe(len) - } - - for (let i = off[0]; i < this._bufs.length; i++) { - const l = this._bufs[i].length - start - - if (bytes > l) { - this._bufs[i].copy(dst, bufoff, start) - bufoff += l - } else { - this._bufs[i].copy(dst, bufoff, start, start + bytes) - bufoff += l - break - } - - bytes -= l - - if (start) { - start = 0 - } - } - - // safeguard so that we don't return uninitialized memory - if (dst.length > bufoff) return dst.slice(0, bufoff) - - return dst -} - -BufferList.prototype.shallowSlice = function shallowSlice (start, end) { - start = start || 0 - end = typeof end !== 'number' ? this.length : end - - if (start < 0) { - start += this.length - } - - if (end < 0) { - end += this.length - } - - if (start === end) { - return this._new() - } - - const startOffset = this._offset(start) - const endOffset = this._offset(end) - const buffers = this._bufs.slice(startOffset[0], endOffset[0] + 1) - - if (endOffset[1] === 0) { - buffers.pop() - } else { - buffers[buffers.length - 1] = buffers[buffers.length - 1].slice(0, endOffset[1]) - } - - if (startOffset[1] !== 0) { - buffers[0] = buffers[0].slice(startOffset[1]) - } - - return this._new(buffers) -} - -BufferList.prototype.toString = function toString (encoding, start, end) { - return this.slice(start, end).toString(encoding) -} - -BufferList.prototype.consume = function consume (bytes) { - // first, normalize the argument, in accordance with how Buffer does it - bytes = Math.trunc(bytes) - // do nothing if not a positive number - if (Number.isNaN(bytes) || bytes <= 0) return this - - while (this._bufs.length) { - if (bytes >= this._bufs[0].length) { - bytes -= this._bufs[0].length - this.length -= this._bufs[0].length - this._bufs.shift() - } else { - this._bufs[0] = this._bufs[0].slice(bytes) - this.length -= bytes - break - } - } - - return this -} - -BufferList.prototype.duplicate = function duplicate () { - const copy = this._new() - - for (let i = 0; i < this._bufs.length; i++) { - copy.append(this._bufs[i]) - } - - return copy -} - -BufferList.prototype.append = function append (buf) { - if (buf == null) { - return this - } - - if (buf.buffer) { - // append a view of the underlying ArrayBuffer - this._appendBuffer(Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength)) - } else if (Array.isArray(buf)) { - for (let i = 0; i < buf.length; i++) { - this.append(buf[i]) - } - } else if (this._isBufferList(buf)) { - // unwrap argument into individual BufferLists - for (let i = 0; i < buf._bufs.length; i++) { - this.append(buf._bufs[i]) - } - } else { - // coerce number arguments to strings, since Buffer(number) does - // uninitialized memory allocation - if (typeof buf === 'number') { - buf = buf.toString() - } - - this._appendBuffer(Buffer.from(buf)) - } - - return this -} - -BufferList.prototype._appendBuffer = function appendBuffer (buf) { - this._bufs.push(buf) - this.length += buf.length -} - -BufferList.prototype.indexOf = function (search, offset, encoding) { - if (encoding === undefined && typeof offset === 'string') { - encoding = offset - offset = undefined - } - - if (typeof search === 'function' || Array.isArray(search)) { - throw new TypeError('The "value" argument must be one of type string, Buffer, BufferList, or Uint8Array.') - } else if (typeof search === 'number') { - search = Buffer.from([search]) - } else if (typeof search === 'string') { - search = Buffer.from(search, encoding) - } else if (this._isBufferList(search)) { - search = search.slice() - } else if (Array.isArray(search.buffer)) { - search = Buffer.from(search.buffer, search.byteOffset, search.byteLength) - } else if (!Buffer.isBuffer(search)) { - search = Buffer.from(search) - } - - offset = Number(offset || 0) - - if (isNaN(offset)) { - offset = 0 - } - - if (offset < 0) { - offset = this.length + offset - } - - if (offset < 0) { - offset = 0 - } - - if (search.length === 0) { - return offset > this.length ? this.length : offset - } - - const blOffset = this._offset(offset) - let blIndex = blOffset[0] // index of which internal buffer we're working on - let buffOffset = blOffset[1] // offset of the internal buffer we're working on - - // scan over each buffer - for (; blIndex < this._bufs.length; blIndex++) { - const buff = this._bufs[blIndex] - - while (buffOffset < buff.length) { - const availableWindow = buff.length - buffOffset - - if (availableWindow >= search.length) { - const nativeSearchResult = buff.indexOf(search, buffOffset) - - if (nativeSearchResult !== -1) { - return this._reverseOffset([blIndex, nativeSearchResult]) - } - - buffOffset = buff.length - search.length + 1 // end of native search window - } else { - const revOffset = this._reverseOffset([blIndex, buffOffset]) - - if (this._match(revOffset, search)) { - return revOffset - } - - buffOffset++ - } - } - - buffOffset = 0 - } - - return -1 -} - -BufferList.prototype._match = function (offset, search) { - if (this.length - offset < search.length) { - return false - } - - for (let searchOffset = 0; searchOffset < search.length; searchOffset++) { - if (this.get(offset + searchOffset) !== search[searchOffset]) { - return false - } - } - return true -} - -;(function () { - const methods = { - readDoubleBE: 8, - readDoubleLE: 8, - readFloatBE: 4, - readFloatLE: 4, - readInt32BE: 4, - readInt32LE: 4, - readUInt32BE: 4, - readUInt32LE: 4, - readInt16BE: 2, - readInt16LE: 2, - readUInt16BE: 2, - readUInt16LE: 2, - readInt8: 1, - readUInt8: 1, - readIntBE: null, - readIntLE: null, - readUIntBE: null, - readUIntLE: null - } - - for (const m in methods) { - (function (m) { - if (methods[m] === null) { - BufferList.prototype[m] = function (offset, byteLength) { - return this.slice(offset, offset + byteLength)[m](0, byteLength) - } - } else { - BufferList.prototype[m] = function (offset = 0) { - return this.slice(offset, offset + methods[m])[m](0) - } - } - }(m)) - } -}()) - -// Used internally by the class and also as an indicator of this object being -// a `BufferList`. It's not possible to use `instanceof BufferList` in a browser -// environment because there could be multiple different copies of the -// BufferList class and some `BufferList`s might be `BufferList`s. -BufferList.prototype._isBufferList = function _isBufferList (b) { - return b instanceof BufferList || BufferList.isBufferList(b) -} - -BufferList.isBufferList = function isBufferList (b) { - return b != null && b[symbol] -} - -module.exports = BufferList diff --git a/node_modules/bl/LICENSE.md b/node_modules/bl/LICENSE.md deleted file mode 100644 index ecbe516..0000000 --- a/node_modules/bl/LICENSE.md +++ /dev/null @@ -1,13 +0,0 @@ -The MIT License (MIT) -===================== - -Copyright (c) 2013-2019 bl contributors ----------------------------------- - -*bl contributors listed at * - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/bl/README.md b/node_modules/bl/README.md deleted file mode 100644 index 9680b1d..0000000 --- a/node_modules/bl/README.md +++ /dev/null @@ -1,247 +0,0 @@ -# bl *(BufferList)* - -[![Build Status](https://api.travis-ci.com/rvagg/bl.svg?branch=master)](https://travis-ci.com/rvagg/bl/) - -**A Node.js Buffer list collector, reader and streamer thingy.** - -[![NPM](https://nodei.co/npm/bl.svg)](https://nodei.co/npm/bl/) - -**bl** is a storage object for collections of Node Buffers, exposing them with the main Buffer readable API. Also works as a duplex stream so you can collect buffers from a stream that emits them and emit buffers to a stream that consumes them! - -The original buffers are kept intact and copies are only done as necessary. Any reads that require the use of a single original buffer will return a slice of that buffer only (which references the same memory as the original buffer). Reads that span buffers perform concatenation as required and return the results transparently. - -```js -const { BufferList } = require('bl') - -const bl = new BufferList() -bl.append(Buffer.from('abcd')) -bl.append(Buffer.from('efg')) -bl.append('hi') // bl will also accept & convert Strings -bl.append(Buffer.from('j')) -bl.append(Buffer.from([ 0x3, 0x4 ])) - -console.log(bl.length) // 12 - -console.log(bl.slice(0, 10).toString('ascii')) // 'abcdefghij' -console.log(bl.slice(3, 10).toString('ascii')) // 'defghij' -console.log(bl.slice(3, 6).toString('ascii')) // 'def' -console.log(bl.slice(3, 8).toString('ascii')) // 'defgh' -console.log(bl.slice(5, 10).toString('ascii')) // 'fghij' - -console.log(bl.indexOf('def')) // 3 -console.log(bl.indexOf('asdf')) // -1 - -// or just use toString! -console.log(bl.toString()) // 'abcdefghij\u0003\u0004' -console.log(bl.toString('ascii', 3, 8)) // 'defgh' -console.log(bl.toString('ascii', 5, 10)) // 'fghij' - -// other standard Buffer readables -console.log(bl.readUInt16BE(10)) // 0x0304 -console.log(bl.readUInt16LE(10)) // 0x0403 -``` - -Give it a callback in the constructor and use it just like **[concat-stream](https://github.com/maxogden/node-concat-stream)**: - -```js -const { BufferListStream } = require('bl') -const fs = require('fs') - -fs.createReadStream('README.md') - .pipe(BufferListStream((err, data) => { // note 'new' isn't strictly required - // `data` is a complete Buffer object containing the full data - console.log(data.toString()) - })) -``` - -Note that when you use the *callback* method like this, the resulting `data` parameter is a concatenation of all `Buffer` objects in the list. If you want to avoid the overhead of this concatenation (in cases of extreme performance consciousness), then avoid the *callback* method and just listen to `'end'` instead, like a standard Stream. - -Or to fetch a URL using [hyperquest](https://github.com/substack/hyperquest) (should work with [request](http://github.com/mikeal/request) and even plain Node http too!): - -```js -const hyperquest = require('hyperquest') -const { BufferListStream } = require('bl') - -const url = 'https://raw.github.com/rvagg/bl/master/README.md' - -hyperquest(url).pipe(BufferListStream((err, data) => { - console.log(data.toString()) -})) -``` - -Or, use it as a readable stream to recompose a list of Buffers to an output source: - -```js -const { BufferListStream } = require('bl') -const fs = require('fs') - -var bl = new BufferListStream() -bl.append(Buffer.from('abcd')) -bl.append(Buffer.from('efg')) -bl.append(Buffer.from('hi')) -bl.append(Buffer.from('j')) - -bl.pipe(fs.createWriteStream('gibberish.txt')) -``` - -## API - - * new BufferList([ buf ]) - * BufferList.isBufferList(obj) - * bl.length - * bl.append(buffer) - * bl.get(index) - * bl.indexOf(value[, byteOffset][, encoding]) - * bl.slice([ start[, end ] ]) - * bl.shallowSlice([ start[, end ] ]) - * bl.copy(dest, [ destStart, [ srcStart [, srcEnd ] ] ]) - * bl.duplicate() - * bl.consume(bytes) - * bl.toString([encoding, [ start, [ end ]]]) - * bl.readDoubleBE(), bl.readDoubleLE(), bl.readFloatBE(), bl.readFloatLE(), bl.readInt32BE(), bl.readInt32LE(), bl.readUInt32BE(), bl.readUInt32LE(), bl.readInt16BE(), bl.readInt16LE(), bl.readUInt16BE(), bl.readUInt16LE(), bl.readInt8(), bl.readUInt8() - * new BufferListStream([ callback ]) - --------------------------------------------------------- - -### new BufferList([ Buffer | Buffer array | BufferList | BufferList array | String ]) -No arguments are _required_ for the constructor, but you can initialise the list by passing in a single `Buffer` object or an array of `Buffer` objects. - -`new` is not strictly required, if you don't instantiate a new object, it will be done automatically for you so you can create a new instance simply with: - -```js -const { BufferList } = require('bl') -const bl = BufferList() - -// equivalent to: - -const { BufferList } = require('bl') -const bl = new BufferList() -``` - --------------------------------------------------------- - -### BufferList.isBufferList(obj) -Determines if the passed object is a `BufferList`. It will return `true` if the passed object is an instance of `BufferList` **or** `BufferListStream` and `false` otherwise. - -N.B. this won't return `true` for `BufferList` or `BufferListStream` instances created by versions of this library before this static method was added. - --------------------------------------------------------- - -### bl.length -Get the length of the list in bytes. This is the sum of the lengths of all of the buffers contained in the list, minus any initial offset for a semi-consumed buffer at the beginning. Should accurately represent the total number of bytes that can be read from the list. - --------------------------------------------------------- - -### bl.append(Buffer | Buffer array | BufferList | BufferList array | String) -`append(buffer)` adds an additional buffer or BufferList to the internal list. `this` is returned so it can be chained. - --------------------------------------------------------- - -### bl.get(index) -`get()` will return the byte at the specified index. - --------------------------------------------------------- - -### bl.indexOf(value[, byteOffset][, encoding]) -`get()` will return the byte at the specified index. -`indexOf()` method returns the first index at which a given element can be found in the BufferList, or -1 if it is not present. - --------------------------------------------------------- - -### bl.slice([ start, [ end ] ]) -`slice()` returns a new `Buffer` object containing the bytes within the range specified. Both `start` and `end` are optional and will default to the beginning and end of the list respectively. - -If the requested range spans a single internal buffer then a slice of that buffer will be returned which shares the original memory range of that Buffer. If the range spans multiple buffers then copy operations will likely occur to give you a uniform Buffer. - --------------------------------------------------------- - -### bl.shallowSlice([ start, [ end ] ]) -`shallowSlice()` returns a new `BufferList` object containing the bytes within the range specified. Both `start` and `end` are optional and will default to the beginning and end of the list respectively. - -No copies will be performed. All buffers in the result share memory with the original list. - --------------------------------------------------------- - -### bl.copy(dest, [ destStart, [ srcStart [, srcEnd ] ] ]) -`copy()` copies the content of the list in the `dest` buffer, starting from `destStart` and containing the bytes within the range specified with `srcStart` to `srcEnd`. `destStart`, `start` and `end` are optional and will default to the beginning of the `dest` buffer, and the beginning and end of the list respectively. - --------------------------------------------------------- - -### bl.duplicate() -`duplicate()` performs a **shallow-copy** of the list. The internal Buffers remains the same, so if you change the underlying Buffers, the change will be reflected in both the original and the duplicate. This method is needed if you want to call `consume()` or `pipe()` and still keep the original list.Example: - -```js -var bl = new BufferListStream() - -bl.append('hello') -bl.append(' world') -bl.append('\n') - -bl.duplicate().pipe(process.stdout, { end: false }) - -console.log(bl.toString()) -``` - --------------------------------------------------------- - -### bl.consume(bytes) -`consume()` will shift bytes *off the start of the list*. The number of bytes consumed don't need to line up with the sizes of the internal Buffers—initial offsets will be calculated accordingly in order to give you a consistent view of the data. - --------------------------------------------------------- - -### bl.toString([encoding, [ start, [ end ]]]) -`toString()` will return a string representation of the buffer. The optional `start` and `end` arguments are passed on to `slice()`, while the `encoding` is passed on to `toString()` of the resulting Buffer. See the [Buffer#toString()](http://nodejs.org/docs/latest/api/buffer.html#buffer_buf_tostring_encoding_start_end) documentation for more information. - --------------------------------------------------------- - -### bl.readDoubleBE(), bl.readDoubleLE(), bl.readFloatBE(), bl.readFloatLE(), bl.readInt32BE(), bl.readInt32LE(), bl.readUInt32BE(), bl.readUInt32LE(), bl.readInt16BE(), bl.readInt16LE(), bl.readUInt16BE(), bl.readUInt16LE(), bl.readInt8(), bl.readUInt8() - -All of the standard byte-reading methods of the `Buffer` interface are implemented and will operate across internal Buffer boundaries transparently. - -See the [Buffer](http://nodejs.org/docs/latest/api/buffer.html) documentation for how these work. - --------------------------------------------------------- - -### new BufferListStream([ callback | Buffer | Buffer array | BufferList | BufferList array | String ]) -**BufferListStream** is a Node **[Duplex Stream](http://nodejs.org/docs/latest/api/stream.html#stream_class_stream_duplex)**, so it can be read from and written to like a standard Node stream. You can also `pipe()` to and from a **BufferListStream** instance. - -The constructor takes an optional callback, if supplied, the callback will be called with an error argument followed by a reference to the **bl** instance, when `bl.end()` is called (i.e. from a piped stream). This is a convenient method of collecting the entire contents of a stream, particularly when the stream is *chunky*, such as a network stream. - -Normally, no arguments are required for the constructor, but you can initialise the list by passing in a single `Buffer` object or an array of `Buffer` object. - -`new` is not strictly required, if you don't instantiate a new object, it will be done automatically for you so you can create a new instance simply with: - -```js -const { BufferListStream } = require('bl') -const bl = BufferListStream() - -// equivalent to: - -const { BufferListStream } = require('bl') -const bl = new BufferListStream() -``` - -N.B. For backwards compatibility reasons, `BufferListStream` is the **default** export when you `require('bl')`: - -```js -const { BufferListStream } = require('bl') -// equivalent to: -const BufferListStream = require('bl') -``` - --------------------------------------------------------- - -## Contributors - -**bl** is brought to you by the following hackers: - - * [Rod Vagg](https://github.com/rvagg) - * [Matteo Collina](https://github.com/mcollina) - * [Jarett Cruger](https://github.com/jcrugzz) - - -## License & copyright - -Copyright (c) 2013-2019 bl contributors (listed above). - -bl is licensed under the MIT license. All rights not explicitly granted in the MIT license are reserved. See the included LICENSE.md file for more details. diff --git a/node_modules/bl/bl.js b/node_modules/bl/bl.js deleted file mode 100644 index 40228f8..0000000 --- a/node_modules/bl/bl.js +++ /dev/null @@ -1,84 +0,0 @@ -'use strict' - -const DuplexStream = require('readable-stream').Duplex -const inherits = require('inherits') -const BufferList = require('./BufferList') - -function BufferListStream (callback) { - if (!(this instanceof BufferListStream)) { - return new BufferListStream(callback) - } - - if (typeof callback === 'function') { - this._callback = callback - - const piper = function piper (err) { - if (this._callback) { - this._callback(err) - this._callback = null - } - }.bind(this) - - this.on('pipe', function onPipe (src) { - src.on('error', piper) - }) - this.on('unpipe', function onUnpipe (src) { - src.removeListener('error', piper) - }) - - callback = null - } - - BufferList._init.call(this, callback) - DuplexStream.call(this) -} - -inherits(BufferListStream, DuplexStream) -Object.assign(BufferListStream.prototype, BufferList.prototype) - -BufferListStream.prototype._new = function _new (callback) { - return new BufferListStream(callback) -} - -BufferListStream.prototype._write = function _write (buf, encoding, callback) { - this._appendBuffer(buf) - - if (typeof callback === 'function') { - callback() - } -} - -BufferListStream.prototype._read = function _read (size) { - if (!this.length) { - return this.push(null) - } - - size = Math.min(size, this.length) - this.push(this.slice(0, size)) - this.consume(size) -} - -BufferListStream.prototype.end = function end (chunk) { - DuplexStream.prototype.end.call(this, chunk) - - if (this._callback) { - this._callback(null, this.slice()) - this._callback = null - } -} - -BufferListStream.prototype._destroy = function _destroy (err, cb) { - this._bufs.length = 0 - this.length = 0 - cb(err) -} - -BufferListStream.prototype._isBufferList = function _isBufferList (b) { - return b instanceof BufferListStream || b instanceof BufferList || BufferListStream.isBufferList(b) -} - -BufferListStream.isBufferList = BufferList.isBufferList - -module.exports = BufferListStream -module.exports.BufferListStream = BufferListStream -module.exports.BufferList = BufferList diff --git a/node_modules/bl/package.json b/node_modules/bl/package.json deleted file mode 100644 index 3b2be3f..0000000 --- a/node_modules/bl/package.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "name": "bl", - "version": "4.1.0", - "description": "Buffer List: collect buffers and access with a standard readable Buffer interface, streamable too!", - "license": "MIT", - "main": "bl.js", - "scripts": { - "lint": "standard *.js test/*.js", - "test": "npm run lint && node test/test.js | faucet" - }, - "repository": { - "type": "git", - "url": "https://github.com/rvagg/bl.git" - }, - "homepage": "https://github.com/rvagg/bl", - "authors": [ - "Rod Vagg (https://github.com/rvagg)", - "Matteo Collina (https://github.com/mcollina)", - "Jarett Cruger (https://github.com/jcrugzz)" - ], - "keywords": [ - "buffer", - "buffers", - "stream", - "awesomesauce" - ], - "dependencies": { - "buffer": "^5.5.0", - "inherits": "^2.0.4", - "readable-stream": "^3.4.0" - }, - "devDependencies": { - "faucet": "~0.0.1", - "standard": "^14.3.0", - "tape": "^4.11.0" - } -} diff --git a/node_modules/bl/test/convert.js b/node_modules/bl/test/convert.js deleted file mode 100644 index 9f3e235..0000000 --- a/node_modules/bl/test/convert.js +++ /dev/null @@ -1,21 +0,0 @@ -'use strict' - -const tape = require('tape') -const { BufferList, BufferListStream } = require('../') -const { Buffer } = require('buffer') - -tape('convert from BufferList to BufferListStream', (t) => { - const data = Buffer.from(`TEST-${Date.now()}`) - const bl = new BufferList(data) - const bls = new BufferListStream(bl) - t.ok(bl.slice().equals(bls.slice())) - t.end() -}) - -tape('convert from BufferListStream to BufferList', (t) => { - const data = Buffer.from(`TEST-${Date.now()}`) - const bls = new BufferListStream(data) - const bl = new BufferList(bls) - t.ok(bl.slice().equals(bls.slice())) - t.end() -}) diff --git a/node_modules/bl/test/indexOf.js b/node_modules/bl/test/indexOf.js deleted file mode 100644 index 62dcb01..0000000 --- a/node_modules/bl/test/indexOf.js +++ /dev/null @@ -1,492 +0,0 @@ -'use strict' - -const tape = require('tape') -const BufferList = require('../') -const { Buffer } = require('buffer') - -tape('indexOf single byte needle', (t) => { - const bl = new BufferList(['abcdefg', 'abcdefg', '12345']) - - t.equal(bl.indexOf('e'), 4) - t.equal(bl.indexOf('e', 5), 11) - t.equal(bl.indexOf('e', 12), -1) - t.equal(bl.indexOf('5'), 18) - - t.end() -}) - -tape('indexOf multiple byte needle', (t) => { - const bl = new BufferList(['abcdefg', 'abcdefg']) - - t.equal(bl.indexOf('ef'), 4) - t.equal(bl.indexOf('ef', 5), 11) - - t.end() -}) - -tape('indexOf multiple byte needles across buffer boundaries', (t) => { - const bl = new BufferList(['abcdefg', 'abcdefg']) - - t.equal(bl.indexOf('fgabc'), 5) - - t.end() -}) - -tape('indexOf takes a Uint8Array search', (t) => { - const bl = new BufferList(['abcdefg', 'abcdefg']) - const search = new Uint8Array([102, 103, 97, 98, 99]) // fgabc - - t.equal(bl.indexOf(search), 5) - - t.end() -}) - -tape('indexOf takes a buffer list search', (t) => { - const bl = new BufferList(['abcdefg', 'abcdefg']) - const search = new BufferList('fgabc') - - t.equal(bl.indexOf(search), 5) - - t.end() -}) - -tape('indexOf a zero byte needle', (t) => { - const b = new BufferList('abcdef') - const bufEmpty = Buffer.from('') - - t.equal(b.indexOf(''), 0) - t.equal(b.indexOf('', 1), 1) - t.equal(b.indexOf('', b.length + 1), b.length) - t.equal(b.indexOf('', Infinity), b.length) - t.equal(b.indexOf(bufEmpty), 0) - t.equal(b.indexOf(bufEmpty, 1), 1) - t.equal(b.indexOf(bufEmpty, b.length + 1), b.length) - t.equal(b.indexOf(bufEmpty, Infinity), b.length) - - t.end() -}) - -tape('indexOf buffers smaller and larger than the needle', (t) => { - const bl = new BufferList(['abcdefg', 'a', 'bcdefg', 'a', 'bcfgab']) - - t.equal(bl.indexOf('fgabc'), 5) - t.equal(bl.indexOf('fgabc', 6), 12) - t.equal(bl.indexOf('fgabc', 13), -1) - - t.end() -}) - -// only present in node 6+ -;(process.version.substr(1).split('.')[0] >= 6) && tape('indexOf latin1 and binary encoding', (t) => { - const b = new BufferList('abcdef') - - // test latin1 encoding - t.equal( - new BufferList(Buffer.from(b.toString('latin1'), 'latin1')) - .indexOf('d', 0, 'latin1'), - 3 - ) - t.equal( - new BufferList(Buffer.from(b.toString('latin1'), 'latin1')) - .indexOf(Buffer.from('d', 'latin1'), 0, 'latin1'), - 3 - ) - t.equal( - new BufferList(Buffer.from('aa\u00e8aa', 'latin1')) - .indexOf('\u00e8', 'latin1'), - 2 - ) - t.equal( - new BufferList(Buffer.from('\u00e8', 'latin1')) - .indexOf('\u00e8', 'latin1'), - 0 - ) - t.equal( - new BufferList(Buffer.from('\u00e8', 'latin1')) - .indexOf(Buffer.from('\u00e8', 'latin1'), 'latin1'), - 0 - ) - - // test binary encoding - t.equal( - new BufferList(Buffer.from(b.toString('binary'), 'binary')) - .indexOf('d', 0, 'binary'), - 3 - ) - t.equal( - new BufferList(Buffer.from(b.toString('binary'), 'binary')) - .indexOf(Buffer.from('d', 'binary'), 0, 'binary'), - 3 - ) - t.equal( - new BufferList(Buffer.from('aa\u00e8aa', 'binary')) - .indexOf('\u00e8', 'binary'), - 2 - ) - t.equal( - new BufferList(Buffer.from('\u00e8', 'binary')) - .indexOf('\u00e8', 'binary'), - 0 - ) - t.equal( - new BufferList(Buffer.from('\u00e8', 'binary')) - .indexOf(Buffer.from('\u00e8', 'binary'), 'binary'), - 0 - ) - - t.end() -}) - -tape('indexOf the entire nodejs10 buffer test suite', (t) => { - const b = new BufferList('abcdef') - const bufA = Buffer.from('a') - const bufBc = Buffer.from('bc') - const bufF = Buffer.from('f') - const bufZ = Buffer.from('z') - - const stringComparison = 'abcdef' - - t.equal(b.indexOf('a'), 0) - t.equal(b.indexOf('a', 1), -1) - t.equal(b.indexOf('a', -1), -1) - t.equal(b.indexOf('a', -4), -1) - t.equal(b.indexOf('a', -b.length), 0) - t.equal(b.indexOf('a', NaN), 0) - t.equal(b.indexOf('a', -Infinity), 0) - t.equal(b.indexOf('a', Infinity), -1) - t.equal(b.indexOf('bc'), 1) - t.equal(b.indexOf('bc', 2), -1) - t.equal(b.indexOf('bc', -1), -1) - t.equal(b.indexOf('bc', -3), -1) - t.equal(b.indexOf('bc', -5), 1) - t.equal(b.indexOf('bc', NaN), 1) - t.equal(b.indexOf('bc', -Infinity), 1) - t.equal(b.indexOf('bc', Infinity), -1) - t.equal(b.indexOf('f'), b.length - 1) - t.equal(b.indexOf('z'), -1) - - // empty search tests - t.equal(b.indexOf(bufA), 0) - t.equal(b.indexOf(bufA, 1), -1) - t.equal(b.indexOf(bufA, -1), -1) - t.equal(b.indexOf(bufA, -4), -1) - t.equal(b.indexOf(bufA, -b.length), 0) - t.equal(b.indexOf(bufA, NaN), 0) - t.equal(b.indexOf(bufA, -Infinity), 0) - t.equal(b.indexOf(bufA, Infinity), -1) - t.equal(b.indexOf(bufBc), 1) - t.equal(b.indexOf(bufBc, 2), -1) - t.equal(b.indexOf(bufBc, -1), -1) - t.equal(b.indexOf(bufBc, -3), -1) - t.equal(b.indexOf(bufBc, -5), 1) - t.equal(b.indexOf(bufBc, NaN), 1) - t.equal(b.indexOf(bufBc, -Infinity), 1) - t.equal(b.indexOf(bufBc, Infinity), -1) - t.equal(b.indexOf(bufF), b.length - 1) - t.equal(b.indexOf(bufZ), -1) - t.equal(b.indexOf(0x61), 0) - t.equal(b.indexOf(0x61, 1), -1) - t.equal(b.indexOf(0x61, -1), -1) - t.equal(b.indexOf(0x61, -4), -1) - t.equal(b.indexOf(0x61, -b.length), 0) - t.equal(b.indexOf(0x61, NaN), 0) - t.equal(b.indexOf(0x61, -Infinity), 0) - t.equal(b.indexOf(0x61, Infinity), -1) - t.equal(b.indexOf(0x0), -1) - - // test offsets - t.equal(b.indexOf('d', 2), 3) - t.equal(b.indexOf('f', 5), 5) - t.equal(b.indexOf('f', -1), 5) - t.equal(b.indexOf('f', 6), -1) - - t.equal(b.indexOf(Buffer.from('d'), 2), 3) - t.equal(b.indexOf(Buffer.from('f'), 5), 5) - t.equal(b.indexOf(Buffer.from('f'), -1), 5) - t.equal(b.indexOf(Buffer.from('f'), 6), -1) - - t.equal(Buffer.from('ff').indexOf(Buffer.from('f'), 1, 'ucs2'), -1) - - // test invalid and uppercase encoding - t.equal(b.indexOf('b', 'utf8'), 1) - t.equal(b.indexOf('b', 'UTF8'), 1) - t.equal(b.indexOf('62', 'HEX'), 1) - t.throws(() => b.indexOf('bad', 'enc'), TypeError) - - // test hex encoding - t.equal( - Buffer.from(b.toString('hex'), 'hex') - .indexOf('64', 0, 'hex'), - 3 - ) - t.equal( - Buffer.from(b.toString('hex'), 'hex') - .indexOf(Buffer.from('64', 'hex'), 0, 'hex'), - 3 - ) - - // test base64 encoding - t.equal( - Buffer.from(b.toString('base64'), 'base64') - .indexOf('ZA==', 0, 'base64'), - 3 - ) - t.equal( - Buffer.from(b.toString('base64'), 'base64') - .indexOf(Buffer.from('ZA==', 'base64'), 0, 'base64'), - 3 - ) - - // test ascii encoding - t.equal( - Buffer.from(b.toString('ascii'), 'ascii') - .indexOf('d', 0, 'ascii'), - 3 - ) - t.equal( - Buffer.from(b.toString('ascii'), 'ascii') - .indexOf(Buffer.from('d', 'ascii'), 0, 'ascii'), - 3 - ) - - // test optional offset with passed encoding - t.equal(Buffer.from('aaaa0').indexOf('30', 'hex'), 4) - t.equal(Buffer.from('aaaa00a').indexOf('3030', 'hex'), 4) - - { - // test usc2 encoding - const twoByteString = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'ucs2') - - t.equal(8, twoByteString.indexOf('\u0395', 4, 'ucs2')) - t.equal(6, twoByteString.indexOf('\u03a3', -4, 'ucs2')) - t.equal(4, twoByteString.indexOf('\u03a3', -6, 'ucs2')) - t.equal(4, twoByteString.indexOf( - Buffer.from('\u03a3', 'ucs2'), -6, 'ucs2')) - t.equal(-1, twoByteString.indexOf('\u03a3', -2, 'ucs2')) - } - - const mixedByteStringUcs2 = - Buffer.from('\u039a\u0391abc\u03a3\u03a3\u0395', 'ucs2') - - t.equal(6, mixedByteStringUcs2.indexOf('bc', 0, 'ucs2')) - t.equal(10, mixedByteStringUcs2.indexOf('\u03a3', 0, 'ucs2')) - t.equal(-1, mixedByteStringUcs2.indexOf('\u0396', 0, 'ucs2')) - - t.equal( - 6, mixedByteStringUcs2.indexOf(Buffer.from('bc', 'ucs2'), 0, 'ucs2')) - t.equal( - 10, mixedByteStringUcs2.indexOf(Buffer.from('\u03a3', 'ucs2'), 0, 'ucs2')) - t.equal( - -1, mixedByteStringUcs2.indexOf(Buffer.from('\u0396', 'ucs2'), 0, 'ucs2')) - - { - const twoByteString = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'ucs2') - - // Test single char pattern - t.equal(0, twoByteString.indexOf('\u039a', 0, 'ucs2')) - let index = twoByteString.indexOf('\u0391', 0, 'ucs2') - t.equal(2, index, `Alpha - at index ${index}`) - index = twoByteString.indexOf('\u03a3', 0, 'ucs2') - t.equal(4, index, `First Sigma - at index ${index}`) - index = twoByteString.indexOf('\u03a3', 6, 'ucs2') - t.equal(6, index, `Second Sigma - at index ${index}`) - index = twoByteString.indexOf('\u0395', 0, 'ucs2') - t.equal(8, index, `Epsilon - at index ${index}`) - index = twoByteString.indexOf('\u0392', 0, 'ucs2') - t.equal(-1, index, `Not beta - at index ${index}`) - - // Test multi-char pattern - index = twoByteString.indexOf('\u039a\u0391', 0, 'ucs2') - t.equal(0, index, `Lambda Alpha - at index ${index}`) - index = twoByteString.indexOf('\u0391\u03a3', 0, 'ucs2') - t.equal(2, index, `Alpha Sigma - at index ${index}`) - index = twoByteString.indexOf('\u03a3\u03a3', 0, 'ucs2') - t.equal(4, index, `Sigma Sigma - at index ${index}`) - index = twoByteString.indexOf('\u03a3\u0395', 0, 'ucs2') - t.equal(6, index, `Sigma Epsilon - at index ${index}`) - } - - const mixedByteStringUtf8 = Buffer.from('\u039a\u0391abc\u03a3\u03a3\u0395') - - t.equal(5, mixedByteStringUtf8.indexOf('bc')) - t.equal(5, mixedByteStringUtf8.indexOf('bc', 5)) - t.equal(5, mixedByteStringUtf8.indexOf('bc', -8)) - t.equal(7, mixedByteStringUtf8.indexOf('\u03a3')) - t.equal(-1, mixedByteStringUtf8.indexOf('\u0396')) - - // Test complex string indexOf algorithms. Only trigger for long strings. - // Long string that isn't a simple repeat of a shorter string. - let longString = 'A' - for (let i = 66; i < 76; i++) { // from 'B' to 'K' - longString = longString + String.fromCharCode(i) + longString - } - - const longBufferString = Buffer.from(longString) - - // pattern of 15 chars, repeated every 16 chars in long - let pattern = 'ABACABADABACABA' - for (let i = 0; i < longBufferString.length - pattern.length; i += 7) { - const index = longBufferString.indexOf(pattern, i) - t.equal((i + 15) & ~0xf, index, - `Long ABACABA...-string at index ${i}`) - } - - let index = longBufferString.indexOf('AJABACA') - t.equal(510, index, `Long AJABACA, First J - at index ${index}`) - index = longBufferString.indexOf('AJABACA', 511) - t.equal(1534, index, `Long AJABACA, Second J - at index ${index}`) - - pattern = 'JABACABADABACABA' - index = longBufferString.indexOf(pattern) - t.equal(511, index, `Long JABACABA..., First J - at index ${index}`) - index = longBufferString.indexOf(pattern, 512) - t.equal( - 1535, index, `Long JABACABA..., Second J - at index ${index}`) - - // Search for a non-ASCII string in a pure ASCII string. - const asciiString = Buffer.from( - 'somethingnotatallsinisterwhichalsoworks') - t.equal(-1, asciiString.indexOf('\x2061')) - t.equal(3, asciiString.indexOf('eth', 0)) - - // Search in string containing many non-ASCII chars. - const allCodePoints = [] - for (let i = 0; i < 65536; i++) { - allCodePoints[i] = i - } - - const allCharsString = String.fromCharCode.apply(String, allCodePoints) - const allCharsBufferUtf8 = Buffer.from(allCharsString) - const allCharsBufferUcs2 = Buffer.from(allCharsString, 'ucs2') - - // Search for string long enough to trigger complex search with ASCII pattern - // and UC16 subject. - t.equal(-1, allCharsBufferUtf8.indexOf('notfound')) - t.equal(-1, allCharsBufferUcs2.indexOf('notfound')) - - // Needle is longer than haystack, but only because it's encoded as UTF-16 - t.equal(Buffer.from('aaaa').indexOf('a'.repeat(4), 'ucs2'), -1) - - t.equal(Buffer.from('aaaa').indexOf('a'.repeat(4), 'utf8'), 0) - t.equal(Buffer.from('aaaa').indexOf('你好', 'ucs2'), -1) - - // Haystack has odd length, but the needle is UCS2. - t.equal(Buffer.from('aaaaa').indexOf('b', 'ucs2'), -1) - - { - // Find substrings in Utf8. - const lengths = [1, 3, 15] // Single char, simple and complex. - const indices = [0x5, 0x60, 0x400, 0x680, 0x7ee, 0xFF02, 0x16610, 0x2f77b] - for (let lengthIndex = 0; lengthIndex < lengths.length; lengthIndex++) { - for (let i = 0; i < indices.length; i++) { - const index = indices[i] - let length = lengths[lengthIndex] - - if (index + length > 0x7F) { - length = 2 * length - } - - if (index + length > 0x7FF) { - length = 3 * length - } - - if (index + length > 0xFFFF) { - length = 4 * length - } - - const patternBufferUtf8 = allCharsBufferUtf8.slice(index, index + length) - t.equal(index, allCharsBufferUtf8.indexOf(patternBufferUtf8)) - - const patternStringUtf8 = patternBufferUtf8.toString() - t.equal(index, allCharsBufferUtf8.indexOf(patternStringUtf8)) - } - } - } - - { - // Find substrings in Usc2. - const lengths = [2, 4, 16] // Single char, simple and complex. - const indices = [0x5, 0x65, 0x105, 0x205, 0x285, 0x2005, 0x2085, 0xfff0] - - for (let lengthIndex = 0; lengthIndex < lengths.length; lengthIndex++) { - for (let i = 0; i < indices.length; i++) { - const index = indices[i] * 2 - const length = lengths[lengthIndex] - - const patternBufferUcs2 = - allCharsBufferUcs2.slice(index, index + length) - t.equal( - index, allCharsBufferUcs2.indexOf(patternBufferUcs2, 0, 'ucs2')) - - const patternStringUcs2 = patternBufferUcs2.toString('ucs2') - t.equal( - index, allCharsBufferUcs2.indexOf(patternStringUcs2, 0, 'ucs2')) - } - } - } - - [ - () => {}, - {}, - [] - ].forEach((val) => { - t.throws(() => b.indexOf(val), TypeError, `"${JSON.stringify(val)}" should throw`) - }) - - // Test weird offset arguments. - // The following offsets coerce to NaN or 0, searching the whole Buffer - t.equal(b.indexOf('b', undefined), 1) - t.equal(b.indexOf('b', {}), 1) - t.equal(b.indexOf('b', 0), 1) - t.equal(b.indexOf('b', null), 1) - t.equal(b.indexOf('b', []), 1) - - // The following offset coerces to 2, in other words +[2] === 2 - t.equal(b.indexOf('b', [2]), -1) - - // Behavior should match String.indexOf() - t.equal( - b.indexOf('b', undefined), - stringComparison.indexOf('b', undefined)) - t.equal( - b.indexOf('b', {}), - stringComparison.indexOf('b', {})) - t.equal( - b.indexOf('b', 0), - stringComparison.indexOf('b', 0)) - t.equal( - b.indexOf('b', null), - stringComparison.indexOf('b', null)) - t.equal( - b.indexOf('b', []), - stringComparison.indexOf('b', [])) - t.equal( - b.indexOf('b', [2]), - stringComparison.indexOf('b', [2])) - - // test truncation of Number arguments to uint8 - { - const buf = Buffer.from('this is a test') - - t.equal(buf.indexOf(0x6973), 3) - t.equal(buf.indexOf(0x697320), 4) - t.equal(buf.indexOf(0x69732069), 2) - t.equal(buf.indexOf(0x697374657374), 0) - t.equal(buf.indexOf(0x69737374), 0) - t.equal(buf.indexOf(0x69737465), 11) - t.equal(buf.indexOf(0x69737465), 11) - t.equal(buf.indexOf(-140), 0) - t.equal(buf.indexOf(-152), 1) - t.equal(buf.indexOf(0xff), -1) - t.equal(buf.indexOf(0xffff), -1) - } - - // Test that Uint8Array arguments are okay. - { - const needle = new Uint8Array([0x66, 0x6f, 0x6f]) - const haystack = new BufferList(Buffer.from('a foo b foo')) - t.equal(haystack.indexOf(needle), 2) - } - - t.end() -}) diff --git a/node_modules/bl/test/isBufferList.js b/node_modules/bl/test/isBufferList.js deleted file mode 100644 index 9d895d5..0000000 --- a/node_modules/bl/test/isBufferList.js +++ /dev/null @@ -1,32 +0,0 @@ -'use strict' - -const tape = require('tape') -const { BufferList, BufferListStream } = require('../') -const { Buffer } = require('buffer') - -tape('isBufferList positives', (t) => { - t.ok(BufferList.isBufferList(new BufferList())) - t.ok(BufferList.isBufferList(new BufferListStream())) - - t.end() -}) - -tape('isBufferList negatives', (t) => { - const types = [ - null, - undefined, - NaN, - true, - false, - {}, - [], - Buffer.alloc(0), - [Buffer.alloc(0)] - ] - - for (const obj of types) { - t.notOk(BufferList.isBufferList(obj)) - } - - t.end() -}) diff --git a/node_modules/bl/test/test.js b/node_modules/bl/test/test.js deleted file mode 100644 index e523d0c..0000000 --- a/node_modules/bl/test/test.js +++ /dev/null @@ -1,869 +0,0 @@ -'use strict' - -const tape = require('tape') -const crypto = require('crypto') -const fs = require('fs') -const path = require('path') -const BufferList = require('../') -const { Buffer } = require('buffer') - -const encodings = - ('hex utf8 utf-8 ascii binary base64' + - (process.browser ? '' : ' ucs2 ucs-2 utf16le utf-16le')).split(' ') - -require('./indexOf') -require('./isBufferList') -require('./convert') - -tape('single bytes from single buffer', function (t) { - const bl = new BufferList() - - bl.append(Buffer.from('abcd')) - - t.equal(bl.length, 4) - t.equal(bl.get(-1), undefined) - t.equal(bl.get(0), 97) - t.equal(bl.get(1), 98) - t.equal(bl.get(2), 99) - t.equal(bl.get(3), 100) - t.equal(bl.get(4), undefined) - - t.end() -}) - -tape('single bytes from multiple buffers', function (t) { - const bl = new BufferList() - - bl.append(Buffer.from('abcd')) - bl.append(Buffer.from('efg')) - bl.append(Buffer.from('hi')) - bl.append(Buffer.from('j')) - - t.equal(bl.length, 10) - - t.equal(bl.get(0), 97) - t.equal(bl.get(1), 98) - t.equal(bl.get(2), 99) - t.equal(bl.get(3), 100) - t.equal(bl.get(4), 101) - t.equal(bl.get(5), 102) - t.equal(bl.get(6), 103) - t.equal(bl.get(7), 104) - t.equal(bl.get(8), 105) - t.equal(bl.get(9), 106) - - t.end() -}) - -tape('multi bytes from single buffer', function (t) { - const bl = new BufferList() - - bl.append(Buffer.from('abcd')) - - t.equal(bl.length, 4) - - t.equal(bl.slice(0, 4).toString('ascii'), 'abcd') - t.equal(bl.slice(0, 3).toString('ascii'), 'abc') - t.equal(bl.slice(1, 4).toString('ascii'), 'bcd') - t.equal(bl.slice(-4, -1).toString('ascii'), 'abc') - - t.end() -}) - -tape('multi bytes from single buffer (negative indexes)', function (t) { - const bl = new BufferList() - - bl.append(Buffer.from('buffer')) - - t.equal(bl.length, 6) - - t.equal(bl.slice(-6, -1).toString('ascii'), 'buffe') - t.equal(bl.slice(-6, -2).toString('ascii'), 'buff') - t.equal(bl.slice(-5, -2).toString('ascii'), 'uff') - - t.end() -}) - -tape('multiple bytes from multiple buffers', function (t) { - const bl = new BufferList() - - bl.append(Buffer.from('abcd')) - bl.append(Buffer.from('efg')) - bl.append(Buffer.from('hi')) - bl.append(Buffer.from('j')) - - t.equal(bl.length, 10) - - t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij') - t.equal(bl.slice(3, 10).toString('ascii'), 'defghij') - t.equal(bl.slice(3, 6).toString('ascii'), 'def') - t.equal(bl.slice(3, 8).toString('ascii'), 'defgh') - t.equal(bl.slice(5, 10).toString('ascii'), 'fghij') - t.equal(bl.slice(-7, -4).toString('ascii'), 'def') - - t.end() -}) - -tape('multiple bytes from multiple buffer lists', function (t) { - const bl = new BufferList() - - bl.append(new BufferList([Buffer.from('abcd'), Buffer.from('efg')])) - bl.append(new BufferList([Buffer.from('hi'), Buffer.from('j')])) - - t.equal(bl.length, 10) - - t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij') - - t.equal(bl.slice(3, 10).toString('ascii'), 'defghij') - t.equal(bl.slice(3, 6).toString('ascii'), 'def') - t.equal(bl.slice(3, 8).toString('ascii'), 'defgh') - t.equal(bl.slice(5, 10).toString('ascii'), 'fghij') - - t.end() -}) - -// same data as previous test, just using nested constructors -tape('multiple bytes from crazy nested buffer lists', function (t) { - const bl = new BufferList() - - bl.append(new BufferList([ - new BufferList([ - new BufferList(Buffer.from('abc')), - Buffer.from('d'), - new BufferList(Buffer.from('efg')) - ]), - new BufferList([Buffer.from('hi')]), - new BufferList(Buffer.from('j')) - ])) - - t.equal(bl.length, 10) - - t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij') - - t.equal(bl.slice(3, 10).toString('ascii'), 'defghij') - t.equal(bl.slice(3, 6).toString('ascii'), 'def') - t.equal(bl.slice(3, 8).toString('ascii'), 'defgh') - t.equal(bl.slice(5, 10).toString('ascii'), 'fghij') - - t.end() -}) - -tape('append accepts arrays of Buffers', function (t) { - const bl = new BufferList() - - bl.append(Buffer.from('abc')) - bl.append([Buffer.from('def')]) - bl.append([Buffer.from('ghi'), Buffer.from('jkl')]) - bl.append([Buffer.from('mnop'), Buffer.from('qrstu'), Buffer.from('vwxyz')]) - t.equal(bl.length, 26) - t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz') - - t.end() -}) - -tape('append accepts arrays of Uint8Arrays', function (t) { - const bl = new BufferList() - - bl.append(new Uint8Array([97, 98, 99])) - bl.append([Uint8Array.from([100, 101, 102])]) - bl.append([new Uint8Array([103, 104, 105]), new Uint8Array([106, 107, 108])]) - bl.append([new Uint8Array([109, 110, 111, 112]), new Uint8Array([113, 114, 115, 116, 117]), new Uint8Array([118, 119, 120, 121, 122])]) - t.equal(bl.length, 26) - t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz') - - t.end() -}) - -tape('append accepts arrays of BufferLists', function (t) { - const bl = new BufferList() - - bl.append(Buffer.from('abc')) - bl.append([new BufferList('def')]) - bl.append(new BufferList([Buffer.from('ghi'), new BufferList('jkl')])) - bl.append([Buffer.from('mnop'), new BufferList([Buffer.from('qrstu'), Buffer.from('vwxyz')])]) - t.equal(bl.length, 26) - t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz') - - t.end() -}) - -tape('append chainable', function (t) { - const bl = new BufferList() - - t.ok(bl.append(Buffer.from('abcd')) === bl) - t.ok(bl.append([Buffer.from('abcd')]) === bl) - t.ok(bl.append(new BufferList(Buffer.from('abcd'))) === bl) - t.ok(bl.append([new BufferList(Buffer.from('abcd'))]) === bl) - - t.end() -}) - -tape('append chainable (test results)', function (t) { - const bl = new BufferList('abc') - .append([new BufferList('def')]) - .append(new BufferList([Buffer.from('ghi'), new BufferList('jkl')])) - .append([Buffer.from('mnop'), new BufferList([Buffer.from('qrstu'), Buffer.from('vwxyz')])]) - - t.equal(bl.length, 26) - t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz') - - t.end() -}) - -tape('consuming from multiple buffers', function (t) { - const bl = new BufferList() - - bl.append(Buffer.from('abcd')) - bl.append(Buffer.from('efg')) - bl.append(Buffer.from('hi')) - bl.append(Buffer.from('j')) - - t.equal(bl.length, 10) - - t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij') - - bl.consume(3) - t.equal(bl.length, 7) - t.equal(bl.slice(0, 7).toString('ascii'), 'defghij') - - bl.consume(2) - t.equal(bl.length, 5) - t.equal(bl.slice(0, 5).toString('ascii'), 'fghij') - - bl.consume(1) - t.equal(bl.length, 4) - t.equal(bl.slice(0, 4).toString('ascii'), 'ghij') - - bl.consume(1) - t.equal(bl.length, 3) - t.equal(bl.slice(0, 3).toString('ascii'), 'hij') - - bl.consume(2) - t.equal(bl.length, 1) - t.equal(bl.slice(0, 1).toString('ascii'), 'j') - - t.end() -}) - -tape('complete consumption', function (t) { - const bl = new BufferList() - - bl.append(Buffer.from('a')) - bl.append(Buffer.from('b')) - - bl.consume(2) - - t.equal(bl.length, 0) - t.equal(bl._bufs.length, 0) - - t.end() -}) - -tape('test readUInt8 / readInt8', function (t) { - const buf1 = Buffer.alloc(1) - const buf2 = Buffer.alloc(3) - const buf3 = Buffer.alloc(3) - const bl = new BufferList() - - buf1[0] = 0x1 - buf2[1] = 0x3 - buf2[2] = 0x4 - buf3[0] = 0x23 - buf3[1] = 0x42 - - bl.append(buf1) - bl.append(buf2) - bl.append(buf3) - - t.equal(bl.readUInt8(), 0x1) - t.equal(bl.readUInt8(2), 0x3) - t.equal(bl.readInt8(2), 0x3) - t.equal(bl.readUInt8(3), 0x4) - t.equal(bl.readInt8(3), 0x4) - t.equal(bl.readUInt8(4), 0x23) - t.equal(bl.readInt8(4), 0x23) - t.equal(bl.readUInt8(5), 0x42) - t.equal(bl.readInt8(5), 0x42) - - t.end() -}) - -tape('test readUInt16LE / readUInt16BE / readInt16LE / readInt16BE', function (t) { - const buf1 = Buffer.alloc(1) - const buf2 = Buffer.alloc(3) - const buf3 = Buffer.alloc(3) - const bl = new BufferList() - - buf1[0] = 0x1 - buf2[1] = 0x3 - buf2[2] = 0x4 - buf3[0] = 0x23 - buf3[1] = 0x42 - - bl.append(buf1) - bl.append(buf2) - bl.append(buf3) - - t.equal(bl.readUInt16BE(), 0x0100) - t.equal(bl.readUInt16LE(), 0x0001) - t.equal(bl.readUInt16BE(2), 0x0304) - t.equal(bl.readUInt16LE(2), 0x0403) - t.equal(bl.readInt16BE(2), 0x0304) - t.equal(bl.readInt16LE(2), 0x0403) - t.equal(bl.readUInt16BE(3), 0x0423) - t.equal(bl.readUInt16LE(3), 0x2304) - t.equal(bl.readInt16BE(3), 0x0423) - t.equal(bl.readInt16LE(3), 0x2304) - t.equal(bl.readUInt16BE(4), 0x2342) - t.equal(bl.readUInt16LE(4), 0x4223) - t.equal(bl.readInt16BE(4), 0x2342) - t.equal(bl.readInt16LE(4), 0x4223) - - t.end() -}) - -tape('test readUInt32LE / readUInt32BE / readInt32LE / readInt32BE', function (t) { - const buf1 = Buffer.alloc(1) - const buf2 = Buffer.alloc(3) - const buf3 = Buffer.alloc(3) - const bl = new BufferList() - - buf1[0] = 0x1 - buf2[1] = 0x3 - buf2[2] = 0x4 - buf3[0] = 0x23 - buf3[1] = 0x42 - - bl.append(buf1) - bl.append(buf2) - bl.append(buf3) - - t.equal(bl.readUInt32BE(), 0x01000304) - t.equal(bl.readUInt32LE(), 0x04030001) - t.equal(bl.readUInt32BE(2), 0x03042342) - t.equal(bl.readUInt32LE(2), 0x42230403) - t.equal(bl.readInt32BE(2), 0x03042342) - t.equal(bl.readInt32LE(2), 0x42230403) - - t.end() -}) - -tape('test readUIntLE / readUIntBE / readIntLE / readIntBE', function (t) { - const buf1 = Buffer.alloc(1) - const buf2 = Buffer.alloc(3) - const buf3 = Buffer.alloc(3) - const bl = new BufferList() - - buf2[0] = 0x2 - buf2[1] = 0x3 - buf2[2] = 0x4 - buf3[0] = 0x23 - buf3[1] = 0x42 - buf3[2] = 0x61 - - bl.append(buf1) - bl.append(buf2) - bl.append(buf3) - - t.equal(bl.readUIntBE(1, 1), 0x02) - t.equal(bl.readUIntBE(1, 2), 0x0203) - t.equal(bl.readUIntBE(1, 3), 0x020304) - t.equal(bl.readUIntBE(1, 4), 0x02030423) - t.equal(bl.readUIntBE(1, 5), 0x0203042342) - t.equal(bl.readUIntBE(1, 6), 0x020304234261) - t.equal(bl.readUIntLE(1, 1), 0x02) - t.equal(bl.readUIntLE(1, 2), 0x0302) - t.equal(bl.readUIntLE(1, 3), 0x040302) - t.equal(bl.readUIntLE(1, 4), 0x23040302) - t.equal(bl.readUIntLE(1, 5), 0x4223040302) - t.equal(bl.readUIntLE(1, 6), 0x614223040302) - t.equal(bl.readIntBE(1, 1), 0x02) - t.equal(bl.readIntBE(1, 2), 0x0203) - t.equal(bl.readIntBE(1, 3), 0x020304) - t.equal(bl.readIntBE(1, 4), 0x02030423) - t.equal(bl.readIntBE(1, 5), 0x0203042342) - t.equal(bl.readIntBE(1, 6), 0x020304234261) - t.equal(bl.readIntLE(1, 1), 0x02) - t.equal(bl.readIntLE(1, 2), 0x0302) - t.equal(bl.readIntLE(1, 3), 0x040302) - t.equal(bl.readIntLE(1, 4), 0x23040302) - t.equal(bl.readIntLE(1, 5), 0x4223040302) - t.equal(bl.readIntLE(1, 6), 0x614223040302) - - t.end() -}) - -tape('test readFloatLE / readFloatBE', function (t) { - const buf1 = Buffer.alloc(1) - const buf2 = Buffer.alloc(3) - const buf3 = Buffer.alloc(3) - const bl = new BufferList() - - buf1[0] = 0x01 - buf2[1] = 0x00 - buf2[2] = 0x00 - buf3[0] = 0x80 - buf3[1] = 0x3f - - bl.append(buf1) - bl.append(buf2) - bl.append(buf3) - - const canonical = Buffer.concat([buf1, buf2, buf3]) - t.equal(bl.readFloatLE(), canonical.readFloatLE()) - t.equal(bl.readFloatBE(), canonical.readFloatBE()) - t.equal(bl.readFloatLE(2), canonical.readFloatLE(2)) - t.equal(bl.readFloatBE(2), canonical.readFloatBE(2)) - - t.end() -}) - -tape('test readDoubleLE / readDoubleBE', function (t) { - const buf1 = Buffer.alloc(1) - const buf2 = Buffer.alloc(3) - const buf3 = Buffer.alloc(10) - const bl = new BufferList() - - buf1[0] = 0x01 - buf2[1] = 0x55 - buf2[2] = 0x55 - buf3[0] = 0x55 - buf3[1] = 0x55 - buf3[2] = 0x55 - buf3[3] = 0x55 - buf3[4] = 0xd5 - buf3[5] = 0x3f - - bl.append(buf1) - bl.append(buf2) - bl.append(buf3) - - const canonical = Buffer.concat([buf1, buf2, buf3]) - t.equal(bl.readDoubleBE(), canonical.readDoubleBE()) - t.equal(bl.readDoubleLE(), canonical.readDoubleLE()) - t.equal(bl.readDoubleBE(2), canonical.readDoubleBE(2)) - t.equal(bl.readDoubleLE(2), canonical.readDoubleLE(2)) - - t.end() -}) - -tape('test toString', function (t) { - const bl = new BufferList() - - bl.append(Buffer.from('abcd')) - bl.append(Buffer.from('efg')) - bl.append(Buffer.from('hi')) - bl.append(Buffer.from('j')) - - t.equal(bl.toString('ascii', 0, 10), 'abcdefghij') - t.equal(bl.toString('ascii', 3, 10), 'defghij') - t.equal(bl.toString('ascii', 3, 6), 'def') - t.equal(bl.toString('ascii', 3, 8), 'defgh') - t.equal(bl.toString('ascii', 5, 10), 'fghij') - - t.end() -}) - -tape('test toString encoding', function (t) { - const bl = new BufferList() - const b = Buffer.from('abcdefghij\xff\x00') - - bl.append(Buffer.from('abcd')) - bl.append(Buffer.from('efg')) - bl.append(Buffer.from('hi')) - bl.append(Buffer.from('j')) - bl.append(Buffer.from('\xff\x00')) - - encodings.forEach(function (enc) { - t.equal(bl.toString(enc), b.toString(enc), enc) - }) - - t.end() -}) - -tape('uninitialized memory', function (t) { - const secret = crypto.randomBytes(256) - for (let i = 0; i < 1e6; i++) { - const clone = Buffer.from(secret) - const bl = new BufferList() - bl.append(Buffer.from('a')) - bl.consume(-1024) - const buf = bl.slice(1) - if (buf.indexOf(clone) !== -1) { - t.fail(`Match (at ${i})`) - break - } - } - t.end() -}) - -!process.browser && tape('test stream', function (t) { - const random = crypto.randomBytes(65534) - - const bl = new BufferList((err, buf) => { - t.ok(Buffer.isBuffer(buf)) - t.ok(err === null) - t.ok(random.equals(bl.slice())) - t.ok(random.equals(buf.slice())) - - bl.pipe(fs.createWriteStream('/tmp/bl_test_rnd_out.dat')) - .on('close', function () { - const rndhash = crypto.createHash('md5').update(random).digest('hex') - const md5sum = crypto.createHash('md5') - const s = fs.createReadStream('/tmp/bl_test_rnd_out.dat') - - s.on('data', md5sum.update.bind(md5sum)) - s.on('end', function () { - t.equal(rndhash, md5sum.digest('hex'), 'woohoo! correct hash!') - t.end() - }) - }) - }) - - fs.writeFileSync('/tmp/bl_test_rnd.dat', random) - fs.createReadStream('/tmp/bl_test_rnd.dat').pipe(bl) -}) - -tape('instantiation with Buffer', function (t) { - const buf = crypto.randomBytes(1024) - const buf2 = crypto.randomBytes(1024) - let b = BufferList(buf) - - t.equal(buf.toString('hex'), b.slice().toString('hex'), 'same buffer') - b = BufferList([buf, buf2]) - t.equal(b.slice().toString('hex'), Buffer.concat([buf, buf2]).toString('hex'), 'same buffer') - - t.end() -}) - -tape('test String appendage', function (t) { - const bl = new BufferList() - const b = Buffer.from('abcdefghij\xff\x00') - - bl.append('abcd') - bl.append('efg') - bl.append('hi') - bl.append('j') - bl.append('\xff\x00') - - encodings.forEach(function (enc) { - t.equal(bl.toString(enc), b.toString(enc)) - }) - - t.end() -}) - -tape('test Number appendage', function (t) { - const bl = new BufferList() - const b = Buffer.from('1234567890') - - bl.append(1234) - bl.append(567) - bl.append(89) - bl.append(0) - - encodings.forEach(function (enc) { - t.equal(bl.toString(enc), b.toString(enc)) - }) - - t.end() -}) - -tape('write nothing, should get empty buffer', function (t) { - t.plan(3) - BufferList(function (err, data) { - t.notOk(err, 'no error') - t.ok(Buffer.isBuffer(data), 'got a buffer') - t.equal(0, data.length, 'got a zero-length buffer') - t.end() - }).end() -}) - -tape('unicode string', function (t) { - t.plan(2) - - const inp1 = '\u2600' - const inp2 = '\u2603' - const exp = inp1 + ' and ' + inp2 - const bl = BufferList() - - bl.write(inp1) - bl.write(' and ') - bl.write(inp2) - t.equal(exp, bl.toString()) - t.equal(Buffer.from(exp).toString('hex'), bl.toString('hex')) -}) - -tape('should emit finish', function (t) { - const source = BufferList() - const dest = BufferList() - - source.write('hello') - source.pipe(dest) - - dest.on('finish', function () { - t.equal(dest.toString('utf8'), 'hello') - t.end() - }) -}) - -tape('basic copy', function (t) { - const buf = crypto.randomBytes(1024) - const buf2 = Buffer.alloc(1024) - const b = BufferList(buf) - - b.copy(buf2) - t.equal(b.slice().toString('hex'), buf2.toString('hex'), 'same buffer') - - t.end() -}) - -tape('copy after many appends', function (t) { - const buf = crypto.randomBytes(512) - const buf2 = Buffer.alloc(1024) - const b = BufferList(buf) - - b.append(buf) - b.copy(buf2) - t.equal(b.slice().toString('hex'), buf2.toString('hex'), 'same buffer') - - t.end() -}) - -tape('copy at a precise position', function (t) { - const buf = crypto.randomBytes(1004) - const buf2 = Buffer.alloc(1024) - const b = BufferList(buf) - - b.copy(buf2, 20) - t.equal(b.slice().toString('hex'), buf2.slice(20).toString('hex'), 'same buffer') - - t.end() -}) - -tape('copy starting from a precise location', function (t) { - const buf = crypto.randomBytes(10) - const buf2 = Buffer.alloc(5) - const b = BufferList(buf) - - b.copy(buf2, 0, 5) - t.equal(b.slice(5).toString('hex'), buf2.toString('hex'), 'same buffer') - - t.end() -}) - -tape('copy in an interval', function (t) { - const rnd = crypto.randomBytes(10) - const b = BufferList(rnd) // put the random bytes there - const actual = Buffer.alloc(3) - const expected = Buffer.alloc(3) - - rnd.copy(expected, 0, 5, 8) - b.copy(actual, 0, 5, 8) - - t.equal(actual.toString('hex'), expected.toString('hex'), 'same buffer') - - t.end() -}) - -tape('copy an interval between two buffers', function (t) { - const buf = crypto.randomBytes(10) - const buf2 = Buffer.alloc(10) - const b = BufferList(buf) - - b.append(buf) - b.copy(buf2, 0, 5, 15) - - t.equal(b.slice(5, 15).toString('hex'), buf2.toString('hex'), 'same buffer') - - t.end() -}) - -tape('shallow slice across buffer boundaries', function (t) { - const bl = new BufferList(['First', 'Second', 'Third']) - - t.equal(bl.shallowSlice(3, 13).toString(), 'stSecondTh') - - t.end() -}) - -tape('shallow slice within single buffer', function (t) { - t.plan(2) - - const bl = new BufferList(['First', 'Second', 'Third']) - - t.equal(bl.shallowSlice(5, 10).toString(), 'Secon') - t.equal(bl.shallowSlice(7, 10).toString(), 'con') - - t.end() -}) - -tape('shallow slice single buffer', function (t) { - t.plan(3) - - const bl = new BufferList(['First', 'Second', 'Third']) - - t.equal(bl.shallowSlice(0, 5).toString(), 'First') - t.equal(bl.shallowSlice(5, 11).toString(), 'Second') - t.equal(bl.shallowSlice(11, 16).toString(), 'Third') -}) - -tape('shallow slice with negative or omitted indices', function (t) { - t.plan(4) - - const bl = new BufferList(['First', 'Second', 'Third']) - - t.equal(bl.shallowSlice().toString(), 'FirstSecondThird') - t.equal(bl.shallowSlice(5).toString(), 'SecondThird') - t.equal(bl.shallowSlice(5, -3).toString(), 'SecondTh') - t.equal(bl.shallowSlice(-8).toString(), 'ondThird') -}) - -tape('shallow slice does not make a copy', function (t) { - t.plan(1) - - const buffers = [Buffer.from('First'), Buffer.from('Second'), Buffer.from('Third')] - const bl = (new BufferList(buffers)).shallowSlice(5, -3) - - buffers[1].fill('h') - buffers[2].fill('h') - - t.equal(bl.toString(), 'hhhhhhhh') -}) - -tape('shallow slice with 0 length', function (t) { - t.plan(1) - - const buffers = [Buffer.from('First'), Buffer.from('Second'), Buffer.from('Third')] - const bl = (new BufferList(buffers)).shallowSlice(0, 0) - - t.equal(bl.length, 0) -}) - -tape('shallow slice with 0 length from middle', function (t) { - t.plan(1) - - const buffers = [Buffer.from('First'), Buffer.from('Second'), Buffer.from('Third')] - const bl = (new BufferList(buffers)).shallowSlice(10, 10) - - t.equal(bl.length, 0) -}) - -tape('duplicate', function (t) { - t.plan(2) - - const bl = new BufferList('abcdefghij\xff\x00') - const dup = bl.duplicate() - - t.equal(bl.prototype, dup.prototype) - t.equal(bl.toString('hex'), dup.toString('hex')) -}) - -tape('destroy no pipe', function (t) { - t.plan(2) - - const bl = new BufferList('alsdkfja;lsdkfja;lsdk') - - bl.destroy() - - t.equal(bl._bufs.length, 0) - t.equal(bl.length, 0) -}) - -tape('destroy with error', function (t) { - t.plan(3) - - const bl = new BufferList('alsdkfja;lsdkfja;lsdk') - const err = new Error('kaboom') - - bl.destroy(err) - bl.on('error', function (_err) { - t.equal(_err, err) - }) - - t.equal(bl._bufs.length, 0) - t.equal(bl.length, 0) -}) - -!process.browser && tape('destroy with pipe before read end', function (t) { - t.plan(2) - - const bl = new BufferList() - fs.createReadStream(path.join(__dirname, '/test.js')) - .pipe(bl) - - bl.destroy() - - t.equal(bl._bufs.length, 0) - t.equal(bl.length, 0) -}) - -!process.browser && tape('destroy with pipe before read end with race', function (t) { - t.plan(2) - - const bl = new BufferList() - - fs.createReadStream(path.join(__dirname, '/test.js')) - .pipe(bl) - - setTimeout(function () { - bl.destroy() - setTimeout(function () { - t.equal(bl._bufs.length, 0) - t.equal(bl.length, 0) - }, 500) - }, 500) -}) - -!process.browser && tape('destroy with pipe after read end', function (t) { - t.plan(2) - - const bl = new BufferList() - - fs.createReadStream(path.join(__dirname, '/test.js')) - .on('end', onEnd) - .pipe(bl) - - function onEnd () { - bl.destroy() - - t.equal(bl._bufs.length, 0) - t.equal(bl.length, 0) - } -}) - -!process.browser && tape('destroy with pipe while writing to a destination', function (t) { - t.plan(4) - - const bl = new BufferList() - const ds = new BufferList() - - fs.createReadStream(path.join(__dirname, '/test.js')) - .on('end', onEnd) - .pipe(bl) - - function onEnd () { - bl.pipe(ds) - - setTimeout(function () { - bl.destroy() - - t.equals(bl._bufs.length, 0) - t.equals(bl.length, 0) - - ds.destroy() - - t.equals(bl._bufs.length, 0) - t.equals(bl.length, 0) - }, 100) - } -}) - -!process.browser && tape('handle error', function (t) { - t.plan(2) - - fs.createReadStream('/does/not/exist').pipe(BufferList(function (err, data) { - t.ok(err instanceof Error, 'has error') - t.notOk(data, 'no data') - })) -}) diff --git a/node_modules/boolbase/README.md b/node_modules/boolbase/README.md deleted file mode 100644 index 85eefa5..0000000 --- a/node_modules/boolbase/README.md +++ /dev/null @@ -1,10 +0,0 @@ -#boolbase -This very simple module provides two basic functions, one that always returns true (`trueFunc`) and one that always returns false (`falseFunc`). - -###WTF? - -By having only a single instance of these functions around, it's possible to do some nice optimizations. Eg. [`CSSselect`](https://github.com/fb55/CSSselect) uses these functions to determine whether a selector won't match any elements. If that's the case, the DOM doesn't even have to be touched. - -###And why is this a separate module? - -I'm trying to modularize `CSSselect` and most modules depend on these functions. IMHO, having a separate module is the easiest solution to this problem. \ No newline at end of file diff --git a/node_modules/boolbase/index.js b/node_modules/boolbase/index.js deleted file mode 100644 index 8799fd9..0000000 --- a/node_modules/boolbase/index.js +++ /dev/null @@ -1,8 +0,0 @@ -module.exports = { - trueFunc: function trueFunc(){ - return true; - }, - falseFunc: function falseFunc(){ - return false; - } -}; \ No newline at end of file diff --git a/node_modules/boolbase/package.json b/node_modules/boolbase/package.json deleted file mode 100644 index 78330a8..0000000 --- a/node_modules/boolbase/package.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "name": "boolbase", - "version": "1.0.0", - "description": "two functions: One that returns true, one that returns false", - "main": "index.js", - "scripts": { - "test": "echo \"Error: no test specified\" && exit 1" - }, - "repository": { - "type": "git", - "url": "https://github.com/fb55/boolbase" - }, - "keywords": [ - "boolean", - "function" - ], - "author": "Felix Boehm ", - "license": "ISC", - "bugs": { - "url": "https://github.com/fb55/boolbase/issues" - }, - "homepage": "https://github.com/fb55/boolbase" -} diff --git a/node_modules/brace-expansion/LICENSE b/node_modules/brace-expansion/LICENSE deleted file mode 100644 index de32266..0000000 --- a/node_modules/brace-expansion/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2013 Julian Gruber - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/brace-expansion/README.md b/node_modules/brace-expansion/README.md deleted file mode 100644 index 6b4e0e1..0000000 --- a/node_modules/brace-expansion/README.md +++ /dev/null @@ -1,129 +0,0 @@ -# brace-expansion - -[Brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html), -as known from sh/bash, in JavaScript. - -[![build status](https://secure.travis-ci.org/juliangruber/brace-expansion.svg)](http://travis-ci.org/juliangruber/brace-expansion) -[![downloads](https://img.shields.io/npm/dm/brace-expansion.svg)](https://www.npmjs.org/package/brace-expansion) -[![Greenkeeper badge](https://badges.greenkeeper.io/juliangruber/brace-expansion.svg)](https://greenkeeper.io/) - -[![testling badge](https://ci.testling.com/juliangruber/brace-expansion.png)](https://ci.testling.com/juliangruber/brace-expansion) - -## Example - -```js -var expand = require('brace-expansion'); - -expand('file-{a,b,c}.jpg') -// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg'] - -expand('-v{,,}') -// => ['-v', '-v', '-v'] - -expand('file{0..2}.jpg') -// => ['file0.jpg', 'file1.jpg', 'file2.jpg'] - -expand('file-{a..c}.jpg') -// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg'] - -expand('file{2..0}.jpg') -// => ['file2.jpg', 'file1.jpg', 'file0.jpg'] - -expand('file{0..4..2}.jpg') -// => ['file0.jpg', 'file2.jpg', 'file4.jpg'] - -expand('file-{a..e..2}.jpg') -// => ['file-a.jpg', 'file-c.jpg', 'file-e.jpg'] - -expand('file{00..10..5}.jpg') -// => ['file00.jpg', 'file05.jpg', 'file10.jpg'] - -expand('{{A..C},{a..c}}') -// => ['A', 'B', 'C', 'a', 'b', 'c'] - -expand('ppp{,config,oe{,conf}}') -// => ['ppp', 'pppconfig', 'pppoe', 'pppoeconf'] -``` - -## API - -```js -var expand = require('brace-expansion'); -``` - -### var expanded = expand(str) - -Return an array of all possible and valid expansions of `str`. If none are -found, `[str]` is returned. - -Valid expansions are: - -```js -/^(.*,)+(.+)?$/ -// {a,b,...} -``` - -A comma separated list of options, like `{a,b}` or `{a,{b,c}}` or `{,a,}`. - -```js -/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/ -// {x..y[..incr]} -``` - -A numeric sequence from `x` to `y` inclusive, with optional increment. -If `x` or `y` start with a leading `0`, all the numbers will be padded -to have equal length. Negative numbers and backwards iteration work too. - -```js -/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/ -// {x..y[..incr]} -``` - -An alphabetic sequence from `x` to `y` inclusive, with optional increment. -`x` and `y` must be exactly one character, and if given, `incr` must be a -number. - -For compatibility reasons, the string `${` is not eligible for brace expansion. - -## Installation - -With [npm](https://npmjs.org) do: - -```bash -npm install brace-expansion -``` - -## Contributors - -- [Julian Gruber](https://github.com/juliangruber) -- [Isaac Z. Schlueter](https://github.com/isaacs) - -## Sponsors - -This module is proudly supported by my [Sponsors](https://github.com/juliangruber/sponsors)! - -Do you want to support modules like this to improve their quality, stability and weigh in on new features? Then please consider donating to my [Patreon](https://www.patreon.com/juliangruber). Not sure how much of my modules you're using? Try [feross/thanks](https://github.com/feross/thanks)! - -## License - -(MIT) - -Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/brace-expansion/index.js b/node_modules/brace-expansion/index.js deleted file mode 100644 index 0478be8..0000000 --- a/node_modules/brace-expansion/index.js +++ /dev/null @@ -1,201 +0,0 @@ -var concatMap = require('concat-map'); -var balanced = require('balanced-match'); - -module.exports = expandTop; - -var escSlash = '\0SLASH'+Math.random()+'\0'; -var escOpen = '\0OPEN'+Math.random()+'\0'; -var escClose = '\0CLOSE'+Math.random()+'\0'; -var escComma = '\0COMMA'+Math.random()+'\0'; -var escPeriod = '\0PERIOD'+Math.random()+'\0'; - -function numeric(str) { - return parseInt(str, 10) == str - ? parseInt(str, 10) - : str.charCodeAt(0); -} - -function escapeBraces(str) { - return str.split('\\\\').join(escSlash) - .split('\\{').join(escOpen) - .split('\\}').join(escClose) - .split('\\,').join(escComma) - .split('\\.').join(escPeriod); -} - -function unescapeBraces(str) { - return str.split(escSlash).join('\\') - .split(escOpen).join('{') - .split(escClose).join('}') - .split(escComma).join(',') - .split(escPeriod).join('.'); -} - - -// Basically just str.split(","), but handling cases -// where we have nested braced sections, which should be -// treated as individual members, like {a,{b,c},d} -function parseCommaParts(str) { - if (!str) - return ['']; - - var parts = []; - var m = balanced('{', '}', str); - - if (!m) - return str.split(','); - - var pre = m.pre; - var body = m.body; - var post = m.post; - var p = pre.split(','); - - p[p.length-1] += '{' + body + '}'; - var postParts = parseCommaParts(post); - if (post.length) { - p[p.length-1] += postParts.shift(); - p.push.apply(p, postParts); - } - - parts.push.apply(parts, p); - - return parts; -} - -function expandTop(str) { - if (!str) - return []; - - // I don't know why Bash 4.3 does this, but it does. - // Anything starting with {} will have the first two bytes preserved - // but *only* at the top level, so {},a}b will not expand to anything, - // but a{},b}c will be expanded to [a}c,abc]. - // One could argue that this is a bug in Bash, but since the goal of - // this module is to match Bash's rules, we escape a leading {} - if (str.substr(0, 2) === '{}') { - str = '\\{\\}' + str.substr(2); - } - - return expand(escapeBraces(str), true).map(unescapeBraces); -} - -function identity(e) { - return e; -} - -function embrace(str) { - return '{' + str + '}'; -} -function isPadded(el) { - return /^-?0\d/.test(el); -} - -function lte(i, y) { - return i <= y; -} -function gte(i, y) { - return i >= y; -} - -function expand(str, isTop) { - var expansions = []; - - var m = balanced('{', '}', str); - if (!m || /\$$/.test(m.pre)) return [str]; - - var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); - var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); - var isSequence = isNumericSequence || isAlphaSequence; - var isOptions = m.body.indexOf(',') >= 0; - if (!isSequence && !isOptions) { - // {a},b} - if (m.post.match(/,.*\}/)) { - str = m.pre + '{' + m.body + escClose + m.post; - return expand(str); - } - return [str]; - } - - var n; - if (isSequence) { - n = m.body.split(/\.\./); - } else { - n = parseCommaParts(m.body); - if (n.length === 1) { - // x{{a,b}}y ==> x{a}y x{b}y - n = expand(n[0], false).map(embrace); - if (n.length === 1) { - var post = m.post.length - ? expand(m.post, false) - : ['']; - return post.map(function(p) { - return m.pre + n[0] + p; - }); - } - } - } - - // at this point, n is the parts, and we know it's not a comma set - // with a single entry. - - // no need to expand pre, since it is guaranteed to be free of brace-sets - var pre = m.pre; - var post = m.post.length - ? expand(m.post, false) - : ['']; - - var N; - - if (isSequence) { - var x = numeric(n[0]); - var y = numeric(n[1]); - var width = Math.max(n[0].length, n[1].length) - var incr = n.length == 3 - ? Math.abs(numeric(n[2])) - : 1; - var test = lte; - var reverse = y < x; - if (reverse) { - incr *= -1; - test = gte; - } - var pad = n.some(isPadded); - - N = []; - - for (var i = x; test(i, y); i += incr) { - var c; - if (isAlphaSequence) { - c = String.fromCharCode(i); - if (c === '\\') - c = ''; - } else { - c = String(i); - if (pad) { - var need = width - c.length; - if (need > 0) { - var z = new Array(need + 1).join('0'); - if (i < 0) - c = '-' + z + c.slice(1); - else - c = z + c; - } - } - } - N.push(c); - } - } else { - N = concatMap(n, function(el) { return expand(el, false) }); - } - - for (var j = 0; j < N.length; j++) { - for (var k = 0; k < post.length; k++) { - var expansion = pre + N[j] + post[k]; - if (!isTop || isSequence || expansion) - expansions.push(expansion); - } - } - - return expansions; -} - diff --git a/node_modules/brace-expansion/package.json b/node_modules/brace-expansion/package.json deleted file mode 100644 index a18faa8..0000000 --- a/node_modules/brace-expansion/package.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "name": "brace-expansion", - "description": "Brace expansion as known from sh/bash", - "version": "1.1.11", - "repository": { - "type": "git", - "url": "git://github.com/juliangruber/brace-expansion.git" - }, - "homepage": "https://github.com/juliangruber/brace-expansion", - "main": "index.js", - "scripts": { - "test": "tape test/*.js", - "gentest": "bash test/generate.sh", - "bench": "matcha test/perf/bench.js" - }, - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - }, - "devDependencies": { - "matcha": "^0.7.0", - "tape": "^4.6.0" - }, - "keywords": [], - "author": { - "name": "Julian Gruber", - "email": "mail@juliangruber.com", - "url": "http://juliangruber.com" - }, - "license": "MIT", - "testling": { - "files": "test/*.js", - "browsers": [ - "ie/8..latest", - "firefox/20..latest", - "firefox/nightly", - "chrome/25..latest", - "chrome/canary", - "opera/12..latest", - "opera/next", - "safari/5.1..latest", - "ipad/6.0..latest", - "iphone/6.0..latest", - "android-browser/4.2..latest" - ] - } -} diff --git a/node_modules/buffer-crc32/LICENSE b/node_modules/buffer-crc32/LICENSE deleted file mode 100644 index 4cef10e..0000000 --- a/node_modules/buffer-crc32/LICENSE +++ /dev/null @@ -1,19 +0,0 @@ -The MIT License - -Copyright (c) 2013 Brian J. Brennan - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the -Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, -INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR -PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE -FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, -ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/buffer-crc32/README.md b/node_modules/buffer-crc32/README.md deleted file mode 100644 index 0d9d8b8..0000000 --- a/node_modules/buffer-crc32/README.md +++ /dev/null @@ -1,47 +0,0 @@ -# buffer-crc32 - -[![Build Status](https://secure.travis-ci.org/brianloveswords/buffer-crc32.png?branch=master)](http://travis-ci.org/brianloveswords/buffer-crc32) - -crc32 that works with binary data and fancy character sets, outputs -buffer, signed or unsigned data and has tests. - -Derived from the sample CRC implementation in the PNG specification: http://www.w3.org/TR/PNG/#D-CRCAppendix - -# install -``` -npm install buffer-crc32 -``` - -# example -```js -var crc32 = require('buffer-crc32'); -// works with buffers -var buf = Buffer([0x00, 0x73, 0x75, 0x70, 0x20, 0x62, 0x72, 0x6f, 0x00]) -crc32(buf) // -> - -// has convenience methods for getting signed or unsigned ints -crc32.signed(buf) // -> -1805997238 -crc32.unsigned(buf) // -> 2488970058 - -// will cast to buffer if given a string, so you can -// directly use foreign characters safely -crc32('自動販売機') // -> - -// and works in append mode too -var partialCrc = crc32('hey'); -var partialCrc = crc32(' ', partialCrc); -var partialCrc = crc32('sup', partialCrc); -var partialCrc = crc32(' ', partialCrc); -var finalCrc = crc32('bros', partialCrc); // -> -``` - -# tests -This was tested against the output of zlib's crc32 method. You can run -the tests with`npm test` (requires tap) - -# see also -https://github.com/alexgorbatchev/node-crc, `crc.buffer.crc32` also -supports buffer inputs and return unsigned ints (thanks @tjholowaychuk). - -# license -MIT/X11 diff --git a/node_modules/buffer-crc32/index.js b/node_modules/buffer-crc32/index.js deleted file mode 100644 index 6727dd3..0000000 --- a/node_modules/buffer-crc32/index.js +++ /dev/null @@ -1,111 +0,0 @@ -var Buffer = require('buffer').Buffer; - -var CRC_TABLE = [ - 0x00000000, 0x77073096, 0xee0e612c, 0x990951ba, 0x076dc419, - 0x706af48f, 0xe963a535, 0x9e6495a3, 0x0edb8832, 0x79dcb8a4, - 0xe0d5e91e, 0x97d2d988, 0x09b64c2b, 0x7eb17cbd, 0xe7b82d07, - 0x90bf1d91, 0x1db71064, 0x6ab020f2, 0xf3b97148, 0x84be41de, - 0x1adad47d, 0x6ddde4eb, 0xf4d4b551, 0x83d385c7, 0x136c9856, - 0x646ba8c0, 0xfd62f97a, 0x8a65c9ec, 0x14015c4f, 0x63066cd9, - 0xfa0f3d63, 0x8d080df5, 0x3b6e20c8, 0x4c69105e, 0xd56041e4, - 0xa2677172, 0x3c03e4d1, 0x4b04d447, 0xd20d85fd, 0xa50ab56b, - 0x35b5a8fa, 0x42b2986c, 0xdbbbc9d6, 0xacbcf940, 0x32d86ce3, - 0x45df5c75, 0xdcd60dcf, 0xabd13d59, 0x26d930ac, 0x51de003a, - 0xc8d75180, 0xbfd06116, 0x21b4f4b5, 0x56b3c423, 0xcfba9599, - 0xb8bda50f, 0x2802b89e, 0x5f058808, 0xc60cd9b2, 0xb10be924, - 0x2f6f7c87, 0x58684c11, 0xc1611dab, 0xb6662d3d, 0x76dc4190, - 0x01db7106, 0x98d220bc, 0xefd5102a, 0x71b18589, 0x06b6b51f, - 0x9fbfe4a5, 0xe8b8d433, 0x7807c9a2, 0x0f00f934, 0x9609a88e, - 0xe10e9818, 0x7f6a0dbb, 0x086d3d2d, 0x91646c97, 0xe6635c01, - 0x6b6b51f4, 0x1c6c6162, 0x856530d8, 0xf262004e, 0x6c0695ed, - 0x1b01a57b, 0x8208f4c1, 0xf50fc457, 0x65b0d9c6, 0x12b7e950, - 0x8bbeb8ea, 0xfcb9887c, 0x62dd1ddf, 0x15da2d49, 0x8cd37cf3, - 0xfbd44c65, 0x4db26158, 0x3ab551ce, 0xa3bc0074, 0xd4bb30e2, - 0x4adfa541, 0x3dd895d7, 0xa4d1c46d, 0xd3d6f4fb, 0x4369e96a, - 0x346ed9fc, 0xad678846, 0xda60b8d0, 0x44042d73, 0x33031de5, - 0xaa0a4c5f, 0xdd0d7cc9, 0x5005713c, 0x270241aa, 0xbe0b1010, - 0xc90c2086, 0x5768b525, 0x206f85b3, 0xb966d409, 0xce61e49f, - 0x5edef90e, 0x29d9c998, 0xb0d09822, 0xc7d7a8b4, 0x59b33d17, - 0x2eb40d81, 0xb7bd5c3b, 0xc0ba6cad, 0xedb88320, 0x9abfb3b6, - 0x03b6e20c, 0x74b1d29a, 0xead54739, 0x9dd277af, 0x04db2615, - 0x73dc1683, 0xe3630b12, 0x94643b84, 0x0d6d6a3e, 0x7a6a5aa8, - 0xe40ecf0b, 0x9309ff9d, 0x0a00ae27, 0x7d079eb1, 0xf00f9344, - 0x8708a3d2, 0x1e01f268, 0x6906c2fe, 0xf762575d, 0x806567cb, - 0x196c3671, 0x6e6b06e7, 0xfed41b76, 0x89d32be0, 0x10da7a5a, - 0x67dd4acc, 0xf9b9df6f, 0x8ebeeff9, 0x17b7be43, 0x60b08ed5, - 0xd6d6a3e8, 0xa1d1937e, 0x38d8c2c4, 0x4fdff252, 0xd1bb67f1, - 0xa6bc5767, 0x3fb506dd, 0x48b2364b, 0xd80d2bda, 0xaf0a1b4c, - 0x36034af6, 0x41047a60, 0xdf60efc3, 0xa867df55, 0x316e8eef, - 0x4669be79, 0xcb61b38c, 0xbc66831a, 0x256fd2a0, 0x5268e236, - 0xcc0c7795, 0xbb0b4703, 0x220216b9, 0x5505262f, 0xc5ba3bbe, - 0xb2bd0b28, 0x2bb45a92, 0x5cb36a04, 0xc2d7ffa7, 0xb5d0cf31, - 0x2cd99e8b, 0x5bdeae1d, 0x9b64c2b0, 0xec63f226, 0x756aa39c, - 0x026d930a, 0x9c0906a9, 0xeb0e363f, 0x72076785, 0x05005713, - 0x95bf4a82, 0xe2b87a14, 0x7bb12bae, 0x0cb61b38, 0x92d28e9b, - 0xe5d5be0d, 0x7cdcefb7, 0x0bdbdf21, 0x86d3d2d4, 0xf1d4e242, - 0x68ddb3f8, 0x1fda836e, 0x81be16cd, 0xf6b9265b, 0x6fb077e1, - 0x18b74777, 0x88085ae6, 0xff0f6a70, 0x66063bca, 0x11010b5c, - 0x8f659eff, 0xf862ae69, 0x616bffd3, 0x166ccf45, 0xa00ae278, - 0xd70dd2ee, 0x4e048354, 0x3903b3c2, 0xa7672661, 0xd06016f7, - 0x4969474d, 0x3e6e77db, 0xaed16a4a, 0xd9d65adc, 0x40df0b66, - 0x37d83bf0, 0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9, - 0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6, 0xbad03605, - 0xcdd70693, 0x54de5729, 0x23d967bf, 0xb3667a2e, 0xc4614ab8, - 0x5d681b02, 0x2a6f2b94, 0xb40bbe37, 0xc30c8ea1, 0x5a05df1b, - 0x2d02ef8d -]; - -if (typeof Int32Array !== 'undefined') { - CRC_TABLE = new Int32Array(CRC_TABLE); -} - -function ensureBuffer(input) { - if (Buffer.isBuffer(input)) { - return input; - } - - var hasNewBufferAPI = - typeof Buffer.alloc === "function" && - typeof Buffer.from === "function"; - - if (typeof input === "number") { - return hasNewBufferAPI ? Buffer.alloc(input) : new Buffer(input); - } - else if (typeof input === "string") { - return hasNewBufferAPI ? Buffer.from(input) : new Buffer(input); - } - else { - throw new Error("input must be buffer, number, or string, received " + - typeof input); - } -} - -function bufferizeInt(num) { - var tmp = ensureBuffer(4); - tmp.writeInt32BE(num, 0); - return tmp; -} - -function _crc32(buf, previous) { - buf = ensureBuffer(buf); - if (Buffer.isBuffer(previous)) { - previous = previous.readUInt32BE(0); - } - var crc = ~~previous ^ -1; - for (var n = 0; n < buf.length; n++) { - crc = CRC_TABLE[(crc ^ buf[n]) & 0xff] ^ (crc >>> 8); - } - return (crc ^ -1); -} - -function crc32() { - return bufferizeInt(_crc32.apply(null, arguments)); -} -crc32.signed = function () { - return _crc32.apply(null, arguments); -}; -crc32.unsigned = function () { - return _crc32.apply(null, arguments) >>> 0; -}; - -module.exports = crc32; diff --git a/node_modules/buffer-crc32/package.json b/node_modules/buffer-crc32/package.json deleted file mode 100644 index e896bec..0000000 --- a/node_modules/buffer-crc32/package.json +++ /dev/null @@ -1,39 +0,0 @@ -{ - "author": "Brian J. Brennan ", - "name": "buffer-crc32", - "description": "A pure javascript CRC32 algorithm that plays nice with binary data", - "version": "0.2.13", - "licenses": [ - { - "type": "MIT", - "url": "https://github.com/brianloveswords/buffer-crc32/raw/master/LICENSE" - } - ], - "contributors": [ - { - "name": "Vladimir Kuznetsov", - "github": "mistakster" - } - ], - "homepage": "https://github.com/brianloveswords/buffer-crc32", - "repository": { - "type": "git", - "url": "git://github.com/brianloveswords/buffer-crc32.git" - }, - "main": "index.js", - "scripts": { - "test": "./node_modules/.bin/tap tests/*.test.js" - }, - "dependencies": {}, - "devDependencies": { - "tap": "~0.2.5" - }, - "optionalDependencies": {}, - "engines": { - "node": "*" - }, - "license": "MIT", - "files": [ - "index.js" - ] -} diff --git a/node_modules/buffer/AUTHORS.md b/node_modules/buffer/AUTHORS.md deleted file mode 100644 index 22eb171..0000000 --- a/node_modules/buffer/AUTHORS.md +++ /dev/null @@ -1,70 +0,0 @@ -# Authors - -#### Ordered by first contribution. - -- Romain Beauxis (toots@rastageeks.org) -- Tobias Koppers (tobias.koppers@googlemail.com) -- Janus (ysangkok@gmail.com) -- Rainer Dreyer (rdrey1@gmail.com) -- Tõnis Tiigi (tonistiigi@gmail.com) -- James Halliday (mail@substack.net) -- Michael Williamson (mike@zwobble.org) -- elliottcable (github@elliottcable.name) -- rafael (rvalle@livelens.net) -- Andrew Kelley (superjoe30@gmail.com) -- Andreas Madsen (amwebdk@gmail.com) -- Mike Brevoort (mike.brevoort@pearson.com) -- Brian White (mscdex@mscdex.net) -- Feross Aboukhadijeh (feross@feross.org) -- Ruben Verborgh (ruben@verborgh.org) -- eliang (eliang.cs@gmail.com) -- Jesse Tane (jesse.tane@gmail.com) -- Alfonso Boza (alfonso@cloud.com) -- Mathias Buus (mathiasbuus@gmail.com) -- Devon Govett (devongovett@gmail.com) -- Daniel Cousens (github@dcousens.com) -- Joseph Dykstra (josephdykstra@gmail.com) -- Parsha Pourkhomami (parshap+git@gmail.com) -- Damjan Košir (damjan.kosir@gmail.com) -- daverayment (dave.rayment@gmail.com) -- kawanet (u-suke@kawa.net) -- Linus Unnebäck (linus@folkdatorn.se) -- Nolan Lawson (nolan.lawson@gmail.com) -- Calvin Metcalf (calvin.metcalf@gmail.com) -- Koki Takahashi (hakatasiloving@gmail.com) -- Guy Bedford (guybedford@gmail.com) -- Jan Schär (jscissr@gmail.com) -- RaulTsc (tomescu.raul@gmail.com) -- Matthieu Monsch (monsch@alum.mit.edu) -- Dan Ehrenberg (littledan@chromium.org) -- Kirill Fomichev (fanatid@ya.ru) -- Yusuke Kawasaki (u-suke@kawa.net) -- DC (dcposch@dcpos.ch) -- John-David Dalton (john.david.dalton@gmail.com) -- adventure-yunfei (adventure030@gmail.com) -- Emil Bay (github@tixz.dk) -- Sam Sudar (sudar.sam@gmail.com) -- Volker Mische (volker.mische@gmail.com) -- David Walton (support@geekstocks.com) -- Сковорода Никита Андреевич (chalkerx@gmail.com) -- greenkeeper[bot] (greenkeeper[bot]@users.noreply.github.com) -- ukstv (sergey.ukustov@machinomy.com) -- Renée Kooi (renee@kooi.me) -- ranbochen (ranbochen@qq.com) -- Vladimir Borovik (bobahbdb@gmail.com) -- greenkeeper[bot] (23040076+greenkeeper[bot]@users.noreply.github.com) -- kumavis (aaron@kumavis.me) -- Sergey Ukustov (sergey.ukustov@machinomy.com) -- Fei Liu (liu.feiwood@gmail.com) -- Blaine Bublitz (blaine.bublitz@gmail.com) -- clement (clement@seald.io) -- Koushik Dutta (koushd@gmail.com) -- Jordan Harband (ljharb@gmail.com) -- Niklas Mischkulnig (mischnic@users.noreply.github.com) -- Nikolai Vavilov (vvnicholas@gmail.com) -- Fedor Nezhivoi (gyzerok@users.noreply.github.com) -- Peter Newman (peternewman@users.noreply.github.com) -- mathmakgakpak (44949126+mathmakgakpak@users.noreply.github.com) -- jkkang (jkkang@smartauth.kr) - -#### Generated by bin/update-authors.sh. diff --git a/node_modules/buffer/LICENSE b/node_modules/buffer/LICENSE deleted file mode 100644 index d6bf75d..0000000 --- a/node_modules/buffer/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Feross Aboukhadijeh, and other contributors. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/buffer/README.md b/node_modules/buffer/README.md deleted file mode 100644 index 9a23d7c..0000000 --- a/node_modules/buffer/README.md +++ /dev/null @@ -1,410 +0,0 @@ -# buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] - -[travis-image]: https://img.shields.io/travis/feross/buffer/master.svg -[travis-url]: https://travis-ci.org/feross/buffer -[npm-image]: https://img.shields.io/npm/v/buffer.svg -[npm-url]: https://npmjs.org/package/buffer -[downloads-image]: https://img.shields.io/npm/dm/buffer.svg -[downloads-url]: https://npmjs.org/package/buffer -[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg -[standard-url]: https://standardjs.com - -#### The buffer module from [node.js](https://nodejs.org/), for the browser. - -[![saucelabs][saucelabs-image]][saucelabs-url] - -[saucelabs-image]: https://saucelabs.com/browser-matrix/buffer.svg -[saucelabs-url]: https://saucelabs.com/u/buffer - -With [browserify](http://browserify.org), simply `require('buffer')` or use the `Buffer` global and you will get this module. - -The goal is to provide an API that is 100% identical to -[node's Buffer API](https://nodejs.org/api/buffer.html). Read the -[official docs](https://nodejs.org/api/buffer.html) for the full list of properties, -instance methods, and class methods that are supported. - -## features - -- Manipulate binary data like a boss, in all browsers! -- Super fast. Backed by Typed Arrays (`Uint8Array`/`ArrayBuffer`, not `Object`) -- Extremely small bundle size (**6.75KB minified + gzipped**, 51.9KB with comments) -- Excellent browser support (Chrome, Firefox, Edge, Safari 9+, IE 11, iOS 9+, Android, etc.) -- Preserves Node API exactly, with one minor difference (see below) -- Square-bracket `buf[4]` notation works! -- Does not modify any browser prototypes or put anything on `window` -- Comprehensive test suite (including all buffer tests from node.js core) - -## install - -To use this module directly (without browserify), install it: - -```bash -npm install buffer -``` - -This module was previously called **native-buffer-browserify**, but please use **buffer** -from now on. - -If you do not use a bundler, you can use the [standalone script](https://bundle.run/buffer). - -## usage - -The module's API is identical to node's `Buffer` API. Read the -[official docs](https://nodejs.org/api/buffer.html) for the full list of properties, -instance methods, and class methods that are supported. - -As mentioned above, `require('buffer')` or use the `Buffer` global with -[browserify](http://browserify.org) and this module will automatically be included -in your bundle. Almost any npm module will work in the browser, even if it assumes that -the node `Buffer` API will be available. - -To depend on this module explicitly (without browserify), require it like this: - -```js -var Buffer = require('buffer/').Buffer // note: the trailing slash is important! -``` - -To require this module explicitly, use `require('buffer/')` which tells the node.js module -lookup algorithm (also used by browserify) to use the **npm module** named `buffer` -instead of the **node.js core** module named `buffer`! - - -## how does it work? - -The Buffer constructor returns instances of `Uint8Array` that have their prototype -changed to `Buffer.prototype`. Furthermore, `Buffer` is a subclass of `Uint8Array`, -so the returned instances will have all the node `Buffer` methods and the -`Uint8Array` methods. Square bracket notation works as expected -- it returns a -single octet. - -The `Uint8Array` prototype remains unmodified. - - -## tracking the latest node api - -This module tracks the Buffer API in the latest (unstable) version of node.js. The Buffer -API is considered **stable** in the -[node stability index](https://nodejs.org/docs/latest/api/documentation.html#documentation_stability_index), -so it is unlikely that there will ever be breaking changes. -Nonetheless, when/if the Buffer API changes in node, this module's API will change -accordingly. - -## related packages - -- [`buffer-reverse`](https://www.npmjs.com/package/buffer-reverse) - Reverse a buffer -- [`buffer-xor`](https://www.npmjs.com/package/buffer-xor) - Bitwise xor a buffer -- [`is-buffer`](https://www.npmjs.com/package/is-buffer) - Determine if an object is a Buffer without including the whole `Buffer` package - -## conversion packages - -### convert typed array to buffer - -Use [`typedarray-to-buffer`](https://www.npmjs.com/package/typedarray-to-buffer) to convert any kind of typed array to a `Buffer`. Does not perform a copy, so it's super fast. - -### convert buffer to typed array - -`Buffer` is a subclass of `Uint8Array` (which is a typed array). So there is no need to explicitly convert to typed array. Just use the buffer as a `Uint8Array`. - -### convert blob to buffer - -Use [`blob-to-buffer`](https://www.npmjs.com/package/blob-to-buffer) to convert a `Blob` to a `Buffer`. - -### convert buffer to blob - -To convert a `Buffer` to a `Blob`, use the `Blob` constructor: - -```js -var blob = new Blob([ buffer ]) -``` - -Optionally, specify a mimetype: - -```js -var blob = new Blob([ buffer ], { type: 'text/html' }) -``` - -### convert arraybuffer to buffer - -To convert an `ArrayBuffer` to a `Buffer`, use the `Buffer.from` function. Does not perform a copy, so it's super fast. - -```js -var buffer = Buffer.from(arrayBuffer) -``` - -### convert buffer to arraybuffer - -To convert a `Buffer` to an `ArrayBuffer`, use the `.buffer` property (which is present on all `Uint8Array` objects): - -```js -var arrayBuffer = buffer.buffer.slice( - buffer.byteOffset, buffer.byteOffset + buffer.byteLength -) -``` - -Alternatively, use the [`to-arraybuffer`](https://www.npmjs.com/package/to-arraybuffer) module. - -## performance - -See perf tests in `/perf`. - -`BrowserBuffer` is the browser `buffer` module (this repo). `Uint8Array` is included as a -sanity check (since `BrowserBuffer` uses `Uint8Array` under the hood, `Uint8Array` will -always be at least a bit faster). Finally, `NodeBuffer` is the node.js buffer module, -which is included to compare against. - -NOTE: Performance has improved since these benchmarks were taken. PR welcome to update the README. - -### Chrome 38 - -| Method | Operations | Accuracy | Sampled | Fastest | -|:-------|:-----------|:---------|:--------|:-------:| -| BrowserBuffer#bracket-notation | 11,457,464 ops/sec | ±0.86% | 66 | ✓ | -| Uint8Array#bracket-notation | 10,824,332 ops/sec | ±0.74% | 65 | | -| | | | | -| BrowserBuffer#concat | 450,532 ops/sec | ±0.76% | 68 | | -| Uint8Array#concat | 1,368,911 ops/sec | ±1.50% | 62 | ✓ | -| | | | | -| BrowserBuffer#copy(16000) | 903,001 ops/sec | ±0.96% | 67 | | -| Uint8Array#copy(16000) | 1,422,441 ops/sec | ±1.04% | 66 | ✓ | -| | | | | -| BrowserBuffer#copy(16) | 11,431,358 ops/sec | ±0.46% | 69 | | -| Uint8Array#copy(16) | 13,944,163 ops/sec | ±1.12% | 68 | ✓ | -| | | | | -| BrowserBuffer#new(16000) | 106,329 ops/sec | ±6.70% | 44 | | -| Uint8Array#new(16000) | 131,001 ops/sec | ±2.85% | 31 | ✓ | -| | | | | -| BrowserBuffer#new(16) | 1,554,491 ops/sec | ±1.60% | 65 | | -| Uint8Array#new(16) | 6,623,930 ops/sec | ±1.66% | 65 | ✓ | -| | | | | -| BrowserBuffer#readDoubleBE | 112,830 ops/sec | ±0.51% | 69 | ✓ | -| DataView#getFloat64 | 93,500 ops/sec | ±0.57% | 68 | | -| | | | | -| BrowserBuffer#readFloatBE | 146,678 ops/sec | ±0.95% | 68 | ✓ | -| DataView#getFloat32 | 99,311 ops/sec | ±0.41% | 67 | | -| | | | | -| BrowserBuffer#readUInt32LE | 843,214 ops/sec | ±0.70% | 69 | ✓ | -| DataView#getUint32 | 103,024 ops/sec | ±0.64% | 67 | | -| | | | | -| BrowserBuffer#slice | 1,013,941 ops/sec | ±0.75% | 67 | | -| Uint8Array#subarray | 1,903,928 ops/sec | ±0.53% | 67 | ✓ | -| | | | | -| BrowserBuffer#writeFloatBE | 61,387 ops/sec | ±0.90% | 67 | | -| DataView#setFloat32 | 141,249 ops/sec | ±0.40% | 66 | ✓ | - - -### Firefox 33 - -| Method | Operations | Accuracy | Sampled | Fastest | -|:-------|:-----------|:---------|:--------|:-------:| -| BrowserBuffer#bracket-notation | 20,800,421 ops/sec | ±1.84% | 60 | | -| Uint8Array#bracket-notation | 20,826,235 ops/sec | ±2.02% | 61 | ✓ | -| | | | | -| BrowserBuffer#concat | 153,076 ops/sec | ±2.32% | 61 | | -| Uint8Array#concat | 1,255,674 ops/sec | ±8.65% | 52 | ✓ | -| | | | | -| BrowserBuffer#copy(16000) | 1,105,312 ops/sec | ±1.16% | 63 | | -| Uint8Array#copy(16000) | 1,615,911 ops/sec | ±0.55% | 66 | ✓ | -| | | | | -| BrowserBuffer#copy(16) | 16,357,599 ops/sec | ±0.73% | 68 | | -| Uint8Array#copy(16) | 31,436,281 ops/sec | ±1.05% | 68 | ✓ | -| | | | | -| BrowserBuffer#new(16000) | 52,995 ops/sec | ±6.01% | 35 | | -| Uint8Array#new(16000) | 87,686 ops/sec | ±5.68% | 45 | ✓ | -| | | | | -| BrowserBuffer#new(16) | 252,031 ops/sec | ±1.61% | 66 | | -| Uint8Array#new(16) | 8,477,026 ops/sec | ±0.49% | 68 | ✓ | -| | | | | -| BrowserBuffer#readDoubleBE | 99,871 ops/sec | ±0.41% | 69 | | -| DataView#getFloat64 | 285,663 ops/sec | ±0.70% | 68 | ✓ | -| | | | | -| BrowserBuffer#readFloatBE | 115,540 ops/sec | ±0.42% | 69 | | -| DataView#getFloat32 | 288,722 ops/sec | ±0.82% | 68 | ✓ | -| | | | | -| BrowserBuffer#readUInt32LE | 633,926 ops/sec | ±1.08% | 67 | ✓ | -| DataView#getUint32 | 294,808 ops/sec | ±0.79% | 64 | | -| | | | | -| BrowserBuffer#slice | 349,425 ops/sec | ±0.46% | 69 | | -| Uint8Array#subarray | 5,965,819 ops/sec | ±0.60% | 65 | ✓ | -| | | | | -| BrowserBuffer#writeFloatBE | 59,980 ops/sec | ±0.41% | 67 | | -| DataView#setFloat32 | 317,634 ops/sec | ±0.63% | 68 | ✓ | - -### Safari 8 - -| Method | Operations | Accuracy | Sampled | Fastest | -|:-------|:-----------|:---------|:--------|:-------:| -| BrowserBuffer#bracket-notation | 10,279,729 ops/sec | ±2.25% | 56 | ✓ | -| Uint8Array#bracket-notation | 10,030,767 ops/sec | ±2.23% | 59 | | -| | | | | -| BrowserBuffer#concat | 144,138 ops/sec | ±1.38% | 65 | | -| Uint8Array#concat | 4,950,764 ops/sec | ±1.70% | 63 | ✓ | -| | | | | -| BrowserBuffer#copy(16000) | 1,058,548 ops/sec | ±1.51% | 64 | | -| Uint8Array#copy(16000) | 1,409,666 ops/sec | ±1.17% | 65 | ✓ | -| | | | | -| BrowserBuffer#copy(16) | 6,282,529 ops/sec | ±1.88% | 58 | | -| Uint8Array#copy(16) | 11,907,128 ops/sec | ±2.87% | 58 | ✓ | -| | | | | -| BrowserBuffer#new(16000) | 101,663 ops/sec | ±3.89% | 57 | | -| Uint8Array#new(16000) | 22,050,818 ops/sec | ±6.51% | 46 | ✓ | -| | | | | -| BrowserBuffer#new(16) | 176,072 ops/sec | ±2.13% | 64 | | -| Uint8Array#new(16) | 24,385,731 ops/sec | ±5.01% | 51 | ✓ | -| | | | | -| BrowserBuffer#readDoubleBE | 41,341 ops/sec | ±1.06% | 67 | | -| DataView#getFloat64 | 322,280 ops/sec | ±0.84% | 68 | ✓ | -| | | | | -| BrowserBuffer#readFloatBE | 46,141 ops/sec | ±1.06% | 65 | | -| DataView#getFloat32 | 337,025 ops/sec | ±0.43% | 69 | ✓ | -| | | | | -| BrowserBuffer#readUInt32LE | 151,551 ops/sec | ±1.02% | 66 | | -| DataView#getUint32 | 308,278 ops/sec | ±0.94% | 67 | ✓ | -| | | | | -| BrowserBuffer#slice | 197,365 ops/sec | ±0.95% | 66 | | -| Uint8Array#subarray | 9,558,024 ops/sec | ±3.08% | 58 | ✓ | -| | | | | -| BrowserBuffer#writeFloatBE | 17,518 ops/sec | ±1.03% | 63 | | -| DataView#setFloat32 | 319,751 ops/sec | ±0.48% | 68 | ✓ | - - -### Node 0.11.14 - -| Method | Operations | Accuracy | Sampled | Fastest | -|:-------|:-----------|:---------|:--------|:-------:| -| BrowserBuffer#bracket-notation | 10,489,828 ops/sec | ±3.25% | 90 | | -| Uint8Array#bracket-notation | 10,534,884 ops/sec | ±0.81% | 92 | ✓ | -| NodeBuffer#bracket-notation | 10,389,910 ops/sec | ±0.97% | 87 | | -| | | | | -| BrowserBuffer#concat | 487,830 ops/sec | ±2.58% | 88 | | -| Uint8Array#concat | 1,814,327 ops/sec | ±1.28% | 88 | ✓ | -| NodeBuffer#concat | 1,636,523 ops/sec | ±1.88% | 73 | | -| | | | | -| BrowserBuffer#copy(16000) | 1,073,665 ops/sec | ±0.77% | 90 | | -| Uint8Array#copy(16000) | 1,348,517 ops/sec | ±0.84% | 89 | ✓ | -| NodeBuffer#copy(16000) | 1,289,533 ops/sec | ±0.82% | 93 | | -| | | | | -| BrowserBuffer#copy(16) | 12,782,706 ops/sec | ±0.74% | 85 | | -| Uint8Array#copy(16) | 14,180,427 ops/sec | ±0.93% | 92 | ✓ | -| NodeBuffer#copy(16) | 11,083,134 ops/sec | ±1.06% | 89 | | -| | | | | -| BrowserBuffer#new(16000) | 141,678 ops/sec | ±3.30% | 67 | | -| Uint8Array#new(16000) | 161,491 ops/sec | ±2.96% | 60 | | -| NodeBuffer#new(16000) | 292,699 ops/sec | ±3.20% | 55 | ✓ | -| | | | | -| BrowserBuffer#new(16) | 1,655,466 ops/sec | ±2.41% | 82 | | -| Uint8Array#new(16) | 14,399,926 ops/sec | ±0.91% | 94 | ✓ | -| NodeBuffer#new(16) | 3,894,696 ops/sec | ±0.88% | 92 | | -| | | | | -| BrowserBuffer#readDoubleBE | 109,582 ops/sec | ±0.75% | 93 | ✓ | -| DataView#getFloat64 | 91,235 ops/sec | ±0.81% | 90 | | -| NodeBuffer#readDoubleBE | 88,593 ops/sec | ±0.96% | 81 | | -| | | | | -| BrowserBuffer#readFloatBE | 139,854 ops/sec | ±1.03% | 85 | ✓ | -| DataView#getFloat32 | 98,744 ops/sec | ±0.80% | 89 | | -| NodeBuffer#readFloatBE | 92,769 ops/sec | ±0.94% | 93 | | -| | | | | -| BrowserBuffer#readUInt32LE | 710,861 ops/sec | ±0.82% | 92 | | -| DataView#getUint32 | 117,893 ops/sec | ±0.84% | 91 | | -| NodeBuffer#readUInt32LE | 851,412 ops/sec | ±0.72% | 93 | ✓ | -| | | | | -| BrowserBuffer#slice | 1,673,877 ops/sec | ±0.73% | 94 | | -| Uint8Array#subarray | 6,919,243 ops/sec | ±0.67% | 90 | ✓ | -| NodeBuffer#slice | 4,617,604 ops/sec | ±0.79% | 93 | | -| | | | | -| BrowserBuffer#writeFloatBE | 66,011 ops/sec | ±0.75% | 93 | | -| DataView#setFloat32 | 127,760 ops/sec | ±0.72% | 93 | ✓ | -| NodeBuffer#writeFloatBE | 103,352 ops/sec | ±0.83% | 93 | | - -### iojs 1.8.1 - -| Method | Operations | Accuracy | Sampled | Fastest | -|:-------|:-----------|:---------|:--------|:-------:| -| BrowserBuffer#bracket-notation | 10,990,488 ops/sec | ±1.11% | 91 | | -| Uint8Array#bracket-notation | 11,268,757 ops/sec | ±0.65% | 97 | | -| NodeBuffer#bracket-notation | 11,353,260 ops/sec | ±0.83% | 94 | ✓ | -| | | | | -| BrowserBuffer#concat | 378,954 ops/sec | ±0.74% | 94 | | -| Uint8Array#concat | 1,358,288 ops/sec | ±0.97% | 87 | | -| NodeBuffer#concat | 1,934,050 ops/sec | ±1.11% | 78 | ✓ | -| | | | | -| BrowserBuffer#copy(16000) | 894,538 ops/sec | ±0.56% | 84 | | -| Uint8Array#copy(16000) | 1,442,656 ops/sec | ±0.71% | 96 | | -| NodeBuffer#copy(16000) | 1,457,898 ops/sec | ±0.53% | 92 | ✓ | -| | | | | -| BrowserBuffer#copy(16) | 12,870,457 ops/sec | ±0.67% | 95 | | -| Uint8Array#copy(16) | 16,643,989 ops/sec | ±0.61% | 93 | ✓ | -| NodeBuffer#copy(16) | 14,885,848 ops/sec | ±0.74% | 94 | | -| | | | | -| BrowserBuffer#new(16000) | 109,264 ops/sec | ±4.21% | 63 | | -| Uint8Array#new(16000) | 138,916 ops/sec | ±1.87% | 61 | | -| NodeBuffer#new(16000) | 281,449 ops/sec | ±3.58% | 51 | ✓ | -| | | | | -| BrowserBuffer#new(16) | 1,362,935 ops/sec | ±0.56% | 99 | | -| Uint8Array#new(16) | 6,193,090 ops/sec | ±0.64% | 95 | ✓ | -| NodeBuffer#new(16) | 4,745,425 ops/sec | ±1.56% | 90 | | -| | | | | -| BrowserBuffer#readDoubleBE | 118,127 ops/sec | ±0.59% | 93 | ✓ | -| DataView#getFloat64 | 107,332 ops/sec | ±0.65% | 91 | | -| NodeBuffer#readDoubleBE | 116,274 ops/sec | ±0.94% | 95 | | -| | | | | -| BrowserBuffer#readFloatBE | 150,326 ops/sec | ±0.58% | 95 | ✓ | -| DataView#getFloat32 | 110,541 ops/sec | ±0.57% | 98 | | -| NodeBuffer#readFloatBE | 121,599 ops/sec | ±0.60% | 87 | | -| | | | | -| BrowserBuffer#readUInt32LE | 814,147 ops/sec | ±0.62% | 93 | | -| DataView#getUint32 | 137,592 ops/sec | ±0.64% | 90 | | -| NodeBuffer#readUInt32LE | 931,650 ops/sec | ±0.71% | 96 | ✓ | -| | | | | -| BrowserBuffer#slice | 878,590 ops/sec | ±0.68% | 93 | | -| Uint8Array#subarray | 2,843,308 ops/sec | ±1.02% | 90 | | -| NodeBuffer#slice | 4,998,316 ops/sec | ±0.68% | 90 | ✓ | -| | | | | -| BrowserBuffer#writeFloatBE | 65,927 ops/sec | ±0.74% | 93 | | -| DataView#setFloat32 | 139,823 ops/sec | ±0.97% | 89 | ✓ | -| NodeBuffer#writeFloatBE | 135,763 ops/sec | ±0.65% | 96 | | -| | | | | - -## Testing the project - -First, install the project: - - npm install - -Then, to run tests in Node.js, run: - - npm run test-node - -To test locally in a browser, you can run: - - npm run test-browser-es5-local # For ES5 browsers that don't support ES6 - npm run test-browser-es6-local # For ES6 compliant browsers - -This will print out a URL that you can then open in a browser to run the tests, using [airtap](https://www.npmjs.com/package/airtap). - -To run automated browser tests using Saucelabs, ensure that your `SAUCE_USERNAME` and `SAUCE_ACCESS_KEY` environment variables are set, then run: - - npm test - -This is what's run in Travis, to check against various browsers. The list of browsers is kept in the `bin/airtap-es5.yml` and `bin/airtap-es6.yml` files. - -## JavaScript Standard Style - -This module uses [JavaScript Standard Style](https://github.com/feross/standard). - -[![JavaScript Style Guide](https://cdn.rawgit.com/feross/standard/master/badge.svg)](https://github.com/feross/standard) - -To test that the code conforms to the style, `npm install` and run: - - ./node_modules/.bin/standard - -## credit - -This was originally forked from [buffer-browserify](https://github.com/toots/buffer-browserify). - -## Security Policies and Procedures - -The `buffer` team and community take all security bugs in `buffer` seriously. Please see our [security policies and procedures](https://github.com/feross/security) document to learn how to report issues. - -## license - -MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org), and other contributors. Originally forked from an MIT-licensed module by Romain Beauxis. diff --git a/node_modules/buffer/index.d.ts b/node_modules/buffer/index.d.ts deleted file mode 100644 index 5d1a804..0000000 --- a/node_modules/buffer/index.d.ts +++ /dev/null @@ -1,186 +0,0 @@ -export class Buffer extends Uint8Array { - length: number - write(string: string, offset?: number, length?: number, encoding?: string): number; - toString(encoding?: string, start?: number, end?: number): string; - toJSON(): { type: 'Buffer', data: any[] }; - equals(otherBuffer: Buffer): boolean; - compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number; - copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; - slice(start?: number, end?: number): Buffer; - writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; - writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; - writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; - writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; - readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number; - readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number; - readIntLE(offset: number, byteLength: number, noAssert?: boolean): number; - readIntBE(offset: number, byteLength: number, noAssert?: boolean): number; - readUInt8(offset: number, noAssert?: boolean): number; - readUInt16LE(offset: number, noAssert?: boolean): number; - readUInt16BE(offset: number, noAssert?: boolean): number; - readUInt32LE(offset: number, noAssert?: boolean): number; - readUInt32BE(offset: number, noAssert?: boolean): number; - readInt8(offset: number, noAssert?: boolean): number; - readInt16LE(offset: number, noAssert?: boolean): number; - readInt16BE(offset: number, noAssert?: boolean): number; - readInt32LE(offset: number, noAssert?: boolean): number; - readInt32BE(offset: number, noAssert?: boolean): number; - readFloatLE(offset: number, noAssert?: boolean): number; - readFloatBE(offset: number, noAssert?: boolean): number; - readDoubleLE(offset: number, noAssert?: boolean): number; - readDoubleBE(offset: number, noAssert?: boolean): number; - reverse(): this; - swap16(): Buffer; - swap32(): Buffer; - swap64(): Buffer; - writeUInt8(value: number, offset: number, noAssert?: boolean): number; - writeUInt16LE(value: number, offset: number, noAssert?: boolean): number; - writeUInt16BE(value: number, offset: number, noAssert?: boolean): number; - writeUInt32LE(value: number, offset: number, noAssert?: boolean): number; - writeUInt32BE(value: number, offset: number, noAssert?: boolean): number; - writeInt8(value: number, offset: number, noAssert?: boolean): number; - writeInt16LE(value: number, offset: number, noAssert?: boolean): number; - writeInt16BE(value: number, offset: number, noAssert?: boolean): number; - writeInt32LE(value: number, offset: number, noAssert?: boolean): number; - writeInt32BE(value: number, offset: number, noAssert?: boolean): number; - writeFloatLE(value: number, offset: number, noAssert?: boolean): number; - writeFloatBE(value: number, offset: number, noAssert?: boolean): number; - writeDoubleLE(value: number, offset: number, noAssert?: boolean): number; - writeDoubleBE(value: number, offset: number, noAssert?: boolean): number; - fill(value: any, offset?: number, end?: number): this; - indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; - lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; - includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean; - - /** - * Allocates a new buffer containing the given {str}. - * - * @param str String to store in buffer. - * @param encoding encoding to use, optional. Default is 'utf8' - */ - constructor (str: string, encoding?: string); - /** - * Allocates a new buffer of {size} octets. - * - * @param size count of octets to allocate. - */ - constructor (size: number); - /** - * Allocates a new buffer containing the given {array} of octets. - * - * @param array The octets to store. - */ - constructor (array: Uint8Array); - /** - * Produces a Buffer backed by the same allocated memory as - * the given {ArrayBuffer}. - * - * - * @param arrayBuffer The ArrayBuffer with which to share memory. - */ - constructor (arrayBuffer: ArrayBuffer); - /** - * Allocates a new buffer containing the given {array} of octets. - * - * @param array The octets to store. - */ - constructor (array: any[]); - /** - * Copies the passed {buffer} data onto a new {Buffer} instance. - * - * @param buffer The buffer to copy. - */ - constructor (buffer: Buffer); - prototype: Buffer; - /** - * Allocates a new Buffer using an {array} of octets. - * - * @param array - */ - static from(array: any[]): Buffer; - /** - * When passed a reference to the .buffer property of a TypedArray instance, - * the newly created Buffer will share the same allocated memory as the TypedArray. - * The optional {byteOffset} and {length} arguments specify a memory range - * within the {arrayBuffer} that will be shared by the Buffer. - * - * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer() - * @param byteOffset - * @param length - */ - static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer; - /** - * Copies the passed {buffer} data onto a new Buffer instance. - * - * @param buffer - */ - static from(buffer: Buffer | Uint8Array): Buffer; - /** - * Creates a new Buffer containing the given JavaScript string {str}. - * If provided, the {encoding} parameter identifies the character encoding. - * If not provided, {encoding} defaults to 'utf8'. - * - * @param str - */ - static from(str: string, encoding?: string): Buffer; - /** - * Returns true if {obj} is a Buffer - * - * @param obj object to test. - */ - static isBuffer(obj: any): obj is Buffer; - /** - * Returns true if {encoding} is a valid encoding argument. - * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' - * - * @param encoding string to test. - */ - static isEncoding(encoding: string): boolean; - /** - * Gives the actual byte length of a string. encoding defaults to 'utf8'. - * This is not the same as String.prototype.length since that returns the number of characters in a string. - * - * @param string string to test. - * @param encoding encoding used to evaluate (defaults to 'utf8') - */ - static byteLength(string: string, encoding?: string): number; - /** - * Returns a buffer which is the result of concatenating all the buffers in the list together. - * - * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer. - * If the list has exactly one item, then the first item of the list is returned. - * If the list has more than one item, then a new Buffer is created. - * - * @param list An array of Buffer objects to concatenate - * @param totalLength Total length of the buffers when concatenated. - * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly. - */ - static concat(list: Buffer[], totalLength?: number): Buffer; - /** - * The same as buf1.compare(buf2). - */ - static compare(buf1: Buffer, buf2: Buffer): number; - /** - * Allocates a new buffer of {size} octets. - * - * @param size count of octets to allocate. - * @param fill if specified, buffer will be initialized by calling buf.fill(fill). - * If parameter is omitted, buffer will be filled with zeros. - * @param encoding encoding used for call to buf.fill while initializing - */ - static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer; - /** - * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents - * of the newly created Buffer are unknown and may contain sensitive data. - * - * @param size count of octets to allocate - */ - static allocUnsafe(size: number): Buffer; - /** - * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents - * of the newly created Buffer are unknown and may contain sensitive data. - * - * @param size count of octets to allocate - */ - static allocUnsafeSlow(size: number): Buffer; -} diff --git a/node_modules/buffer/index.js b/node_modules/buffer/index.js deleted file mode 100644 index 609cf31..0000000 --- a/node_modules/buffer/index.js +++ /dev/null @@ -1,1817 +0,0 @@ -/*! - * The buffer module from node.js, for the browser. - * - * @author Feross Aboukhadijeh - * @license MIT - */ -/* eslint-disable no-proto */ - -'use strict' - -var base64 = require('base64-js') -var ieee754 = require('ieee754') -var customInspectSymbol = - (typeof Symbol === 'function' && typeof Symbol['for'] === 'function') // eslint-disable-line dot-notation - ? Symbol['for']('nodejs.util.inspect.custom') // eslint-disable-line dot-notation - : null - -exports.Buffer = Buffer -exports.SlowBuffer = SlowBuffer -exports.INSPECT_MAX_BYTES = 50 - -var K_MAX_LENGTH = 0x7fffffff -exports.kMaxLength = K_MAX_LENGTH - -/** - * If `Buffer.TYPED_ARRAY_SUPPORT`: - * === true Use Uint8Array implementation (fastest) - * === false Print warning and recommend using `buffer` v4.x which has an Object - * implementation (most compatible, even IE6) - * - * Browsers that support typed arrays are IE 10+, Firefox 4+, Chrome 7+, Safari 5.1+, - * Opera 11.6+, iOS 4.2+. - * - * We report that the browser does not support typed arrays if the are not subclassable - * using __proto__. Firefox 4-29 lacks support for adding new properties to `Uint8Array` - * (See: https://bugzilla.mozilla.org/show_bug.cgi?id=695438). IE 10 lacks support - * for __proto__ and has a buggy typed array implementation. - */ -Buffer.TYPED_ARRAY_SUPPORT = typedArraySupport() - -if (!Buffer.TYPED_ARRAY_SUPPORT && typeof console !== 'undefined' && - typeof console.error === 'function') { - console.error( - 'This browser lacks typed array (Uint8Array) support which is required by ' + - '`buffer` v5.x. Use `buffer` v4.x if you require old browser support.' - ) -} - -function typedArraySupport () { - // Can typed array instances can be augmented? - try { - var arr = new Uint8Array(1) - var proto = { foo: function () { return 42 } } - Object.setPrototypeOf(proto, Uint8Array.prototype) - Object.setPrototypeOf(arr, proto) - return arr.foo() === 42 - } catch (e) { - return false - } -} - -Object.defineProperty(Buffer.prototype, 'parent', { - enumerable: true, - get: function () { - if (!Buffer.isBuffer(this)) return undefined - return this.buffer - } -}) - -Object.defineProperty(Buffer.prototype, 'offset', { - enumerable: true, - get: function () { - if (!Buffer.isBuffer(this)) return undefined - return this.byteOffset - } -}) - -function createBuffer (length) { - if (length > K_MAX_LENGTH) { - throw new RangeError('The value "' + length + '" is invalid for option "size"') - } - // Return an augmented `Uint8Array` instance - var buf = new Uint8Array(length) - Object.setPrototypeOf(buf, Buffer.prototype) - return buf -} - -/** - * The Buffer constructor returns instances of `Uint8Array` that have their - * prototype changed to `Buffer.prototype`. Furthermore, `Buffer` is a subclass of - * `Uint8Array`, so the returned instances will have all the node `Buffer` methods - * and the `Uint8Array` methods. Square bracket notation works as expected -- it - * returns a single octet. - * - * The `Uint8Array` prototype remains unmodified. - */ - -function Buffer (arg, encodingOrOffset, length) { - // Common case. - if (typeof arg === 'number') { - if (typeof encodingOrOffset === 'string') { - throw new TypeError( - 'The "string" argument must be of type string. Received type number' - ) - } - return allocUnsafe(arg) - } - return from(arg, encodingOrOffset, length) -} - -Buffer.poolSize = 8192 // not used by this implementation - -function from (value, encodingOrOffset, length) { - if (typeof value === 'string') { - return fromString(value, encodingOrOffset) - } - - if (ArrayBuffer.isView(value)) { - return fromArrayView(value) - } - - if (value == null) { - throw new TypeError( - 'The first argument must be one of type string, Buffer, ArrayBuffer, Array, ' + - 'or Array-like Object. Received type ' + (typeof value) - ) - } - - if (isInstance(value, ArrayBuffer) || - (value && isInstance(value.buffer, ArrayBuffer))) { - return fromArrayBuffer(value, encodingOrOffset, length) - } - - if (typeof SharedArrayBuffer !== 'undefined' && - (isInstance(value, SharedArrayBuffer) || - (value && isInstance(value.buffer, SharedArrayBuffer)))) { - return fromArrayBuffer(value, encodingOrOffset, length) - } - - if (typeof value === 'number') { - throw new TypeError( - 'The "value" argument must not be of type number. Received type number' - ) - } - - var valueOf = value.valueOf && value.valueOf() - if (valueOf != null && valueOf !== value) { - return Buffer.from(valueOf, encodingOrOffset, length) - } - - var b = fromObject(value) - if (b) return b - - if (typeof Symbol !== 'undefined' && Symbol.toPrimitive != null && - typeof value[Symbol.toPrimitive] === 'function') { - return Buffer.from( - value[Symbol.toPrimitive]('string'), encodingOrOffset, length - ) - } - - throw new TypeError( - 'The first argument must be one of type string, Buffer, ArrayBuffer, Array, ' + - 'or Array-like Object. Received type ' + (typeof value) - ) -} - -/** - * Functionally equivalent to Buffer(arg, encoding) but throws a TypeError - * if value is a number. - * Buffer.from(str[, encoding]) - * Buffer.from(array) - * Buffer.from(buffer) - * Buffer.from(arrayBuffer[, byteOffset[, length]]) - **/ -Buffer.from = function (value, encodingOrOffset, length) { - return from(value, encodingOrOffset, length) -} - -// Note: Change prototype *after* Buffer.from is defined to workaround Chrome bug: -// https://github.com/feross/buffer/pull/148 -Object.setPrototypeOf(Buffer.prototype, Uint8Array.prototype) -Object.setPrototypeOf(Buffer, Uint8Array) - -function assertSize (size) { - if (typeof size !== 'number') { - throw new TypeError('"size" argument must be of type number') - } else if (size < 0) { - throw new RangeError('The value "' + size + '" is invalid for option "size"') - } -} - -function alloc (size, fill, encoding) { - assertSize(size) - if (size <= 0) { - return createBuffer(size) - } - if (fill !== undefined) { - // Only pay attention to encoding if it's a string. This - // prevents accidentally sending in a number that would - // be interpreted as a start offset. - return typeof encoding === 'string' - ? createBuffer(size).fill(fill, encoding) - : createBuffer(size).fill(fill) - } - return createBuffer(size) -} - -/** - * Creates a new filled Buffer instance. - * alloc(size[, fill[, encoding]]) - **/ -Buffer.alloc = function (size, fill, encoding) { - return alloc(size, fill, encoding) -} - -function allocUnsafe (size) { - assertSize(size) - return createBuffer(size < 0 ? 0 : checked(size) | 0) -} - -/** - * Equivalent to Buffer(num), by default creates a non-zero-filled Buffer instance. - * */ -Buffer.allocUnsafe = function (size) { - return allocUnsafe(size) -} -/** - * Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance. - */ -Buffer.allocUnsafeSlow = function (size) { - return allocUnsafe(size) -} - -function fromString (string, encoding) { - if (typeof encoding !== 'string' || encoding === '') { - encoding = 'utf8' - } - - if (!Buffer.isEncoding(encoding)) { - throw new TypeError('Unknown encoding: ' + encoding) - } - - var length = byteLength(string, encoding) | 0 - var buf = createBuffer(length) - - var actual = buf.write(string, encoding) - - if (actual !== length) { - // Writing a hex string, for example, that contains invalid characters will - // cause everything after the first invalid character to be ignored. (e.g. - // 'abxxcd' will be treated as 'ab') - buf = buf.slice(0, actual) - } - - return buf -} - -function fromArrayLike (array) { - var length = array.length < 0 ? 0 : checked(array.length) | 0 - var buf = createBuffer(length) - for (var i = 0; i < length; i += 1) { - buf[i] = array[i] & 255 - } - return buf -} - -function fromArrayView (arrayView) { - if (isInstance(arrayView, Uint8Array)) { - var copy = new Uint8Array(arrayView) - return fromArrayBuffer(copy.buffer, copy.byteOffset, copy.byteLength) - } - return fromArrayLike(arrayView) -} - -function fromArrayBuffer (array, byteOffset, length) { - if (byteOffset < 0 || array.byteLength < byteOffset) { - throw new RangeError('"offset" is outside of buffer bounds') - } - - if (array.byteLength < byteOffset + (length || 0)) { - throw new RangeError('"length" is outside of buffer bounds') - } - - var buf - if (byteOffset === undefined && length === undefined) { - buf = new Uint8Array(array) - } else if (length === undefined) { - buf = new Uint8Array(array, byteOffset) - } else { - buf = new Uint8Array(array, byteOffset, length) - } - - // Return an augmented `Uint8Array` instance - Object.setPrototypeOf(buf, Buffer.prototype) - - return buf -} - -function fromObject (obj) { - if (Buffer.isBuffer(obj)) { - var len = checked(obj.length) | 0 - var buf = createBuffer(len) - - if (buf.length === 0) { - return buf - } - - obj.copy(buf, 0, 0, len) - return buf - } - - if (obj.length !== undefined) { - if (typeof obj.length !== 'number' || numberIsNaN(obj.length)) { - return createBuffer(0) - } - return fromArrayLike(obj) - } - - if (obj.type === 'Buffer' && Array.isArray(obj.data)) { - return fromArrayLike(obj.data) - } -} - -function checked (length) { - // Note: cannot use `length < K_MAX_LENGTH` here because that fails when - // length is NaN (which is otherwise coerced to zero.) - if (length >= K_MAX_LENGTH) { - throw new RangeError('Attempt to allocate Buffer larger than maximum ' + - 'size: 0x' + K_MAX_LENGTH.toString(16) + ' bytes') - } - return length | 0 -} - -function SlowBuffer (length) { - if (+length != length) { // eslint-disable-line eqeqeq - length = 0 - } - return Buffer.alloc(+length) -} - -Buffer.isBuffer = function isBuffer (b) { - return b != null && b._isBuffer === true && - b !== Buffer.prototype // so Buffer.isBuffer(Buffer.prototype) will be false -} - -Buffer.compare = function compare (a, b) { - if (isInstance(a, Uint8Array)) a = Buffer.from(a, a.offset, a.byteLength) - if (isInstance(b, Uint8Array)) b = Buffer.from(b, b.offset, b.byteLength) - if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) { - throw new TypeError( - 'The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array' - ) - } - - if (a === b) return 0 - - var x = a.length - var y = b.length - - for (var i = 0, len = Math.min(x, y); i < len; ++i) { - if (a[i] !== b[i]) { - x = a[i] - y = b[i] - break - } - } - - if (x < y) return -1 - if (y < x) return 1 - return 0 -} - -Buffer.isEncoding = function isEncoding (encoding) { - switch (String(encoding).toLowerCase()) { - case 'hex': - case 'utf8': - case 'utf-8': - case 'ascii': - case 'latin1': - case 'binary': - case 'base64': - case 'ucs2': - case 'ucs-2': - case 'utf16le': - case 'utf-16le': - return true - default: - return false - } -} - -Buffer.concat = function concat (list, length) { - if (!Array.isArray(list)) { - throw new TypeError('"list" argument must be an Array of Buffers') - } - - if (list.length === 0) { - return Buffer.alloc(0) - } - - var i - if (length === undefined) { - length = 0 - for (i = 0; i < list.length; ++i) { - length += list[i].length - } - } - - var buffer = Buffer.allocUnsafe(length) - var pos = 0 - for (i = 0; i < list.length; ++i) { - var buf = list[i] - if (isInstance(buf, Uint8Array)) { - if (pos + buf.length > buffer.length) { - Buffer.from(buf).copy(buffer, pos) - } else { - Uint8Array.prototype.set.call( - buffer, - buf, - pos - ) - } - } else if (!Buffer.isBuffer(buf)) { - throw new TypeError('"list" argument must be an Array of Buffers') - } else { - buf.copy(buffer, pos) - } - pos += buf.length - } - return buffer -} - -function byteLength (string, encoding) { - if (Buffer.isBuffer(string)) { - return string.length - } - if (ArrayBuffer.isView(string) || isInstance(string, ArrayBuffer)) { - return string.byteLength - } - if (typeof string !== 'string') { - throw new TypeError( - 'The "string" argument must be one of type string, Buffer, or ArrayBuffer. ' + - 'Received type ' + typeof string - ) - } - - var len = string.length - var mustMatch = (arguments.length > 2 && arguments[2] === true) - if (!mustMatch && len === 0) return 0 - - // Use a for loop to avoid recursion - var loweredCase = false - for (;;) { - switch (encoding) { - case 'ascii': - case 'latin1': - case 'binary': - return len - case 'utf8': - case 'utf-8': - return utf8ToBytes(string).length - case 'ucs2': - case 'ucs-2': - case 'utf16le': - case 'utf-16le': - return len * 2 - case 'hex': - return len >>> 1 - case 'base64': - return base64ToBytes(string).length - default: - if (loweredCase) { - return mustMatch ? -1 : utf8ToBytes(string).length // assume utf8 - } - encoding = ('' + encoding).toLowerCase() - loweredCase = true - } - } -} -Buffer.byteLength = byteLength - -function slowToString (encoding, start, end) { - var loweredCase = false - - // No need to verify that "this.length <= MAX_UINT32" since it's a read-only - // property of a typed array. - - // This behaves neither like String nor Uint8Array in that we set start/end - // to their upper/lower bounds if the value passed is out of range. - // undefined is handled specially as per ECMA-262 6th Edition, - // Section 13.3.3.7 Runtime Semantics: KeyedBindingInitialization. - if (start === undefined || start < 0) { - start = 0 - } - // Return early if start > this.length. Done here to prevent potential uint32 - // coercion fail below. - if (start > this.length) { - return '' - } - - if (end === undefined || end > this.length) { - end = this.length - } - - if (end <= 0) { - return '' - } - - // Force coercion to uint32. This will also coerce falsey/NaN values to 0. - end >>>= 0 - start >>>= 0 - - if (end <= start) { - return '' - } - - if (!encoding) encoding = 'utf8' - - while (true) { - switch (encoding) { - case 'hex': - return hexSlice(this, start, end) - - case 'utf8': - case 'utf-8': - return utf8Slice(this, start, end) - - case 'ascii': - return asciiSlice(this, start, end) - - case 'latin1': - case 'binary': - return latin1Slice(this, start, end) - - case 'base64': - return base64Slice(this, start, end) - - case 'ucs2': - case 'ucs-2': - case 'utf16le': - case 'utf-16le': - return utf16leSlice(this, start, end) - - default: - if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding) - encoding = (encoding + '').toLowerCase() - loweredCase = true - } - } -} - -// This property is used by `Buffer.isBuffer` (and the `is-buffer` npm package) -// to detect a Buffer instance. It's not possible to use `instanceof Buffer` -// reliably in a browserify context because there could be multiple different -// copies of the 'buffer' package in use. This method works even for Buffer -// instances that were created from another copy of the `buffer` package. -// See: https://github.com/feross/buffer/issues/154 -Buffer.prototype._isBuffer = true - -function swap (b, n, m) { - var i = b[n] - b[n] = b[m] - b[m] = i -} - -Buffer.prototype.swap16 = function swap16 () { - var len = this.length - if (len % 2 !== 0) { - throw new RangeError('Buffer size must be a multiple of 16-bits') - } - for (var i = 0; i < len; i += 2) { - swap(this, i, i + 1) - } - return this -} - -Buffer.prototype.swap32 = function swap32 () { - var len = this.length - if (len % 4 !== 0) { - throw new RangeError('Buffer size must be a multiple of 32-bits') - } - for (var i = 0; i < len; i += 4) { - swap(this, i, i + 3) - swap(this, i + 1, i + 2) - } - return this -} - -Buffer.prototype.swap64 = function swap64 () { - var len = this.length - if (len % 8 !== 0) { - throw new RangeError('Buffer size must be a multiple of 64-bits') - } - for (var i = 0; i < len; i += 8) { - swap(this, i, i + 7) - swap(this, i + 1, i + 6) - swap(this, i + 2, i + 5) - swap(this, i + 3, i + 4) - } - return this -} - -Buffer.prototype.toString = function toString () { - var length = this.length - if (length === 0) return '' - if (arguments.length === 0) return utf8Slice(this, 0, length) - return slowToString.apply(this, arguments) -} - -Buffer.prototype.toLocaleString = Buffer.prototype.toString - -Buffer.prototype.equals = function equals (b) { - if (!Buffer.isBuffer(b)) throw new TypeError('Argument must be a Buffer') - if (this === b) return true - return Buffer.compare(this, b) === 0 -} - -Buffer.prototype.inspect = function inspect () { - var str = '' - var max = exports.INSPECT_MAX_BYTES - str = this.toString('hex', 0, max).replace(/(.{2})/g, '$1 ').trim() - if (this.length > max) str += ' ... ' - return '' -} -if (customInspectSymbol) { - Buffer.prototype[customInspectSymbol] = Buffer.prototype.inspect -} - -Buffer.prototype.compare = function compare (target, start, end, thisStart, thisEnd) { - if (isInstance(target, Uint8Array)) { - target = Buffer.from(target, target.offset, target.byteLength) - } - if (!Buffer.isBuffer(target)) { - throw new TypeError( - 'The "target" argument must be one of type Buffer or Uint8Array. ' + - 'Received type ' + (typeof target) - ) - } - - if (start === undefined) { - start = 0 - } - if (end === undefined) { - end = target ? target.length : 0 - } - if (thisStart === undefined) { - thisStart = 0 - } - if (thisEnd === undefined) { - thisEnd = this.length - } - - if (start < 0 || end > target.length || thisStart < 0 || thisEnd > this.length) { - throw new RangeError('out of range index') - } - - if (thisStart >= thisEnd && start >= end) { - return 0 - } - if (thisStart >= thisEnd) { - return -1 - } - if (start >= end) { - return 1 - } - - start >>>= 0 - end >>>= 0 - thisStart >>>= 0 - thisEnd >>>= 0 - - if (this === target) return 0 - - var x = thisEnd - thisStart - var y = end - start - var len = Math.min(x, y) - - var thisCopy = this.slice(thisStart, thisEnd) - var targetCopy = target.slice(start, end) - - for (var i = 0; i < len; ++i) { - if (thisCopy[i] !== targetCopy[i]) { - x = thisCopy[i] - y = targetCopy[i] - break - } - } - - if (x < y) return -1 - if (y < x) return 1 - return 0 -} - -// Finds either the first index of `val` in `buffer` at offset >= `byteOffset`, -// OR the last index of `val` in `buffer` at offset <= `byteOffset`. -// -// Arguments: -// - buffer - a Buffer to search -// - val - a string, Buffer, or number -// - byteOffset - an index into `buffer`; will be clamped to an int32 -// - encoding - an optional encoding, relevant is val is a string -// - dir - true for indexOf, false for lastIndexOf -function bidirectionalIndexOf (buffer, val, byteOffset, encoding, dir) { - // Empty buffer means no match - if (buffer.length === 0) return -1 - - // Normalize byteOffset - if (typeof byteOffset === 'string') { - encoding = byteOffset - byteOffset = 0 - } else if (byteOffset > 0x7fffffff) { - byteOffset = 0x7fffffff - } else if (byteOffset < -0x80000000) { - byteOffset = -0x80000000 - } - byteOffset = +byteOffset // Coerce to Number. - if (numberIsNaN(byteOffset)) { - // byteOffset: it it's undefined, null, NaN, "foo", etc, search whole buffer - byteOffset = dir ? 0 : (buffer.length - 1) - } - - // Normalize byteOffset: negative offsets start from the end of the buffer - if (byteOffset < 0) byteOffset = buffer.length + byteOffset - if (byteOffset >= buffer.length) { - if (dir) return -1 - else byteOffset = buffer.length - 1 - } else if (byteOffset < 0) { - if (dir) byteOffset = 0 - else return -1 - } - - // Normalize val - if (typeof val === 'string') { - val = Buffer.from(val, encoding) - } - - // Finally, search either indexOf (if dir is true) or lastIndexOf - if (Buffer.isBuffer(val)) { - // Special case: looking for empty string/buffer always fails - if (val.length === 0) { - return -1 - } - return arrayIndexOf(buffer, val, byteOffset, encoding, dir) - } else if (typeof val === 'number') { - val = val & 0xFF // Search for a byte value [0-255] - if (typeof Uint8Array.prototype.indexOf === 'function') { - if (dir) { - return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset) - } else { - return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset) - } - } - return arrayIndexOf(buffer, [val], byteOffset, encoding, dir) - } - - throw new TypeError('val must be string, number or Buffer') -} - -function arrayIndexOf (arr, val, byteOffset, encoding, dir) { - var indexSize = 1 - var arrLength = arr.length - var valLength = val.length - - if (encoding !== undefined) { - encoding = String(encoding).toLowerCase() - if (encoding === 'ucs2' || encoding === 'ucs-2' || - encoding === 'utf16le' || encoding === 'utf-16le') { - if (arr.length < 2 || val.length < 2) { - return -1 - } - indexSize = 2 - arrLength /= 2 - valLength /= 2 - byteOffset /= 2 - } - } - - function read (buf, i) { - if (indexSize === 1) { - return buf[i] - } else { - return buf.readUInt16BE(i * indexSize) - } - } - - var i - if (dir) { - var foundIndex = -1 - for (i = byteOffset; i < arrLength; i++) { - if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) { - if (foundIndex === -1) foundIndex = i - if (i - foundIndex + 1 === valLength) return foundIndex * indexSize - } else { - if (foundIndex !== -1) i -= i - foundIndex - foundIndex = -1 - } - } - } else { - if (byteOffset + valLength > arrLength) byteOffset = arrLength - valLength - for (i = byteOffset; i >= 0; i--) { - var found = true - for (var j = 0; j < valLength; j++) { - if (read(arr, i + j) !== read(val, j)) { - found = false - break - } - } - if (found) return i - } - } - - return -1 -} - -Buffer.prototype.includes = function includes (val, byteOffset, encoding) { - return this.indexOf(val, byteOffset, encoding) !== -1 -} - -Buffer.prototype.indexOf = function indexOf (val, byteOffset, encoding) { - return bidirectionalIndexOf(this, val, byteOffset, encoding, true) -} - -Buffer.prototype.lastIndexOf = function lastIndexOf (val, byteOffset, encoding) { - return bidirectionalIndexOf(this, val, byteOffset, encoding, false) -} - -function hexWrite (buf, string, offset, length) { - offset = Number(offset) || 0 - var remaining = buf.length - offset - if (!length) { - length = remaining - } else { - length = Number(length) - if (length > remaining) { - length = remaining - } - } - - var strLen = string.length - - if (length > strLen / 2) { - length = strLen / 2 - } - for (var i = 0; i < length; ++i) { - var parsed = parseInt(string.substr(i * 2, 2), 16) - if (numberIsNaN(parsed)) return i - buf[offset + i] = parsed - } - return i -} - -function utf8Write (buf, string, offset, length) { - return blitBuffer(utf8ToBytes(string, buf.length - offset), buf, offset, length) -} - -function asciiWrite (buf, string, offset, length) { - return blitBuffer(asciiToBytes(string), buf, offset, length) -} - -function base64Write (buf, string, offset, length) { - return blitBuffer(base64ToBytes(string), buf, offset, length) -} - -function ucs2Write (buf, string, offset, length) { - return blitBuffer(utf16leToBytes(string, buf.length - offset), buf, offset, length) -} - -Buffer.prototype.write = function write (string, offset, length, encoding) { - // Buffer#write(string) - if (offset === undefined) { - encoding = 'utf8' - length = this.length - offset = 0 - // Buffer#write(string, encoding) - } else if (length === undefined && typeof offset === 'string') { - encoding = offset - length = this.length - offset = 0 - // Buffer#write(string, offset[, length][, encoding]) - } else if (isFinite(offset)) { - offset = offset >>> 0 - if (isFinite(length)) { - length = length >>> 0 - if (encoding === undefined) encoding = 'utf8' - } else { - encoding = length - length = undefined - } - } else { - throw new Error( - 'Buffer.write(string, encoding, offset[, length]) is no longer supported' - ) - } - - var remaining = this.length - offset - if (length === undefined || length > remaining) length = remaining - - if ((string.length > 0 && (length < 0 || offset < 0)) || offset > this.length) { - throw new RangeError('Attempt to write outside buffer bounds') - } - - if (!encoding) encoding = 'utf8' - - var loweredCase = false - for (;;) { - switch (encoding) { - case 'hex': - return hexWrite(this, string, offset, length) - - case 'utf8': - case 'utf-8': - return utf8Write(this, string, offset, length) - - case 'ascii': - case 'latin1': - case 'binary': - return asciiWrite(this, string, offset, length) - - case 'base64': - // Warning: maxLength not taken into account in base64Write - return base64Write(this, string, offset, length) - - case 'ucs2': - case 'ucs-2': - case 'utf16le': - case 'utf-16le': - return ucs2Write(this, string, offset, length) - - default: - if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding) - encoding = ('' + encoding).toLowerCase() - loweredCase = true - } - } -} - -Buffer.prototype.toJSON = function toJSON () { - return { - type: 'Buffer', - data: Array.prototype.slice.call(this._arr || this, 0) - } -} - -function base64Slice (buf, start, end) { - if (start === 0 && end === buf.length) { - return base64.fromByteArray(buf) - } else { - return base64.fromByteArray(buf.slice(start, end)) - } -} - -function utf8Slice (buf, start, end) { - end = Math.min(buf.length, end) - var res = [] - - var i = start - while (i < end) { - var firstByte = buf[i] - var codePoint = null - var bytesPerSequence = (firstByte > 0xEF) - ? 4 - : (firstByte > 0xDF) - ? 3 - : (firstByte > 0xBF) - ? 2 - : 1 - - if (i + bytesPerSequence <= end) { - var secondByte, thirdByte, fourthByte, tempCodePoint - - switch (bytesPerSequence) { - case 1: - if (firstByte < 0x80) { - codePoint = firstByte - } - break - case 2: - secondByte = buf[i + 1] - if ((secondByte & 0xC0) === 0x80) { - tempCodePoint = (firstByte & 0x1F) << 0x6 | (secondByte & 0x3F) - if (tempCodePoint > 0x7F) { - codePoint = tempCodePoint - } - } - break - case 3: - secondByte = buf[i + 1] - thirdByte = buf[i + 2] - if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80) { - tempCodePoint = (firstByte & 0xF) << 0xC | (secondByte & 0x3F) << 0x6 | (thirdByte & 0x3F) - if (tempCodePoint > 0x7FF && (tempCodePoint < 0xD800 || tempCodePoint > 0xDFFF)) { - codePoint = tempCodePoint - } - } - break - case 4: - secondByte = buf[i + 1] - thirdByte = buf[i + 2] - fourthByte = buf[i + 3] - if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80 && (fourthByte & 0xC0) === 0x80) { - tempCodePoint = (firstByte & 0xF) << 0x12 | (secondByte & 0x3F) << 0xC | (thirdByte & 0x3F) << 0x6 | (fourthByte & 0x3F) - if (tempCodePoint > 0xFFFF && tempCodePoint < 0x110000) { - codePoint = tempCodePoint - } - } - } - } - - if (codePoint === null) { - // we did not generate a valid codePoint so insert a - // replacement char (U+FFFD) and advance only 1 byte - codePoint = 0xFFFD - bytesPerSequence = 1 - } else if (codePoint > 0xFFFF) { - // encode to utf16 (surrogate pair dance) - codePoint -= 0x10000 - res.push(codePoint >>> 10 & 0x3FF | 0xD800) - codePoint = 0xDC00 | codePoint & 0x3FF - } - - res.push(codePoint) - i += bytesPerSequence - } - - return decodeCodePointsArray(res) -} - -// Based on http://stackoverflow.com/a/22747272/680742, the browser with -// the lowest limit is Chrome, with 0x10000 args. -// We go 1 magnitude less, for safety -var MAX_ARGUMENTS_LENGTH = 0x1000 - -function decodeCodePointsArray (codePoints) { - var len = codePoints.length - if (len <= MAX_ARGUMENTS_LENGTH) { - return String.fromCharCode.apply(String, codePoints) // avoid extra slice() - } - - // Decode in chunks to avoid "call stack size exceeded". - var res = '' - var i = 0 - while (i < len) { - res += String.fromCharCode.apply( - String, - codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH) - ) - } - return res -} - -function asciiSlice (buf, start, end) { - var ret = '' - end = Math.min(buf.length, end) - - for (var i = start; i < end; ++i) { - ret += String.fromCharCode(buf[i] & 0x7F) - } - return ret -} - -function latin1Slice (buf, start, end) { - var ret = '' - end = Math.min(buf.length, end) - - for (var i = start; i < end; ++i) { - ret += String.fromCharCode(buf[i]) - } - return ret -} - -function hexSlice (buf, start, end) { - var len = buf.length - - if (!start || start < 0) start = 0 - if (!end || end < 0 || end > len) end = len - - var out = '' - for (var i = start; i < end; ++i) { - out += hexSliceLookupTable[buf[i]] - } - return out -} - -function utf16leSlice (buf, start, end) { - var bytes = buf.slice(start, end) - var res = '' - // If bytes.length is odd, the last 8 bits must be ignored (same as node.js) - for (var i = 0; i < bytes.length - 1; i += 2) { - res += String.fromCharCode(bytes[i] + (bytes[i + 1] * 256)) - } - return res -} - -Buffer.prototype.slice = function slice (start, end) { - var len = this.length - start = ~~start - end = end === undefined ? len : ~~end - - if (start < 0) { - start += len - if (start < 0) start = 0 - } else if (start > len) { - start = len - } - - if (end < 0) { - end += len - if (end < 0) end = 0 - } else if (end > len) { - end = len - } - - if (end < start) end = start - - var newBuf = this.subarray(start, end) - // Return an augmented `Uint8Array` instance - Object.setPrototypeOf(newBuf, Buffer.prototype) - - return newBuf -} - -/* - * Need to make sure that buffer isn't trying to write out of bounds. - */ -function checkOffset (offset, ext, length) { - if ((offset % 1) !== 0 || offset < 0) throw new RangeError('offset is not uint') - if (offset + ext > length) throw new RangeError('Trying to access beyond buffer length') -} - -Buffer.prototype.readUintLE = -Buffer.prototype.readUIntLE = function readUIntLE (offset, byteLength, noAssert) { - offset = offset >>> 0 - byteLength = byteLength >>> 0 - if (!noAssert) checkOffset(offset, byteLength, this.length) - - var val = this[offset] - var mul = 1 - var i = 0 - while (++i < byteLength && (mul *= 0x100)) { - val += this[offset + i] * mul - } - - return val -} - -Buffer.prototype.readUintBE = -Buffer.prototype.readUIntBE = function readUIntBE (offset, byteLength, noAssert) { - offset = offset >>> 0 - byteLength = byteLength >>> 0 - if (!noAssert) { - checkOffset(offset, byteLength, this.length) - } - - var val = this[offset + --byteLength] - var mul = 1 - while (byteLength > 0 && (mul *= 0x100)) { - val += this[offset + --byteLength] * mul - } - - return val -} - -Buffer.prototype.readUint8 = -Buffer.prototype.readUInt8 = function readUInt8 (offset, noAssert) { - offset = offset >>> 0 - if (!noAssert) checkOffset(offset, 1, this.length) - return this[offset] -} - -Buffer.prototype.readUint16LE = -Buffer.prototype.readUInt16LE = function readUInt16LE (offset, noAssert) { - offset = offset >>> 0 - if (!noAssert) checkOffset(offset, 2, this.length) - return this[offset] | (this[offset + 1] << 8) -} - -Buffer.prototype.readUint16BE = -Buffer.prototype.readUInt16BE = function readUInt16BE (offset, noAssert) { - offset = offset >>> 0 - if (!noAssert) checkOffset(offset, 2, this.length) - return (this[offset] << 8) | this[offset + 1] -} - -Buffer.prototype.readUint32LE = -Buffer.prototype.readUInt32LE = function readUInt32LE (offset, noAssert) { - offset = offset >>> 0 - if (!noAssert) checkOffset(offset, 4, this.length) - - return ((this[offset]) | - (this[offset + 1] << 8) | - (this[offset + 2] << 16)) + - (this[offset + 3] * 0x1000000) -} - -Buffer.prototype.readUint32BE = -Buffer.prototype.readUInt32BE = function readUInt32BE (offset, noAssert) { - offset = offset >>> 0 - if (!noAssert) checkOffset(offset, 4, this.length) - - return (this[offset] * 0x1000000) + - ((this[offset + 1] << 16) | - (this[offset + 2] << 8) | - this[offset + 3]) -} - -Buffer.prototype.readIntLE = function readIntLE (offset, byteLength, noAssert) { - offset = offset >>> 0 - byteLength = byteLength >>> 0 - if (!noAssert) checkOffset(offset, byteLength, this.length) - - var val = this[offset] - var mul = 1 - var i = 0 - while (++i < byteLength && (mul *= 0x100)) { - val += this[offset + i] * mul - } - mul *= 0x80 - - if (val >= mul) val -= Math.pow(2, 8 * byteLength) - - return val -} - -Buffer.prototype.readIntBE = function readIntBE (offset, byteLength, noAssert) { - offset = offset >>> 0 - byteLength = byteLength >>> 0 - if (!noAssert) checkOffset(offset, byteLength, this.length) - - var i = byteLength - var mul = 1 - var val = this[offset + --i] - while (i > 0 && (mul *= 0x100)) { - val += this[offset + --i] * mul - } - mul *= 0x80 - - if (val >= mul) val -= Math.pow(2, 8 * byteLength) - - return val -} - -Buffer.prototype.readInt8 = function readInt8 (offset, noAssert) { - offset = offset >>> 0 - if (!noAssert) checkOffset(offset, 1, this.length) - if (!(this[offset] & 0x80)) return (this[offset]) - return ((0xff - this[offset] + 1) * -1) -} - -Buffer.prototype.readInt16LE = function readInt16LE (offset, noAssert) { - offset = offset >>> 0 - if (!noAssert) checkOffset(offset, 2, this.length) - var val = this[offset] | (this[offset + 1] << 8) - return (val & 0x8000) ? val | 0xFFFF0000 : val -} - -Buffer.prototype.readInt16BE = function readInt16BE (offset, noAssert) { - offset = offset >>> 0 - if (!noAssert) checkOffset(offset, 2, this.length) - var val = this[offset + 1] | (this[offset] << 8) - return (val & 0x8000) ? val | 0xFFFF0000 : val -} - -Buffer.prototype.readInt32LE = function readInt32LE (offset, noAssert) { - offset = offset >>> 0 - if (!noAssert) checkOffset(offset, 4, this.length) - - return (this[offset]) | - (this[offset + 1] << 8) | - (this[offset + 2] << 16) | - (this[offset + 3] << 24) -} - -Buffer.prototype.readInt32BE = function readInt32BE (offset, noAssert) { - offset = offset >>> 0 - if (!noAssert) checkOffset(offset, 4, this.length) - - return (this[offset] << 24) | - (this[offset + 1] << 16) | - (this[offset + 2] << 8) | - (this[offset + 3]) -} - -Buffer.prototype.readFloatLE = function readFloatLE (offset, noAssert) { - offset = offset >>> 0 - if (!noAssert) checkOffset(offset, 4, this.length) - return ieee754.read(this, offset, true, 23, 4) -} - -Buffer.prototype.readFloatBE = function readFloatBE (offset, noAssert) { - offset = offset >>> 0 - if (!noAssert) checkOffset(offset, 4, this.length) - return ieee754.read(this, offset, false, 23, 4) -} - -Buffer.prototype.readDoubleLE = function readDoubleLE (offset, noAssert) { - offset = offset >>> 0 - if (!noAssert) checkOffset(offset, 8, this.length) - return ieee754.read(this, offset, true, 52, 8) -} - -Buffer.prototype.readDoubleBE = function readDoubleBE (offset, noAssert) { - offset = offset >>> 0 - if (!noAssert) checkOffset(offset, 8, this.length) - return ieee754.read(this, offset, false, 52, 8) -} - -function checkInt (buf, value, offset, ext, max, min) { - if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance') - if (value > max || value < min) throw new RangeError('"value" argument is out of bounds') - if (offset + ext > buf.length) throw new RangeError('Index out of range') -} - -Buffer.prototype.writeUintLE = -Buffer.prototype.writeUIntLE = function writeUIntLE (value, offset, byteLength, noAssert) { - value = +value - offset = offset >>> 0 - byteLength = byteLength >>> 0 - if (!noAssert) { - var maxBytes = Math.pow(2, 8 * byteLength) - 1 - checkInt(this, value, offset, byteLength, maxBytes, 0) - } - - var mul = 1 - var i = 0 - this[offset] = value & 0xFF - while (++i < byteLength && (mul *= 0x100)) { - this[offset + i] = (value / mul) & 0xFF - } - - return offset + byteLength -} - -Buffer.prototype.writeUintBE = -Buffer.prototype.writeUIntBE = function writeUIntBE (value, offset, byteLength, noAssert) { - value = +value - offset = offset >>> 0 - byteLength = byteLength >>> 0 - if (!noAssert) { - var maxBytes = Math.pow(2, 8 * byteLength) - 1 - checkInt(this, value, offset, byteLength, maxBytes, 0) - } - - var i = byteLength - 1 - var mul = 1 - this[offset + i] = value & 0xFF - while (--i >= 0 && (mul *= 0x100)) { - this[offset + i] = (value / mul) & 0xFF - } - - return offset + byteLength -} - -Buffer.prototype.writeUint8 = -Buffer.prototype.writeUInt8 = function writeUInt8 (value, offset, noAssert) { - value = +value - offset = offset >>> 0 - if (!noAssert) checkInt(this, value, offset, 1, 0xff, 0) - this[offset] = (value & 0xff) - return offset + 1 -} - -Buffer.prototype.writeUint16LE = -Buffer.prototype.writeUInt16LE = function writeUInt16LE (value, offset, noAssert) { - value = +value - offset = offset >>> 0 - if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0) - this[offset] = (value & 0xff) - this[offset + 1] = (value >>> 8) - return offset + 2 -} - -Buffer.prototype.writeUint16BE = -Buffer.prototype.writeUInt16BE = function writeUInt16BE (value, offset, noAssert) { - value = +value - offset = offset >>> 0 - if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0) - this[offset] = (value >>> 8) - this[offset + 1] = (value & 0xff) - return offset + 2 -} - -Buffer.prototype.writeUint32LE = -Buffer.prototype.writeUInt32LE = function writeUInt32LE (value, offset, noAssert) { - value = +value - offset = offset >>> 0 - if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0) - this[offset + 3] = (value >>> 24) - this[offset + 2] = (value >>> 16) - this[offset + 1] = (value >>> 8) - this[offset] = (value & 0xff) - return offset + 4 -} - -Buffer.prototype.writeUint32BE = -Buffer.prototype.writeUInt32BE = function writeUInt32BE (value, offset, noAssert) { - value = +value - offset = offset >>> 0 - if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0) - this[offset] = (value >>> 24) - this[offset + 1] = (value >>> 16) - this[offset + 2] = (value >>> 8) - this[offset + 3] = (value & 0xff) - return offset + 4 -} - -Buffer.prototype.writeIntLE = function writeIntLE (value, offset, byteLength, noAssert) { - value = +value - offset = offset >>> 0 - if (!noAssert) { - var limit = Math.pow(2, (8 * byteLength) - 1) - - checkInt(this, value, offset, byteLength, limit - 1, -limit) - } - - var i = 0 - var mul = 1 - var sub = 0 - this[offset] = value & 0xFF - while (++i < byteLength && (mul *= 0x100)) { - if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) { - sub = 1 - } - this[offset + i] = ((value / mul) >> 0) - sub & 0xFF - } - - return offset + byteLength -} - -Buffer.prototype.writeIntBE = function writeIntBE (value, offset, byteLength, noAssert) { - value = +value - offset = offset >>> 0 - if (!noAssert) { - var limit = Math.pow(2, (8 * byteLength) - 1) - - checkInt(this, value, offset, byteLength, limit - 1, -limit) - } - - var i = byteLength - 1 - var mul = 1 - var sub = 0 - this[offset + i] = value & 0xFF - while (--i >= 0 && (mul *= 0x100)) { - if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) { - sub = 1 - } - this[offset + i] = ((value / mul) >> 0) - sub & 0xFF - } - - return offset + byteLength -} - -Buffer.prototype.writeInt8 = function writeInt8 (value, offset, noAssert) { - value = +value - offset = offset >>> 0 - if (!noAssert) checkInt(this, value, offset, 1, 0x7f, -0x80) - if (value < 0) value = 0xff + value + 1 - this[offset] = (value & 0xff) - return offset + 1 -} - -Buffer.prototype.writeInt16LE = function writeInt16LE (value, offset, noAssert) { - value = +value - offset = offset >>> 0 - if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000) - this[offset] = (value & 0xff) - this[offset + 1] = (value >>> 8) - return offset + 2 -} - -Buffer.prototype.writeInt16BE = function writeInt16BE (value, offset, noAssert) { - value = +value - offset = offset >>> 0 - if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000) - this[offset] = (value >>> 8) - this[offset + 1] = (value & 0xff) - return offset + 2 -} - -Buffer.prototype.writeInt32LE = function writeInt32LE (value, offset, noAssert) { - value = +value - offset = offset >>> 0 - if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000) - this[offset] = (value & 0xff) - this[offset + 1] = (value >>> 8) - this[offset + 2] = (value >>> 16) - this[offset + 3] = (value >>> 24) - return offset + 4 -} - -Buffer.prototype.writeInt32BE = function writeInt32BE (value, offset, noAssert) { - value = +value - offset = offset >>> 0 - if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000) - if (value < 0) value = 0xffffffff + value + 1 - this[offset] = (value >>> 24) - this[offset + 1] = (value >>> 16) - this[offset + 2] = (value >>> 8) - this[offset + 3] = (value & 0xff) - return offset + 4 -} - -function checkIEEE754 (buf, value, offset, ext, max, min) { - if (offset + ext > buf.length) throw new RangeError('Index out of range') - if (offset < 0) throw new RangeError('Index out of range') -} - -function writeFloat (buf, value, offset, littleEndian, noAssert) { - value = +value - offset = offset >>> 0 - if (!noAssert) { - checkIEEE754(buf, value, offset, 4, 3.4028234663852886e+38, -3.4028234663852886e+38) - } - ieee754.write(buf, value, offset, littleEndian, 23, 4) - return offset + 4 -} - -Buffer.prototype.writeFloatLE = function writeFloatLE (value, offset, noAssert) { - return writeFloat(this, value, offset, true, noAssert) -} - -Buffer.prototype.writeFloatBE = function writeFloatBE (value, offset, noAssert) { - return writeFloat(this, value, offset, false, noAssert) -} - -function writeDouble (buf, value, offset, littleEndian, noAssert) { - value = +value - offset = offset >>> 0 - if (!noAssert) { - checkIEEE754(buf, value, offset, 8, 1.7976931348623157E+308, -1.7976931348623157E+308) - } - ieee754.write(buf, value, offset, littleEndian, 52, 8) - return offset + 8 -} - -Buffer.prototype.writeDoubleLE = function writeDoubleLE (value, offset, noAssert) { - return writeDouble(this, value, offset, true, noAssert) -} - -Buffer.prototype.writeDoubleBE = function writeDoubleBE (value, offset, noAssert) { - return writeDouble(this, value, offset, false, noAssert) -} - -// copy(targetBuffer, targetStart=0, sourceStart=0, sourceEnd=buffer.length) -Buffer.prototype.copy = function copy (target, targetStart, start, end) { - if (!Buffer.isBuffer(target)) throw new TypeError('argument should be a Buffer') - if (!start) start = 0 - if (!end && end !== 0) end = this.length - if (targetStart >= target.length) targetStart = target.length - if (!targetStart) targetStart = 0 - if (end > 0 && end < start) end = start - - // Copy 0 bytes; we're done - if (end === start) return 0 - if (target.length === 0 || this.length === 0) return 0 - - // Fatal error conditions - if (targetStart < 0) { - throw new RangeError('targetStart out of bounds') - } - if (start < 0 || start >= this.length) throw new RangeError('Index out of range') - if (end < 0) throw new RangeError('sourceEnd out of bounds') - - // Are we oob? - if (end > this.length) end = this.length - if (target.length - targetStart < end - start) { - end = target.length - targetStart + start - } - - var len = end - start - - if (this === target && typeof Uint8Array.prototype.copyWithin === 'function') { - // Use built-in when available, missing from IE11 - this.copyWithin(targetStart, start, end) - } else { - Uint8Array.prototype.set.call( - target, - this.subarray(start, end), - targetStart - ) - } - - return len -} - -// Usage: -// buffer.fill(number[, offset[, end]]) -// buffer.fill(buffer[, offset[, end]]) -// buffer.fill(string[, offset[, end]][, encoding]) -Buffer.prototype.fill = function fill (val, start, end, encoding) { - // Handle string cases: - if (typeof val === 'string') { - if (typeof start === 'string') { - encoding = start - start = 0 - end = this.length - } else if (typeof end === 'string') { - encoding = end - end = this.length - } - if (encoding !== undefined && typeof encoding !== 'string') { - throw new TypeError('encoding must be a string') - } - if (typeof encoding === 'string' && !Buffer.isEncoding(encoding)) { - throw new TypeError('Unknown encoding: ' + encoding) - } - if (val.length === 1) { - var code = val.charCodeAt(0) - if ((encoding === 'utf8' && code < 128) || - encoding === 'latin1') { - // Fast path: If `val` fits into a single byte, use that numeric value. - val = code - } - } - } else if (typeof val === 'number') { - val = val & 255 - } else if (typeof val === 'boolean') { - val = Number(val) - } - - // Invalid ranges are not set to a default, so can range check early. - if (start < 0 || this.length < start || this.length < end) { - throw new RangeError('Out of range index') - } - - if (end <= start) { - return this - } - - start = start >>> 0 - end = end === undefined ? this.length : end >>> 0 - - if (!val) val = 0 - - var i - if (typeof val === 'number') { - for (i = start; i < end; ++i) { - this[i] = val - } - } else { - var bytes = Buffer.isBuffer(val) - ? val - : Buffer.from(val, encoding) - var len = bytes.length - if (len === 0) { - throw new TypeError('The value "' + val + - '" is invalid for argument "value"') - } - for (i = 0; i < end - start; ++i) { - this[i + start] = bytes[i % len] - } - } - - return this -} - -// HELPER FUNCTIONS -// ================ - -var INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g - -function base64clean (str) { - // Node takes equal signs as end of the Base64 encoding - str = str.split('=')[0] - // Node strips out invalid characters like \n and \t from the string, base64-js does not - str = str.trim().replace(INVALID_BASE64_RE, '') - // Node converts strings with length < 2 to '' - if (str.length < 2) return '' - // Node allows for non-padded base64 strings (missing trailing ===), base64-js does not - while (str.length % 4 !== 0) { - str = str + '=' - } - return str -} - -function utf8ToBytes (string, units) { - units = units || Infinity - var codePoint - var length = string.length - var leadSurrogate = null - var bytes = [] - - for (var i = 0; i < length; ++i) { - codePoint = string.charCodeAt(i) - - // is surrogate component - if (codePoint > 0xD7FF && codePoint < 0xE000) { - // last char was a lead - if (!leadSurrogate) { - // no lead yet - if (codePoint > 0xDBFF) { - // unexpected trail - if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) - continue - } else if (i + 1 === length) { - // unpaired lead - if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) - continue - } - - // valid lead - leadSurrogate = codePoint - - continue - } - - // 2 leads in a row - if (codePoint < 0xDC00) { - if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) - leadSurrogate = codePoint - continue - } - - // valid surrogate pair - codePoint = (leadSurrogate - 0xD800 << 10 | codePoint - 0xDC00) + 0x10000 - } else if (leadSurrogate) { - // valid bmp char, but last char was a lead - if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) - } - - leadSurrogate = null - - // encode utf8 - if (codePoint < 0x80) { - if ((units -= 1) < 0) break - bytes.push(codePoint) - } else if (codePoint < 0x800) { - if ((units -= 2) < 0) break - bytes.push( - codePoint >> 0x6 | 0xC0, - codePoint & 0x3F | 0x80 - ) - } else if (codePoint < 0x10000) { - if ((units -= 3) < 0) break - bytes.push( - codePoint >> 0xC | 0xE0, - codePoint >> 0x6 & 0x3F | 0x80, - codePoint & 0x3F | 0x80 - ) - } else if (codePoint < 0x110000) { - if ((units -= 4) < 0) break - bytes.push( - codePoint >> 0x12 | 0xF0, - codePoint >> 0xC & 0x3F | 0x80, - codePoint >> 0x6 & 0x3F | 0x80, - codePoint & 0x3F | 0x80 - ) - } else { - throw new Error('Invalid code point') - } - } - - return bytes -} - -function asciiToBytes (str) { - var byteArray = [] - for (var i = 0; i < str.length; ++i) { - // Node's code seems to be doing this and not & 0x7F.. - byteArray.push(str.charCodeAt(i) & 0xFF) - } - return byteArray -} - -function utf16leToBytes (str, units) { - var c, hi, lo - var byteArray = [] - for (var i = 0; i < str.length; ++i) { - if ((units -= 2) < 0) break - - c = str.charCodeAt(i) - hi = c >> 8 - lo = c % 256 - byteArray.push(lo) - byteArray.push(hi) - } - - return byteArray -} - -function base64ToBytes (str) { - return base64.toByteArray(base64clean(str)) -} - -function blitBuffer (src, dst, offset, length) { - for (var i = 0; i < length; ++i) { - if ((i + offset >= dst.length) || (i >= src.length)) break - dst[i + offset] = src[i] - } - return i -} - -// ArrayBuffer or Uint8Array objects from other contexts (i.e. iframes) do not pass -// the `instanceof` check but they should be treated as of that type. -// See: https://github.com/feross/buffer/issues/166 -function isInstance (obj, type) { - return obj instanceof type || - (obj != null && obj.constructor != null && obj.constructor.name != null && - obj.constructor.name === type.name) -} -function numberIsNaN (obj) { - // For IE11 support - return obj !== obj // eslint-disable-line no-self-compare -} - -// Create lookup table for `toString('hex')` -// See: https://github.com/feross/buffer/issues/219 -var hexSliceLookupTable = (function () { - var alphabet = '0123456789abcdef' - var table = new Array(256) - for (var i = 0; i < 16; ++i) { - var i16 = i * 16 - for (var j = 0; j < 16; ++j) { - table[i16 + j] = alphabet[i] + alphabet[j] - } - } - return table -})() diff --git a/node_modules/buffer/package.json b/node_modules/buffer/package.json deleted file mode 100644 index 3b1b498..0000000 --- a/node_modules/buffer/package.json +++ /dev/null @@ -1,96 +0,0 @@ -{ - "name": "buffer", - "description": "Node.js Buffer API, for the browser", - "version": "5.7.1", - "author": { - "name": "Feross Aboukhadijeh", - "email": "feross@feross.org", - "url": "https://feross.org" - }, - "bugs": { - "url": "https://github.com/feross/buffer/issues" - }, - "contributors": [ - "Romain Beauxis ", - "James Halliday " - ], - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.1.13" - }, - "devDependencies": { - "airtap": "^3.0.0", - "benchmark": "^2.1.4", - "browserify": "^17.0.0", - "concat-stream": "^2.0.0", - "hyperquest": "^2.1.3", - "is-buffer": "^2.0.4", - "is-nan": "^1.3.0", - "split": "^1.0.1", - "standard": "*", - "tape": "^5.0.1", - "through2": "^4.0.2", - "uglify-js": "^3.11.3" - }, - "homepage": "https://github.com/feross/buffer", - "jspm": { - "map": { - "./index.js": { - "node": "@node/buffer" - } - } - }, - "keywords": [ - "arraybuffer", - "browser", - "browserify", - "buffer", - "compatible", - "dataview", - "uint8array" - ], - "license": "MIT", - "main": "index.js", - "types": "index.d.ts", - "repository": { - "type": "git", - "url": "git://github.com/feross/buffer.git" - }, - "scripts": { - "perf": "browserify --debug perf/bracket-notation.js > perf/bundle.js && open perf/index.html", - "perf-node": "node perf/bracket-notation.js && node perf/concat.js && node perf/copy-big.js && node perf/copy.js && node perf/new-big.js && node perf/new.js && node perf/readDoubleBE.js && node perf/readFloatBE.js && node perf/readUInt32LE.js && node perf/slice.js && node perf/writeFloatBE.js", - "size": "browserify -r ./ | uglifyjs -c -m | gzip | wc -c", - "test": "standard && node ./bin/test.js", - "test-browser-es5": "airtap -- test/*.js", - "test-browser-es5-local": "airtap --local -- test/*.js", - "test-browser-es6": "airtap -- test/*.js test/node/*.js", - "test-browser-es6-local": "airtap --local -- test/*.js test/node/*.js", - "test-node": "tape test/*.js test/node/*.js", - "update-authors": "./bin/update-authors.sh" - }, - "standard": { - "ignore": [ - "test/node/**/*.js", - "test/common.js", - "test/_polyfill.js", - "perf/**/*.js" - ], - "globals": [ - "SharedArrayBuffer" - ] - }, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] -} diff --git a/node_modules/call-bind-apply-helpers/.eslintrc b/node_modules/call-bind-apply-helpers/.eslintrc deleted file mode 100644 index 201e859..0000000 --- a/node_modules/call-bind-apply-helpers/.eslintrc +++ /dev/null @@ -1,17 +0,0 @@ -{ - "root": true, - - "extends": "@ljharb", - - "rules": { - "func-name-matching": 0, - "id-length": 0, - "new-cap": [2, { - "capIsNewExceptions": [ - "GetIntrinsic", - ], - }], - "no-extra-parens": 0, - "no-magic-numbers": 0, - }, -} diff --git a/node_modules/call-bind-apply-helpers/.github/FUNDING.yml b/node_modules/call-bind-apply-helpers/.github/FUNDING.yml deleted file mode 100644 index 0011e9d..0000000 --- a/node_modules/call-bind-apply-helpers/.github/FUNDING.yml +++ /dev/null @@ -1,12 +0,0 @@ -# These are supported funding model platforms - -github: [ljharb] -patreon: # Replace with a single Patreon username -open_collective: # Replace with a single Open Collective username -ko_fi: # Replace with a single Ko-fi username -tidelift: npm/call-bind-apply-helpers -community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry -liberapay: # Replace with a single Liberapay username -issuehunt: # Replace with a single IssueHunt username -otechie: # Replace with a single Otechie username -custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] diff --git a/node_modules/call-bind-apply-helpers/.nycrc b/node_modules/call-bind-apply-helpers/.nycrc deleted file mode 100644 index bdd626c..0000000 --- a/node_modules/call-bind-apply-helpers/.nycrc +++ /dev/null @@ -1,9 +0,0 @@ -{ - "all": true, - "check-coverage": false, - "reporter": ["text-summary", "text", "html", "json"], - "exclude": [ - "coverage", - "test" - ] -} diff --git a/node_modules/call-bind-apply-helpers/CHANGELOG.md b/node_modules/call-bind-apply-helpers/CHANGELOG.md deleted file mode 100644 index 2484942..0000000 --- a/node_modules/call-bind-apply-helpers/CHANGELOG.md +++ /dev/null @@ -1,30 +0,0 @@ -# Changelog - -All notable changes to this project will be documented in this file. - -The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) -and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). - -## [v1.0.2](https://github.com/ljharb/call-bind-apply-helpers/compare/v1.0.1...v1.0.2) - 2025-02-12 - -### Commits - -- [types] improve inferred types [`e6f9586`](https://github.com/ljharb/call-bind-apply-helpers/commit/e6f95860a3c72879cb861a858cdfb8138fbedec1) -- [Dev Deps] update `@arethetypeswrong/cli`, `@ljharb/tsconfig`, `@types/tape`, `es-value-fixtures`, `for-each`, `has-strict-mode`, `object-inspect` [`e43d540`](https://github.com/ljharb/call-bind-apply-helpers/commit/e43d5409f97543bfbb11f345d47d8ce4e066d8c1) - -## [v1.0.1](https://github.com/ljharb/call-bind-apply-helpers/compare/v1.0.0...v1.0.1) - 2024-12-08 - -### Commits - -- [types] `reflectApply`: fix types [`4efc396`](https://github.com/ljharb/call-bind-apply-helpers/commit/4efc3965351a4f02cc55e836fa391d3d11ef2ef8) -- [Fix] `reflectApply`: oops, Reflect is not a function [`83cc739`](https://github.com/ljharb/call-bind-apply-helpers/commit/83cc7395de6b79b7730bdf092f1436f0b1263c75) -- [Dev Deps] update `@arethetypeswrong/cli` [`80bd5d3`](https://github.com/ljharb/call-bind-apply-helpers/commit/80bd5d3ae58b4f6b6995ce439dd5a1bcb178a940) - -## v1.0.0 - 2024-12-05 - -### Commits - -- Initial implementation, tests, readme [`7879629`](https://github.com/ljharb/call-bind-apply-helpers/commit/78796290f9b7430c9934d6f33d94ae9bc89fce04) -- Initial commit [`3f1dc16`](https://github.com/ljharb/call-bind-apply-helpers/commit/3f1dc164afc43285631b114a5f9dd9137b2b952f) -- npm init [`081df04`](https://github.com/ljharb/call-bind-apply-helpers/commit/081df048c312fcee400922026f6e97281200a603) -- Only apps should have lockfiles [`5b9ca0f`](https://github.com/ljharb/call-bind-apply-helpers/commit/5b9ca0fe8101ebfaf309c549caac4e0a017ed930) diff --git a/node_modules/call-bind-apply-helpers/LICENSE b/node_modules/call-bind-apply-helpers/LICENSE deleted file mode 100644 index f82f389..0000000 --- a/node_modules/call-bind-apply-helpers/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2024 Jordan Harband - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/call-bind-apply-helpers/README.md b/node_modules/call-bind-apply-helpers/README.md deleted file mode 100644 index 8fc0dae..0000000 --- a/node_modules/call-bind-apply-helpers/README.md +++ /dev/null @@ -1,62 +0,0 @@ -# call-bind-apply-helpers [![Version Badge][npm-version-svg]][package-url] - -[![github actions][actions-image]][actions-url] -[![coverage][codecov-image]][codecov-url] -[![dependency status][deps-svg]][deps-url] -[![dev dependency status][dev-deps-svg]][dev-deps-url] -[![License][license-image]][license-url] -[![Downloads][downloads-image]][downloads-url] - -[![npm badge][npm-badge-png]][package-url] - -Helper functions around Function call/apply/bind, for use in `call-bind`. - -The only packages that should likely ever use this package directly are `call-bind` and `get-intrinsic`. -Please use `call-bind` unless you have a very good reason not to. - -## Getting started - -```sh -npm install --save call-bind-apply-helpers -``` - -## Usage/Examples - -```js -const assert = require('assert'); -const callBindBasic = require('call-bind-apply-helpers'); - -function f(a, b) { - assert.equal(this, 1); - assert.equal(a, 2); - assert.equal(b, 3); - assert.equal(arguments.length, 2); -} - -const fBound = callBindBasic([f, 1]); - -delete Function.prototype.call; -delete Function.prototype.bind; - -fBound(2, 3); -``` - -## Tests - -Clone the repo, `npm install`, and run `npm test` - -[package-url]: https://npmjs.org/package/call-bind-apply-helpers -[npm-version-svg]: https://versionbadg.es/ljharb/call-bind-apply-helpers.svg -[deps-svg]: https://david-dm.org/ljharb/call-bind-apply-helpers.svg -[deps-url]: https://david-dm.org/ljharb/call-bind-apply-helpers -[dev-deps-svg]: https://david-dm.org/ljharb/call-bind-apply-helpers/dev-status.svg -[dev-deps-url]: https://david-dm.org/ljharb/call-bind-apply-helpers#info=devDependencies -[npm-badge-png]: https://nodei.co/npm/call-bind-apply-helpers.png?downloads=true&stars=true -[license-image]: https://img.shields.io/npm/l/call-bind-apply-helpers.svg -[license-url]: LICENSE -[downloads-image]: https://img.shields.io/npm/dm/call-bind-apply-helpers.svg -[downloads-url]: https://npm-stat.com/charts.html?package=call-bind-apply-helpers -[codecov-image]: https://codecov.io/gh/ljharb/call-bind-apply-helpers/branch/main/graphs/badge.svg -[codecov-url]: https://app.codecov.io/gh/ljharb/call-bind-apply-helpers/ -[actions-image]: https://img.shields.io/endpoint?url=https://github-actions-badge-u3jn4tfpocch.runkit.sh/ljharb/call-bind-apply-helpers -[actions-url]: https://github.com/ljharb/call-bind-apply-helpers/actions diff --git a/node_modules/call-bind-apply-helpers/actualApply.d.ts b/node_modules/call-bind-apply-helpers/actualApply.d.ts deleted file mode 100644 index b87286a..0000000 --- a/node_modules/call-bind-apply-helpers/actualApply.d.ts +++ /dev/null @@ -1 +0,0 @@ -export = Reflect.apply; \ No newline at end of file diff --git a/node_modules/call-bind-apply-helpers/actualApply.js b/node_modules/call-bind-apply-helpers/actualApply.js deleted file mode 100644 index ffa5135..0000000 --- a/node_modules/call-bind-apply-helpers/actualApply.js +++ /dev/null @@ -1,10 +0,0 @@ -'use strict'; - -var bind = require('function-bind'); - -var $apply = require('./functionApply'); -var $call = require('./functionCall'); -var $reflectApply = require('./reflectApply'); - -/** @type {import('./actualApply')} */ -module.exports = $reflectApply || bind.call($call, $apply); diff --git a/node_modules/call-bind-apply-helpers/applyBind.d.ts b/node_modules/call-bind-apply-helpers/applyBind.d.ts deleted file mode 100644 index d176c1a..0000000 --- a/node_modules/call-bind-apply-helpers/applyBind.d.ts +++ /dev/null @@ -1,19 +0,0 @@ -import actualApply from './actualApply'; - -type TupleSplitHead = T['length'] extends N - ? T - : T extends [...infer R, any] - ? TupleSplitHead - : never - -type TupleSplitTail = O['length'] extends N - ? T - : T extends [infer F, ...infer R] - ? TupleSplitTail<[...R], N, [...O, F]> - : never - -type TupleSplit = [TupleSplitHead, TupleSplitTail] - -declare function applyBind(...args: TupleSplit, 2>[1]): ReturnType; - -export = applyBind; \ No newline at end of file diff --git a/node_modules/call-bind-apply-helpers/applyBind.js b/node_modules/call-bind-apply-helpers/applyBind.js deleted file mode 100644 index d2b7723..0000000 --- a/node_modules/call-bind-apply-helpers/applyBind.js +++ /dev/null @@ -1,10 +0,0 @@ -'use strict'; - -var bind = require('function-bind'); -var $apply = require('./functionApply'); -var actualApply = require('./actualApply'); - -/** @type {import('./applyBind')} */ -module.exports = function applyBind() { - return actualApply(bind, $apply, arguments); -}; diff --git a/node_modules/call-bind-apply-helpers/functionApply.d.ts b/node_modules/call-bind-apply-helpers/functionApply.d.ts deleted file mode 100644 index 1f6e11b..0000000 --- a/node_modules/call-bind-apply-helpers/functionApply.d.ts +++ /dev/null @@ -1 +0,0 @@ -export = Function.prototype.apply; \ No newline at end of file diff --git a/node_modules/call-bind-apply-helpers/functionApply.js b/node_modules/call-bind-apply-helpers/functionApply.js deleted file mode 100644 index c71df9c..0000000 --- a/node_modules/call-bind-apply-helpers/functionApply.js +++ /dev/null @@ -1,4 +0,0 @@ -'use strict'; - -/** @type {import('./functionApply')} */ -module.exports = Function.prototype.apply; diff --git a/node_modules/call-bind-apply-helpers/functionCall.d.ts b/node_modules/call-bind-apply-helpers/functionCall.d.ts deleted file mode 100644 index 15e93df..0000000 --- a/node_modules/call-bind-apply-helpers/functionCall.d.ts +++ /dev/null @@ -1 +0,0 @@ -export = Function.prototype.call; \ No newline at end of file diff --git a/node_modules/call-bind-apply-helpers/functionCall.js b/node_modules/call-bind-apply-helpers/functionCall.js deleted file mode 100644 index 7a8d873..0000000 --- a/node_modules/call-bind-apply-helpers/functionCall.js +++ /dev/null @@ -1,4 +0,0 @@ -'use strict'; - -/** @type {import('./functionCall')} */ -module.exports = Function.prototype.call; diff --git a/node_modules/call-bind-apply-helpers/index.d.ts b/node_modules/call-bind-apply-helpers/index.d.ts deleted file mode 100644 index 541516b..0000000 --- a/node_modules/call-bind-apply-helpers/index.d.ts +++ /dev/null @@ -1,64 +0,0 @@ -type RemoveFromTuple< - Tuple extends readonly unknown[], - RemoveCount extends number, - Index extends 1[] = [] -> = Index["length"] extends RemoveCount - ? Tuple - : Tuple extends [infer First, ...infer Rest] - ? RemoveFromTuple - : Tuple; - -type ConcatTuples< - Prefix extends readonly unknown[], - Suffix extends readonly unknown[] -> = [...Prefix, ...Suffix]; - -type ExtractFunctionParams = T extends (this: infer TThis, ...args: infer P extends readonly unknown[]) => infer R - ? { thisArg: TThis; params: P; returnType: R } - : never; - -type BindFunction< - T extends (this: any, ...args: any[]) => any, - TThis, - TBoundArgs extends readonly unknown[], - ReceiverBound extends boolean -> = ExtractFunctionParams extends { - thisArg: infer OrigThis; - params: infer P extends readonly unknown[]; - returnType: infer R; -} - ? ReceiverBound extends true - ? (...args: RemoveFromTuple>) => R extends [OrigThis, ...infer Rest] - ? [TThis, ...Rest] // Replace `this` with `thisArg` - : R - : >>( - thisArg: U, - ...args: RemainingArgs - ) => R extends [OrigThis, ...infer Rest] - ? [U, ...ConcatTuples] // Preserve bound args in return type - : R - : never; - -declare function callBind< - const T extends (this: any, ...args: any[]) => any, - Extracted extends ExtractFunctionParams, - const TBoundArgs extends Partial & readonly unknown[], - const TThis extends Extracted["thisArg"] ->( - args: [fn: T, thisArg: TThis, ...boundArgs: TBoundArgs] -): BindFunction; - -declare function callBind< - const T extends (this: any, ...args: any[]) => any, - Extracted extends ExtractFunctionParams, - const TBoundArgs extends Partial & readonly unknown[] ->( - args: [fn: T, ...boundArgs: TBoundArgs] -): BindFunction; - -declare function callBind( - args: [fn: Exclude, ...rest: TArgs] -): never; - -// export as namespace callBind; -export = callBind; diff --git a/node_modules/call-bind-apply-helpers/index.js b/node_modules/call-bind-apply-helpers/index.js deleted file mode 100644 index 2f6dab4..0000000 --- a/node_modules/call-bind-apply-helpers/index.js +++ /dev/null @@ -1,15 +0,0 @@ -'use strict'; - -var bind = require('function-bind'); -var $TypeError = require('es-errors/type'); - -var $call = require('./functionCall'); -var $actualApply = require('./actualApply'); - -/** @type {(args: [Function, thisArg?: unknown, ...args: unknown[]]) => Function} TODO FIXME, find a way to use import('.') */ -module.exports = function callBindBasic(args) { - if (args.length < 1 || typeof args[0] !== 'function') { - throw new $TypeError('a function is required'); - } - return $actualApply(bind, $call, args); -}; diff --git a/node_modules/call-bind-apply-helpers/package.json b/node_modules/call-bind-apply-helpers/package.json deleted file mode 100644 index 923b8be..0000000 --- a/node_modules/call-bind-apply-helpers/package.json +++ /dev/null @@ -1,85 +0,0 @@ -{ - "name": "call-bind-apply-helpers", - "version": "1.0.2", - "description": "Helper functions around Function call/apply/bind, for use in `call-bind`", - "main": "index.js", - "exports": { - ".": "./index.js", - "./actualApply": "./actualApply.js", - "./applyBind": "./applyBind.js", - "./functionApply": "./functionApply.js", - "./functionCall": "./functionCall.js", - "./reflectApply": "./reflectApply.js", - "./package.json": "./package.json" - }, - "scripts": { - "prepack": "npmignore --auto --commentLines=auto", - "prepublish": "not-in-publish || npm run prepublishOnly", - "prepublishOnly": "safe-publish-latest", - "prelint": "evalmd README.md", - "lint": "eslint --ext=.js,.mjs .", - "postlint": "tsc -p . && attw -P", - "pretest": "npm run lint", - "tests-only": "nyc tape 'test/**/*.js'", - "test": "npm run tests-only", - "posttest": "npx npm@'>=10.2' audit --production", - "version": "auto-changelog && git add CHANGELOG.md", - "postversion": "auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\"" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/ljharb/call-bind-apply-helpers.git" - }, - "author": "Jordan Harband ", - "license": "MIT", - "bugs": { - "url": "https://github.com/ljharb/call-bind-apply-helpers/issues" - }, - "homepage": "https://github.com/ljharb/call-bind-apply-helpers#readme", - "dependencies": { - "es-errors": "^1.3.0", - "function-bind": "^1.1.2" - }, - "devDependencies": { - "@arethetypeswrong/cli": "^0.17.3", - "@ljharb/eslint-config": "^21.1.1", - "@ljharb/tsconfig": "^0.2.3", - "@types/for-each": "^0.3.3", - "@types/function-bind": "^1.1.10", - "@types/object-inspect": "^1.13.0", - "@types/tape": "^5.8.1", - "auto-changelog": "^2.5.0", - "encoding": "^0.1.13", - "es-value-fixtures": "^1.7.1", - "eslint": "=8.8.0", - "evalmd": "^0.0.19", - "for-each": "^0.3.5", - "has-strict-mode": "^1.1.0", - "in-publish": "^2.0.1", - "npmignore": "^0.3.1", - "nyc": "^10.3.2", - "object-inspect": "^1.13.4", - "safe-publish-latest": "^2.0.0", - "tape": "^5.9.0", - "typescript": "next" - }, - "testling": { - "files": "test/index.js" - }, - "auto-changelog": { - "output": "CHANGELOG.md", - "template": "keepachangelog", - "unreleased": false, - "commitLimit": false, - "backfillLimit": false, - "hideCredit": true - }, - "publishConfig": { - "ignore": [ - ".github/workflows" - ] - }, - "engines": { - "node": ">= 0.4" - } -} diff --git a/node_modules/call-bind-apply-helpers/reflectApply.d.ts b/node_modules/call-bind-apply-helpers/reflectApply.d.ts deleted file mode 100644 index 6b2ae76..0000000 --- a/node_modules/call-bind-apply-helpers/reflectApply.d.ts +++ /dev/null @@ -1,3 +0,0 @@ -declare const reflectApply: false | typeof Reflect.apply; - -export = reflectApply; diff --git a/node_modules/call-bind-apply-helpers/reflectApply.js b/node_modules/call-bind-apply-helpers/reflectApply.js deleted file mode 100644 index 3d03caa..0000000 --- a/node_modules/call-bind-apply-helpers/reflectApply.js +++ /dev/null @@ -1,4 +0,0 @@ -'use strict'; - -/** @type {import('./reflectApply')} */ -module.exports = typeof Reflect !== 'undefined' && Reflect && Reflect.apply; diff --git a/node_modules/call-bind-apply-helpers/test/index.js b/node_modules/call-bind-apply-helpers/test/index.js deleted file mode 100644 index 1cdc89e..0000000 --- a/node_modules/call-bind-apply-helpers/test/index.js +++ /dev/null @@ -1,63 +0,0 @@ -'use strict'; - -var callBind = require('../'); -var hasStrictMode = require('has-strict-mode')(); -var forEach = require('for-each'); -var inspect = require('object-inspect'); -var v = require('es-value-fixtures'); - -var test = require('tape'); - -test('callBindBasic', function (t) { - forEach(v.nonFunctions, function (nonFunction) { - t['throws']( - // @ts-expect-error - function () { callBind([nonFunction]); }, - TypeError, - inspect(nonFunction) + ' is not a function' - ); - }); - - var sentinel = { sentinel: true }; - /** @type {(this: T, a: A, b: B) => [T | undefined, A, B]} */ - var func = function (a, b) { - // eslint-disable-next-line no-invalid-this - return [!hasStrictMode && this === global ? undefined : this, a, b]; - }; - t.equal(func.length, 2, 'original function length is 2'); - - /** type {(thisArg: unknown, a: number, b: number) => [unknown, number, number]} */ - var bound = callBind([func]); - /** type {((a: number, b: number) => [typeof sentinel, typeof a, typeof b])} */ - var boundR = callBind([func, sentinel]); - /** type {((b: number) => [typeof sentinel, number, typeof b])} */ - var boundArg = callBind([func, sentinel, /** @type {const} */ (1)]); - - // @ts-expect-error - t.deepEqual(bound(), [undefined, undefined, undefined], 'bound func with no args'); - - // @ts-expect-error - t.deepEqual(func(), [undefined, undefined, undefined], 'unbound func with too few args'); - // @ts-expect-error - t.deepEqual(bound(1, 2), [hasStrictMode ? 1 : Object(1), 2, undefined], 'bound func too few args'); - // @ts-expect-error - t.deepEqual(boundR(), [sentinel, undefined, undefined], 'bound func with receiver, with too few args'); - // @ts-expect-error - t.deepEqual(boundArg(), [sentinel, 1, undefined], 'bound func with receiver and arg, with too few args'); - - t.deepEqual(func(1, 2), [undefined, 1, 2], 'unbound func with right args'); - t.deepEqual(bound(1, 2, 3), [hasStrictMode ? 1 : Object(1), 2, 3], 'bound func with right args'); - t.deepEqual(boundR(1, 2), [sentinel, 1, 2], 'bound func with receiver, with right args'); - t.deepEqual(boundArg(2), [sentinel, 1, 2], 'bound func with receiver and arg, with right arg'); - - // @ts-expect-error - t.deepEqual(func(1, 2, 3), [undefined, 1, 2], 'unbound func with too many args'); - // @ts-expect-error - t.deepEqual(bound(1, 2, 3, 4), [hasStrictMode ? 1 : Object(1), 2, 3], 'bound func with too many args'); - // @ts-expect-error - t.deepEqual(boundR(1, 2, 3), [sentinel, 1, 2], 'bound func with receiver, with too many args'); - // @ts-expect-error - t.deepEqual(boundArg(2, 3), [sentinel, 1, 2], 'bound func with receiver and arg, with too many args'); - - t.end(); -}); diff --git a/node_modules/call-bind-apply-helpers/tsconfig.json b/node_modules/call-bind-apply-helpers/tsconfig.json deleted file mode 100644 index aef9993..0000000 --- a/node_modules/call-bind-apply-helpers/tsconfig.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "extends": "@ljharb/tsconfig", - "compilerOptions": { - "target": "es2021", - }, - "exclude": [ - "coverage", - ], -} \ No newline at end of file diff --git a/node_modules/call-bound/.eslintrc b/node_modules/call-bound/.eslintrc deleted file mode 100644 index 2612ed8..0000000 --- a/node_modules/call-bound/.eslintrc +++ /dev/null @@ -1,13 +0,0 @@ -{ - "root": true, - - "extends": "@ljharb", - - "rules": { - "new-cap": [2, { - "capIsNewExceptions": [ - "GetIntrinsic", - ], - }], - }, -} diff --git a/node_modules/call-bound/.github/FUNDING.yml b/node_modules/call-bound/.github/FUNDING.yml deleted file mode 100644 index 2a2a135..0000000 --- a/node_modules/call-bound/.github/FUNDING.yml +++ /dev/null @@ -1,12 +0,0 @@ -# These are supported funding model platforms - -github: [ljharb] -patreon: # Replace with a single Patreon username -open_collective: # Replace with a single Open Collective username -ko_fi: # Replace with a single Ko-fi username -tidelift: npm/call-bound -community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry -liberapay: # Replace with a single Liberapay username -issuehunt: # Replace with a single IssueHunt username -otechie: # Replace with a single Otechie username -custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] diff --git a/node_modules/call-bound/.nycrc b/node_modules/call-bound/.nycrc deleted file mode 100644 index bdd626c..0000000 --- a/node_modules/call-bound/.nycrc +++ /dev/null @@ -1,9 +0,0 @@ -{ - "all": true, - "check-coverage": false, - "reporter": ["text-summary", "text", "html", "json"], - "exclude": [ - "coverage", - "test" - ] -} diff --git a/node_modules/call-bound/CHANGELOG.md b/node_modules/call-bound/CHANGELOG.md deleted file mode 100644 index 8bde4e9..0000000 --- a/node_modules/call-bound/CHANGELOG.md +++ /dev/null @@ -1,42 +0,0 @@ -# Changelog - -All notable changes to this project will be documented in this file. - -The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) -and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). - -## [v1.0.4](https://github.com/ljharb/call-bound/compare/v1.0.3...v1.0.4) - 2025-03-03 - -### Commits - -- [types] improve types [`e648922`](https://github.com/ljharb/call-bound/commit/e6489222a9e54f350fbf952ceabe51fd8b6027ff) -- [Dev Deps] update `@arethetypeswrong/cli`, `@ljharb/tsconfig`, `@types/tape`, `es-value-fixtures`, `for-each`, `has-strict-mode`, `object-inspect` [`a42a5eb`](https://github.com/ljharb/call-bound/commit/a42a5ebe6c1b54fcdc7997c7dc64fdca9e936719) -- [Deps] update `call-bind-apply-helpers`, `get-intrinsic` [`f529eac`](https://github.com/ljharb/call-bound/commit/f529eac132404c17156bbc23ab2297a25d0f20b8) - -## [v1.0.3](https://github.com/ljharb/call-bound/compare/v1.0.2...v1.0.3) - 2024-12-15 - -### Commits - -- [Refactor] use `call-bind-apply-helpers` instead of `call-bind` [`5e0b134`](https://github.com/ljharb/call-bound/commit/5e0b13496df14fb7d05dae9412f088da8d3f75be) -- [Deps] update `get-intrinsic` [`41fc967`](https://github.com/ljharb/call-bound/commit/41fc96732a22c7b7e8f381f93ccc54bb6293be2e) -- [readme] fix example [`79a0137`](https://github.com/ljharb/call-bound/commit/79a0137723f7c6d09c9c05452bbf8d5efb5d6e49) -- [meta] add `sideEffects` flag [`08b07be`](https://github.com/ljharb/call-bound/commit/08b07be7f1c03f67dc6f3cdaf0906259771859f7) - -## [v1.0.2](https://github.com/ljharb/call-bound/compare/v1.0.1...v1.0.2) - 2024-12-10 - -### Commits - -- [Dev Deps] update `@arethetypeswrong/cli`, `@ljharb/tsconfig`, `gopd` [`e6a5ffe`](https://github.com/ljharb/call-bound/commit/e6a5ffe849368fe4f74dfd6cdeca1b9baa39e8d5) -- [Deps] update `call-bind`, `get-intrinsic` [`2aeb5b5`](https://github.com/ljharb/call-bound/commit/2aeb5b521dc2b2683d1345c753ea1161de2d1c14) -- [types] improve return type [`1a0c9fe`](https://github.com/ljharb/call-bound/commit/1a0c9fe3114471e7ca1f57d104e2efe713bb4871) - -## v1.0.1 - 2024-12-05 - -### Commits - -- Initial implementation, tests, readme, types [`6d94121`](https://github.com/ljharb/call-bound/commit/6d94121a9243602e506334069f7a03189fe3363d) -- Initial commit [`0eae867`](https://github.com/ljharb/call-bound/commit/0eae867334ea025c33e6e91cdecfc9df96680cf9) -- npm init [`71b2479`](https://github.com/ljharb/call-bound/commit/71b2479c6723e0b7d91a6b663613067e98b7b275) -- Only apps should have lockfiles [`c3754a9`](https://github.com/ljharb/call-bound/commit/c3754a949b7f9132b47e2d18c1729889736741eb) -- [actions] skip `npm ls` in node < 10 [`74275a5`](https://github.com/ljharb/call-bound/commit/74275a5186b8caf6309b6b97472bdcb0df4683a8) -- [Dev Deps] add missing peer dep [`1354de8`](https://github.com/ljharb/call-bound/commit/1354de8679413e4ae9c523d85f76fa7a5e032d97) diff --git a/node_modules/call-bound/LICENSE b/node_modules/call-bound/LICENSE deleted file mode 100644 index f82f389..0000000 --- a/node_modules/call-bound/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2024 Jordan Harband - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/call-bound/README.md b/node_modules/call-bound/README.md deleted file mode 100644 index a44e43e..0000000 --- a/node_modules/call-bound/README.md +++ /dev/null @@ -1,53 +0,0 @@ -# call-bound [![Version Badge][npm-version-svg]][package-url] - -[![github actions][actions-image]][actions-url] -[![coverage][codecov-image]][codecov-url] -[![dependency status][deps-svg]][deps-url] -[![dev dependency status][dev-deps-svg]][dev-deps-url] -[![License][license-image]][license-url] -[![Downloads][downloads-image]][downloads-url] - -[![npm badge][npm-badge-png]][package-url] - -Robust call-bound JavaScript intrinsics, using `call-bind` and `get-intrinsic`. - -## Getting started - -```sh -npm install --save call-bound -``` - -## Usage/Examples - -```js -const assert = require('assert'); -const callBound = require('call-bound'); - -const slice = callBound('Array.prototype.slice'); - -delete Function.prototype.call; -delete Function.prototype.bind; -delete Array.prototype.slice; - -assert.deepEqual(slice([1, 2, 3, 4], 1, -1), [2, 3]); -``` - -## Tests - -Clone the repo, `npm install`, and run `npm test` - -[package-url]: https://npmjs.org/package/call-bound -[npm-version-svg]: https://versionbadg.es/ljharb/call-bound.svg -[deps-svg]: https://david-dm.org/ljharb/call-bound.svg -[deps-url]: https://david-dm.org/ljharb/call-bound -[dev-deps-svg]: https://david-dm.org/ljharb/call-bound/dev-status.svg -[dev-deps-url]: https://david-dm.org/ljharb/call-bound#info=devDependencies -[npm-badge-png]: https://nodei.co/npm/call-bound.png?downloads=true&stars=true -[license-image]: https://img.shields.io/npm/l/call-bound.svg -[license-url]: LICENSE -[downloads-image]: https://img.shields.io/npm/dm/call-bound.svg -[downloads-url]: https://npm-stat.com/charts.html?package=call-bound -[codecov-image]: https://codecov.io/gh/ljharb/call-bound/branch/main/graphs/badge.svg -[codecov-url]: https://app.codecov.io/gh/ljharb/call-bound/ -[actions-image]: https://img.shields.io/endpoint?url=https://github-actions-badge-u3jn4tfpocch.runkit.sh/ljharb/call-bound -[actions-url]: https://github.com/ljharb/call-bound/actions diff --git a/node_modules/call-bound/index.d.ts b/node_modules/call-bound/index.d.ts deleted file mode 100644 index 5562f00..0000000 --- a/node_modules/call-bound/index.d.ts +++ /dev/null @@ -1,94 +0,0 @@ -type Intrinsic = typeof globalThis; - -type IntrinsicName = keyof Intrinsic | `%${keyof Intrinsic}%`; - -type IntrinsicPath = IntrinsicName | `${StripPercents}.${string}` | `%${StripPercents}.${string}%`; - -type AllowMissing = boolean; - -type StripPercents = T extends `%${infer U}%` ? U : T; - -type BindMethodPrecise = - F extends (this: infer This, ...args: infer Args) => infer R - ? (obj: This, ...args: Args) => R - : F extends { - (this: infer This1, ...args: infer Args1): infer R1; - (this: infer This2, ...args: infer Args2): infer R2 - } - ? { - (obj: This1, ...args: Args1): R1; - (obj: This2, ...args: Args2): R2 - } - : never - -// Extract method type from a prototype -type GetPrototypeMethod = - (typeof globalThis)[T] extends { prototype: any } - ? M extends keyof (typeof globalThis)[T]['prototype'] - ? (typeof globalThis)[T]['prototype'][M] - : never - : never - -// Get static property/method -type GetStaticMember = - P extends keyof (typeof globalThis)[T] ? (typeof globalThis)[T][P] : never - -// Type that maps string path to actual bound function or value with better precision -type BoundIntrinsic = - S extends `${infer Obj}.prototype.${infer Method}` - ? Obj extends keyof typeof globalThis - ? BindMethodPrecise> - : unknown - : S extends `${infer Obj}.${infer Prop}` - ? Obj extends keyof typeof globalThis - ? GetStaticMember - : unknown - : unknown - -declare function arraySlice(array: readonly T[], start?: number, end?: number): T[]; -declare function arraySlice(array: ArrayLike, start?: number, end?: number): T[]; -declare function arraySlice(array: IArguments, start?: number, end?: number): T[]; - -// Special cases for methods that need explicit typing -interface SpecialCases { - '%Object.prototype.isPrototypeOf%': (thisArg: {}, obj: unknown) => boolean; - '%String.prototype.replace%': { - (str: string, searchValue: string | RegExp, replaceValue: string): string; - (str: string, searchValue: string | RegExp, replacer: (substring: string, ...args: any[]) => string): string - }; - '%Object.prototype.toString%': (obj: {}) => string; - '%Object.prototype.hasOwnProperty%': (obj: {}, v: PropertyKey) => boolean; - '%Array.prototype.slice%': typeof arraySlice; - '%Array.prototype.map%': (array: readonly T[], callbackfn: (value: T, index: number, array: readonly T[]) => U, thisArg?: any) => U[]; - '%Array.prototype.filter%': (array: readonly T[], predicate: (value: T, index: number, array: readonly T[]) => unknown, thisArg?: any) => T[]; - '%Array.prototype.indexOf%': (array: readonly T[], searchElement: T, fromIndex?: number) => number; - '%Function.prototype.apply%': (fn: (...args: A) => R, thisArg: any, args: A) => R; - '%Function.prototype.call%': (fn: (...args: A) => R, thisArg: any, ...args: A) => R; - '%Function.prototype.bind%': (fn: (...args: A) => R, thisArg: any, ...args: A) => (...remainingArgs: A) => R; - '%Promise.prototype.then%': { - (promise: Promise, onfulfilled: (value: T) => R | PromiseLike): Promise; - (promise: Promise, onfulfilled: ((value: T) => R | PromiseLike) | undefined | null, onrejected: (reason: any) => R | PromiseLike): Promise; - }; - '%RegExp.prototype.test%': (regexp: RegExp, str: string) => boolean; - '%RegExp.prototype.exec%': (regexp: RegExp, str: string) => RegExpExecArray | null; - '%Error.prototype.toString%': (error: Error) => string; - '%TypeError.prototype.toString%': (error: TypeError) => string; - '%String.prototype.split%': ( - obj: unknown, - splitter: string | RegExp | { - [Symbol.split](string: string, limit?: number): string[]; - }, - limit?: number | undefined - ) => string[]; -} - -/** - * Returns a bound function for a prototype method, or a value for a static property. - * - * @param name - The name of the intrinsic (e.g. 'Array.prototype.slice') - * @param {AllowMissing} [allowMissing] - Whether to allow missing intrinsics (default: false) - */ -declare function callBound, S extends IntrinsicPath>(name: K, allowMissing?: AllowMissing): SpecialCases[`%${StripPercents}%`]; -declare function callBound, S extends IntrinsicPath>(name: S, allowMissing?: AllowMissing): BoundIntrinsic; - -export = callBound; diff --git a/node_modules/call-bound/index.js b/node_modules/call-bound/index.js deleted file mode 100644 index e9ade74..0000000 --- a/node_modules/call-bound/index.js +++ /dev/null @@ -1,19 +0,0 @@ -'use strict'; - -var GetIntrinsic = require('get-intrinsic'); - -var callBindBasic = require('call-bind-apply-helpers'); - -/** @type {(thisArg: string, searchString: string, position?: number) => number} */ -var $indexOf = callBindBasic([GetIntrinsic('%String.prototype.indexOf%')]); - -/** @type {import('.')} */ -module.exports = function callBoundIntrinsic(name, allowMissing) { - /* eslint no-extra-parens: 0 */ - - var intrinsic = /** @type {(this: unknown, ...args: unknown[]) => unknown} */ (GetIntrinsic(name, !!allowMissing)); - if (typeof intrinsic === 'function' && $indexOf(name, '.prototype.') > -1) { - return callBindBasic(/** @type {const} */ ([intrinsic])); - } - return intrinsic; -}; diff --git a/node_modules/call-bound/package.json b/node_modules/call-bound/package.json deleted file mode 100644 index d542db4..0000000 --- a/node_modules/call-bound/package.json +++ /dev/null @@ -1,99 +0,0 @@ -{ - "name": "call-bound", - "version": "1.0.4", - "description": "Robust call-bound JavaScript intrinsics, using `call-bind` and `get-intrinsic`.", - "main": "index.js", - "exports": { - ".": "./index.js", - "./package.json": "./package.json" - }, - "sideEffects": false, - "scripts": { - "prepack": "npmignore --auto --commentLines=auto", - "prepublish": "not-in-publish || npm run prepublishOnly", - "prepublishOnly": "safe-publish-latest", - "prelint": "evalmd README.md", - "lint": "eslint --ext=.js,.mjs .", - "postlint": "tsc -p . && attw -P", - "pretest": "npm run lint", - "tests-only": "nyc tape 'test/**/*.js'", - "test": "npm run tests-only", - "posttest": "npx npm@'>=10.2' audit --production", - "version": "auto-changelog && git add CHANGELOG.md", - "postversion": "auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\"" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/ljharb/call-bound.git" - }, - "keywords": [ - "javascript", - "ecmascript", - "es", - "js", - "callbind", - "callbound", - "call", - "bind", - "bound", - "call-bind", - "call-bound", - "function", - "es-abstract" - ], - "author": "Jordan Harband ", - "funding": { - "url": "https://github.com/sponsors/ljharb" - }, - "license": "MIT", - "bugs": { - "url": "https://github.com/ljharb/call-bound/issues" - }, - "homepage": "https://github.com/ljharb/call-bound#readme", - "dependencies": { - "call-bind-apply-helpers": "^1.0.2", - "get-intrinsic": "^1.3.0" - }, - "devDependencies": { - "@arethetypeswrong/cli": "^0.17.4", - "@ljharb/eslint-config": "^21.1.1", - "@ljharb/tsconfig": "^0.3.0", - "@types/call-bind": "^1.0.5", - "@types/get-intrinsic": "^1.2.3", - "@types/tape": "^5.8.1", - "auto-changelog": "^2.5.0", - "encoding": "^0.1.13", - "es-value-fixtures": "^1.7.1", - "eslint": "=8.8.0", - "evalmd": "^0.0.19", - "for-each": "^0.3.5", - "gopd": "^1.2.0", - "has-strict-mode": "^1.1.0", - "in-publish": "^2.0.1", - "npmignore": "^0.3.1", - "nyc": "^10.3.2", - "object-inspect": "^1.13.4", - "safe-publish-latest": "^2.0.0", - "tape": "^5.9.0", - "typescript": "next" - }, - "testling": { - "files": "test/index.js" - }, - "auto-changelog": { - "output": "CHANGELOG.md", - "template": "keepachangelog", - "unreleased": false, - "commitLimit": false, - "backfillLimit": false, - "hideCredit": true - }, - "publishConfig": { - "ignore": [ - ".github/workflows" - ] - }, - "engines": { - "node": ">= 0.4" - } -} diff --git a/node_modules/call-bound/test/index.js b/node_modules/call-bound/test/index.js deleted file mode 100644 index a2fc9f0..0000000 --- a/node_modules/call-bound/test/index.js +++ /dev/null @@ -1,61 +0,0 @@ -'use strict'; - -var test = require('tape'); - -var callBound = require('../'); - -/** @template {true} T @template U @typedef {T extends U ? T : never} AssertType */ - -test('callBound', function (t) { - // static primitive - t.equal(callBound('Array.length'), Array.length, 'Array.length yields itself'); - t.equal(callBound('%Array.length%'), Array.length, '%Array.length% yields itself'); - - // static non-function object - t.equal(callBound('Array.prototype'), Array.prototype, 'Array.prototype yields itself'); - t.equal(callBound('%Array.prototype%'), Array.prototype, '%Array.prototype% yields itself'); - t.equal(callBound('Array.constructor'), Array.constructor, 'Array.constructor yields itself'); - t.equal(callBound('%Array.constructor%'), Array.constructor, '%Array.constructor% yields itself'); - - // static function - t.equal(callBound('Date.parse'), Date.parse, 'Date.parse yields itself'); - t.equal(callBound('%Date.parse%'), Date.parse, '%Date.parse% yields itself'); - - // prototype primitive - t.equal(callBound('Error.prototype.message'), Error.prototype.message, 'Error.prototype.message yields itself'); - t.equal(callBound('%Error.prototype.message%'), Error.prototype.message, '%Error.prototype.message% yields itself'); - - var x = callBound('Object.prototype.toString'); - var y = callBound('%Object.prototype.toString%'); - - // prototype function - t.notEqual(x, Object.prototype.toString, 'Object.prototype.toString does not yield itself'); - t.notEqual(y, Object.prototype.toString, '%Object.prototype.toString% does not yield itself'); - t.equal(x(true), Object.prototype.toString.call(true), 'call-bound Object.prototype.toString calls into the original'); - t.equal(y(true), Object.prototype.toString.call(true), 'call-bound %Object.prototype.toString% calls into the original'); - - t['throws']( - // @ts-expect-error - function () { callBound('does not exist'); }, - SyntaxError, - 'nonexistent intrinsic throws' - ); - t['throws']( - // @ts-expect-error - function () { callBound('does not exist', true); }, - SyntaxError, - 'allowMissing arg still throws for unknown intrinsic' - ); - - t.test('real but absent intrinsic', { skip: typeof WeakRef !== 'undefined' }, function (st) { - st['throws']( - function () { callBound('WeakRef'); }, - TypeError, - 'real but absent intrinsic throws' - ); - st.equal(callBound('WeakRef', true), undefined, 'allowMissing arg avoids exception'); - st.end(); - }); - - t.end(); -}); diff --git a/node_modules/call-bound/tsconfig.json b/node_modules/call-bound/tsconfig.json deleted file mode 100644 index 8976d98..0000000 --- a/node_modules/call-bound/tsconfig.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "extends": "@ljharb/tsconfig", - "compilerOptions": { - "target": "ESNext", - "lib": ["es2024"], - }, - "exclude": [ - "coverage", - ], -} diff --git a/node_modules/cheerio/History.md b/node_modules/cheerio/History.md deleted file mode 100644 index c7e38e6..0000000 --- a/node_modules/cheerio/History.md +++ /dev/null @@ -1,576 +0,0 @@ - -0.22.0 / 2016-08-23 -================== - - * Return undefined in .prop if given an invalid element or tag (#880) - * Merge pull request #884 from cheeriojs/readme-cleanup - * readme updates - * Merge pull request #881 from piamancini/patch-1 - * Added backers and sponsors from OpenCollective - * Use jQuery from the jquery module in benchmarks (#871) - * Document, test, and extend static `$.text` method (#855) - * Fix typo on calling _.extend (#861) - * Update versions (#870) - * Use individual lodash functions (#864) - * Added `.serialize()` support. Fixes #69 (#827) - * Update Readme.md (#857) - * add extension for JSON require call - * remove gittask badge - * Merge pull request #672 from underdogio/dev/checkbox.radio.values.sqwished - * Added default value for checkboxes/radios - -0.20.0 / 2016-02-01 -================== - - * Add coveralls badge, remove link to old report (Felix Böhm) - * Update lodash dependeny to 4.1.0 (leif.hanack) - * Fix PR #726 adding 'appendTo()' and 'prependTo()' (Delgan) - * Added appendTo and prependTo with tests #641 (digihaven) - * Fix #780 by changing options context in '.find()' (Felix Böhm) - * Add an unit test checking the query of child (Delgan) - * fix #667: attr({foo: null}) removes attribute foo, like attr('foo', null) (Ray Waldin) - * Include reference to dedicated "Loading" section (Mike Pennisi) - * Added load method to $ (alanev) - * update css-select to 1.2.0 (Felix Böhm) - * Fixing Grammatical Error (Dan Corman) - * Test against node v0.12 --> v4.2 (Jason Kurian) - * Correct output in example (Felix Böhm) - * Fix npm files filter (Bogdan Chadkin) - * Enable setting data on all elements in selection (Mike Pennisi) - * Reinstate `$.fn.toArray` (Mike Pennisi) - * update css-select to 1.1.0 (Thomas Shafer) - * Complete implementation of `wrap` (Mike Pennisi) - * Correct name of unit test (Mike Pennisi) - * Correct grammar in test titles (Mike Pennisi) - * Normalize whitespace (Mike Pennisi) - * Insert omitted assertion (Mike Pennisi) - * Update invocation of `children` (Mike Pennisi) - * Begin implementation of `wrap` method (Dandlezzz) - * Update Readme.md (Sven Slootweg) - * fix document's mistake in Readme.md (exoticknight) - * Add tests for setting text and html as non-strings (Ryc O'Chet) - * Fix for passing non-string values to .html or .text (Ryc O'Chet) - * use a selector to filter form elements (fb55) - * fix README.md typo (Yutian Li) - * README: fix spelling (Chris Rebert) - * Added support for options without a `value` attribute. Fixes #633 (Todd Wolfson) - * responding to pull request feedback - remove item() method and related tests (Ray Waldin) - * add length property and item method to object returned by prop('style'), plus tests (Ray Waldin) - * Added .prop method to readme (Artem Burtsev) - * Added .prop method (Artem Burtsev) - * Added Gitter badge (The Gitter Badger) - -0.19.0 / 2015-03-21 -================== - - * fixed allignment (fb55) - * added test case for malformed json in data attributes (fb55) - * fix: handle some extreme cases like `data-custom="{{templatevar}}"`. There is possibility error while parsing json . (Harish.K) - * Add missing optional selector doc for {prev,next}{All,Until} (Jérémie Astori) - * update to dom-serializer@0.1.0 (Felix Böhm) - * Document `Cheerio#serialzeArray` (Mike Pennisi) - * Fixed up `serializeArray()` and added multiple support (Todd Wolfson) - * Implement serializeArray() (Jarno Leppänen) - * recognize options in $.xml() (fb55) - * lib/static.js: text(): rm errant space before ++ (Chris Rebert) - * Do not expose internal `children` array (Mike Pennisi) - * Change lodash dependencies to ^3.1.0 (Samy Pessé) - * Update lodash@3.1.0 (Samy Pessé) - * Updates Readme.md: .not(function (index, elem)) (Patrick Ward) - * update to css-select@1.0.0 (fb55) - * Allow failures in Node.js v0.11 (Mike Pennisi) - * Added: Gittask badge (Matthew Mueller) - * Isolate prototypes of functions created via `load` (Mike Pennisi) - * Updates Readme.md: adds JS syntax highlighting (frankcash) - * #608 -- Add support for insertBefore/insertAfter syntax. Supports target types of: $, [$], selector (both single and multiple results) (Ben Cochran) - * Clone input nodes when inserting over a set (Mike Pennisi) - * Move unit test files (Mike Pennisi) - * remove unnecessarily tricky code (David Chambers) - * pass options to $.html in toString (fb55) - * add license info to package.json (Chris Rebert) - * xyz@~0.5.0 (David Chambers) - * Remove unofficial signature of `children` (Mike Pennisi) - * Fix bug in `css` method (Mike Pennisi) - * Correct bug in implementation of `Cheerio#val` (Mike Pennisi) - -0.18.0 / 2014-11-06 -================== - - * bump htmlparser2 dependency to ~3.8.1 (Chris Rebert) - * Correct unit test titles (Mike Pennisi) - * Correct behavior of `after` and `before` (Mike Pennisi) - * implement jQuery's .has() (Chris Rebert) - * Update repository url (haqii) - * attr() should return undefined or name for booleans (Raoul Millais) - * Update Readme.md (Ryan Breen) - * Implement `Cheerio#not` (Mike Pennisi) - * Clone nodes according to original parsing options (Mike Pennisi) - * fix lint error (David Chambers) - * Add explicit tests for DOM level 1 API (Mike Pennisi) - * Expose DOM level 1 API for Node-like objects (Mike Pennisi) - * Correct error in documentation (Mike Pennisi) - * Return a fully-qualified Function from `$.load` (Mike Pennisi) - * Update tests to avoid duck typing (Mike Pennisi) - * Alter "loaded" functions to produce true instances (Mike Pennisi) - * Organize tests for `cheerio.load` (Mike Pennisi) - * Complete `$.prototype.find` (Mike Pennisi) - * Use JSHint's `extends` option (Mike Pennisi) - * Remove aliases for exported methods (Mike Pennisi) - * Disallow unused variables (Mike Pennisi) - * Remove unused internal variables (Mike Pennisi) - * Remove unused variables from unit tests (Mike Pennisi) - * Remove unused API method references (Mike Pennisi) - * Move tests for `contains` method (Mike Pennisi) - * xyz@0.4.0 (David Chambers) - * Created a wiki for companies using cheerio in production (Matthew Mueller) - * Implement `$.prototype.index` (Mike Pennisi) - * Implement `$.prototype.addBack` (Mike Pennisi) - * Added double quotes to radio attribute name to account for characters such as brackets (akant10) - * Update History.md (Gabriel Falkenberg) - * add 0.17.0 changelog (David Chambers) - * exit prepublish script if tag not found (David Chambers) - * alphabetize devDependencies (fb55) - * ignore coverage dir (fb55) - * submit coverage to coveralls (fb55) - * replace jscoverage with istanbul (fb55) - -0.17.0 / 2014-06-10 -================== - - * Fix bug in internal `uniqueSplice` function (Mike Pennisi) - * accept buffer argument to cheerio.load (David Chambers) - * Respect options on the element level (Alex Indigo) - * Change state definition to more readable (Artem Burtsev) - * added test (0xBADC0FFEE) - * add class only if doesn't exist (Artem Burtsev) - * Made it less insane. (Alex Indigo) - * Implement `Cheerio#add` (Mike Pennisi) - * Use "loaded" instance of Cheerio in unit tests (Mike Pennisi) - * Be more strict with object check. (Alex Indigo) - * Added options argument to .html() static method. (Alex Indigo) - * Fixed encoding mishaps. Adjusted tests. (Alex Indigo) - * use dom-serializer module (fb55) - * don't test on 0.8, don't ignore 0.11 (Felix Böhm) - * parse: rm unused variables (coderaiser) - * cheerio: rm unused variable (coderaiser) - * Fixed test (Avi Kohn) - * Added test (Avi Kohn) - * Changed == to === (Avi Kohn) - * Fixed a bug in removing type="hidden" attr (Avi Kohn) - * sorted (Alexey Raspopov) - * add `muted` attr to booleanAttributes (Alexey Raspopov) - * fixed context of `this` in .html (Felix Böhm) - * append new elements for each element in selection (fb55) - -0.16.0 / 2014-05-08 -================== - - * fix `make bench` (David Chambers) - * makefile: add release-* targets (David Chambers) - * alphabetize dependencies (David Chambers) - * Rewrite `data` internals with caching behavior (Mike Pennisi) - * Fence .val example as js (Kevin Sawicki) - * Fixed typos. Deleted trailing whitespace from test/render.js (Nattaphoom Ch) - * Fix manipulation APIs with removed elements (kpdecker) - * Perform manual string parsing for hasClass (kpdecker) - * Fix existing element removal (kpdecker) - * update render tests (Felix Böhm) - * fixed cheerio path (Felix Böhm) - * use `entities.escape` for attribute values (Felix Böhm) - * bump entities version (Felix Böhm) - * remove lowerCaseTags option from readme (Felix Böhm) - * added test case for .html in xmlMode (fb55) - * render xml in `html()` when `xmlMode: true` (fb55) - * use a map for booleanAttributes (fb55) - * update singleTags, use utils.isTag (fb55) - * update travis badge URL (Felix Böhm) - * use typeof instead of _.isString and _.isNumber (fb55) - * use Array.isArray instead of _.isArray (fb55) - * replace _.isFunction with typeof (fb55) - * removed unnecessary error message (fb55) - * decode entities in htmlparser2 (fb55) - * pass options object to CSSselect (fb55) - -0.15.0 / 2014-04-08 -================== - - * Update callbacks to pass element per docs (@kpdecker) - * preserve options (@fb55) - * Use SVG travis badge (@t3chnoboy) - * only use static requires (@fb55) - * Optimize manipulation methods (@kpdecker) - * Optimize add and remove class cases (@kpdecker) - * accept dom of DomHandler to cheerio.load (@nleush) - * added parentsUntil method (@finspin) - * Add performance optimization and bug fix `empty` method (@kpdecker) - -0.14.0 / 2014-04-01 -================== - - * call encodeXML and directly expose decodeHTML (@fb55) - * use latest htmlparser2 and entities versions (@fb55) - * Deprecate `$.fn.toArray` (@jugglinmike) - * Implement `$.fn.get` (@jugglinmike) - * .replaceWith now replaces all selected elements. (@xavi-) - * Correct arguments for 'replaceWith' callback (@jugglinmike) - * switch to lodash (@fb55) - * update to entities@0.5.0 (@fb55) - * Fix attr when $ collection contains text modules (@kpdecker) - * Update to latest version of expect.js (@jugglinmike) - * Remove nodes from their previous structures (@jugglinmike) - * Update render.js (@stevenvachon) - * CDATA test (@stevenvachon) - * only ever one child index for cdata (@stevenvachon) - * don't loop through cdata children array (@stevenvachon) - * proper rendering of CDATA (@stevenvachon) - * Add cheerio-only bench option (@kpdecker) - * Avoid delete operations (@kpdecker) - * Add independent html benchmark (@kpdecker) - * Cache tag check in render (@kpdecker) - * Simplify attribute rendering step (@kpdecker) - * Add html rendering bench case (@kpdecker) - * Remove unnecessary check from removeAttr (@kpdecker) - * Remove unnecessary encoding step for attrs (@kpdecker) - * Add test for removeAttr+attr on boolean attributes (@kpdecker) - * Add single element benchmark case (@kpdecker) - * Optimize filter with selector (@kpdecker) - * Fix passing context as dom node (@alfred-nsh) - * Fix bug in `nextUntil` (@jugglinmike) - * Fix bug in `nextAll` (@jugglinmike) - * Implement `selector` argument of `next` method (@jugglinmike) - * Fix bug in `prevUntil` (@jugglinmike) - * Implement `selector` argument of `prev` method (@jugglinmike) - * Fix bug in `prevAll` (@jugglinmike) - * Fix bug in `siblings` (@jugglinmike) - * Avoid unnecessary indexOf from toggleClass (@kpdecker) - * Use strict equality rather than falsy check in eq (@kpdecker) - * Add benchmark coverage for all $ APIs (@kpdecker) - * Optimize filter Cheerio intermediate creation (@kpdecker) - * Optimize siblings cheerio instance creation (@kpdecker) - * Optimize identity cases for first/last/eq (@kpdecker) - * Use domEach for traversal (@kpdecker) - * Inline children lookup in find (@kpdecker) - * Use domEach in data accessor (@kpdecker) - * Avoid cheerio creation in add/remove/toggleClass (@kpdecker) - * Implement getAttr local helper (@kpdecker) - -0.13.1 / 2014-01-07 -================== - - * Fix select with context in Cheerio function (@jugglinmike) - * Remove unecessary DOM maintenance logic (@jugglinmike) - * Deprecate support for node 0.6 - -0.13.0 / 2013-12-30 -================== - - * Remove "root" node (@jugglinmike) - * Fix bug in `prevAll`, `prev`, `nextAll`, `next`, `prevUntil`, `nextUntil` (@jugglinmike) - * Fix `replaceWith` method (@jugglinmike) - * added nextUntil() and prevUntil() (@finspin) - * Remove internal `connect` function (@jugglinmike) - * Rename `Cheerio#make` to document private status (@jugginmike) - * Remove extraneous call to `_.uniq` (@jugglinmike) - * Use CSSselect library directly (@jugglinmike) - * Run CI against Node v0.11 as an allowed failure (@jugginmike) - * Correct bug in `Cheerio#parents` (@jugglinmike) - * Implement `$.fn.end` (@jugginmike) - * Ignore colons inside of url(.*) when parsing css (@Meekohi) - * Introduce rudimentary benchmark suite (@jugglinmike) - * Update HtmlParser2 version (@jugglinmike) - * Correct inconsistency in `$.fn.map` (@jugglinmike) - * fixed traversing tests (@finspin) - * Simplify `make` method (@jugglinmike) - * Avoid shadowing instance methods from arrays (@jugglinmike) - -0.12.4 / 2013-11-12 -================== - - * Coerce JSON values returned by `data` (@jugglinmike) - * issue #284: when rendering HTML, use original data attributes (@Trott) - * Introduce JSHint for automated code linting (@jugglinmike) - * Prevent `find` from returning duplicate elements (@jugglinmike) - * Implement function signature of `replaceWith` (@jugglinmike) - * Implement function signature of `before` (@jugglinmike) - * Implement function signature of `after` (@jugglinmike) - * Implement function signature of `append`/`prepend` (@jugglinmike) - * Extend iteration methods to accept nodes (@jugglinmike) - * Improve `removeClass` (@jugglinmike) - * Complete function signature of `addClass` (@jugglinmike) - * Fix bug in `removeClass` (@jugglinmike) - * Improve contributing.md (@jugglinmike) - * Fix and document .css() (@jugglinmike) - -0.12.3 / 2013-10-04 -=================== - - * Add .toggleClass() function (@cyberthom) - * Add contributing guidelines (@jugglinmike) - * Fix bug in `siblings` (@jugglinmike) - * Correct the implementation `filter` and `is` (@jugglinmike) - * add .data() function (@andi-neck) - * add .css() (@yields) - * Implements contents() (@jlep) - -0.12.2 / 2013-09-04 -================== - - * Correct implementation of `$.fn.text` (@jugglinmike) - * Refactor Cheerio array creation (@jugglinmike) - * Extend manipulation methods to accept Arrays (@jugglinmike) - * support .attr(attributeName, function(index, attr)) (@xiaohwan) - -0.12.1 / 2013-07-30 -================== - - * Correct behavior of `Cheerio#parents` (@jugglinmike) - * Double quotes inside attributes kills HTML (@khoomeister) - * Making next({}) and prev({}) return empty object (@absentTelegraph) - * Implement $.parseHTML (@jugglinmike) - * Correct bug in jQuery.fn.closest (@jugglinmike) - * Correct behavior of $.fn.val on 'option' elements (@jugglinmike) - -0.12.0 / 2013-06-09 -=================== - - * Breaking Change: Changed context from parent to the actual passed one (@swissmanu) - * Fixed: jquery checkbox val behavior (@jhubble) - * Added: output xml with $.xml() (@Maciek416) - * Bumped: htmlparser2 to 3.1.1 - * Fixed: bug in attr(key, val) on empty objects (@farhadi) - * Added: prevAll, nextAll (@lessmind) - * Fixed: Safety check in parents and closest (@zero21xxx) - * Added: .is(sel) (@zero21xxx) - -0.11.0 / 2013-04-22 -================== - -* Added: .closest() (@jeremy-dentel) -* Added: .parents() (@zero21xxx) -* Added: .val() (@rschmukler & @leahciMic) -* Added: Travis support for node 0.10.0 (@jeremy-dentel) -* Fixed: .find() if no selector (@davidchambers) -* Fixed: Propagate syntax errors caused by invalid selectors (@davidchambers) - -0.10.8 / 2013-03-11 -================== - -* Add slice method (SBoudrias) - -0.10.7 / 2013-02-10 -================== - -* Code & doc cleanup (davidchambers) -* Fixed bug in filter (jugglinmike) - -0.10.6 / 2013-01-29 -================== - -* Added `$.contains(...)` (jugglinmike) -* formatting cleanup (davidchambers) -* Bug fix for `.children()` (jugglinmike & davidchambers) -* Remove global `render` bug (wvl) - -0.10.5 / 2012-12-18 -=================== - -* Fixed botched publish from 0.10.4 - changes should now be present - -0.10.4 / 2012-12-16 -================== - -* $.find should query descendants only (@jugglinmike) -* Tighter underscore dependency - -0.10.3 / 2012-11-18 -=================== - -* fixed outer html bug -* Updated documentation for $(...).html() and $.html() - -0.10.2 / 2012-11-17 -=================== - -* Added a toString() method (@bensheldon) -* use `_.each` and `_.map` to simplify cheerio namesakes (@davidchambers) -* Added filter() with tests and updated readme (@bensheldon & @davidchambers) -* Added spaces between attributes rewritten by removeClass (@jos3000) -* updated docs to remove reference to size method (@ironchefpython) -* removed HTML tidy/pretty print from cheerio - -0.10.1 / 2012-10-04 -=================== - -* Fixed regression, filtering with a context (#106) - -0.10.0 / 2012-09-24 -=================== - -* Greatly simplified and reorganized the library, reducing the loc by 30% -* Now supports mocha's test-coverage -* Deprecated self-closing tags (HTML5 doesn't require them) -* Fixed error thrown in removeClass(...) @robashton - -0.9.2 / 2012-08-10 -================== - -* added $(...).map(fn) -* manipulation: refactor `makeCheerioArray` -* make .removeClass() remove *all* occurrences (#64) - -0.9.1 / 2012-08-03 -================== - -* fixed bug causing options not to make it to the parser - -0.9.0 / 2012-07-24 -================== - -* Added node 8.x support -* Removed node 4.x support -* Add html(dom) support (@wvl) -* fixed xss vulnerabilities on .attr(), .text(), & .html() (@benatkin, @FB55) -* Rewrote tests into javascript, removing coffeescript dependency (@davidchambers) -* Tons of cleanup (@davidchambers) - -0.8.3 / 2012-06-12 -================== - -* Fixed minor package regression (closes #60) - -0.8.2 / 2012-06-11 -================== - -* Now fails gracefully in cases that involve special chars, which is inline with jQuery (closes #59) -* text() now decode special entities (closes #52) -* updated travis.yml to test node 4.x - -0.8.1 / 2012-06-02 -================== - -* fixed regression where if you created an element, it would update the root -* compatible with node 4.x (again) - -0.8.0 / 2012-05-27 -================== - -* Updated CSS parser to use FB55/CSSselect. Cheerio now supports most CSS3 psuedo selectors thanks to @FB55. -* ignoreWhitespace now on by default again. See #55 for context. -* Changed $(':root') to $.root(), cleaned up $.clone() -* Support for .eq(i) thanks to @alexbardas -* Removed support for node 0.4.x -* Fixed memory leak where package.json was continually loaded -* Tons more tests - -0.7.0 / 2012-04-08 -================== - -* Now testing with node v0.7.7 -* Added travis-ci integration -* Replaced should.js with expect.js. Browser testing to come -* Fixed spacing between attributes and their values -* Added HTML tidy/pretty print -* Exposed node-htmlparser2 parsing options -* Revert .replaceWith(...) to be consistent with jQuery - -0.6.2 / 2012-02-12 -================== - -* Fixed .replaceWith(...) regression - -0.6.1 / 2012-02-12 -================== - -* Added .first(), .last(), and .clone() commands. -* Option to parse using whitespace added to `.load`. -* Many bug fixes to make cheerio more aligned with jQuery. -* Added $(':root') to select the highest level element. - -Many thanks to the contributors that made this release happen: @ironchefpython and @siddMahen - -0.6.0 / 2012-02-07 -================== - -* *Important:* `$(...).html()` now returns inner HTML, which is in line with the jQuery spec -* `$.html()` returns the full HTML string. `$.html([cheerioObject])` will return the outer(selected element's tag) and inner HTML of that object -* Fixed bug that prevented HTML strings with depth (eg. `append('
')`) from getting `parent`, `next`, `prev` attributes. -* Halted [htmlparser2](https://github.com/FB55/node-htmlparser) at v2.2.2 until single attributes bug gets fixed. - -0.5.1 / 2012-02-05 -================== - -* Fixed minor regression: $(...).text(fn) would fail - -0.5.1 / 2012-02-05 -================== - -* Fixed regression: HTML pages with comments would fail - -0.5.0 / 2012-02-04 -================== - -* Transitioned from Coffeescript back to Javascript -* Parser now ignores whitespace -* Fixed issue with double slashes on self-enclosing tags -* Added boolean attributes to html rendering - -0.4.2 / 2012-01-16 -================== - -* Multiple selectors support: $('.apple, .orange'). Thanks @siddMahen! -* Update package.json to always use latest cheerio-soupselect -* Fix memory leak in index.js - -0.4.1 / 2011-12-19 -================== -* Minor packaging changes to allow `make test` to work from npm installation - -0.4.0 / 2011-12-19 -================== - -* Rewrote all unit tests as cheerio transitioned from vows -> mocha -* Internally, renderer.render -> render(...), parser.parse -> parse(...) -* Append, prepend, html, before, after all work with only text (no tags) -* Bugfix: Attributes can now be removed from script and style tags -* Added yield as a single tag -* Cheerio now compatible with node >=0.4.7 - -0.3.2 / 2011-12-1 -================= - -* Fixed $(...).text(...) to work with "root" element - -0.3.1 / 2011-11-25 -================== - -* Now relying on cheerio-soupselect instead of node-soupselect -* Removed all lingering htmlparser dependencies -* parser now returns parent "root" element. Root now never needs to be updated when there is multiple roots. This fixes ongoing issues with before(...), after(...) and other manipulation functions -* Added jQuery's $(...).replaceWith(...) - -0.3.0 / 2011-11-19 -================== - -* Now using htmlparser2 for parsing (2x speed increase, cleaner, actively developed) -* Added benchmark directory for future speed tests -* $('...').dom() was funky, so it was removed in favor of $('...').get(). $.dom() still works the same. -* $.root now correctly static across all instances of $ -* Added a screencast - -0.2.2 / 2011-11-9 -================= - -* Traversing will select ` text " -); - -module.exports = makeDom(markup); diff --git a/node_modules/cheerio/node_modules/css-select/node_modules/domutils/test/tests/helpers.js b/node_modules/cheerio/node_modules/css-select/node_modules/domutils/test/tests/helpers.js deleted file mode 100644 index 2e30afb..0000000 --- a/node_modules/cheerio/node_modules/css-select/node_modules/domutils/test/tests/helpers.js +++ /dev/null @@ -1,89 +0,0 @@ -var makeDom = require("../utils").makeDom; -var helpers = require("../.."); -var assert = require("assert"); - -describe("helpers", function() { - describe("removeSubsets", function() { - var removeSubsets = helpers.removeSubsets; - var dom = makeDom("

")[0]; - - it("removes identical trees", function() { - var matches = removeSubsets([dom, dom]); - assert.equal(matches.length, 1); - }); - - it("Removes subsets found first", function() { - var matches = removeSubsets([dom, dom.children[0].children[0]]); - assert.equal(matches.length, 1); - }); - - it("Removes subsets found last", function() { - var matches = removeSubsets([dom.children[0], dom]); - assert.equal(matches.length, 1); - }); - - it("Does not remove unique trees", function() { - var matches = removeSubsets([dom.children[0], dom.children[1]]); - assert.equal(matches.length, 2); - }); - }); - - describe("compareDocumentPosition", function() { - var compareDocumentPosition = helpers.compareDocumentPosition; - var markup = "

"; - var dom = makeDom(markup)[0]; - var p = dom.children[0]; - var span = p.children[0]; - var a = dom.children[1]; - - it("reports when the first node occurs before the second indirectly", function() { - assert.equal(compareDocumentPosition(span, a), 2); - }); - - it("reports when the first node contains the second", function() { - assert.equal(compareDocumentPosition(p, span), 10); - }); - - it("reports when the first node occurs after the second indirectly", function() { - assert.equal(compareDocumentPosition(a, span), 4); - }); - - it("reports when the first node is contained by the second", function() { - assert.equal(compareDocumentPosition(span, p), 20); - }); - - it("reports when the nodes belong to separate documents", function() { - var other = makeDom(markup)[0].children[0].children[0]; - - assert.equal(compareDocumentPosition(span, other), 1); - }); - - it("reports when the nodes are identical", function() { - assert.equal(compareDocumentPosition(span, span), 0); - }); - }); - - describe("uniqueSort", function() { - var uniqueSort = helpers.uniqueSort; - var dom, p, span, a; - - beforeEach(function() { - dom = makeDom("

")[0]; - p = dom.children[0]; - span = p.children[0]; - a = dom.children[1]; - }); - - it("leaves unique elements untouched", function() { - assert.deepEqual(uniqueSort([p, a]), [p, a]); - }); - - it("removes duplicate elements", function() { - assert.deepEqual(uniqueSort([p, a, p]), [p, a]); - }); - - it("sorts nodes in document order", function() { - assert.deepEqual(uniqueSort([a, dom, span, p]), [dom, p, span, a]); - }); - }); -}); diff --git a/node_modules/cheerio/node_modules/css-select/node_modules/domutils/test/tests/legacy.js b/node_modules/cheerio/node_modules/css-select/node_modules/domutils/test/tests/legacy.js deleted file mode 100644 index 87fabfa..0000000 --- a/node_modules/cheerio/node_modules/css-select/node_modules/domutils/test/tests/legacy.js +++ /dev/null @@ -1,119 +0,0 @@ -var DomUtils = require("../.."); -var fixture = require("../fixture"); -var assert = require("assert"); - -// Set up expected structures -var expected = { - idAsdf: fixture[1], - tag2: [], - typeScript: [] -}; -for (var idx = 0; idx < 20; ++idx) { - expected.tag2.push(fixture[idx*2 + 1].children[5]); - expected.typeScript.push(fixture[idx*2 + 1].children[1]); -} - -describe("legacy", function() { - describe("getElements", function() { - var getElements = DomUtils.getElements; - it("returns the node with the specified ID", function() { - assert.deepEqual( - getElements({ id: "asdf" }, fixture, true, 1), - [expected.idAsdf] - ); - }); - it("returns empty array for unknown IDs", function() { - assert.deepEqual(getElements({ id: "asdfs" }, fixture, true), []); - }); - it("returns the nodes with the specified tag name", function() { - assert.deepEqual( - getElements({ tag_name:"tag2" }, fixture, true), - expected.tag2 - ); - }); - it("returns empty array for unknown tag names", function() { - assert.deepEqual( - getElements({ tag_name : "asdfs" }, fixture, true), - [] - ); - }); - it("returns the nodes with the specified tag type", function() { - assert.deepEqual( - getElements({ tag_type: "script" }, fixture, true), - expected.typeScript - ); - }); - it("returns empty array for unknown tag types", function() { - assert.deepEqual( - getElements({ tag_type: "video" }, fixture, true), - [] - ); - }); - }); - - describe("getElementById", function() { - var getElementById = DomUtils.getElementById; - it("returns the specified node", function() { - assert.equal( - expected.idAsdf, - getElementById("asdf", fixture, true) - ); - }); - it("returns `null` for unknown IDs", function() { - assert.equal(null, getElementById("asdfs", fixture, true)); - }); - }); - - describe("getElementsByTagName", function() { - var getElementsByTagName = DomUtils.getElementsByTagName; - it("returns the specified nodes", function() { - assert.deepEqual( - getElementsByTagName("tag2", fixture, true), - expected.tag2 - ); - }); - it("returns empty array for unknown tag names", function() { - assert.deepEqual( - getElementsByTagName("tag23", fixture, true), - [] - ); - }); - }); - - describe("getElementsByTagType", function() { - var getElementsByTagType = DomUtils.getElementsByTagType; - it("returns the specified nodes", function() { - assert.deepEqual( - getElementsByTagType("script", fixture, true), - expected.typeScript - ); - }); - it("returns empty array for unknown tag types", function() { - assert.deepEqual( - getElementsByTagType("video", fixture, true), - [] - ); - }); - }); - - describe("getOuterHTML", function() { - var getOuterHTML = DomUtils.getOuterHTML; - it("Correctly renders the outer HTML", function() { - assert.equal( - getOuterHTML(fixture[1]), - " text " - ); - }); - }); - - describe("getInnerHTML", function() { - var getInnerHTML = DomUtils.getInnerHTML; - it("Correctly renders the inner HTML", function() { - assert.equal( - getInnerHTML(fixture[1]), - " text " - ); - }); - }); - -}); diff --git a/node_modules/cheerio/node_modules/css-select/node_modules/domutils/test/tests/traversal.js b/node_modules/cheerio/node_modules/css-select/node_modules/domutils/test/tests/traversal.js deleted file mode 100644 index f500e08..0000000 --- a/node_modules/cheerio/node_modules/css-select/node_modules/domutils/test/tests/traversal.js +++ /dev/null @@ -1,17 +0,0 @@ -var makeDom = require("../utils").makeDom; -var traversal = require("../.."); -var assert = require("assert"); - -describe("traversal", function() { - describe("hasAttrib", function() { - var hasAttrib = traversal.hasAttrib; - - it("doesn't throw on text nodes", function() { - var dom = makeDom("textnode"); - assert.doesNotThrow(function() { - hasAttrib(dom[0], "some-attrib"); - }); - }); - - }); -}); diff --git a/node_modules/cheerio/node_modules/css-select/node_modules/domutils/test/utils.js b/node_modules/cheerio/node_modules/css-select/node_modules/domutils/test/utils.js deleted file mode 100644 index 676e8f6..0000000 --- a/node_modules/cheerio/node_modules/css-select/node_modules/domutils/test/utils.js +++ /dev/null @@ -1,9 +0,0 @@ -var htmlparser = require("htmlparser2"); - -exports.makeDom = function(markup) { - var handler = new htmlparser.DomHandler(), - parser = new htmlparser.Parser(handler); - parser.write(markup); - parser.done(); - return handler.dom; -}; diff --git a/node_modules/cheerio/node_modules/css-select/node_modules/nth-check/LICENSE b/node_modules/cheerio/node_modules/css-select/node_modules/nth-check/LICENSE deleted file mode 100644 index c464f86..0000000 --- a/node_modules/cheerio/node_modules/css-select/node_modules/nth-check/LICENSE +++ /dev/null @@ -1,11 +0,0 @@ -Copyright (c) Felix Böhm -All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - -Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - -Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - -THIS IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS, -EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/cheerio/node_modules/css-select/node_modules/nth-check/README.md b/node_modules/cheerio/node_modules/css-select/node_modules/nth-check/README.md deleted file mode 100644 index 53c6c22..0000000 --- a/node_modules/cheerio/node_modules/css-select/node_modules/nth-check/README.md +++ /dev/null @@ -1,51 +0,0 @@ -# nth-check [![Build Status](https://travis-ci.org/fb55/nth-check.svg)](https://travis-ci.org/fb55/nth-check) - -A performant nth-check parser & compiler. - -### About - -This module can be used to parse & compile nth-checks, as they are found in CSS 3's `nth-child()` and `nth-last-of-type()`. - -`nth-check` focusses on speed, providing optimized functions for different kinds of nth-child formulas, while still following the [spec](http://www.w3.org/TR/css3-selectors/#nth-child-pseudo). - -### API - -```js -var nthCheck = require("nth-check"); -``` - -##### `nthCheck(formula)` - -First parses, then compiles the formula. - -##### `nthCheck.parse(formula)` - -Parses the expression, throws a `SyntaxError` if it fails, otherwise returns an array containing two elements. - -__Example:__ - -```js -nthCheck.parse("2n+3") //[2, 3] -``` - -##### `nthCheck.compile([a, b])` - -Takes an array with two elements (as returned by `.parse`) and returns a highly optimized function. - -If the formula doesn't match any elements, it returns [`boolbase`](https://github.com/fb55/boolbase)'s `falseFunc`, otherwise, a function accepting an _index_ is returned, which returns whether or not a passed _index_ matches the formula. (Note: The spec starts counting at `1`, the returned function at `0`). - -__Example:__ -```js -var check = nthCheck.compile([2, 3]); - -check(0) //false -check(1) //false -check(2) //true -check(3) //false -check(4) //true -check(5) //false -check(6) //true -``` - ---- -License: BSD diff --git a/node_modules/cheerio/node_modules/css-select/node_modules/nth-check/compile.js b/node_modules/cheerio/node_modules/css-select/node_modules/nth-check/compile.js deleted file mode 100644 index 77f2436..0000000 --- a/node_modules/cheerio/node_modules/css-select/node_modules/nth-check/compile.js +++ /dev/null @@ -1,40 +0,0 @@ -module.exports = compile; - -var BaseFuncs = require("boolbase"), - trueFunc = BaseFuncs.trueFunc, - falseFunc = BaseFuncs.falseFunc; - -/* - returns a function that checks if an elements index matches the given rule - highly optimized to return the fastest solution -*/ -function compile(parsed){ - var a = parsed[0], - b = parsed[1] - 1; - - //when b <= 0, a*n won't be possible for any matches when a < 0 - //besides, the specification says that no element is matched when a and b are 0 - if(b < 0 && a <= 0) return falseFunc; - - //when a is in the range -1..1, it matches any element (so only b is checked) - if(a ===-1) return function(pos){ return pos <= b; }; - if(a === 0) return function(pos){ return pos === b; }; - //when b <= 0 and a === 1, they match any element - if(a === 1) return b < 0 ? trueFunc : function(pos){ return pos >= b; }; - - //when a > 0, modulo can be used to check if there is a match - var bMod = b % a; - if(bMod < 0) bMod += a; - - if(a > 1){ - return function(pos){ - return pos >= b && pos % a === bMod; - }; - } - - a *= -1; //make `a` positive - - return function(pos){ - return pos <= b && pos % a === bMod; - }; -} \ No newline at end of file diff --git a/node_modules/cheerio/node_modules/css-select/node_modules/nth-check/index.js b/node_modules/cheerio/node_modules/css-select/node_modules/nth-check/index.js deleted file mode 100644 index 3253bbd..0000000 --- a/node_modules/cheerio/node_modules/css-select/node_modules/nth-check/index.js +++ /dev/null @@ -1,9 +0,0 @@ -var parse = require("./parse.js"), - compile = require("./compile.js"); - -module.exports = function nthCheck(formula){ - return compile(parse(formula)); -}; - -module.exports.parse = parse; -module.exports.compile = compile; \ No newline at end of file diff --git a/node_modules/cheerio/node_modules/css-select/node_modules/nth-check/package.json b/node_modules/cheerio/node_modules/css-select/node_modules/nth-check/package.json deleted file mode 100644 index 1a41b02..0000000 --- a/node_modules/cheerio/node_modules/css-select/node_modules/nth-check/package.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "name": "nth-check", - "version": "1.0.2", - "description": "performant nth-check parser & compiler", - "main": "index.js", - "scripts": { - "test": "node test" - }, - "repository": { - "type": "git", - "url": "https://github.com/fb55/nth-check" - }, - "files": [ - "compile.js", - "index.js", - "parse.js" - ], - "keywords": [ - "nth-child", - "nth", - "css" - ], - "author": "Felix Boehm ", - "license": "BSD-2-Clause", - "bugs": { - "url": "https://github.com/fb55/nth-check/issues" - }, - "homepage": "https://github.com/fb55/nth-check", - "dependencies": { - "boolbase": "~1.0.0" - } -} diff --git a/node_modules/cheerio/node_modules/css-select/node_modules/nth-check/parse.js b/node_modules/cheerio/node_modules/css-select/node_modules/nth-check/parse.js deleted file mode 100644 index 5302951..0000000 --- a/node_modules/cheerio/node_modules/css-select/node_modules/nth-check/parse.js +++ /dev/null @@ -1,40 +0,0 @@ -module.exports = parse; - -//following http://www.w3.org/TR/css3-selectors/#nth-child-pseudo - -//[ ['-'|'+']? INTEGER? {N} [ S* ['-'|'+'] S* INTEGER ]? -var re_nthElement = /^([+\-]?\d*n)?\s*(?:([+\-]?)\s*(\d+))?$/; - -/* - parses a nth-check formula, returns an array of two numbers -*/ -function parse(formula){ - formula = formula.trim().toLowerCase(); - - if(formula === "even"){ - return [2, 0]; - } else if(formula === "odd"){ - return [2, 1]; - } else { - var parsed = formula.match(re_nthElement); - - if(!parsed){ - throw new SyntaxError("n-th rule couldn't be parsed ('" + formula + "')"); - } - - var a; - - if(parsed[1]){ - a = parseInt(parsed[1], 10); - if(isNaN(a)){ - if(parsed[1].charAt(0) === "-") a = -1; - else a = 1; - } - } else a = 0; - - return [ - a, - parsed[3] ? parseInt((parsed[2] || "") + parsed[3], 10) : 0 - ]; - } -} diff --git a/node_modules/cheerio/node_modules/css-select/package.json b/node_modules/cheerio/node_modules/css-select/package.json deleted file mode 100644 index dc14b25..0000000 --- a/node_modules/cheerio/node_modules/css-select/package.json +++ /dev/null @@ -1,61 +0,0 @@ -{ - "name": "css-select", - "version": "1.2.0", - "description": "a CSS selector compiler/engine", - "author": "Felix Boehm ", - "keywords": [ - "css", - "selector", - "sizzle" - ], - "repository": { - "type": "git", - "url": "git://github.com/fb55/css-select.git" - }, - "files": [ - "index.js", - "lib" - ], - "dependencies": { - "css-what": "2.1", - "domutils": "1.5.1", - "boolbase": "~1.0.0", - "nth-check": "~1.0.1" - }, - "devDependencies": { - "htmlparser2": "*", - "cheerio-soupselect": "*", - "mocha": "*", - "mocha-lcov-reporter": "*", - "coveralls": "*", - "istanbul": "*", - "expect.js": "*", - "jshint": "2" - }, - "scripts": { - "test": "mocha && npm run lint", - "lint": "jshint index.js lib/*.js test/*.js", - "lcov": "istanbul cover _mocha --report lcovonly -- -R spec", - "coveralls": "npm run lint && npm run lcov && (cat coverage/lcov.info | coveralls || exit 0)" - }, - "license": "BSD-like", - "jshintConfig": { - "eqeqeq": true, - "freeze": true, - "latedef": "nofunc", - "noarg": true, - "nonbsp": true, - "quotmark": "double", - "undef": true, - "unused": true, - "trailing": true, - "eqnull": true, - "proto": true, - "smarttabs": true, - "node": true, - "globals": { - "describe": true, - "it": true - } - } -} diff --git a/node_modules/cheerio/node_modules/htmlparser2/LICENSE b/node_modules/cheerio/node_modules/htmlparser2/LICENSE deleted file mode 100644 index 0a35e02..0000000 --- a/node_modules/cheerio/node_modules/htmlparser2/LICENSE +++ /dev/null @@ -1,18 +0,0 @@ -Copyright 2010, 2011, Chris Winberry . All rights reserved. -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/cheerio/node_modules/htmlparser2/README.md b/node_modules/cheerio/node_modules/htmlparser2/README.md deleted file mode 100644 index 8c1c256..0000000 --- a/node_modules/cheerio/node_modules/htmlparser2/README.md +++ /dev/null @@ -1,91 +0,0 @@ -# htmlparser2 - -[![NPM version](http://img.shields.io/npm/v/htmlparser2.svg?style=flat)](https://npmjs.org/package/htmlparser2) -[![Downloads](https://img.shields.io/npm/dm/htmlparser2.svg?style=flat)](https://npmjs.org/package/htmlparser2) -[![Build Status](http://img.shields.io/travis/fb55/htmlparser2/master.svg?style=flat)](http://travis-ci.org/fb55/htmlparser2) -[![Coverage](http://img.shields.io/coveralls/fb55/htmlparser2.svg?style=flat)](https://coveralls.io/r/fb55/htmlparser2) - -A forgiving HTML/XML/RSS parser. The parser can handle streams and provides a callback interface. - -## Installation - npm install htmlparser2 - -A live demo of htmlparser2 is available [here](https://astexplorer.net/#/2AmVrGuGVJ). - -## Usage - -```javascript -var htmlparser = require("htmlparser2"); -var parser = new htmlparser.Parser({ - onopentag: function(name, attribs){ - if(name === "script" && attribs.type === "text/javascript"){ - console.log("JS! Hooray!"); - } - }, - ontext: function(text){ - console.log("-->", text); - }, - onclosetag: function(tagname){ - if(tagname === "script"){ - console.log("That's it?!"); - } - } -}, {decodeEntities: true}); -parser.write("Xyz ", - "expected": [ - { - "type": "tag", - "name": "head", - "attribs": {}, - "children": [ - { - "type": "script", - "name": "script", - "attribs": { - "language": "Javascript" - }, - "children": [ - { - "data": "var foo = \"\"; alert(2 > foo); var baz = 10 << 2; var zip = 10 >> 1; var yap = \"<<>>>><<\";", - "type": "text" - } - ] - } - ] - } - ] -} \ No newline at end of file diff --git a/node_modules/cheerio/node_modules/htmlparser2/node_modules/domhandler/test/cases/05-tags_in_comment.json b/node_modules/cheerio/node_modules/htmlparser2/node_modules/domhandler/test/cases/05-tags_in_comment.json deleted file mode 100644 index 2d22d9e..0000000 --- a/node_modules/cheerio/node_modules/htmlparser2/node_modules/domhandler/test/cases/05-tags_in_comment.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "name": "Special char in comment", - "options": {}, - "html": "", - "expected": [ - { - "type": "tag", - "name": "head", - "attribs": {}, - "children": [ - { - "data": " commented out tags Test", - "type": "comment" - } - ] - } - ] -} \ No newline at end of file diff --git a/node_modules/cheerio/node_modules/htmlparser2/node_modules/domhandler/test/cases/06-comment_in_script.json b/node_modules/cheerio/node_modules/htmlparser2/node_modules/domhandler/test/cases/06-comment_in_script.json deleted file mode 100644 index 9a21cda..0000000 --- a/node_modules/cheerio/node_modules/htmlparser2/node_modules/domhandler/test/cases/06-comment_in_script.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "name": "Script source in comment", - "options": {}, - "html": "", - "expected": [ - { - "type": "script", - "name": "script", - "attribs": {}, - "children": [ - { - "data": "", - "type": "text" - } - ] - } - ] -} \ No newline at end of file diff --git a/node_modules/cheerio/node_modules/htmlparser2/node_modules/domhandler/test/cases/07-unescaped_in_style.json b/node_modules/cheerio/node_modules/htmlparser2/node_modules/domhandler/test/cases/07-unescaped_in_style.json deleted file mode 100644 index 77438fd..0000000 --- a/node_modules/cheerio/node_modules/htmlparser2/node_modules/domhandler/test/cases/07-unescaped_in_style.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "name": "Unescaped chars in style", - "options": {}, - "html": "", - "expected": [ - { - "type": "style", - "name": "style", - "attribs": { - "type": "text/css" - }, - "children": [ - { - "data": "\n body > p\n\t{ font-weight: bold; }", - "type": "text" - } - ] - } - ] -} \ No newline at end of file diff --git a/node_modules/cheerio/node_modules/htmlparser2/node_modules/domhandler/test/cases/08-extra_spaces_in_tag.json b/node_modules/cheerio/node_modules/htmlparser2/node_modules/domhandler/test/cases/08-extra_spaces_in_tag.json deleted file mode 100644 index 5c2492e..0000000 --- a/node_modules/cheerio/node_modules/htmlparser2/node_modules/domhandler/test/cases/08-extra_spaces_in_tag.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "name": "Extra spaces in tag", - "options": {}, - "html": "the text", - "expected": [ - { - "type": "tag", - "name": "font", - "attribs": { - "size": "14" - }, - "children": [ - { - "data": "the text", - "type": "text" - } - ] - } - ] -} \ No newline at end of file diff --git a/node_modules/cheerio/node_modules/htmlparser2/node_modules/domhandler/test/cases/09-unquoted_attrib.json b/node_modules/cheerio/node_modules/htmlparser2/node_modules/domhandler/test/cases/09-unquoted_attrib.json deleted file mode 100644 index 543ccee..0000000 --- a/node_modules/cheerio/node_modules/htmlparser2/node_modules/domhandler/test/cases/09-unquoted_attrib.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "name": "Unquoted attributes", - "options": {}, - "html": "the text", - "expected": [ - { - "type": "tag", - "name": "font", - "attribs": { - "size": "14" - }, - "children": [ - { - "data": "the text", - "type": "text" - } - ] - } - ] -} \ No newline at end of file diff --git a/node_modules/cheerio/node_modules/htmlparser2/node_modules/domhandler/test/cases/10-singular_attribute.json b/node_modules/cheerio/node_modules/htmlparser2/node_modules/domhandler/test/cases/10-singular_attribute.json deleted file mode 100644 index 544636e..0000000 --- a/node_modules/cheerio/node_modules/htmlparser2/node_modules/domhandler/test/cases/10-singular_attribute.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "name": "Singular attribute", - "options": {}, - "html": "