mirror of
https://github.com/trympet/nextcloud-artifacts-action.git
synced 2025-04-24 20:16:08 +02:00
40436 lines
1.3 MiB
40436 lines
1.3 MiB
/******/ (() => { // webpackBootstrap
|
||
/******/ var __webpack_modules__ = ({
|
||
|
||
/***/ 2262:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||
if (k2 === undefined) k2 = k;
|
||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||
}) : (function(o, m, k, k2) {
|
||
if (k2 === undefined) k2 = k;
|
||
o[k2] = m[k];
|
||
}));
|
||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||
}) : function(o, v) {
|
||
o["default"] = v;
|
||
});
|
||
var __importStar = (this && this.__importStar) || function (mod) {
|
||
if (mod && mod.__esModule) return mod;
|
||
var result = {};
|
||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||
__setModuleDefault(result, mod);
|
||
return result;
|
||
};
|
||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
return new (P || (P = Promise))(function (resolve, reject) {
|
||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
});
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.FileFinder = void 0;
|
||
const glob = __importStar(__nccwpck_require__(1269));
|
||
const fs_1 = __nccwpck_require__(5747);
|
||
const core_1 = __nccwpck_require__(4513);
|
||
const path = __importStar(__nccwpck_require__(5622));
|
||
const util_1 = __nccwpck_require__(1669);
|
||
const stats = util_1.promisify(fs_1.stat);
|
||
class FileFinder {
|
||
constructor(searchPath, globOptions) {
|
||
this.searchPath = searchPath;
|
||
this.globOptions = globOptions || FileFinder.DefaultGlobOptions;
|
||
}
|
||
findFiles() {
|
||
return __awaiter(this, void 0, void 0, function* () {
|
||
const searchResults = [];
|
||
const globber = yield glob.create(this.searchPath, this.globOptions);
|
||
const rawSearchResults = yield globber.glob();
|
||
/*
|
||
Files are saved with case insensitivity. Uploading both a.txt and A.txt will files to be overwritten
|
||
Detect any files that could be overwritten for user awareness
|
||
*/
|
||
const set = new Set();
|
||
/*
|
||
Directories will be rejected if attempted to be uploaded. This includes just empty
|
||
directories so filter any directories out from the raw search results
|
||
*/
|
||
for (const searchResult of rawSearchResults) {
|
||
const fileStats = yield stats(searchResult);
|
||
// isDirectory() returns false for symlinks if using fs.lstat(), make sure to use fs.stat() instead
|
||
if (!fileStats.isDirectory()) {
|
||
core_1.debug(`File:${searchResult} was found using the provided searchPath`);
|
||
searchResults.push(searchResult);
|
||
// detect any files that would be overwritten because of case insensitivity
|
||
if (set.has(searchResult.toLowerCase())) {
|
||
core_1.info(`Uploads are case insensitive: ${searchResult} was detected that it will be overwritten by another file with the same path`);
|
||
}
|
||
else {
|
||
set.add(searchResult.toLowerCase());
|
||
}
|
||
}
|
||
else {
|
||
core_1.debug(`Removing ${searchResult} from rawSearchResults because it is a directory`);
|
||
}
|
||
}
|
||
// Calculate the root directory for the artifact using the search paths that were utilized
|
||
const searchPaths = globber.getSearchPaths();
|
||
if (searchPaths.length > 1) {
|
||
core_1.info(`Multiple search paths detected. Calculating the least common ancestor of all paths`);
|
||
const lcaSearchPath = this.getMultiPathLCA(searchPaths);
|
||
core_1.info(`The least common ancestor is ${lcaSearchPath}. This will be the root directory of the artifact`);
|
||
return {
|
||
filesToUpload: searchResults,
|
||
rootDirectory: lcaSearchPath
|
||
};
|
||
}
|
||
/*
|
||
Special case for a single file artifact that is uploaded without a directory or wildcard pattern. The directory structure is
|
||
not preserved and the root directory will be the single files parent directory
|
||
*/
|
||
if (searchResults.length === 1 && searchPaths[0] === searchResults[0]) {
|
||
return {
|
||
filesToUpload: searchResults,
|
||
rootDirectory: path.dirname(searchResults[0])
|
||
};
|
||
}
|
||
return {
|
||
filesToUpload: searchResults,
|
||
rootDirectory: searchPaths[0]
|
||
};
|
||
});
|
||
}
|
||
getMultiPathLCA(searchPaths) {
|
||
if (searchPaths.length < 2) {
|
||
throw new Error('At least two search paths must be provided');
|
||
}
|
||
const commonPaths = new Array();
|
||
const splitPaths = new Array();
|
||
let smallestPathLength = Number.MAX_SAFE_INTEGER;
|
||
// split each of the search paths using the platform specific separator
|
||
for (const searchPath of searchPaths) {
|
||
core_1.debug(`Using search path ${searchPath}`);
|
||
const splitSearchPath = path.normalize(searchPath).split(path.sep);
|
||
// keep track of the smallest path length so that we don't accidentally later go out of bounds
|
||
smallestPathLength = Math.min(smallestPathLength, splitSearchPath.length);
|
||
splitPaths.push(splitSearchPath);
|
||
}
|
||
// on Unix-like file systems, the file separator exists at the beginning of the file path, make sure to preserve it
|
||
if (searchPaths[0].startsWith(path.sep)) {
|
||
commonPaths.push(path.sep);
|
||
}
|
||
let splitIndex = 0;
|
||
// function to check if the paths are the same at a specific index
|
||
function isPathTheSame() {
|
||
const compare = splitPaths[0][splitIndex];
|
||
for (let i = 1; i < splitPaths.length; i++) {
|
||
if (compare !== splitPaths[i][splitIndex]) {
|
||
// a non-common index has been reached
|
||
return false;
|
||
}
|
||
}
|
||
return true;
|
||
}
|
||
// loop over all the search paths until there is a non-common ancestor or we go out of bounds
|
||
while (splitIndex < smallestPathLength) {
|
||
if (!isPathTheSame()) {
|
||
break;
|
||
}
|
||
// if all are the same, add to the end result & increment the index
|
||
commonPaths.push(splitPaths[0][splitIndex]);
|
||
splitIndex++;
|
||
}
|
||
return path.join(...commonPaths);
|
||
}
|
||
}
|
||
exports.FileFinder = FileFinder;
|
||
FileFinder.DefaultGlobOptions = {
|
||
followSymbolicLinks: true,
|
||
implicitDescendants: true,
|
||
omitBrokenSymbolicLinks: true
|
||
};
|
||
//# sourceMappingURL=FileFinder.js.map
|
||
|
||
/***/ }),
|
||
|
||
/***/ 4024:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||
if (k2 === undefined) k2 = k;
|
||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||
}) : (function(o, m, k, k2) {
|
||
if (k2 === undefined) k2 = k;
|
||
o[k2] = m[k];
|
||
}));
|
||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||
}) : function(o, v) {
|
||
o["default"] = v;
|
||
});
|
||
var __importStar = (this && this.__importStar) || function (mod) {
|
||
if (mod && mod.__esModule) return mod;
|
||
var result = {};
|
||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||
__setModuleDefault(result, mod);
|
||
return result;
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.Inputs = void 0;
|
||
const core = __importStar(__nccwpck_require__(4513));
|
||
const NoFileOption_1 = __nccwpck_require__(3294);
|
||
class Inputs {
|
||
static get ArtifactName() {
|
||
return core.getInput("name");
|
||
}
|
||
static get ArtifactPath() {
|
||
return core.getInput("path");
|
||
}
|
||
static get Retention() {
|
||
return core.getInput("retention-days");
|
||
}
|
||
static get Endpoint() {
|
||
return core.getInput("nextcloud-url");
|
||
}
|
||
static get Username() {
|
||
return core.getInput("nextcloud-username");
|
||
}
|
||
static get Password() {
|
||
return core.getInput("nextcloud-password");
|
||
}
|
||
static get NoFileBehvaior() {
|
||
const notFoundAction = core.getInput("if-no-files-found") || NoFileOption_1.NoFileOption.warn;
|
||
const noFileBehavior = NoFileOption_1.NoFileOption[notFoundAction];
|
||
if (!noFileBehavior) {
|
||
core.setFailed(`Unrecognized ${"ifNoFilesFound"} input. Provided: ${notFoundAction}. Available options: ${Object.keys(NoFileOption_1.NoFileOption)}`);
|
||
}
|
||
return noFileBehavior;
|
||
}
|
||
}
|
||
exports.Inputs = Inputs;
|
||
//# sourceMappingURL=Inputs.js.map
|
||
|
||
/***/ }),
|
||
|
||
/***/ 3294:
|
||
/***/ ((__unused_webpack_module, exports) => {
|
||
|
||
"use strict";
|
||
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.NoFileOption = void 0;
|
||
var NoFileOption;
|
||
(function (NoFileOption) {
|
||
/**
|
||
* Default. Output a warning but do not fail the action
|
||
*/
|
||
NoFileOption["warn"] = "warn";
|
||
/**
|
||
* Fail the action with an error message
|
||
*/
|
||
NoFileOption["error"] = "error";
|
||
/**
|
||
* Do not output any warnings or errors, the action does not fail
|
||
*/
|
||
NoFileOption["ignore"] = "ignore";
|
||
})(NoFileOption = exports.NoFileOption || (exports.NoFileOption = {}));
|
||
//# sourceMappingURL=NoFileOption.js.map
|
||
|
||
/***/ }),
|
||
|
||
/***/ 6762:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||
if (k2 === undefined) k2 = k;
|
||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||
}) : (function(o, m, k, k2) {
|
||
if (k2 === undefined) k2 = k;
|
||
o[k2] = m[k];
|
||
}));
|
||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||
}) : function(o, v) {
|
||
o["default"] = v;
|
||
});
|
||
var __importStar = (this && this.__importStar) || function (mod) {
|
||
if (mod && mod.__esModule) return mod;
|
||
var result = {};
|
||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||
__setModuleDefault(result, mod);
|
||
return result;
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
const Inputs_1 = __nccwpck_require__(4024);
|
||
const NextcloudArtifact_1 = __nccwpck_require__(7643);
|
||
const core = __importStar(__nccwpck_require__(4513));
|
||
try {
|
||
var artifact = new NextcloudArtifact_1.NextcloudArtifact(Inputs_1.Inputs.ArtifactName, Inputs_1.Inputs.ArtifactPath, Inputs_1.Inputs.NoFileBehvaior);
|
||
artifact.run()
|
||
.catch(e => core.setFailed(e));
|
||
core.info("Finished");
|
||
}
|
||
catch (error) {
|
||
core.setFailed(error.message);
|
||
}
|
||
//# sourceMappingURL=nextcloud-artifacts.js.map
|
||
|
||
/***/ }),
|
||
|
||
/***/ 7643:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||
if (k2 === undefined) k2 = k;
|
||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||
}) : (function(o, m, k, k2) {
|
||
if (k2 === undefined) k2 = k;
|
||
o[k2] = m[k];
|
||
}));
|
||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||
}) : function(o, v) {
|
||
o["default"] = v;
|
||
});
|
||
var __importStar = (this && this.__importStar) || function (mod) {
|
||
if (mod && mod.__esModule) return mod;
|
||
var result = {};
|
||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||
__setModuleDefault(result, mod);
|
||
return result;
|
||
};
|
||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
return new (P || (P = Promise))(function (resolve, reject) {
|
||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
});
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.NextcloudArtifact = void 0;
|
||
const core = __importStar(__nccwpck_require__(4513));
|
||
const FileFinder_1 = __nccwpck_require__(2262);
|
||
const Inputs_1 = __nccwpck_require__(4024);
|
||
const NextcloudClient_1 = __nccwpck_require__(3199);
|
||
const NoFileOption_1 = __nccwpck_require__(3294);
|
||
class NextcloudArtifact {
|
||
constructor(name, path, errorBehavior) {
|
||
this.name = name;
|
||
this.path = path;
|
||
this.errorBehavior = errorBehavior;
|
||
}
|
||
run() {
|
||
return __awaiter(this, void 0, void 0, function* () {
|
||
const fileFinder = new FileFinder_1.FileFinder(this.path);
|
||
const files = yield fileFinder.findFiles();
|
||
if (files.filesToUpload.length > 0) {
|
||
yield this.uploadFiles(files);
|
||
}
|
||
else {
|
||
this.logNoFilesFound();
|
||
}
|
||
});
|
||
}
|
||
uploadFiles(files) {
|
||
return __awaiter(this, void 0, void 0, function* () {
|
||
this.logUpload(files.filesToUpload.length, files.rootDirectory);
|
||
const client = new NextcloudClient_1.NextcloudClient(Inputs_1.Inputs.Endpoint, this.name, files.rootDirectory);
|
||
yield client.uploadFiles(files.filesToUpload);
|
||
});
|
||
}
|
||
logUpload(fileCount, rootDirectory) {
|
||
const s = fileCount === 1 ? '' : 's';
|
||
core.info(`With the provided path, there will be ${fileCount} file${s} uploaded`);
|
||
core.debug(`Root artifact directory is ${rootDirectory}`);
|
||
if (fileCount > 10000) {
|
||
core.warning(`There are over 10,000 files in this artifact, consider create an archive before upload to improve the upload performance.`);
|
||
}
|
||
}
|
||
logNoFilesFound() {
|
||
const errorMessage = `No files were found with the provided path: ${this.path}. No artifacts will be uploaded.`;
|
||
switch (this.errorBehavior) {
|
||
case NoFileOption_1.NoFileOption.warn: {
|
||
core.warning(errorMessage);
|
||
break;
|
||
}
|
||
case NoFileOption_1.NoFileOption.error: {
|
||
core.setFailed(errorMessage);
|
||
break;
|
||
}
|
||
case NoFileOption_1.NoFileOption.ignore: {
|
||
core.info(errorMessage);
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
exports.NextcloudArtifact = NextcloudArtifact;
|
||
//# sourceMappingURL=NextcloudArtifact.js.map
|
||
|
||
/***/ }),
|
||
|
||
/***/ 3199:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||
if (k2 === undefined) k2 = k;
|
||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||
}) : (function(o, m, k, k2) {
|
||
if (k2 === undefined) k2 = k;
|
||
o[k2] = m[k];
|
||
}));
|
||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||
}) : function(o, v) {
|
||
o["default"] = v;
|
||
});
|
||
var __importStar = (this && this.__importStar) || function (mod) {
|
||
if (mod && mod.__esModule) return mod;
|
||
var result = {};
|
||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||
__setModuleDefault(result, mod);
|
||
return result;
|
||
};
|
||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
return new (P || (P = Promise))(function (resolve, reject) {
|
||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
});
|
||
};
|
||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.NextcloudClient = void 0;
|
||
const fsSync = __importStar(__nccwpck_require__(5747));
|
||
const path = __importStar(__nccwpck_require__(5622));
|
||
const core = __importStar(__nccwpck_require__(4513));
|
||
const os = __importStar(__nccwpck_require__(2087));
|
||
const archiver = __importStar(__nccwpck_require__(5899));
|
||
const node_fetch_1 = __importDefault(__nccwpck_require__(5992));
|
||
const Inputs_1 = __nccwpck_require__(4024);
|
||
const btoa_1 = __importDefault(__nccwpck_require__(6371));
|
||
const uuid_1 = __nccwpck_require__(4043);
|
||
const webdav = __importStar(__nccwpck_require__(4758));
|
||
const fs = fsSync.promises;
|
||
class NextcloudClient {
|
||
constructor(endpoint, artifact, rootDirectory) {
|
||
this.endpoint = endpoint;
|
||
this.artifact = artifact;
|
||
this.rootDirectory = rootDirectory;
|
||
this.guid = uuid_1.v4();
|
||
this.headers = { 'Authorization': 'Basic ' + btoa_1.default(`${Inputs_1.Inputs.Username}:${Inputs_1.Inputs.Password}`) };
|
||
this.davClient = webdav.createClient(`${this.endpoint}/remote.php/dav/files/${Inputs_1.Inputs.Username}`, {
|
||
username: Inputs_1.Inputs.Username,
|
||
password: Inputs_1.Inputs.Password,
|
||
});
|
||
}
|
||
uploadFiles(files) {
|
||
return __awaiter(this, void 0, void 0, function* () {
|
||
core.info("Preparing upload...");
|
||
const spec = this.uploadSpec(files);
|
||
core.info("Zipping files...");
|
||
var zip = yield this.zipFiles(spec);
|
||
core.info("Uploading to Nextcloud...");
|
||
const path = yield this.upload(zip);
|
||
core.info(`File path: ${path}`);
|
||
core.info("Sharing file...");
|
||
yield this.shareFile(path);
|
||
});
|
||
}
|
||
uploadSpec(files) {
|
||
const specifications = [];
|
||
if (!fsSync.existsSync(this.rootDirectory)) {
|
||
throw new Error(`this.rootDirectory ${this.rootDirectory} does not exist`);
|
||
}
|
||
if (!fsSync.lstatSync(this.rootDirectory).isDirectory()) {
|
||
throw new Error(`this.rootDirectory ${this.rootDirectory} is not a valid directory`);
|
||
}
|
||
// Normalize and resolve, this allows for either absolute or relative paths to be used
|
||
let root = path.normalize(this.rootDirectory);
|
||
root = path.resolve(root);
|
||
/*
|
||
Example to demonstrate behavior
|
||
|
||
Input:
|
||
artifactName: my-artifact
|
||
rootDirectory: '/home/user/files/plz-upload'
|
||
artifactFiles: [
|
||
'/home/user/files/plz-upload/file1.txt',
|
||
'/home/user/files/plz-upload/file2.txt',
|
||
'/home/user/files/plz-upload/dir/file3.txt'
|
||
]
|
||
|
||
Output:
|
||
specifications: [
|
||
['/home/user/files/plz-upload/file1.txt', 'my-artifact/file1.txt'],
|
||
['/home/user/files/plz-upload/file1.txt', 'my-artifact/file2.txt'],
|
||
['/home/user/files/plz-upload/file1.txt', 'my-artifact/dir/file3.txt']
|
||
]
|
||
*/
|
||
for (let file of files) {
|
||
if (!fsSync.existsSync(file)) {
|
||
throw new Error(`File ${file} does not exist`);
|
||
}
|
||
if (!fsSync.lstatSync(file).isDirectory()) {
|
||
// Normalize and resolve, this allows for either absolute or relative paths to be used
|
||
file = path.normalize(file);
|
||
file = path.resolve(file);
|
||
if (!file.startsWith(root)) {
|
||
throw new Error(`The rootDirectory: ${root} is not a parent directory of the file: ${file}`);
|
||
}
|
||
// Check for forbidden characters in file paths that will be rejected during upload
|
||
const uploadPath = file.replace(root, '');
|
||
/*
|
||
uploadFilePath denotes where the file will be uploaded in the file container on the server. During a run, if multiple artifacts are uploaded, they will all
|
||
be saved in the same container. The artifact name is used as the root directory in the container to separate and distinguish uploaded artifacts
|
||
|
||
path.join handles all the following cases and would return 'artifact-name/file-to-upload.txt
|
||
join('artifact-name/', 'file-to-upload.txt')
|
||
join('artifact-name/', '/file-to-upload.txt')
|
||
join('artifact-name', 'file-to-upload.txt')
|
||
join('artifact-name', '/file-to-upload.txt')
|
||
*/
|
||
specifications.push({
|
||
absolutePath: file,
|
||
uploadPath: path.join(this.artifact, uploadPath)
|
||
});
|
||
}
|
||
else {
|
||
// Directories are rejected by the server during upload
|
||
core.debug(`Removing ${file} from rawSearchResults because it is a directory`);
|
||
}
|
||
}
|
||
return specifications;
|
||
}
|
||
zipFiles(specs) {
|
||
return __awaiter(this, void 0, void 0, function* () {
|
||
const tempArtifactDir = path.join(os.tmpdir(), this.guid);
|
||
const artifactPath = path.join(tempArtifactDir, `artifact-${this.artifact}`);
|
||
yield fs.mkdir(path.join(artifactPath, this.artifact), { recursive: true });
|
||
const copies = [];
|
||
for (let spec of specs) {
|
||
const dstpath = path.join(artifactPath, spec.uploadPath);
|
||
const dstDir = path.dirname(dstpath);
|
||
if (!fsSync.existsSync(dstDir)) {
|
||
yield fs.mkdir(dstDir, { recursive: true });
|
||
}
|
||
copies.push(fs.copyFile(spec.absolutePath, dstpath));
|
||
}
|
||
yield Promise.all(copies);
|
||
core.info(`files: ${yield fs.readdir(path.join(artifactPath, this.artifact))}`);
|
||
const archivePath = path.join(artifactPath, `${this.artifact}.zip`);
|
||
yield this.zip(path.join(artifactPath, this.artifact), archivePath);
|
||
core.info(`archive stat: ${(yield fs.stat(archivePath)).size}`);
|
||
return archivePath;
|
||
});
|
||
}
|
||
zip(dirpath, destpath) {
|
||
return __awaiter(this, void 0, void 0, function* () {
|
||
const archive = archiver.create('zip', { zlib: { level: 9 } });
|
||
const stream = archive.directory(dirpath, false)
|
||
.pipe(fsSync.createWriteStream(destpath));
|
||
yield archive.finalize();
|
||
return yield new Promise((resolve, reject) => {
|
||
stream.on('error', e => reject(e))
|
||
.on('close', () => resolve());
|
||
});
|
||
});
|
||
}
|
||
upload(file) {
|
||
return __awaiter(this, void 0, void 0, function* () {
|
||
const remoteFileDir = `/artifacts/${this.guid}`;
|
||
core.info("Checking directory...");
|
||
if (!(yield this.davClient.exists(remoteFileDir))) {
|
||
core.info("Creating directory...");
|
||
yield this.davClient.createDirectory(remoteFileDir, { recursive: true });
|
||
}
|
||
const remoteFilePath = `${remoteFileDir}/${this.artifact}.zip`;
|
||
core.info(`Transferring file... (${file})`);
|
||
const stream = this.davClient.createWriteStream(remoteFilePath);
|
||
fsSync.createReadStream(file)
|
||
.pipe(stream);
|
||
yield new Promise((resolve, reject) => {
|
||
stream.on('error', () => reject("Failed to upload file"))
|
||
.on('pipe', () => core.info("pipe"))
|
||
.on('finish', () => resolve());
|
||
});
|
||
core.info("finish");
|
||
return remoteFilePath;
|
||
});
|
||
}
|
||
shareFile(remoteFilePath) {
|
||
return __awaiter(this, void 0, void 0, function* () {
|
||
const url = this.endpoint + `/ocs/v2.php/apps/files_sharing/api/v1/shares`;
|
||
const body = {
|
||
path: remoteFilePath,
|
||
shareType: 3,
|
||
publicUpload: "false",
|
||
permissions: 1,
|
||
};
|
||
const res = yield node_fetch_1.default(url, {
|
||
method: 'PUT',
|
||
headers: this.headers,
|
||
body: JSON.stringify(body),
|
||
});
|
||
res.status;
|
||
core.info(yield res.text());
|
||
});
|
||
}
|
||
}
|
||
exports.NextcloudClient = NextcloudClient;
|
||
//# sourceMappingURL=NextcloudClient.js.map
|
||
|
||
/***/ }),
|
||
|
||
/***/ 2282:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||
if (k2 === undefined) k2 = k;
|
||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||
}) : (function(o, m, k, k2) {
|
||
if (k2 === undefined) k2 = k;
|
||
o[k2] = m[k];
|
||
}));
|
||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||
}) : function(o, v) {
|
||
o["default"] = v;
|
||
});
|
||
var __importStar = (this && this.__importStar) || function (mod) {
|
||
if (mod && mod.__esModule) return mod;
|
||
var result = {};
|
||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||
__setModuleDefault(result, mod);
|
||
return result;
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.issue = exports.issueCommand = void 0;
|
||
const os = __importStar(__nccwpck_require__(2087));
|
||
const utils_1 = __nccwpck_require__(2084);
|
||
/**
|
||
* Commands
|
||
*
|
||
* Command Format:
|
||
* ::name key=value,key=value::message
|
||
*
|
||
* Examples:
|
||
* ::warning::This is the message
|
||
* ::set-env name=MY_VAR::some value
|
||
*/
|
||
function issueCommand(command, properties, message) {
|
||
const cmd = new Command(command, properties, message);
|
||
process.stdout.write(cmd.toString() + os.EOL);
|
||
}
|
||
exports.issueCommand = issueCommand;
|
||
function issue(name, message = '') {
|
||
issueCommand(name, {}, message);
|
||
}
|
||
exports.issue = issue;
|
||
const CMD_STRING = '::';
|
||
class Command {
|
||
constructor(command, properties, message) {
|
||
if (!command) {
|
||
command = 'missing.command';
|
||
}
|
||
this.command = command;
|
||
this.properties = properties;
|
||
this.message = message;
|
||
}
|
||
toString() {
|
||
let cmdStr = CMD_STRING + this.command;
|
||
if (this.properties && Object.keys(this.properties).length > 0) {
|
||
cmdStr += ' ';
|
||
let first = true;
|
||
for (const key in this.properties) {
|
||
if (this.properties.hasOwnProperty(key)) {
|
||
const val = this.properties[key];
|
||
if (val) {
|
||
if (first) {
|
||
first = false;
|
||
}
|
||
else {
|
||
cmdStr += ',';
|
||
}
|
||
cmdStr += `${key}=${escapeProperty(val)}`;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
cmdStr += `${CMD_STRING}${escapeData(this.message)}`;
|
||
return cmdStr;
|
||
}
|
||
}
|
||
function escapeData(s) {
|
||
return utils_1.toCommandValue(s)
|
||
.replace(/%/g, '%25')
|
||
.replace(/\r/g, '%0D')
|
||
.replace(/\n/g, '%0A');
|
||
}
|
||
function escapeProperty(s) {
|
||
return utils_1.toCommandValue(s)
|
||
.replace(/%/g, '%25')
|
||
.replace(/\r/g, '%0D')
|
||
.replace(/\n/g, '%0A')
|
||
.replace(/:/g, '%3A')
|
||
.replace(/,/g, '%2C');
|
||
}
|
||
//# sourceMappingURL=command.js.map
|
||
|
||
/***/ }),
|
||
|
||
/***/ 4513:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||
if (k2 === undefined) k2 = k;
|
||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||
}) : (function(o, m, k, k2) {
|
||
if (k2 === undefined) k2 = k;
|
||
o[k2] = m[k];
|
||
}));
|
||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||
}) : function(o, v) {
|
||
o["default"] = v;
|
||
});
|
||
var __importStar = (this && this.__importStar) || function (mod) {
|
||
if (mod && mod.__esModule) return mod;
|
||
var result = {};
|
||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||
__setModuleDefault(result, mod);
|
||
return result;
|
||
};
|
||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
return new (P || (P = Promise))(function (resolve, reject) {
|
||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
});
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;
|
||
const command_1 = __nccwpck_require__(2282);
|
||
const file_command_1 = __nccwpck_require__(6065);
|
||
const utils_1 = __nccwpck_require__(2084);
|
||
const os = __importStar(__nccwpck_require__(2087));
|
||
const path = __importStar(__nccwpck_require__(5622));
|
||
/**
|
||
* The code to exit an action
|
||
*/
|
||
var ExitCode;
|
||
(function (ExitCode) {
|
||
/**
|
||
* A code indicating that the action was successful
|
||
*/
|
||
ExitCode[ExitCode["Success"] = 0] = "Success";
|
||
/**
|
||
* A code indicating that the action was a failure
|
||
*/
|
||
ExitCode[ExitCode["Failure"] = 1] = "Failure";
|
||
})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));
|
||
//-----------------------------------------------------------------------
|
||
// Variables
|
||
//-----------------------------------------------------------------------
|
||
/**
|
||
* Sets env variable for this action and future actions in the job
|
||
* @param name the name of the variable to set
|
||
* @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify
|
||
*/
|
||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||
function exportVariable(name, val) {
|
||
const convertedVal = utils_1.toCommandValue(val);
|
||
process.env[name] = convertedVal;
|
||
const filePath = process.env['GITHUB_ENV'] || '';
|
||
if (filePath) {
|
||
const delimiter = '_GitHubActionsFileCommandDelimeter_';
|
||
const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;
|
||
file_command_1.issueCommand('ENV', commandValue);
|
||
}
|
||
else {
|
||
command_1.issueCommand('set-env', { name }, convertedVal);
|
||
}
|
||
}
|
||
exports.exportVariable = exportVariable;
|
||
/**
|
||
* Registers a secret which will get masked from logs
|
||
* @param secret value of the secret
|
||
*/
|
||
function setSecret(secret) {
|
||
command_1.issueCommand('add-mask', {}, secret);
|
||
}
|
||
exports.setSecret = setSecret;
|
||
/**
|
||
* Prepends inputPath to the PATH (for this action and future actions)
|
||
* @param inputPath
|
||
*/
|
||
function addPath(inputPath) {
|
||
const filePath = process.env['GITHUB_PATH'] || '';
|
||
if (filePath) {
|
||
file_command_1.issueCommand('PATH', inputPath);
|
||
}
|
||
else {
|
||
command_1.issueCommand('add-path', {}, inputPath);
|
||
}
|
||
process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;
|
||
}
|
||
exports.addPath = addPath;
|
||
/**
|
||
* Gets the value of an input.
|
||
* Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.
|
||
* Returns an empty string if the value is not defined.
|
||
*
|
||
* @param name name of the input to get
|
||
* @param options optional. See InputOptions.
|
||
* @returns string
|
||
*/
|
||
function getInput(name, options) {
|
||
const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';
|
||
if (options && options.required && !val) {
|
||
throw new Error(`Input required and not supplied: ${name}`);
|
||
}
|
||
if (options && options.trimWhitespace === false) {
|
||
return val;
|
||
}
|
||
return val.trim();
|
||
}
|
||
exports.getInput = getInput;
|
||
/**
|
||
* Gets the input value of the boolean type in the YAML 1.2 "core schema" specification.
|
||
* Support boolean input list: `true | True | TRUE | false | False | FALSE` .
|
||
* The return value is also in boolean type.
|
||
* ref: https://yaml.org/spec/1.2/spec.html#id2804923
|
||
*
|
||
* @param name name of the input to get
|
||
* @param options optional. See InputOptions.
|
||
* @returns boolean
|
||
*/
|
||
function getBooleanInput(name, options) {
|
||
const trueValue = ['true', 'True', 'TRUE'];
|
||
const falseValue = ['false', 'False', 'FALSE'];
|
||
const val = getInput(name, options);
|
||
if (trueValue.includes(val))
|
||
return true;
|
||
if (falseValue.includes(val))
|
||
return false;
|
||
throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` +
|
||
`Support boolean input list: \`true | True | TRUE | false | False | FALSE\``);
|
||
}
|
||
exports.getBooleanInput = getBooleanInput;
|
||
/**
|
||
* Sets the value of an output.
|
||
*
|
||
* @param name name of the output to set
|
||
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
|
||
*/
|
||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||
function setOutput(name, value) {
|
||
process.stdout.write(os.EOL);
|
||
command_1.issueCommand('set-output', { name }, value);
|
||
}
|
||
exports.setOutput = setOutput;
|
||
/**
|
||
* Enables or disables the echoing of commands into stdout for the rest of the step.
|
||
* Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.
|
||
*
|
||
*/
|
||
function setCommandEcho(enabled) {
|
||
command_1.issue('echo', enabled ? 'on' : 'off');
|
||
}
|
||
exports.setCommandEcho = setCommandEcho;
|
||
//-----------------------------------------------------------------------
|
||
// Results
|
||
//-----------------------------------------------------------------------
|
||
/**
|
||
* Sets the action status to failed.
|
||
* When the action exits it will be with an exit code of 1
|
||
* @param message add error issue message
|
||
*/
|
||
function setFailed(message) {
|
||
process.exitCode = ExitCode.Failure;
|
||
error(message);
|
||
}
|
||
exports.setFailed = setFailed;
|
||
//-----------------------------------------------------------------------
|
||
// Logging Commands
|
||
//-----------------------------------------------------------------------
|
||
/**
|
||
* Gets whether Actions Step Debug is on or not
|
||
*/
|
||
function isDebug() {
|
||
return process.env['RUNNER_DEBUG'] === '1';
|
||
}
|
||
exports.isDebug = isDebug;
|
||
/**
|
||
* Writes debug message to user log
|
||
* @param message debug message
|
||
*/
|
||
function debug(message) {
|
||
command_1.issueCommand('debug', {}, message);
|
||
}
|
||
exports.debug = debug;
|
||
/**
|
||
* Adds an error issue
|
||
* @param message error issue message. Errors will be converted to string via toString()
|
||
*/
|
||
function error(message) {
|
||
command_1.issue('error', message instanceof Error ? message.toString() : message);
|
||
}
|
||
exports.error = error;
|
||
/**
|
||
* Adds an warning issue
|
||
* @param message warning issue message. Errors will be converted to string via toString()
|
||
*/
|
||
function warning(message) {
|
||
command_1.issue('warning', message instanceof Error ? message.toString() : message);
|
||
}
|
||
exports.warning = warning;
|
||
/**
|
||
* Writes info to log with console.log.
|
||
* @param message info message
|
||
*/
|
||
function info(message) {
|
||
process.stdout.write(message + os.EOL);
|
||
}
|
||
exports.info = info;
|
||
/**
|
||
* Begin an output group.
|
||
*
|
||
* Output until the next `groupEnd` will be foldable in this group
|
||
*
|
||
* @param name The name of the output group
|
||
*/
|
||
function startGroup(name) {
|
||
command_1.issue('group', name);
|
||
}
|
||
exports.startGroup = startGroup;
|
||
/**
|
||
* End an output group.
|
||
*/
|
||
function endGroup() {
|
||
command_1.issue('endgroup');
|
||
}
|
||
exports.endGroup = endGroup;
|
||
/**
|
||
* Wrap an asynchronous function call in a group.
|
||
*
|
||
* Returns the same type as the function itself.
|
||
*
|
||
* @param name The name of the group
|
||
* @param fn The function to wrap in the group
|
||
*/
|
||
function group(name, fn) {
|
||
return __awaiter(this, void 0, void 0, function* () {
|
||
startGroup(name);
|
||
let result;
|
||
try {
|
||
result = yield fn();
|
||
}
|
||
finally {
|
||
endGroup();
|
||
}
|
||
return result;
|
||
});
|
||
}
|
||
exports.group = group;
|
||
//-----------------------------------------------------------------------
|
||
// Wrapper action state
|
||
//-----------------------------------------------------------------------
|
||
/**
|
||
* Saves state for current action, the state can only be retrieved by this action's post job execution.
|
||
*
|
||
* @param name name of the state to store
|
||
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
|
||
*/
|
||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||
function saveState(name, value) {
|
||
command_1.issueCommand('save-state', { name }, value);
|
||
}
|
||
exports.saveState = saveState;
|
||
/**
|
||
* Gets the value of an state set by this action's main execution.
|
||
*
|
||
* @param name name of the state to get
|
||
* @returns string
|
||
*/
|
||
function getState(name) {
|
||
return process.env[`STATE_${name}`] || '';
|
||
}
|
||
exports.getState = getState;
|
||
//# sourceMappingURL=core.js.map
|
||
|
||
/***/ }),
|
||
|
||
/***/ 6065:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
// For internal use, subject to change.
|
||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||
if (k2 === undefined) k2 = k;
|
||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||
}) : (function(o, m, k, k2) {
|
||
if (k2 === undefined) k2 = k;
|
||
o[k2] = m[k];
|
||
}));
|
||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||
}) : function(o, v) {
|
||
o["default"] = v;
|
||
});
|
||
var __importStar = (this && this.__importStar) || function (mod) {
|
||
if (mod && mod.__esModule) return mod;
|
||
var result = {};
|
||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||
__setModuleDefault(result, mod);
|
||
return result;
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.issueCommand = void 0;
|
||
// We use any as a valid input type
|
||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||
const fs = __importStar(__nccwpck_require__(5747));
|
||
const os = __importStar(__nccwpck_require__(2087));
|
||
const utils_1 = __nccwpck_require__(2084);
|
||
function issueCommand(command, message) {
|
||
const filePath = process.env[`GITHUB_${command}`];
|
||
if (!filePath) {
|
||
throw new Error(`Unable to find environment variable for file command ${command}`);
|
||
}
|
||
if (!fs.existsSync(filePath)) {
|
||
throw new Error(`Missing file at path: ${filePath}`);
|
||
}
|
||
fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {
|
||
encoding: 'utf8'
|
||
});
|
||
}
|
||
exports.issueCommand = issueCommand;
|
||
//# sourceMappingURL=file-command.js.map
|
||
|
||
/***/ }),
|
||
|
||
/***/ 2084:
|
||
/***/ ((__unused_webpack_module, exports) => {
|
||
|
||
"use strict";
|
||
|
||
// We use any as a valid input type
|
||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.toCommandValue = void 0;
|
||
/**
|
||
* Sanitizes an input into a string so it can be passed into issueCommand safely
|
||
* @param input input to sanitize into a string
|
||
*/
|
||
function toCommandValue(input) {
|
||
if (input === null || input === undefined) {
|
||
return '';
|
||
}
|
||
else if (typeof input === 'string' || input instanceof String) {
|
||
return input;
|
||
}
|
||
return JSON.stringify(input);
|
||
}
|
||
exports.toCommandValue = toCommandValue;
|
||
//# sourceMappingURL=utils.js.map
|
||
|
||
/***/ }),
|
||
|
||
/***/ 1269:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
return new (P || (P = Promise))(function (resolve, reject) {
|
||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
});
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.create = void 0;
|
||
const internal_globber_1 = __nccwpck_require__(6578);
|
||
/**
|
||
* Constructs a globber
|
||
*
|
||
* @param patterns Patterns separated by newlines
|
||
* @param options Glob options
|
||
*/
|
||
function create(patterns, options) {
|
||
return __awaiter(this, void 0, void 0, function* () {
|
||
return yield internal_globber_1.DefaultGlobber.create(patterns, options);
|
||
});
|
||
}
|
||
exports.create = create;
|
||
//# sourceMappingURL=glob.js.map
|
||
|
||
/***/ }),
|
||
|
||
/***/ 8629:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||
if (k2 === undefined) k2 = k;
|
||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||
}) : (function(o, m, k, k2) {
|
||
if (k2 === undefined) k2 = k;
|
||
o[k2] = m[k];
|
||
}));
|
||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||
}) : function(o, v) {
|
||
o["default"] = v;
|
||
});
|
||
var __importStar = (this && this.__importStar) || function (mod) {
|
||
if (mod && mod.__esModule) return mod;
|
||
var result = {};
|
||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||
__setModuleDefault(result, mod);
|
||
return result;
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.getOptions = void 0;
|
||
const core = __importStar(__nccwpck_require__(4513));
|
||
/**
|
||
* Returns a copy with defaults filled in.
|
||
*/
|
||
function getOptions(copy) {
|
||
const result = {
|
||
followSymbolicLinks: true,
|
||
implicitDescendants: true,
|
||
omitBrokenSymbolicLinks: true
|
||
};
|
||
if (copy) {
|
||
if (typeof copy.followSymbolicLinks === 'boolean') {
|
||
result.followSymbolicLinks = copy.followSymbolicLinks;
|
||
core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`);
|
||
}
|
||
if (typeof copy.implicitDescendants === 'boolean') {
|
||
result.implicitDescendants = copy.implicitDescendants;
|
||
core.debug(`implicitDescendants '${result.implicitDescendants}'`);
|
||
}
|
||
if (typeof copy.omitBrokenSymbolicLinks === 'boolean') {
|
||
result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks;
|
||
core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`);
|
||
}
|
||
}
|
||
return result;
|
||
}
|
||
exports.getOptions = getOptions;
|
||
//# sourceMappingURL=internal-glob-options-helper.js.map
|
||
|
||
/***/ }),
|
||
|
||
/***/ 6578:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||
if (k2 === undefined) k2 = k;
|
||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||
}) : (function(o, m, k, k2) {
|
||
if (k2 === undefined) k2 = k;
|
||
o[k2] = m[k];
|
||
}));
|
||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||
}) : function(o, v) {
|
||
o["default"] = v;
|
||
});
|
||
var __importStar = (this && this.__importStar) || function (mod) {
|
||
if (mod && mod.__esModule) return mod;
|
||
var result = {};
|
||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||
__setModuleDefault(result, mod);
|
||
return result;
|
||
};
|
||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
return new (P || (P = Promise))(function (resolve, reject) {
|
||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
});
|
||
};
|
||
var __asyncValues = (this && this.__asyncValues) || function (o) {
|
||
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
||
var m = o[Symbol.asyncIterator], i;
|
||
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
|
||
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
||
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
||
};
|
||
var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); }
|
||
var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) {
|
||
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
||
var g = generator.apply(thisArg, _arguments || []), i, q = [];
|
||
return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i;
|
||
function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }
|
||
function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }
|
||
function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }
|
||
function fulfill(value) { resume("next", value); }
|
||
function reject(value) { resume("throw", value); }
|
||
function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.DefaultGlobber = void 0;
|
||
const core = __importStar(__nccwpck_require__(4513));
|
||
const fs = __importStar(__nccwpck_require__(5747));
|
||
const globOptionsHelper = __importStar(__nccwpck_require__(8629));
|
||
const path = __importStar(__nccwpck_require__(5622));
|
||
const patternHelper = __importStar(__nccwpck_require__(6289));
|
||
const internal_match_kind_1 = __nccwpck_require__(6800);
|
||
const internal_pattern_1 = __nccwpck_require__(484);
|
||
const internal_search_state_1 = __nccwpck_require__(2853);
|
||
const IS_WINDOWS = process.platform === 'win32';
|
||
class DefaultGlobber {
|
||
constructor(options) {
|
||
this.patterns = [];
|
||
this.searchPaths = [];
|
||
this.options = globOptionsHelper.getOptions(options);
|
||
}
|
||
getSearchPaths() {
|
||
// Return a copy
|
||
return this.searchPaths.slice();
|
||
}
|
||
glob() {
|
||
var e_1, _a;
|
||
return __awaiter(this, void 0, void 0, function* () {
|
||
const result = [];
|
||
try {
|
||
for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) {
|
||
const itemPath = _c.value;
|
||
result.push(itemPath);
|
||
}
|
||
}
|
||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||
finally {
|
||
try {
|
||
if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b);
|
||
}
|
||
finally { if (e_1) throw e_1.error; }
|
||
}
|
||
return result;
|
||
});
|
||
}
|
||
globGenerator() {
|
||
return __asyncGenerator(this, arguments, function* globGenerator_1() {
|
||
// Fill in defaults options
|
||
const options = globOptionsHelper.getOptions(this.options);
|
||
// Implicit descendants?
|
||
const patterns = [];
|
||
for (const pattern of this.patterns) {
|
||
patterns.push(pattern);
|
||
if (options.implicitDescendants &&
|
||
(pattern.trailingSeparator ||
|
||
pattern.segments[pattern.segments.length - 1] !== '**')) {
|
||
patterns.push(new internal_pattern_1.Pattern(pattern.negate, true, pattern.segments.concat('**')));
|
||
}
|
||
}
|
||
// Push the search paths
|
||
const stack = [];
|
||
for (const searchPath of patternHelper.getSearchPaths(patterns)) {
|
||
core.debug(`Search path '${searchPath}'`);
|
||
// Exists?
|
||
try {
|
||
// Intentionally using lstat. Detection for broken symlink
|
||
// will be performed later (if following symlinks).
|
||
yield __await(fs.promises.lstat(searchPath));
|
||
}
|
||
catch (err) {
|
||
if (err.code === 'ENOENT') {
|
||
continue;
|
||
}
|
||
throw err;
|
||
}
|
||
stack.unshift(new internal_search_state_1.SearchState(searchPath, 1));
|
||
}
|
||
// Search
|
||
const traversalChain = []; // used to detect cycles
|
||
while (stack.length) {
|
||
// Pop
|
||
const item = stack.pop();
|
||
// Match?
|
||
const match = patternHelper.match(patterns, item.path);
|
||
const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path);
|
||
if (!match && !partialMatch) {
|
||
continue;
|
||
}
|
||
// Stat
|
||
const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain)
|
||
// Broken symlink, or symlink cycle detected, or no longer exists
|
||
);
|
||
// Broken symlink, or symlink cycle detected, or no longer exists
|
||
if (!stats) {
|
||
continue;
|
||
}
|
||
// Directory
|
||
if (stats.isDirectory()) {
|
||
// Matched
|
||
if (match & internal_match_kind_1.MatchKind.Directory) {
|
||
yield yield __await(item.path);
|
||
}
|
||
// Descend?
|
||
else if (!partialMatch) {
|
||
continue;
|
||
}
|
||
// Push the child items in reverse
|
||
const childLevel = item.level + 1;
|
||
const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel));
|
||
stack.push(...childItems.reverse());
|
||
}
|
||
// File
|
||
else if (match & internal_match_kind_1.MatchKind.File) {
|
||
yield yield __await(item.path);
|
||
}
|
||
}
|
||
});
|
||
}
|
||
/**
|
||
* Constructs a DefaultGlobber
|
||
*/
|
||
static create(patterns, options) {
|
||
return __awaiter(this, void 0, void 0, function* () {
|
||
const result = new DefaultGlobber(options);
|
||
if (IS_WINDOWS) {
|
||
patterns = patterns.replace(/\r\n/g, '\n');
|
||
patterns = patterns.replace(/\r/g, '\n');
|
||
}
|
||
const lines = patterns.split('\n').map(x => x.trim());
|
||
for (const line of lines) {
|
||
// Empty or comment
|
||
if (!line || line.startsWith('#')) {
|
||
continue;
|
||
}
|
||
// Pattern
|
||
else {
|
||
result.patterns.push(new internal_pattern_1.Pattern(line));
|
||
}
|
||
}
|
||
result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns));
|
||
return result;
|
||
});
|
||
}
|
||
static stat(item, options, traversalChain) {
|
||
return __awaiter(this, void 0, void 0, function* () {
|
||
// Note:
|
||
// `stat` returns info about the target of a symlink (or symlink chain)
|
||
// `lstat` returns info about a symlink itself
|
||
let stats;
|
||
if (options.followSymbolicLinks) {
|
||
try {
|
||
// Use `stat` (following symlinks)
|
||
stats = yield fs.promises.stat(item.path);
|
||
}
|
||
catch (err) {
|
||
if (err.code === 'ENOENT') {
|
||
if (options.omitBrokenSymbolicLinks) {
|
||
core.debug(`Broken symlink '${item.path}'`);
|
||
return undefined;
|
||
}
|
||
throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`);
|
||
}
|
||
throw err;
|
||
}
|
||
}
|
||
else {
|
||
// Use `lstat` (not following symlinks)
|
||
stats = yield fs.promises.lstat(item.path);
|
||
}
|
||
// Note, isDirectory() returns false for the lstat of a symlink
|
||
if (stats.isDirectory() && options.followSymbolicLinks) {
|
||
// Get the realpath
|
||
const realPath = yield fs.promises.realpath(item.path);
|
||
// Fixup the traversal chain to match the item level
|
||
while (traversalChain.length >= item.level) {
|
||
traversalChain.pop();
|
||
}
|
||
// Test for a cycle
|
||
if (traversalChain.some((x) => x === realPath)) {
|
||
core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`);
|
||
return undefined;
|
||
}
|
||
// Update the traversal chain
|
||
traversalChain.push(realPath);
|
||
}
|
||
return stats;
|
||
});
|
||
}
|
||
}
|
||
exports.DefaultGlobber = DefaultGlobber;
|
||
//# sourceMappingURL=internal-globber.js.map
|
||
|
||
/***/ }),
|
||
|
||
/***/ 6800:
|
||
/***/ ((__unused_webpack_module, exports) => {
|
||
|
||
"use strict";
|
||
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.MatchKind = void 0;
|
||
/**
|
||
* Indicates whether a pattern matches a path
|
||
*/
|
||
var MatchKind;
|
||
(function (MatchKind) {
|
||
/** Not matched */
|
||
MatchKind[MatchKind["None"] = 0] = "None";
|
||
/** Matched if the path is a directory */
|
||
MatchKind[MatchKind["Directory"] = 1] = "Directory";
|
||
/** Matched if the path is a regular file */
|
||
MatchKind[MatchKind["File"] = 2] = "File";
|
||
/** Matched */
|
||
MatchKind[MatchKind["All"] = 3] = "All";
|
||
})(MatchKind = exports.MatchKind || (exports.MatchKind = {}));
|
||
//# sourceMappingURL=internal-match-kind.js.map
|
||
|
||
/***/ }),
|
||
|
||
/***/ 7602:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||
if (k2 === undefined) k2 = k;
|
||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||
}) : (function(o, m, k, k2) {
|
||
if (k2 === undefined) k2 = k;
|
||
o[k2] = m[k];
|
||
}));
|
||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||
}) : function(o, v) {
|
||
o["default"] = v;
|
||
});
|
||
var __importStar = (this && this.__importStar) || function (mod) {
|
||
if (mod && mod.__esModule) return mod;
|
||
var result = {};
|
||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||
__setModuleDefault(result, mod);
|
||
return result;
|
||
};
|
||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0;
|
||
const path = __importStar(__nccwpck_require__(5622));
|
||
const assert_1 = __importDefault(__nccwpck_require__(2357));
|
||
const IS_WINDOWS = process.platform === 'win32';
|
||
/**
|
||
* Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths.
|
||
*
|
||
* For example, on Linux/macOS:
|
||
* - `/ => /`
|
||
* - `/hello => /`
|
||
*
|
||
* For example, on Windows:
|
||
* - `C:\ => C:\`
|
||
* - `C:\hello => C:\`
|
||
* - `C: => C:`
|
||
* - `C:hello => C:`
|
||
* - `\ => \`
|
||
* - `\hello => \`
|
||
* - `\\hello => \\hello`
|
||
* - `\\hello\world => \\hello\world`
|
||
*/
|
||
function dirname(p) {
|
||
// Normalize slashes and trim unnecessary trailing slash
|
||
p = safeTrimTrailingSeparator(p);
|
||
// Windows UNC root, e.g. \\hello or \\hello\world
|
||
if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) {
|
||
return p;
|
||
}
|
||
// Get dirname
|
||
let result = path.dirname(p);
|
||
// Trim trailing slash for Windows UNC root, e.g. \\hello\world\
|
||
if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) {
|
||
result = safeTrimTrailingSeparator(result);
|
||
}
|
||
return result;
|
||
}
|
||
exports.dirname = dirname;
|
||
/**
|
||
* Roots the path if not already rooted. On Windows, relative roots like `\`
|
||
* or `C:` are expanded based on the current working directory.
|
||
*/
|
||
function ensureAbsoluteRoot(root, itemPath) {
|
||
assert_1.default(root, `ensureAbsoluteRoot parameter 'root' must not be empty`);
|
||
assert_1.default(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`);
|
||
// Already rooted
|
||
if (hasAbsoluteRoot(itemPath)) {
|
||
return itemPath;
|
||
}
|
||
// Windows
|
||
if (IS_WINDOWS) {
|
||
// Check for itemPath like C: or C:foo
|
||
if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) {
|
||
let cwd = process.cwd();
|
||
assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
|
||
// Drive letter matches cwd? Expand to cwd
|
||
if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) {
|
||
// Drive only, e.g. C:
|
||
if (itemPath.length === 2) {
|
||
// Preserve specified drive letter case (upper or lower)
|
||
return `${itemPath[0]}:\\${cwd.substr(3)}`;
|
||
}
|
||
// Drive + path, e.g. C:foo
|
||
else {
|
||
if (!cwd.endsWith('\\')) {
|
||
cwd += '\\';
|
||
}
|
||
// Preserve specified drive letter case (upper or lower)
|
||
return `${itemPath[0]}:\\${cwd.substr(3)}${itemPath.substr(2)}`;
|
||
}
|
||
}
|
||
// Different drive
|
||
else {
|
||
return `${itemPath[0]}:\\${itemPath.substr(2)}`;
|
||
}
|
||
}
|
||
// Check for itemPath like \ or \foo
|
||
else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) {
|
||
const cwd = process.cwd();
|
||
assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
|
||
return `${cwd[0]}:\\${itemPath.substr(1)}`;
|
||
}
|
||
}
|
||
assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`);
|
||
// Otherwise ensure root ends with a separator
|
||
if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) {
|
||
// Intentionally empty
|
||
}
|
||
else {
|
||
// Append separator
|
||
root += path.sep;
|
||
}
|
||
return root + itemPath;
|
||
}
|
||
exports.ensureAbsoluteRoot = ensureAbsoluteRoot;
|
||
/**
|
||
* On Linux/macOS, true if path starts with `/`. On Windows, true for paths like:
|
||
* `\\hello\share` and `C:\hello` (and using alternate separator).
|
||
*/
|
||
function hasAbsoluteRoot(itemPath) {
|
||
assert_1.default(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`);
|
||
// Normalize separators
|
||
itemPath = normalizeSeparators(itemPath);
|
||
// Windows
|
||
if (IS_WINDOWS) {
|
||
// E.g. \\hello\share or C:\hello
|
||
return itemPath.startsWith('\\\\') || /^[A-Z]:\\/i.test(itemPath);
|
||
}
|
||
// E.g. /hello
|
||
return itemPath.startsWith('/');
|
||
}
|
||
exports.hasAbsoluteRoot = hasAbsoluteRoot;
|
||
/**
|
||
* On Linux/macOS, true if path starts with `/`. On Windows, true for paths like:
|
||
* `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator).
|
||
*/
|
||
function hasRoot(itemPath) {
|
||
assert_1.default(itemPath, `isRooted parameter 'itemPath' must not be empty`);
|
||
// Normalize separators
|
||
itemPath = normalizeSeparators(itemPath);
|
||
// Windows
|
||
if (IS_WINDOWS) {
|
||
// E.g. \ or \hello or \\hello
|
||
// E.g. C: or C:\hello
|
||
return itemPath.startsWith('\\') || /^[A-Z]:/i.test(itemPath);
|
||
}
|
||
// E.g. /hello
|
||
return itemPath.startsWith('/');
|
||
}
|
||
exports.hasRoot = hasRoot;
|
||
/**
|
||
* Removes redundant slashes and converts `/` to `\` on Windows
|
||
*/
|
||
function normalizeSeparators(p) {
|
||
p = p || '';
|
||
// Windows
|
||
if (IS_WINDOWS) {
|
||
// Convert slashes on Windows
|
||
p = p.replace(/\//g, '\\');
|
||
// Remove redundant slashes
|
||
const isUnc = /^\\\\+[^\\]/.test(p); // e.g. \\hello
|
||
return (isUnc ? '\\' : '') + p.replace(/\\\\+/g, '\\'); // preserve leading \\ for UNC
|
||
}
|
||
// Remove redundant slashes
|
||
return p.replace(/\/\/+/g, '/');
|
||
}
|
||
exports.normalizeSeparators = normalizeSeparators;
|
||
/**
|
||
* Normalizes the path separators and trims the trailing separator (when safe).
|
||
* For example, `/foo/ => /foo` but `/ => /`
|
||
*/
|
||
function safeTrimTrailingSeparator(p) {
|
||
// Short-circuit if empty
|
||
if (!p) {
|
||
return '';
|
||
}
|
||
// Normalize separators
|
||
p = normalizeSeparators(p);
|
||
// No trailing slash
|
||
if (!p.endsWith(path.sep)) {
|
||
return p;
|
||
}
|
||
// Check '/' on Linux/macOS and '\' on Windows
|
||
if (p === path.sep) {
|
||
return p;
|
||
}
|
||
// On Windows check if drive root. E.g. C:\
|
||
if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) {
|
||
return p;
|
||
}
|
||
// Otherwise trim trailing slash
|
||
return p.substr(0, p.length - 1);
|
||
}
|
||
exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator;
|
||
//# sourceMappingURL=internal-path-helper.js.map
|
||
|
||
/***/ }),
|
||
|
||
/***/ 4114:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||
if (k2 === undefined) k2 = k;
|
||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||
}) : (function(o, m, k, k2) {
|
||
if (k2 === undefined) k2 = k;
|
||
o[k2] = m[k];
|
||
}));
|
||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||
}) : function(o, v) {
|
||
o["default"] = v;
|
||
});
|
||
var __importStar = (this && this.__importStar) || function (mod) {
|
||
if (mod && mod.__esModule) return mod;
|
||
var result = {};
|
||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||
__setModuleDefault(result, mod);
|
||
return result;
|
||
};
|
||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.Path = void 0;
|
||
const path = __importStar(__nccwpck_require__(5622));
|
||
const pathHelper = __importStar(__nccwpck_require__(7602));
|
||
const assert_1 = __importDefault(__nccwpck_require__(2357));
|
||
const IS_WINDOWS = process.platform === 'win32';
|
||
/**
|
||
* Helper class for parsing paths into segments
|
||
*/
|
||
class Path {
|
||
/**
|
||
* Constructs a Path
|
||
* @param itemPath Path or array of segments
|
||
*/
|
||
constructor(itemPath) {
|
||
this.segments = [];
|
||
// String
|
||
if (typeof itemPath === 'string') {
|
||
assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`);
|
||
// Normalize slashes and trim unnecessary trailing slash
|
||
itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
|
||
// Not rooted
|
||
if (!pathHelper.hasRoot(itemPath)) {
|
||
this.segments = itemPath.split(path.sep);
|
||
}
|
||
// Rooted
|
||
else {
|
||
// Add all segments, while not at the root
|
||
let remaining = itemPath;
|
||
let dir = pathHelper.dirname(remaining);
|
||
while (dir !== remaining) {
|
||
// Add the segment
|
||
const basename = path.basename(remaining);
|
||
this.segments.unshift(basename);
|
||
// Truncate the last segment
|
||
remaining = dir;
|
||
dir = pathHelper.dirname(remaining);
|
||
}
|
||
// Remainder is the root
|
||
this.segments.unshift(remaining);
|
||
}
|
||
}
|
||
// Array
|
||
else {
|
||
// Must not be empty
|
||
assert_1.default(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`);
|
||
// Each segment
|
||
for (let i = 0; i < itemPath.length; i++) {
|
||
let segment = itemPath[i];
|
||
// Must not be empty
|
||
assert_1.default(segment, `Parameter 'itemPath' must not contain any empty segments`);
|
||
// Normalize slashes
|
||
segment = pathHelper.normalizeSeparators(itemPath[i]);
|
||
// Root segment
|
||
if (i === 0 && pathHelper.hasRoot(segment)) {
|
||
segment = pathHelper.safeTrimTrailingSeparator(segment);
|
||
assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`);
|
||
this.segments.push(segment);
|
||
}
|
||
// All other segments
|
||
else {
|
||
// Must not contain slash
|
||
assert_1.default(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`);
|
||
this.segments.push(segment);
|
||
}
|
||
}
|
||
}
|
||
}
|
||
/**
|
||
* Converts the path to it's string representation
|
||
*/
|
||
toString() {
|
||
// First segment
|
||
let result = this.segments[0];
|
||
// All others
|
||
let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result));
|
||
for (let i = 1; i < this.segments.length; i++) {
|
||
if (skipSlash) {
|
||
skipSlash = false;
|
||
}
|
||
else {
|
||
result += path.sep;
|
||
}
|
||
result += this.segments[i];
|
||
}
|
||
return result;
|
||
}
|
||
}
|
||
exports.Path = Path;
|
||
//# sourceMappingURL=internal-path.js.map
|
||
|
||
/***/ }),
|
||
|
||
/***/ 6289:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||
if (k2 === undefined) k2 = k;
|
||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||
}) : (function(o, m, k, k2) {
|
||
if (k2 === undefined) k2 = k;
|
||
o[k2] = m[k];
|
||
}));
|
||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||
}) : function(o, v) {
|
||
o["default"] = v;
|
||
});
|
||
var __importStar = (this && this.__importStar) || function (mod) {
|
||
if (mod && mod.__esModule) return mod;
|
||
var result = {};
|
||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||
__setModuleDefault(result, mod);
|
||
return result;
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.partialMatch = exports.match = exports.getSearchPaths = void 0;
|
||
const pathHelper = __importStar(__nccwpck_require__(7602));
|
||
const internal_match_kind_1 = __nccwpck_require__(6800);
|
||
const IS_WINDOWS = process.platform === 'win32';
|
||
/**
|
||
* Given an array of patterns, returns an array of paths to search.
|
||
* Duplicates and paths under other included paths are filtered out.
|
||
*/
|
||
function getSearchPaths(patterns) {
|
||
// Ignore negate patterns
|
||
patterns = patterns.filter(x => !x.negate);
|
||
// Create a map of all search paths
|
||
const searchPathMap = {};
|
||
for (const pattern of patterns) {
|
||
const key = IS_WINDOWS
|
||
? pattern.searchPath.toUpperCase()
|
||
: pattern.searchPath;
|
||
searchPathMap[key] = 'candidate';
|
||
}
|
||
const result = [];
|
||
for (const pattern of patterns) {
|
||
// Check if already included
|
||
const key = IS_WINDOWS
|
||
? pattern.searchPath.toUpperCase()
|
||
: pattern.searchPath;
|
||
if (searchPathMap[key] === 'included') {
|
||
continue;
|
||
}
|
||
// Check for an ancestor search path
|
||
let foundAncestor = false;
|
||
let tempKey = key;
|
||
let parent = pathHelper.dirname(tempKey);
|
||
while (parent !== tempKey) {
|
||
if (searchPathMap[parent]) {
|
||
foundAncestor = true;
|
||
break;
|
||
}
|
||
tempKey = parent;
|
||
parent = pathHelper.dirname(tempKey);
|
||
}
|
||
// Include the search pattern in the result
|
||
if (!foundAncestor) {
|
||
result.push(pattern.searchPath);
|
||
searchPathMap[key] = 'included';
|
||
}
|
||
}
|
||
return result;
|
||
}
|
||
exports.getSearchPaths = getSearchPaths;
|
||
/**
|
||
* Matches the patterns against the path
|
||
*/
|
||
function match(patterns, itemPath) {
|
||
let result = internal_match_kind_1.MatchKind.None;
|
||
for (const pattern of patterns) {
|
||
if (pattern.negate) {
|
||
result &= ~pattern.match(itemPath);
|
||
}
|
||
else {
|
||
result |= pattern.match(itemPath);
|
||
}
|
||
}
|
||
return result;
|
||
}
|
||
exports.match = match;
|
||
/**
|
||
* Checks whether to descend further into the directory
|
||
*/
|
||
function partialMatch(patterns, itemPath) {
|
||
return patterns.some(x => !x.negate && x.partialMatch(itemPath));
|
||
}
|
||
exports.partialMatch = partialMatch;
|
||
//# sourceMappingURL=internal-pattern-helper.js.map
|
||
|
||
/***/ }),
|
||
|
||
/***/ 484:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||
if (k2 === undefined) k2 = k;
|
||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||
}) : (function(o, m, k, k2) {
|
||
if (k2 === undefined) k2 = k;
|
||
o[k2] = m[k];
|
||
}));
|
||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||
}) : function(o, v) {
|
||
o["default"] = v;
|
||
});
|
||
var __importStar = (this && this.__importStar) || function (mod) {
|
||
if (mod && mod.__esModule) return mod;
|
||
var result = {};
|
||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||
__setModuleDefault(result, mod);
|
||
return result;
|
||
};
|
||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.Pattern = void 0;
|
||
const os = __importStar(__nccwpck_require__(2087));
|
||
const path = __importStar(__nccwpck_require__(5622));
|
||
const pathHelper = __importStar(__nccwpck_require__(7602));
|
||
const assert_1 = __importDefault(__nccwpck_require__(2357));
|
||
const minimatch_1 = __nccwpck_require__(7066);
|
||
const internal_match_kind_1 = __nccwpck_require__(6800);
|
||
const internal_path_1 = __nccwpck_require__(4114);
|
||
const IS_WINDOWS = process.platform === 'win32';
|
||
class Pattern {
|
||
constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) {
|
||
/**
|
||
* Indicates whether matches should be excluded from the result set
|
||
*/
|
||
this.negate = false;
|
||
// Pattern overload
|
||
let pattern;
|
||
if (typeof patternOrNegate === 'string') {
|
||
pattern = patternOrNegate.trim();
|
||
}
|
||
// Segments overload
|
||
else {
|
||
// Convert to pattern
|
||
segments = segments || [];
|
||
assert_1.default(segments.length, `Parameter 'segments' must not empty`);
|
||
const root = Pattern.getLiteral(segments[0]);
|
||
assert_1.default(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`);
|
||
pattern = new internal_path_1.Path(segments).toString().trim();
|
||
if (patternOrNegate) {
|
||
pattern = `!${pattern}`;
|
||
}
|
||
}
|
||
// Negate
|
||
while (pattern.startsWith('!')) {
|
||
this.negate = !this.negate;
|
||
pattern = pattern.substr(1).trim();
|
||
}
|
||
// Normalize slashes and ensures absolute root
|
||
pattern = Pattern.fixupPattern(pattern, homedir);
|
||
// Segments
|
||
this.segments = new internal_path_1.Path(pattern).segments;
|
||
// Trailing slash indicates the pattern should only match directories, not regular files
|
||
this.trailingSeparator = pathHelper
|
||
.normalizeSeparators(pattern)
|
||
.endsWith(path.sep);
|
||
pattern = pathHelper.safeTrimTrailingSeparator(pattern);
|
||
// Search path (literal path prior to the first glob segment)
|
||
let foundGlob = false;
|
||
const searchSegments = this.segments
|
||
.map(x => Pattern.getLiteral(x))
|
||
.filter(x => !foundGlob && !(foundGlob = x === ''));
|
||
this.searchPath = new internal_path_1.Path(searchSegments).toString();
|
||
// Root RegExp (required when determining partial match)
|
||
this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : '');
|
||
this.isImplicitPattern = isImplicitPattern;
|
||
// Create minimatch
|
||
const minimatchOptions = {
|
||
dot: true,
|
||
nobrace: true,
|
||
nocase: IS_WINDOWS,
|
||
nocomment: true,
|
||
noext: true,
|
||
nonegate: true
|
||
};
|
||
pattern = IS_WINDOWS ? pattern.replace(/\\/g, '/') : pattern;
|
||
this.minimatch = new minimatch_1.Minimatch(pattern, minimatchOptions);
|
||
}
|
||
/**
|
||
* Matches the pattern against the specified path
|
||
*/
|
||
match(itemPath) {
|
||
// Last segment is globstar?
|
||
if (this.segments[this.segments.length - 1] === '**') {
|
||
// Normalize slashes
|
||
itemPath = pathHelper.normalizeSeparators(itemPath);
|
||
// Append a trailing slash. Otherwise Minimatch will not match the directory immediately
|
||
// preceding the globstar. For example, given the pattern `/foo/**`, Minimatch returns
|
||
// false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk.
|
||
if (!itemPath.endsWith(path.sep) && this.isImplicitPattern === false) {
|
||
// Note, this is safe because the constructor ensures the pattern has an absolute root.
|
||
// For example, formats like C: and C:foo on Windows are resolved to an absolute root.
|
||
itemPath = `${itemPath}${path.sep}`;
|
||
}
|
||
}
|
||
else {
|
||
// Normalize slashes and trim unnecessary trailing slash
|
||
itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
|
||
}
|
||
// Match
|
||
if (this.minimatch.match(itemPath)) {
|
||
return this.trailingSeparator ? internal_match_kind_1.MatchKind.Directory : internal_match_kind_1.MatchKind.All;
|
||
}
|
||
return internal_match_kind_1.MatchKind.None;
|
||
}
|
||
/**
|
||
* Indicates whether the pattern may match descendants of the specified path
|
||
*/
|
||
partialMatch(itemPath) {
|
||
// Normalize slashes and trim unnecessary trailing slash
|
||
itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
|
||
// matchOne does not handle root path correctly
|
||
if (pathHelper.dirname(itemPath) === itemPath) {
|
||
return this.rootRegExp.test(itemPath);
|
||
}
|
||
return this.minimatch.matchOne(itemPath.split(IS_WINDOWS ? /\\+/ : /\/+/), this.minimatch.set[0], true);
|
||
}
|
||
/**
|
||
* Escapes glob patterns within a path
|
||
*/
|
||
static globEscape(s) {
|
||
return (IS_WINDOWS ? s : s.replace(/\\/g, '\\\\')) // escape '\' on Linux/macOS
|
||
.replace(/(\[)(?=[^/]+\])/g, '[[]') // escape '[' when ']' follows within the path segment
|
||
.replace(/\?/g, '[?]') // escape '?'
|
||
.replace(/\*/g, '[*]'); // escape '*'
|
||
}
|
||
/**
|
||
* Normalizes slashes and ensures absolute root
|
||
*/
|
||
static fixupPattern(pattern, homedir) {
|
||
// Empty
|
||
assert_1.default(pattern, 'pattern cannot be empty');
|
||
// Must not contain `.` segment, unless first segment
|
||
// Must not contain `..` segment
|
||
const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x));
|
||
assert_1.default(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`);
|
||
// Must not contain globs in root, e.g. Windows UNC path \\foo\b*r
|
||
assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`);
|
||
// Normalize slashes
|
||
pattern = pathHelper.normalizeSeparators(pattern);
|
||
// Replace leading `.` segment
|
||
if (pattern === '.' || pattern.startsWith(`.${path.sep}`)) {
|
||
pattern = Pattern.globEscape(process.cwd()) + pattern.substr(1);
|
||
}
|
||
// Replace leading `~` segment
|
||
else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) {
|
||
homedir = homedir || os.homedir();
|
||
assert_1.default(homedir, 'Unable to determine HOME directory');
|
||
assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`);
|
||
pattern = Pattern.globEscape(homedir) + pattern.substr(1);
|
||
}
|
||
// Replace relative drive root, e.g. pattern is C: or C:foo
|
||
else if (IS_WINDOWS &&
|
||
(pattern.match(/^[A-Z]:$/i) || pattern.match(/^[A-Z]:[^\\]/i))) {
|
||
let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', pattern.substr(0, 2));
|
||
if (pattern.length > 2 && !root.endsWith('\\')) {
|
||
root += '\\';
|
||
}
|
||
pattern = Pattern.globEscape(root) + pattern.substr(2);
|
||
}
|
||
// Replace relative root, e.g. pattern is \ or \foo
|
||
else if (IS_WINDOWS && (pattern === '\\' || pattern.match(/^\\[^\\]/))) {
|
||
let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', '\\');
|
||
if (!root.endsWith('\\')) {
|
||
root += '\\';
|
||
}
|
||
pattern = Pattern.globEscape(root) + pattern.substr(1);
|
||
}
|
||
// Otherwise ensure absolute root
|
||
else {
|
||
pattern = pathHelper.ensureAbsoluteRoot(Pattern.globEscape(process.cwd()), pattern);
|
||
}
|
||
return pathHelper.normalizeSeparators(pattern);
|
||
}
|
||
/**
|
||
* Attempts to unescape a pattern segment to create a literal path segment.
|
||
* Otherwise returns empty string.
|
||
*/
|
||
static getLiteral(segment) {
|
||
let literal = '';
|
||
for (let i = 0; i < segment.length; i++) {
|
||
const c = segment[i];
|
||
// Escape
|
||
if (c === '\\' && !IS_WINDOWS && i + 1 < segment.length) {
|
||
literal += segment[++i];
|
||
continue;
|
||
}
|
||
// Wildcard
|
||
else if (c === '*' || c === '?') {
|
||
return '';
|
||
}
|
||
// Character set
|
||
else if (c === '[' && i + 1 < segment.length) {
|
||
let set = '';
|
||
let closed = -1;
|
||
for (let i2 = i + 1; i2 < segment.length; i2++) {
|
||
const c2 = segment[i2];
|
||
// Escape
|
||
if (c2 === '\\' && !IS_WINDOWS && i2 + 1 < segment.length) {
|
||
set += segment[++i2];
|
||
continue;
|
||
}
|
||
// Closed
|
||
else if (c2 === ']') {
|
||
closed = i2;
|
||
break;
|
||
}
|
||
// Otherwise
|
||
else {
|
||
set += c2;
|
||
}
|
||
}
|
||
// Closed?
|
||
if (closed >= 0) {
|
||
// Cannot convert
|
||
if (set.length > 1) {
|
||
return '';
|
||
}
|
||
// Convert to literal
|
||
if (set) {
|
||
literal += set;
|
||
i = closed;
|
||
continue;
|
||
}
|
||
}
|
||
// Otherwise fall thru
|
||
}
|
||
// Append
|
||
literal += c;
|
||
}
|
||
return literal;
|
||
}
|
||
/**
|
||
* Escapes regexp special characters
|
||
* https://javascript.info/regexp-escaping
|
||
*/
|
||
static regExpEscape(s) {
|
||
return s.replace(/[[\\^$.|?*+()]/g, '\\$&');
|
||
}
|
||
}
|
||
exports.Pattern = Pattern;
|
||
//# sourceMappingURL=internal-pattern.js.map
|
||
|
||
/***/ }),
|
||
|
||
/***/ 2853:
|
||
/***/ ((__unused_webpack_module, exports) => {
|
||
|
||
"use strict";
|
||
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.SearchState = void 0;
|
||
class SearchState {
|
||
constructor(path, level) {
|
||
this.path = path;
|
||
this.level = level;
|
||
}
|
||
}
|
||
exports.SearchState = SearchState;
|
||
//# sourceMappingURL=internal-search-state.js.map
|
||
|
||
/***/ }),
|
||
|
||
/***/ 5023:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
/**
|
||
* archiver-utils
|
||
*
|
||
* Copyright (c) 2012-2014 Chris Talkington, contributors.
|
||
* Licensed under the MIT license.
|
||
* https://github.com/archiverjs/node-archiver/blob/master/LICENSE-MIT
|
||
*/
|
||
var fs = __nccwpck_require__(3224);
|
||
var path = __nccwpck_require__(5622);
|
||
|
||
var flatten = __nccwpck_require__(9058);
|
||
var difference = __nccwpck_require__(9765);
|
||
var union = __nccwpck_require__(6969);
|
||
var isPlainObject = __nccwpck_require__(7617);
|
||
|
||
var glob = __nccwpck_require__(4285);
|
||
|
||
var file = module.exports = {};
|
||
|
||
var pathSeparatorRe = /[\/\\]/g;
|
||
|
||
// Process specified wildcard glob patterns or filenames against a
|
||
// callback, excluding and uniquing files in the result set.
|
||
var processPatterns = function(patterns, fn) {
|
||
// Filepaths to return.
|
||
var result = [];
|
||
// Iterate over flattened patterns array.
|
||
flatten(patterns).forEach(function(pattern) {
|
||
// If the first character is ! it should be omitted
|
||
var exclusion = pattern.indexOf('!') === 0;
|
||
// If the pattern is an exclusion, remove the !
|
||
if (exclusion) { pattern = pattern.slice(1); }
|
||
// Find all matching files for this pattern.
|
||
var matches = fn(pattern);
|
||
if (exclusion) {
|
||
// If an exclusion, remove matching files.
|
||
result = difference(result, matches);
|
||
} else {
|
||
// Otherwise add matching files.
|
||
result = union(result, matches);
|
||
}
|
||
});
|
||
return result;
|
||
};
|
||
|
||
// True if the file path exists.
|
||
file.exists = function() {
|
||
var filepath = path.join.apply(path, arguments);
|
||
return fs.existsSync(filepath);
|
||
};
|
||
|
||
// Return an array of all file paths that match the given wildcard patterns.
|
||
file.expand = function(...args) {
|
||
// If the first argument is an options object, save those options to pass
|
||
// into the File.prototype.glob.sync method.
|
||
var options = isPlainObject(args[0]) ? args.shift() : {};
|
||
// Use the first argument if it's an Array, otherwise convert the arguments
|
||
// object to an array and use that.
|
||
var patterns = Array.isArray(args[0]) ? args[0] : args;
|
||
// Return empty set if there are no patterns or filepaths.
|
||
if (patterns.length === 0) { return []; }
|
||
// Return all matching filepaths.
|
||
var matches = processPatterns(patterns, function(pattern) {
|
||
// Find all matching files for this pattern.
|
||
return glob.sync(pattern, options);
|
||
});
|
||
// Filter result set?
|
||
if (options.filter) {
|
||
matches = matches.filter(function(filepath) {
|
||
filepath = path.join(options.cwd || '', filepath);
|
||
try {
|
||
if (typeof options.filter === 'function') {
|
||
return options.filter(filepath);
|
||
} else {
|
||
// If the file is of the right type and exists, this should work.
|
||
return fs.statSync(filepath)[options.filter]();
|
||
}
|
||
} catch(e) {
|
||
// Otherwise, it's probably not the right type.
|
||
return false;
|
||
}
|
||
});
|
||
}
|
||
return matches;
|
||
};
|
||
|
||
// Build a multi task "files" object dynamically.
|
||
file.expandMapping = function(patterns, destBase, options) {
|
||
options = Object.assign({
|
||
rename: function(destBase, destPath) {
|
||
return path.join(destBase || '', destPath);
|
||
}
|
||
}, options);
|
||
var files = [];
|
||
var fileByDest = {};
|
||
// Find all files matching pattern, using passed-in options.
|
||
file.expand(options, patterns).forEach(function(src) {
|
||
var destPath = src;
|
||
// Flatten?
|
||
if (options.flatten) {
|
||
destPath = path.basename(destPath);
|
||
}
|
||
// Change the extension?
|
||
if (options.ext) {
|
||
destPath = destPath.replace(/(\.[^\/]*)?$/, options.ext);
|
||
}
|
||
// Generate destination filename.
|
||
var dest = options.rename(destBase, destPath, options);
|
||
// Prepend cwd to src path if necessary.
|
||
if (options.cwd) { src = path.join(options.cwd, src); }
|
||
// Normalize filepaths to be unix-style.
|
||
dest = dest.replace(pathSeparatorRe, '/');
|
||
src = src.replace(pathSeparatorRe, '/');
|
||
// Map correct src path to dest path.
|
||
if (fileByDest[dest]) {
|
||
// If dest already exists, push this src onto that dest's src array.
|
||
fileByDest[dest].src.push(src);
|
||
} else {
|
||
// Otherwise create a new src-dest file mapping object.
|
||
files.push({
|
||
src: [src],
|
||
dest: dest,
|
||
});
|
||
// And store a reference for later use.
|
||
fileByDest[dest] = files[files.length - 1];
|
||
}
|
||
});
|
||
return files;
|
||
};
|
||
|
||
// reusing bits of grunt's multi-task source normalization
|
||
file.normalizeFilesArray = function(data) {
|
||
var files = [];
|
||
|
||
data.forEach(function(obj) {
|
||
var prop;
|
||
if ('src' in obj || 'dest' in obj) {
|
||
files.push(obj);
|
||
}
|
||
});
|
||
|
||
if (files.length === 0) {
|
||
return [];
|
||
}
|
||
|
||
files = _(files).chain().forEach(function(obj) {
|
||
if (!('src' in obj) || !obj.src) { return; }
|
||
// Normalize .src properties to flattened array.
|
||
if (Array.isArray(obj.src)) {
|
||
obj.src = flatten(obj.src);
|
||
} else {
|
||
obj.src = [obj.src];
|
||
}
|
||
}).map(function(obj) {
|
||
// Build options object, removing unwanted properties.
|
||
var expandOptions = Object.assign({}, obj);
|
||
delete expandOptions.src;
|
||
delete expandOptions.dest;
|
||
|
||
// Expand file mappings.
|
||
if (obj.expand) {
|
||
return file.expandMapping(obj.src, obj.dest, expandOptions).map(function(mapObj) {
|
||
// Copy obj properties to result.
|
||
var result = Object.assign({}, obj);
|
||
// Make a clone of the orig obj available.
|
||
result.orig = Object.assign({}, obj);
|
||
// Set .src and .dest, processing both as templates.
|
||
result.src = mapObj.src;
|
||
result.dest = mapObj.dest;
|
||
// Remove unwanted properties.
|
||
['expand', 'cwd', 'flatten', 'rename', 'ext'].forEach(function(prop) {
|
||
delete result[prop];
|
||
});
|
||
return result;
|
||
});
|
||
}
|
||
|
||
// Copy obj properties to result, adding an .orig property.
|
||
var result = Object.assign({}, obj);
|
||
// Make a clone of the orig obj available.
|
||
result.orig = Object.assign({}, obj);
|
||
|
||
if ('src' in result) {
|
||
// Expose an expand-on-demand getter method as .src.
|
||
Object.defineProperty(result, 'src', {
|
||
enumerable: true,
|
||
get: function fn() {
|
||
var src;
|
||
if (!('result' in fn)) {
|
||
src = obj.src;
|
||
// If src is an array, flatten it. Otherwise, make it into an array.
|
||
src = Array.isArray(src) ? flatten(src) : [src];
|
||
// Expand src files, memoizing result.
|
||
fn.result = file.expand(expandOptions, src);
|
||
}
|
||
return fn.result;
|
||
}
|
||
});
|
||
}
|
||
|
||
if ('dest' in result) {
|
||
result.dest = obj.dest;
|
||
}
|
||
|
||
return result;
|
||
}).flatten().value();
|
||
|
||
return files;
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 9451:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
/**
|
||
* archiver-utils
|
||
*
|
||
* Copyright (c) 2015 Chris Talkington.
|
||
* Licensed under the MIT license.
|
||
* https://github.com/archiverjs/archiver-utils/blob/master/LICENSE
|
||
*/
|
||
var fs = __nccwpck_require__(3224);
|
||
var path = __nccwpck_require__(5622);
|
||
var nutil = __nccwpck_require__(1669);
|
||
var lazystream = __nccwpck_require__(4556);
|
||
var normalizePath = __nccwpck_require__(1980);
|
||
var defaults = __nccwpck_require__(1301);
|
||
|
||
var Stream = __nccwpck_require__(2413).Stream;
|
||
var PassThrough = __nccwpck_require__(3484).PassThrough;
|
||
|
||
var utils = module.exports = {};
|
||
utils.file = __nccwpck_require__(5023);
|
||
|
||
function assertPath(path) {
|
||
if (typeof path !== 'string') {
|
||
throw new TypeError('Path must be a string. Received ' + nutils.inspect(path));
|
||
}
|
||
}
|
||
|
||
utils.collectStream = function(source, callback) {
|
||
var collection = [];
|
||
var size = 0;
|
||
|
||
source.on('error', callback);
|
||
|
||
source.on('data', function(chunk) {
|
||
collection.push(chunk);
|
||
size += chunk.length;
|
||
});
|
||
|
||
source.on('end', function() {
|
||
var buf = new Buffer(size);
|
||
var offset = 0;
|
||
|
||
collection.forEach(function(data) {
|
||
data.copy(buf, offset);
|
||
offset += data.length;
|
||
});
|
||
|
||
callback(null, buf);
|
||
});
|
||
};
|
||
|
||
utils.dateify = function(dateish) {
|
||
dateish = dateish || new Date();
|
||
|
||
if (dateish instanceof Date) {
|
||
dateish = dateish;
|
||
} else if (typeof dateish === 'string') {
|
||
dateish = new Date(dateish);
|
||
} else {
|
||
dateish = new Date();
|
||
}
|
||
|
||
return dateish;
|
||
};
|
||
|
||
// this is slightly different from lodash version
|
||
utils.defaults = function(object, source, guard) {
|
||
var args = arguments;
|
||
args[0] = args[0] || {};
|
||
|
||
return defaults(...args);
|
||
};
|
||
|
||
utils.isStream = function(source) {
|
||
return source instanceof Stream;
|
||
};
|
||
|
||
utils.lazyReadStream = function(filepath) {
|
||
return new lazystream.Readable(function() {
|
||
return fs.createReadStream(filepath);
|
||
});
|
||
};
|
||
|
||
utils.normalizeInputSource = function(source) {
|
||
if (source === null) {
|
||
return new Buffer(0);
|
||
} else if (typeof source === 'string') {
|
||
return new Buffer(source);
|
||
} else if (utils.isStream(source) && !source._readableState) {
|
||
var normalized = new PassThrough();
|
||
source.pipe(normalized);
|
||
|
||
return normalized;
|
||
}
|
||
|
||
return source;
|
||
};
|
||
|
||
utils.sanitizePath = function(filepath) {
|
||
return normalizePath(filepath, false).replace(/^\w+:/, '').replace(/^(\.\.\/|\/)+/, '');
|
||
};
|
||
|
||
utils.trailingSlashIt = function(str) {
|
||
return str.slice(-1) !== '/' ? str + '/' : str;
|
||
};
|
||
|
||
utils.unixifyPath = function(filepath) {
|
||
return normalizePath(filepath, false).replace(/^\w+:/, '');
|
||
};
|
||
|
||
utils.walkdir = function(dirpath, base, callback) {
|
||
var results = [];
|
||
|
||
if (typeof base === 'function') {
|
||
callback = base;
|
||
base = dirpath;
|
||
}
|
||
|
||
fs.readdir(dirpath, function(err, list) {
|
||
var i = 0;
|
||
var file;
|
||
var filepath;
|
||
|
||
if (err) {
|
||
return callback(err);
|
||
}
|
||
|
||
(function next() {
|
||
file = list[i++];
|
||
|
||
if (!file) {
|
||
return callback(null, results);
|
||
}
|
||
|
||
filepath = path.join(dirpath, file);
|
||
|
||
fs.stat(filepath, function(err, stats) {
|
||
results.push({
|
||
path: filepath,
|
||
relative: path.relative(base, filepath).replace(/\\/g, '/'),
|
||
stats: stats
|
||
});
|
||
|
||
if (stats && stats.isDirectory()) {
|
||
utils.walkdir(filepath, base, function(err, res) {
|
||
res.forEach(function(dirEntry) {
|
||
results.push(dirEntry);
|
||
});
|
||
next();
|
||
});
|
||
} else {
|
||
next();
|
||
}
|
||
});
|
||
})();
|
||
});
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 9994:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
// Copyright Joyent, Inc. and other Node contributors.
|
||
//
|
||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||
// copy of this software and associated documentation files (the
|
||
// "Software"), to deal in the Software without restriction, including
|
||
// without limitation the rights to use, copy, modify, merge, publish,
|
||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||
// persons to whom the Software is furnished to do so, subject to the
|
||
// following conditions:
|
||
//
|
||
// The above copyright notice and this permission notice shall be included
|
||
// in all copies or substantial portions of the Software.
|
||
//
|
||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||
|
||
// a duplex stream is just a stream that is both readable and writable.
|
||
// Since JS doesn't have multiple prototypal inheritance, this class
|
||
// prototypally inherits from Readable, and then parasitically from
|
||
// Writable.
|
||
|
||
|
||
|
||
/*<replacement>*/
|
||
|
||
var pna = __nccwpck_require__(9029);
|
||
/*</replacement>*/
|
||
|
||
/*<replacement>*/
|
||
var objectKeys = Object.keys || function (obj) {
|
||
var keys = [];
|
||
for (var key in obj) {
|
||
keys.push(key);
|
||
}return keys;
|
||
};
|
||
/*</replacement>*/
|
||
|
||
module.exports = Duplex;
|
||
|
||
/*<replacement>*/
|
||
var util = Object.create(__nccwpck_require__(9300));
|
||
util.inherits = __nccwpck_require__(3753);
|
||
/*</replacement>*/
|
||
|
||
var Readable = __nccwpck_require__(7913);
|
||
var Writable = __nccwpck_require__(2072);
|
||
|
||
util.inherits(Duplex, Readable);
|
||
|
||
{
|
||
// avoid scope creep, the keys array can then be collected
|
||
var keys = objectKeys(Writable.prototype);
|
||
for (var v = 0; v < keys.length; v++) {
|
||
var method = keys[v];
|
||
if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method];
|
||
}
|
||
}
|
||
|
||
function Duplex(options) {
|
||
if (!(this instanceof Duplex)) return new Duplex(options);
|
||
|
||
Readable.call(this, options);
|
||
Writable.call(this, options);
|
||
|
||
if (options && options.readable === false) this.readable = false;
|
||
|
||
if (options && options.writable === false) this.writable = false;
|
||
|
||
this.allowHalfOpen = true;
|
||
if (options && options.allowHalfOpen === false) this.allowHalfOpen = false;
|
||
|
||
this.once('end', onend);
|
||
}
|
||
|
||
Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function () {
|
||
return this._writableState.highWaterMark;
|
||
}
|
||
});
|
||
|
||
// the no-half-open enforcer
|
||
function onend() {
|
||
// if we allow half-open state, or if the writable side ended,
|
||
// then we're ok.
|
||
if (this.allowHalfOpen || this._writableState.ended) return;
|
||
|
||
// no more data can be written.
|
||
// But allow more writes to happen in this tick.
|
||
pna.nextTick(onEndNT, this);
|
||
}
|
||
|
||
function onEndNT(self) {
|
||
self.end();
|
||
}
|
||
|
||
Object.defineProperty(Duplex.prototype, 'destroyed', {
|
||
get: function () {
|
||
if (this._readableState === undefined || this._writableState === undefined) {
|
||
return false;
|
||
}
|
||
return this._readableState.destroyed && this._writableState.destroyed;
|
||
},
|
||
set: function (value) {
|
||
// we ignore the value if the stream
|
||
// has not been initialized yet
|
||
if (this._readableState === undefined || this._writableState === undefined) {
|
||
return;
|
||
}
|
||
|
||
// backward compatibility, the user is explicitly
|
||
// managing destroyed
|
||
this._readableState.destroyed = value;
|
||
this._writableState.destroyed = value;
|
||
}
|
||
});
|
||
|
||
Duplex.prototype._destroy = function (err, cb) {
|
||
this.push(null);
|
||
this.end();
|
||
|
||
pna.nextTick(cb, err);
|
||
};
|
||
|
||
/***/ }),
|
||
|
||
/***/ 685:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
// Copyright Joyent, Inc. and other Node contributors.
|
||
//
|
||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||
// copy of this software and associated documentation files (the
|
||
// "Software"), to deal in the Software without restriction, including
|
||
// without limitation the rights to use, copy, modify, merge, publish,
|
||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||
// persons to whom the Software is furnished to do so, subject to the
|
||
// following conditions:
|
||
//
|
||
// The above copyright notice and this permission notice shall be included
|
||
// in all copies or substantial portions of the Software.
|
||
//
|
||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||
|
||
// a passthrough stream.
|
||
// basically just the most minimal sort of Transform stream.
|
||
// Every written chunk gets output as-is.
|
||
|
||
|
||
|
||
module.exports = PassThrough;
|
||
|
||
var Transform = __nccwpck_require__(2555);
|
||
|
||
/*<replacement>*/
|
||
var util = Object.create(__nccwpck_require__(9300));
|
||
util.inherits = __nccwpck_require__(3753);
|
||
/*</replacement>*/
|
||
|
||
util.inherits(PassThrough, Transform);
|
||
|
||
function PassThrough(options) {
|
||
if (!(this instanceof PassThrough)) return new PassThrough(options);
|
||
|
||
Transform.call(this, options);
|
||
}
|
||
|
||
PassThrough.prototype._transform = function (chunk, encoding, cb) {
|
||
cb(null, chunk);
|
||
};
|
||
|
||
/***/ }),
|
||
|
||
/***/ 7913:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
// Copyright Joyent, Inc. and other Node contributors.
|
||
//
|
||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||
// copy of this software and associated documentation files (the
|
||
// "Software"), to deal in the Software without restriction, including
|
||
// without limitation the rights to use, copy, modify, merge, publish,
|
||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||
// persons to whom the Software is furnished to do so, subject to the
|
||
// following conditions:
|
||
//
|
||
// The above copyright notice and this permission notice shall be included
|
||
// in all copies or substantial portions of the Software.
|
||
//
|
||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||
|
||
|
||
|
||
/*<replacement>*/
|
||
|
||
var pna = __nccwpck_require__(9029);
|
||
/*</replacement>*/
|
||
|
||
module.exports = Readable;
|
||
|
||
/*<replacement>*/
|
||
var isArray = __nccwpck_require__(6426);
|
||
/*</replacement>*/
|
||
|
||
/*<replacement>*/
|
||
var Duplex;
|
||
/*</replacement>*/
|
||
|
||
Readable.ReadableState = ReadableState;
|
||
|
||
/*<replacement>*/
|
||
var EE = __nccwpck_require__(8614).EventEmitter;
|
||
|
||
var EElistenerCount = function (emitter, type) {
|
||
return emitter.listeners(type).length;
|
||
};
|
||
/*</replacement>*/
|
||
|
||
/*<replacement>*/
|
||
var Stream = __nccwpck_require__(2342);
|
||
/*</replacement>*/
|
||
|
||
/*<replacement>*/
|
||
|
||
var Buffer = __nccwpck_require__(1534).Buffer;
|
||
var OurUint8Array = global.Uint8Array || function () {};
|
||
function _uint8ArrayToBuffer(chunk) {
|
||
return Buffer.from(chunk);
|
||
}
|
||
function _isUint8Array(obj) {
|
||
return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
|
||
}
|
||
|
||
/*</replacement>*/
|
||
|
||
/*<replacement>*/
|
||
var util = Object.create(__nccwpck_require__(9300));
|
||
util.inherits = __nccwpck_require__(3753);
|
||
/*</replacement>*/
|
||
|
||
/*<replacement>*/
|
||
var debugUtil = __nccwpck_require__(1669);
|
||
var debug = void 0;
|
||
if (debugUtil && debugUtil.debuglog) {
|
||
debug = debugUtil.debuglog('stream');
|
||
} else {
|
||
debug = function () {};
|
||
}
|
||
/*</replacement>*/
|
||
|
||
var BufferList = __nccwpck_require__(5582);
|
||
var destroyImpl = __nccwpck_require__(6599);
|
||
var StringDecoder;
|
||
|
||
util.inherits(Readable, Stream);
|
||
|
||
var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume'];
|
||
|
||
function prependListener(emitter, event, fn) {
|
||
// Sadly this is not cacheable as some libraries bundle their own
|
||
// event emitter implementation with them.
|
||
if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn);
|
||
|
||
// This is a hack to make sure that our error handler is attached before any
|
||
// userland ones. NEVER DO THIS. This is here only because this code needs
|
||
// to continue to work with older versions of Node.js that do not include
|
||
// the prependListener() method. The goal is to eventually remove this hack.
|
||
if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]];
|
||
}
|
||
|
||
function ReadableState(options, stream) {
|
||
Duplex = Duplex || __nccwpck_require__(9994);
|
||
|
||
options = options || {};
|
||
|
||
// Duplex streams are both readable and writable, but share
|
||
// the same options object.
|
||
// However, some cases require setting options to different
|
||
// values for the readable and the writable sides of the duplex stream.
|
||
// These options can be provided separately as readableXXX and writableXXX.
|
||
var isDuplex = stream instanceof Duplex;
|
||
|
||
// object stream flag. Used to make read(n) ignore n and to
|
||
// make all the buffer merging and length checks go away
|
||
this.objectMode = !!options.objectMode;
|
||
|
||
if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode;
|
||
|
||
// the point at which it stops calling _read() to fill the buffer
|
||
// Note: 0 is a valid value, means "don't call _read preemptively ever"
|
||
var hwm = options.highWaterMark;
|
||
var readableHwm = options.readableHighWaterMark;
|
||
var defaultHwm = this.objectMode ? 16 : 16 * 1024;
|
||
|
||
if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm;
|
||
|
||
// cast to ints.
|
||
this.highWaterMark = Math.floor(this.highWaterMark);
|
||
|
||
// A linked list is used to store data chunks instead of an array because the
|
||
// linked list can remove elements from the beginning faster than
|
||
// array.shift()
|
||
this.buffer = new BufferList();
|
||
this.length = 0;
|
||
this.pipes = null;
|
||
this.pipesCount = 0;
|
||
this.flowing = null;
|
||
this.ended = false;
|
||
this.endEmitted = false;
|
||
this.reading = false;
|
||
|
||
// a flag to be able to tell if the event 'readable'/'data' is emitted
|
||
// immediately, or on a later tick. We set this to true at first, because
|
||
// any actions that shouldn't happen until "later" should generally also
|
||
// not happen before the first read call.
|
||
this.sync = true;
|
||
|
||
// whenever we return null, then we set a flag to say
|
||
// that we're awaiting a 'readable' event emission.
|
||
this.needReadable = false;
|
||
this.emittedReadable = false;
|
||
this.readableListening = false;
|
||
this.resumeScheduled = false;
|
||
|
||
// has it been destroyed
|
||
this.destroyed = false;
|
||
|
||
// Crypto is kind of old and crusty. Historically, its default string
|
||
// encoding is 'binary' so we have to make this configurable.
|
||
// Everything else in the universe uses 'utf8', though.
|
||
this.defaultEncoding = options.defaultEncoding || 'utf8';
|
||
|
||
// the number of writers that are awaiting a drain event in .pipe()s
|
||
this.awaitDrain = 0;
|
||
|
||
// if true, a maybeReadMore has been scheduled
|
||
this.readingMore = false;
|
||
|
||
this.decoder = null;
|
||
this.encoding = null;
|
||
if (options.encoding) {
|
||
if (!StringDecoder) StringDecoder = __nccwpck_require__(781)/* .StringDecoder */ .s;
|
||
this.decoder = new StringDecoder(options.encoding);
|
||
this.encoding = options.encoding;
|
||
}
|
||
}
|
||
|
||
function Readable(options) {
|
||
Duplex = Duplex || __nccwpck_require__(9994);
|
||
|
||
if (!(this instanceof Readable)) return new Readable(options);
|
||
|
||
this._readableState = new ReadableState(options, this);
|
||
|
||
// legacy
|
||
this.readable = true;
|
||
|
||
if (options) {
|
||
if (typeof options.read === 'function') this._read = options.read;
|
||
|
||
if (typeof options.destroy === 'function') this._destroy = options.destroy;
|
||
}
|
||
|
||
Stream.call(this);
|
||
}
|
||
|
||
Object.defineProperty(Readable.prototype, 'destroyed', {
|
||
get: function () {
|
||
if (this._readableState === undefined) {
|
||
return false;
|
||
}
|
||
return this._readableState.destroyed;
|
||
},
|
||
set: function (value) {
|
||
// we ignore the value if the stream
|
||
// has not been initialized yet
|
||
if (!this._readableState) {
|
||
return;
|
||
}
|
||
|
||
// backward compatibility, the user is explicitly
|
||
// managing destroyed
|
||
this._readableState.destroyed = value;
|
||
}
|
||
});
|
||
|
||
Readable.prototype.destroy = destroyImpl.destroy;
|
||
Readable.prototype._undestroy = destroyImpl.undestroy;
|
||
Readable.prototype._destroy = function (err, cb) {
|
||
this.push(null);
|
||
cb(err);
|
||
};
|
||
|
||
// Manually shove something into the read() buffer.
|
||
// This returns true if the highWaterMark has not been hit yet,
|
||
// similar to how Writable.write() returns true if you should
|
||
// write() some more.
|
||
Readable.prototype.push = function (chunk, encoding) {
|
||
var state = this._readableState;
|
||
var skipChunkCheck;
|
||
|
||
if (!state.objectMode) {
|
||
if (typeof chunk === 'string') {
|
||
encoding = encoding || state.defaultEncoding;
|
||
if (encoding !== state.encoding) {
|
||
chunk = Buffer.from(chunk, encoding);
|
||
encoding = '';
|
||
}
|
||
skipChunkCheck = true;
|
||
}
|
||
} else {
|
||
skipChunkCheck = true;
|
||
}
|
||
|
||
return readableAddChunk(this, chunk, encoding, false, skipChunkCheck);
|
||
};
|
||
|
||
// Unshift should *always* be something directly out of read()
|
||
Readable.prototype.unshift = function (chunk) {
|
||
return readableAddChunk(this, chunk, null, true, false);
|
||
};
|
||
|
||
function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) {
|
||
var state = stream._readableState;
|
||
if (chunk === null) {
|
||
state.reading = false;
|
||
onEofChunk(stream, state);
|
||
} else {
|
||
var er;
|
||
if (!skipChunkCheck) er = chunkInvalid(state, chunk);
|
||
if (er) {
|
||
stream.emit('error', er);
|
||
} else if (state.objectMode || chunk && chunk.length > 0) {
|
||
if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) {
|
||
chunk = _uint8ArrayToBuffer(chunk);
|
||
}
|
||
|
||
if (addToFront) {
|
||
if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true);
|
||
} else if (state.ended) {
|
||
stream.emit('error', new Error('stream.push() after EOF'));
|
||
} else {
|
||
state.reading = false;
|
||
if (state.decoder && !encoding) {
|
||
chunk = state.decoder.write(chunk);
|
||
if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state);
|
||
} else {
|
||
addChunk(stream, state, chunk, false);
|
||
}
|
||
}
|
||
} else if (!addToFront) {
|
||
state.reading = false;
|
||
}
|
||
}
|
||
|
||
return needMoreData(state);
|
||
}
|
||
|
||
function addChunk(stream, state, chunk, addToFront) {
|
||
if (state.flowing && state.length === 0 && !state.sync) {
|
||
stream.emit('data', chunk);
|
||
stream.read(0);
|
||
} else {
|
||
// update the buffer info.
|
||
state.length += state.objectMode ? 1 : chunk.length;
|
||
if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk);
|
||
|
||
if (state.needReadable) emitReadable(stream);
|
||
}
|
||
maybeReadMore(stream, state);
|
||
}
|
||
|
||
function chunkInvalid(state, chunk) {
|
||
var er;
|
||
if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {
|
||
er = new TypeError('Invalid non-string/buffer chunk');
|
||
}
|
||
return er;
|
||
}
|
||
|
||
// if it's past the high water mark, we can push in some more.
|
||
// Also, if we have no data yet, we can stand some
|
||
// more bytes. This is to work around cases where hwm=0,
|
||
// such as the repl. Also, if the push() triggered a
|
||
// readable event, and the user called read(largeNumber) such that
|
||
// needReadable was set, then we ought to push more, so that another
|
||
// 'readable' event will be triggered.
|
||
function needMoreData(state) {
|
||
return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0);
|
||
}
|
||
|
||
Readable.prototype.isPaused = function () {
|
||
return this._readableState.flowing === false;
|
||
};
|
||
|
||
// backwards compatibility.
|
||
Readable.prototype.setEncoding = function (enc) {
|
||
if (!StringDecoder) StringDecoder = __nccwpck_require__(781)/* .StringDecoder */ .s;
|
||
this._readableState.decoder = new StringDecoder(enc);
|
||
this._readableState.encoding = enc;
|
||
return this;
|
||
};
|
||
|
||
// Don't raise the hwm > 8MB
|
||
var MAX_HWM = 0x800000;
|
||
function computeNewHighWaterMark(n) {
|
||
if (n >= MAX_HWM) {
|
||
n = MAX_HWM;
|
||
} else {
|
||
// Get the next highest power of 2 to prevent increasing hwm excessively in
|
||
// tiny amounts
|
||
n--;
|
||
n |= n >>> 1;
|
||
n |= n >>> 2;
|
||
n |= n >>> 4;
|
||
n |= n >>> 8;
|
||
n |= n >>> 16;
|
||
n++;
|
||
}
|
||
return n;
|
||
}
|
||
|
||
// This function is designed to be inlinable, so please take care when making
|
||
// changes to the function body.
|
||
function howMuchToRead(n, state) {
|
||
if (n <= 0 || state.length === 0 && state.ended) return 0;
|
||
if (state.objectMode) return 1;
|
||
if (n !== n) {
|
||
// Only flow one buffer at a time
|
||
if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length;
|
||
}
|
||
// If we're asking for more than the current hwm, then raise the hwm.
|
||
if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n);
|
||
if (n <= state.length) return n;
|
||
// Don't have enough
|
||
if (!state.ended) {
|
||
state.needReadable = true;
|
||
return 0;
|
||
}
|
||
return state.length;
|
||
}
|
||
|
||
// you can override either this method, or the async _read(n) below.
|
||
Readable.prototype.read = function (n) {
|
||
debug('read', n);
|
||
n = parseInt(n, 10);
|
||
var state = this._readableState;
|
||
var nOrig = n;
|
||
|
||
if (n !== 0) state.emittedReadable = false;
|
||
|
||
// if we're doing read(0) to trigger a readable event, but we
|
||
// already have a bunch of data in the buffer, then just trigger
|
||
// the 'readable' event and move on.
|
||
if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) {
|
||
debug('read: emitReadable', state.length, state.ended);
|
||
if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this);
|
||
return null;
|
||
}
|
||
|
||
n = howMuchToRead(n, state);
|
||
|
||
// if we've ended, and we're now clear, then finish it up.
|
||
if (n === 0 && state.ended) {
|
||
if (state.length === 0) endReadable(this);
|
||
return null;
|
||
}
|
||
|
||
// All the actual chunk generation logic needs to be
|
||
// *below* the call to _read. The reason is that in certain
|
||
// synthetic stream cases, such as passthrough streams, _read
|
||
// may be a completely synchronous operation which may change
|
||
// the state of the read buffer, providing enough data when
|
||
// before there was *not* enough.
|
||
//
|
||
// So, the steps are:
|
||
// 1. Figure out what the state of things will be after we do
|
||
// a read from the buffer.
|
||
//
|
||
// 2. If that resulting state will trigger a _read, then call _read.
|
||
// Note that this may be asynchronous, or synchronous. Yes, it is
|
||
// deeply ugly to write APIs this way, but that still doesn't mean
|
||
// that the Readable class should behave improperly, as streams are
|
||
// designed to be sync/async agnostic.
|
||
// Take note if the _read call is sync or async (ie, if the read call
|
||
// has returned yet), so that we know whether or not it's safe to emit
|
||
// 'readable' etc.
|
||
//
|
||
// 3. Actually pull the requested chunks out of the buffer and return.
|
||
|
||
// if we need a readable event, then we need to do some reading.
|
||
var doRead = state.needReadable;
|
||
debug('need readable', doRead);
|
||
|
||
// if we currently have less than the highWaterMark, then also read some
|
||
if (state.length === 0 || state.length - n < state.highWaterMark) {
|
||
doRead = true;
|
||
debug('length less than watermark', doRead);
|
||
}
|
||
|
||
// however, if we've ended, then there's no point, and if we're already
|
||
// reading, then it's unnecessary.
|
||
if (state.ended || state.reading) {
|
||
doRead = false;
|
||
debug('reading or ended', doRead);
|
||
} else if (doRead) {
|
||
debug('do read');
|
||
state.reading = true;
|
||
state.sync = true;
|
||
// if the length is currently zero, then we *need* a readable event.
|
||
if (state.length === 0) state.needReadable = true;
|
||
// call internal read method
|
||
this._read(state.highWaterMark);
|
||
state.sync = false;
|
||
// If _read pushed data synchronously, then `reading` will be false,
|
||
// and we need to re-evaluate how much data we can return to the user.
|
||
if (!state.reading) n = howMuchToRead(nOrig, state);
|
||
}
|
||
|
||
var ret;
|
||
if (n > 0) ret = fromList(n, state);else ret = null;
|
||
|
||
if (ret === null) {
|
||
state.needReadable = true;
|
||
n = 0;
|
||
} else {
|
||
state.length -= n;
|
||
}
|
||
|
||
if (state.length === 0) {
|
||
// If we have nothing in the buffer, then we want to know
|
||
// as soon as we *do* get something into the buffer.
|
||
if (!state.ended) state.needReadable = true;
|
||
|
||
// If we tried to read() past the EOF, then emit end on the next tick.
|
||
if (nOrig !== n && state.ended) endReadable(this);
|
||
}
|
||
|
||
if (ret !== null) this.emit('data', ret);
|
||
|
||
return ret;
|
||
};
|
||
|
||
function onEofChunk(stream, state) {
|
||
if (state.ended) return;
|
||
if (state.decoder) {
|
||
var chunk = state.decoder.end();
|
||
if (chunk && chunk.length) {
|
||
state.buffer.push(chunk);
|
||
state.length += state.objectMode ? 1 : chunk.length;
|
||
}
|
||
}
|
||
state.ended = true;
|
||
|
||
// emit 'readable' now to make sure it gets picked up.
|
||
emitReadable(stream);
|
||
}
|
||
|
||
// Don't emit readable right away in sync mode, because this can trigger
|
||
// another read() call => stack overflow. This way, it might trigger
|
||
// a nextTick recursion warning, but that's not so bad.
|
||
function emitReadable(stream) {
|
||
var state = stream._readableState;
|
||
state.needReadable = false;
|
||
if (!state.emittedReadable) {
|
||
debug('emitReadable', state.flowing);
|
||
state.emittedReadable = true;
|
||
if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream);
|
||
}
|
||
}
|
||
|
||
function emitReadable_(stream) {
|
||
debug('emit readable');
|
||
stream.emit('readable');
|
||
flow(stream);
|
||
}
|
||
|
||
// at this point, the user has presumably seen the 'readable' event,
|
||
// and called read() to consume some data. that may have triggered
|
||
// in turn another _read(n) call, in which case reading = true if
|
||
// it's in progress.
|
||
// However, if we're not ended, or reading, and the length < hwm,
|
||
// then go ahead and try to read some more preemptively.
|
||
function maybeReadMore(stream, state) {
|
||
if (!state.readingMore) {
|
||
state.readingMore = true;
|
||
pna.nextTick(maybeReadMore_, stream, state);
|
||
}
|
||
}
|
||
|
||
function maybeReadMore_(stream, state) {
|
||
var len = state.length;
|
||
while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) {
|
||
debug('maybeReadMore read 0');
|
||
stream.read(0);
|
||
if (len === state.length)
|
||
// didn't get any data, stop spinning.
|
||
break;else len = state.length;
|
||
}
|
||
state.readingMore = false;
|
||
}
|
||
|
||
// abstract method. to be overridden in specific implementation classes.
|
||
// call cb(er, data) where data is <= n in length.
|
||
// for virtual (non-string, non-buffer) streams, "length" is somewhat
|
||
// arbitrary, and perhaps not very meaningful.
|
||
Readable.prototype._read = function (n) {
|
||
this.emit('error', new Error('_read() is not implemented'));
|
||
};
|
||
|
||
Readable.prototype.pipe = function (dest, pipeOpts) {
|
||
var src = this;
|
||
var state = this._readableState;
|
||
|
||
switch (state.pipesCount) {
|
||
case 0:
|
||
state.pipes = dest;
|
||
break;
|
||
case 1:
|
||
state.pipes = [state.pipes, dest];
|
||
break;
|
||
default:
|
||
state.pipes.push(dest);
|
||
break;
|
||
}
|
||
state.pipesCount += 1;
|
||
debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);
|
||
|
||
var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr;
|
||
|
||
var endFn = doEnd ? onend : unpipe;
|
||
if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn);
|
||
|
||
dest.on('unpipe', onunpipe);
|
||
function onunpipe(readable, unpipeInfo) {
|
||
debug('onunpipe');
|
||
if (readable === src) {
|
||
if (unpipeInfo && unpipeInfo.hasUnpiped === false) {
|
||
unpipeInfo.hasUnpiped = true;
|
||
cleanup();
|
||
}
|
||
}
|
||
}
|
||
|
||
function onend() {
|
||
debug('onend');
|
||
dest.end();
|
||
}
|
||
|
||
// when the dest drains, it reduces the awaitDrain counter
|
||
// on the source. This would be more elegant with a .once()
|
||
// handler in flow(), but adding and removing repeatedly is
|
||
// too slow.
|
||
var ondrain = pipeOnDrain(src);
|
||
dest.on('drain', ondrain);
|
||
|
||
var cleanedUp = false;
|
||
function cleanup() {
|
||
debug('cleanup');
|
||
// cleanup event handlers once the pipe is broken
|
||
dest.removeListener('close', onclose);
|
||
dest.removeListener('finish', onfinish);
|
||
dest.removeListener('drain', ondrain);
|
||
dest.removeListener('error', onerror);
|
||
dest.removeListener('unpipe', onunpipe);
|
||
src.removeListener('end', onend);
|
||
src.removeListener('end', unpipe);
|
||
src.removeListener('data', ondata);
|
||
|
||
cleanedUp = true;
|
||
|
||
// if the reader is waiting for a drain event from this
|
||
// specific writer, then it would cause it to never start
|
||
// flowing again.
|
||
// So, if this is awaiting a drain, then we just call it now.
|
||
// If we don't know, then assume that we are waiting for one.
|
||
if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain();
|
||
}
|
||
|
||
// If the user pushes more data while we're writing to dest then we'll end up
|
||
// in ondata again. However, we only want to increase awaitDrain once because
|
||
// dest will only emit one 'drain' event for the multiple writes.
|
||
// => Introduce a guard on increasing awaitDrain.
|
||
var increasedAwaitDrain = false;
|
||
src.on('data', ondata);
|
||
function ondata(chunk) {
|
||
debug('ondata');
|
||
increasedAwaitDrain = false;
|
||
var ret = dest.write(chunk);
|
||
if (false === ret && !increasedAwaitDrain) {
|
||
// If the user unpiped during `dest.write()`, it is possible
|
||
// to get stuck in a permanently paused state if that write
|
||
// also returned false.
|
||
// => Check whether `dest` is still a piping destination.
|
||
if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) {
|
||
debug('false write response, pause', src._readableState.awaitDrain);
|
||
src._readableState.awaitDrain++;
|
||
increasedAwaitDrain = true;
|
||
}
|
||
src.pause();
|
||
}
|
||
}
|
||
|
||
// if the dest has an error, then stop piping into it.
|
||
// however, don't suppress the throwing behavior for this.
|
||
function onerror(er) {
|
||
debug('onerror', er);
|
||
unpipe();
|
||
dest.removeListener('error', onerror);
|
||
if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er);
|
||
}
|
||
|
||
// Make sure our error handler is attached before userland ones.
|
||
prependListener(dest, 'error', onerror);
|
||
|
||
// Both close and finish should trigger unpipe, but only once.
|
||
function onclose() {
|
||
dest.removeListener('finish', onfinish);
|
||
unpipe();
|
||
}
|
||
dest.once('close', onclose);
|
||
function onfinish() {
|
||
debug('onfinish');
|
||
dest.removeListener('close', onclose);
|
||
unpipe();
|
||
}
|
||
dest.once('finish', onfinish);
|
||
|
||
function unpipe() {
|
||
debug('unpipe');
|
||
src.unpipe(dest);
|
||
}
|
||
|
||
// tell the dest that it's being piped to
|
||
dest.emit('pipe', src);
|
||
|
||
// start the flow if it hasn't been started already.
|
||
if (!state.flowing) {
|
||
debug('pipe resume');
|
||
src.resume();
|
||
}
|
||
|
||
return dest;
|
||
};
|
||
|
||
function pipeOnDrain(src) {
|
||
return function () {
|
||
var state = src._readableState;
|
||
debug('pipeOnDrain', state.awaitDrain);
|
||
if (state.awaitDrain) state.awaitDrain--;
|
||
if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) {
|
||
state.flowing = true;
|
||
flow(src);
|
||
}
|
||
};
|
||
}
|
||
|
||
Readable.prototype.unpipe = function (dest) {
|
||
var state = this._readableState;
|
||
var unpipeInfo = { hasUnpiped: false };
|
||
|
||
// if we're not piping anywhere, then do nothing.
|
||
if (state.pipesCount === 0) return this;
|
||
|
||
// just one destination. most common case.
|
||
if (state.pipesCount === 1) {
|
||
// passed in one, but it's not the right one.
|
||
if (dest && dest !== state.pipes) return this;
|
||
|
||
if (!dest) dest = state.pipes;
|
||
|
||
// got a match.
|
||
state.pipes = null;
|
||
state.pipesCount = 0;
|
||
state.flowing = false;
|
||
if (dest) dest.emit('unpipe', this, unpipeInfo);
|
||
return this;
|
||
}
|
||
|
||
// slow case. multiple pipe destinations.
|
||
|
||
if (!dest) {
|
||
// remove all.
|
||
var dests = state.pipes;
|
||
var len = state.pipesCount;
|
||
state.pipes = null;
|
||
state.pipesCount = 0;
|
||
state.flowing = false;
|
||
|
||
for (var i = 0; i < len; i++) {
|
||
dests[i].emit('unpipe', this, unpipeInfo);
|
||
}return this;
|
||
}
|
||
|
||
// try to find the right one.
|
||
var index = indexOf(state.pipes, dest);
|
||
if (index === -1) return this;
|
||
|
||
state.pipes.splice(index, 1);
|
||
state.pipesCount -= 1;
|
||
if (state.pipesCount === 1) state.pipes = state.pipes[0];
|
||
|
||
dest.emit('unpipe', this, unpipeInfo);
|
||
|
||
return this;
|
||
};
|
||
|
||
// set up data events if they are asked for
|
||
// Ensure readable listeners eventually get something
|
||
Readable.prototype.on = function (ev, fn) {
|
||
var res = Stream.prototype.on.call(this, ev, fn);
|
||
|
||
if (ev === 'data') {
|
||
// Start flowing on next tick if stream isn't explicitly paused
|
||
if (this._readableState.flowing !== false) this.resume();
|
||
} else if (ev === 'readable') {
|
||
var state = this._readableState;
|
||
if (!state.endEmitted && !state.readableListening) {
|
||
state.readableListening = state.needReadable = true;
|
||
state.emittedReadable = false;
|
||
if (!state.reading) {
|
||
pna.nextTick(nReadingNextTick, this);
|
||
} else if (state.length) {
|
||
emitReadable(this);
|
||
}
|
||
}
|
||
}
|
||
|
||
return res;
|
||
};
|
||
Readable.prototype.addListener = Readable.prototype.on;
|
||
|
||
function nReadingNextTick(self) {
|
||
debug('readable nexttick read 0');
|
||
self.read(0);
|
||
}
|
||
|
||
// pause() and resume() are remnants of the legacy readable stream API
|
||
// If the user uses them, then switch into old mode.
|
||
Readable.prototype.resume = function () {
|
||
var state = this._readableState;
|
||
if (!state.flowing) {
|
||
debug('resume');
|
||
state.flowing = true;
|
||
resume(this, state);
|
||
}
|
||
return this;
|
||
};
|
||
|
||
function resume(stream, state) {
|
||
if (!state.resumeScheduled) {
|
||
state.resumeScheduled = true;
|
||
pna.nextTick(resume_, stream, state);
|
||
}
|
||
}
|
||
|
||
function resume_(stream, state) {
|
||
if (!state.reading) {
|
||
debug('resume read 0');
|
||
stream.read(0);
|
||
}
|
||
|
||
state.resumeScheduled = false;
|
||
state.awaitDrain = 0;
|
||
stream.emit('resume');
|
||
flow(stream);
|
||
if (state.flowing && !state.reading) stream.read(0);
|
||
}
|
||
|
||
Readable.prototype.pause = function () {
|
||
debug('call pause flowing=%j', this._readableState.flowing);
|
||
if (false !== this._readableState.flowing) {
|
||
debug('pause');
|
||
this._readableState.flowing = false;
|
||
this.emit('pause');
|
||
}
|
||
return this;
|
||
};
|
||
|
||
function flow(stream) {
|
||
var state = stream._readableState;
|
||
debug('flow', state.flowing);
|
||
while (state.flowing && stream.read() !== null) {}
|
||
}
|
||
|
||
// wrap an old-style stream as the async data source.
|
||
// This is *not* part of the readable stream interface.
|
||
// It is an ugly unfortunate mess of history.
|
||
Readable.prototype.wrap = function (stream) {
|
||
var _this = this;
|
||
|
||
var state = this._readableState;
|
||
var paused = false;
|
||
|
||
stream.on('end', function () {
|
||
debug('wrapped end');
|
||
if (state.decoder && !state.ended) {
|
||
var chunk = state.decoder.end();
|
||
if (chunk && chunk.length) _this.push(chunk);
|
||
}
|
||
|
||
_this.push(null);
|
||
});
|
||
|
||
stream.on('data', function (chunk) {
|
||
debug('wrapped data');
|
||
if (state.decoder) chunk = state.decoder.write(chunk);
|
||
|
||
// don't skip over falsy values in objectMode
|
||
if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return;
|
||
|
||
var ret = _this.push(chunk);
|
||
if (!ret) {
|
||
paused = true;
|
||
stream.pause();
|
||
}
|
||
});
|
||
|
||
// proxy all the other methods.
|
||
// important when wrapping filters and duplexes.
|
||
for (var i in stream) {
|
||
if (this[i] === undefined && typeof stream[i] === 'function') {
|
||
this[i] = function (method) {
|
||
return function () {
|
||
return stream[method].apply(stream, arguments);
|
||
};
|
||
}(i);
|
||
}
|
||
}
|
||
|
||
// proxy certain important events.
|
||
for (var n = 0; n < kProxyEvents.length; n++) {
|
||
stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n]));
|
||
}
|
||
|
||
// when we try to consume some more bytes, simply unpause the
|
||
// underlying stream.
|
||
this._read = function (n) {
|
||
debug('wrapped _read', n);
|
||
if (paused) {
|
||
paused = false;
|
||
stream.resume();
|
||
}
|
||
};
|
||
|
||
return this;
|
||
};
|
||
|
||
Object.defineProperty(Readable.prototype, 'readableHighWaterMark', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function () {
|
||
return this._readableState.highWaterMark;
|
||
}
|
||
});
|
||
|
||
// exposed for testing purposes only.
|
||
Readable._fromList = fromList;
|
||
|
||
// Pluck off n bytes from an array of buffers.
|
||
// Length is the combined lengths of all the buffers in the list.
|
||
// This function is designed to be inlinable, so please take care when making
|
||
// changes to the function body.
|
||
function fromList(n, state) {
|
||
// nothing buffered
|
||
if (state.length === 0) return null;
|
||
|
||
var ret;
|
||
if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) {
|
||
// read it all, truncate the list
|
||
if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length);
|
||
state.buffer.clear();
|
||
} else {
|
||
// read part of list
|
||
ret = fromListPartial(n, state.buffer, state.decoder);
|
||
}
|
||
|
||
return ret;
|
||
}
|
||
|
||
// Extracts only enough buffered data to satisfy the amount requested.
|
||
// This function is designed to be inlinable, so please take care when making
|
||
// changes to the function body.
|
||
function fromListPartial(n, list, hasStrings) {
|
||
var ret;
|
||
if (n < list.head.data.length) {
|
||
// slice is the same for buffers and strings
|
||
ret = list.head.data.slice(0, n);
|
||
list.head.data = list.head.data.slice(n);
|
||
} else if (n === list.head.data.length) {
|
||
// first chunk is a perfect match
|
||
ret = list.shift();
|
||
} else {
|
||
// result spans more than one buffer
|
||
ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list);
|
||
}
|
||
return ret;
|
||
}
|
||
|
||
// Copies a specified amount of characters from the list of buffered data
|
||
// chunks.
|
||
// This function is designed to be inlinable, so please take care when making
|
||
// changes to the function body.
|
||
function copyFromBufferString(n, list) {
|
||
var p = list.head;
|
||
var c = 1;
|
||
var ret = p.data;
|
||
n -= ret.length;
|
||
while (p = p.next) {
|
||
var str = p.data;
|
||
var nb = n > str.length ? str.length : n;
|
||
if (nb === str.length) ret += str;else ret += str.slice(0, n);
|
||
n -= nb;
|
||
if (n === 0) {
|
||
if (nb === str.length) {
|
||
++c;
|
||
if (p.next) list.head = p.next;else list.head = list.tail = null;
|
||
} else {
|
||
list.head = p;
|
||
p.data = str.slice(nb);
|
||
}
|
||
break;
|
||
}
|
||
++c;
|
||
}
|
||
list.length -= c;
|
||
return ret;
|
||
}
|
||
|
||
// Copies a specified amount of bytes from the list of buffered data chunks.
|
||
// This function is designed to be inlinable, so please take care when making
|
||
// changes to the function body.
|
||
function copyFromBuffer(n, list) {
|
||
var ret = Buffer.allocUnsafe(n);
|
||
var p = list.head;
|
||
var c = 1;
|
||
p.data.copy(ret);
|
||
n -= p.data.length;
|
||
while (p = p.next) {
|
||
var buf = p.data;
|
||
var nb = n > buf.length ? buf.length : n;
|
||
buf.copy(ret, ret.length - n, 0, nb);
|
||
n -= nb;
|
||
if (n === 0) {
|
||
if (nb === buf.length) {
|
||
++c;
|
||
if (p.next) list.head = p.next;else list.head = list.tail = null;
|
||
} else {
|
||
list.head = p;
|
||
p.data = buf.slice(nb);
|
||
}
|
||
break;
|
||
}
|
||
++c;
|
||
}
|
||
list.length -= c;
|
||
return ret;
|
||
}
|
||
|
||
function endReadable(stream) {
|
||
var state = stream._readableState;
|
||
|
||
// If we get here before consuming all the bytes, then that is a
|
||
// bug in node. Should never happen.
|
||
if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream');
|
||
|
||
if (!state.endEmitted) {
|
||
state.ended = true;
|
||
pna.nextTick(endReadableNT, state, stream);
|
||
}
|
||
}
|
||
|
||
function endReadableNT(state, stream) {
|
||
// Check that we didn't get one last unshift.
|
||
if (!state.endEmitted && state.length === 0) {
|
||
state.endEmitted = true;
|
||
stream.readable = false;
|
||
stream.emit('end');
|
||
}
|
||
}
|
||
|
||
function indexOf(xs, x) {
|
||
for (var i = 0, l = xs.length; i < l; i++) {
|
||
if (xs[i] === x) return i;
|
||
}
|
||
return -1;
|
||
}
|
||
|
||
/***/ }),
|
||
|
||
/***/ 2555:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
// Copyright Joyent, Inc. and other Node contributors.
|
||
//
|
||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||
// copy of this software and associated documentation files (the
|
||
// "Software"), to deal in the Software without restriction, including
|
||
// without limitation the rights to use, copy, modify, merge, publish,
|
||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||
// persons to whom the Software is furnished to do so, subject to the
|
||
// following conditions:
|
||
//
|
||
// The above copyright notice and this permission notice shall be included
|
||
// in all copies or substantial portions of the Software.
|
||
//
|
||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||
|
||
// a transform stream is a readable/writable stream where you do
|
||
// something with the data. Sometimes it's called a "filter",
|
||
// but that's not a great name for it, since that implies a thing where
|
||
// some bits pass through, and others are simply ignored. (That would
|
||
// be a valid example of a transform, of course.)
|
||
//
|
||
// While the output is causally related to the input, it's not a
|
||
// necessarily symmetric or synchronous transformation. For example,
|
||
// a zlib stream might take multiple plain-text writes(), and then
|
||
// emit a single compressed chunk some time in the future.
|
||
//
|
||
// Here's how this works:
|
||
//
|
||
// The Transform stream has all the aspects of the readable and writable
|
||
// stream classes. When you write(chunk), that calls _write(chunk,cb)
|
||
// internally, and returns false if there's a lot of pending writes
|
||
// buffered up. When you call read(), that calls _read(n) until
|
||
// there's enough pending readable data buffered up.
|
||
//
|
||
// In a transform stream, the written data is placed in a buffer. When
|
||
// _read(n) is called, it transforms the queued up data, calling the
|
||
// buffered _write cb's as it consumes chunks. If consuming a single
|
||
// written chunk would result in multiple output chunks, then the first
|
||
// outputted bit calls the readcb, and subsequent chunks just go into
|
||
// the read buffer, and will cause it to emit 'readable' if necessary.
|
||
//
|
||
// This way, back-pressure is actually determined by the reading side,
|
||
// since _read has to be called to start processing a new chunk. However,
|
||
// a pathological inflate type of transform can cause excessive buffering
|
||
// here. For example, imagine a stream where every byte of input is
|
||
// interpreted as an integer from 0-255, and then results in that many
|
||
// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
|
||
// 1kb of data being output. In this case, you could write a very small
|
||
// amount of input, and end up with a very large amount of output. In
|
||
// such a pathological inflating mechanism, there'd be no way to tell
|
||
// the system to stop doing the transform. A single 4MB write could
|
||
// cause the system to run out of memory.
|
||
//
|
||
// However, even in such a pathological case, only a single written chunk
|
||
// would be consumed, and then the rest would wait (un-transformed) until
|
||
// the results of the previous transformed chunk were consumed.
|
||
|
||
|
||
|
||
module.exports = Transform;
|
||
|
||
var Duplex = __nccwpck_require__(9994);
|
||
|
||
/*<replacement>*/
|
||
var util = Object.create(__nccwpck_require__(9300));
|
||
util.inherits = __nccwpck_require__(3753);
|
||
/*</replacement>*/
|
||
|
||
util.inherits(Transform, Duplex);
|
||
|
||
function afterTransform(er, data) {
|
||
var ts = this._transformState;
|
||
ts.transforming = false;
|
||
|
||
var cb = ts.writecb;
|
||
|
||
if (!cb) {
|
||
return this.emit('error', new Error('write callback called multiple times'));
|
||
}
|
||
|
||
ts.writechunk = null;
|
||
ts.writecb = null;
|
||
|
||
if (data != null) // single equals check for both `null` and `undefined`
|
||
this.push(data);
|
||
|
||
cb(er);
|
||
|
||
var rs = this._readableState;
|
||
rs.reading = false;
|
||
if (rs.needReadable || rs.length < rs.highWaterMark) {
|
||
this._read(rs.highWaterMark);
|
||
}
|
||
}
|
||
|
||
function Transform(options) {
|
||
if (!(this instanceof Transform)) return new Transform(options);
|
||
|
||
Duplex.call(this, options);
|
||
|
||
this._transformState = {
|
||
afterTransform: afterTransform.bind(this),
|
||
needTransform: false,
|
||
transforming: false,
|
||
writecb: null,
|
||
writechunk: null,
|
||
writeencoding: null
|
||
};
|
||
|
||
// start out asking for a readable event once data is transformed.
|
||
this._readableState.needReadable = true;
|
||
|
||
// we have implemented the _read method, and done the other things
|
||
// that Readable wants before the first _read call, so unset the
|
||
// sync guard flag.
|
||
this._readableState.sync = false;
|
||
|
||
if (options) {
|
||
if (typeof options.transform === 'function') this._transform = options.transform;
|
||
|
||
if (typeof options.flush === 'function') this._flush = options.flush;
|
||
}
|
||
|
||
// When the writable side finishes, then flush out anything remaining.
|
||
this.on('prefinish', prefinish);
|
||
}
|
||
|
||
function prefinish() {
|
||
var _this = this;
|
||
|
||
if (typeof this._flush === 'function') {
|
||
this._flush(function (er, data) {
|
||
done(_this, er, data);
|
||
});
|
||
} else {
|
||
done(this, null, null);
|
||
}
|
||
}
|
||
|
||
Transform.prototype.push = function (chunk, encoding) {
|
||
this._transformState.needTransform = false;
|
||
return Duplex.prototype.push.call(this, chunk, encoding);
|
||
};
|
||
|
||
// This is the part where you do stuff!
|
||
// override this function in implementation classes.
|
||
// 'chunk' is an input chunk.
|
||
//
|
||
// Call `push(newChunk)` to pass along transformed output
|
||
// to the readable side. You may call 'push' zero or more times.
|
||
//
|
||
// Call `cb(err)` when you are done with this chunk. If you pass
|
||
// an error, then that'll put the hurt on the whole operation. If you
|
||
// never call cb(), then you'll never get another chunk.
|
||
Transform.prototype._transform = function (chunk, encoding, cb) {
|
||
throw new Error('_transform() is not implemented');
|
||
};
|
||
|
||
Transform.prototype._write = function (chunk, encoding, cb) {
|
||
var ts = this._transformState;
|
||
ts.writecb = cb;
|
||
ts.writechunk = chunk;
|
||
ts.writeencoding = encoding;
|
||
if (!ts.transforming) {
|
||
var rs = this._readableState;
|
||
if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);
|
||
}
|
||
};
|
||
|
||
// Doesn't matter what the args are here.
|
||
// _transform does all the work.
|
||
// That we got here means that the readable side wants more data.
|
||
Transform.prototype._read = function (n) {
|
||
var ts = this._transformState;
|
||
|
||
if (ts.writechunk !== null && ts.writecb && !ts.transforming) {
|
||
ts.transforming = true;
|
||
this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
|
||
} else {
|
||
// mark that we need a transform, so that any data that comes in
|
||
// will get processed, now that we've asked for it.
|
||
ts.needTransform = true;
|
||
}
|
||
};
|
||
|
||
Transform.prototype._destroy = function (err, cb) {
|
||
var _this2 = this;
|
||
|
||
Duplex.prototype._destroy.call(this, err, function (err2) {
|
||
cb(err2);
|
||
_this2.emit('close');
|
||
});
|
||
};
|
||
|
||
function done(stream, er, data) {
|
||
if (er) return stream.emit('error', er);
|
||
|
||
if (data != null) // single equals check for both `null` and `undefined`
|
||
stream.push(data);
|
||
|
||
// if there's nothing in the write buffer, then that means
|
||
// that nothing more will ever be provided
|
||
if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0');
|
||
|
||
if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming');
|
||
|
||
return stream.push(null);
|
||
}
|
||
|
||
/***/ }),
|
||
|
||
/***/ 2072:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
// Copyright Joyent, Inc. and other Node contributors.
|
||
//
|
||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||
// copy of this software and associated documentation files (the
|
||
// "Software"), to deal in the Software without restriction, including
|
||
// without limitation the rights to use, copy, modify, merge, publish,
|
||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||
// persons to whom the Software is furnished to do so, subject to the
|
||
// following conditions:
|
||
//
|
||
// The above copyright notice and this permission notice shall be included
|
||
// in all copies or substantial portions of the Software.
|
||
//
|
||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||
|
||
// A bit simpler than readable streams.
|
||
// Implement an async ._write(chunk, encoding, cb), and it'll handle all
|
||
// the drain event emission and buffering.
|
||
|
||
|
||
|
||
/*<replacement>*/
|
||
|
||
var pna = __nccwpck_require__(9029);
|
||
/*</replacement>*/
|
||
|
||
module.exports = Writable;
|
||
|
||
/* <replacement> */
|
||
function WriteReq(chunk, encoding, cb) {
|
||
this.chunk = chunk;
|
||
this.encoding = encoding;
|
||
this.callback = cb;
|
||
this.next = null;
|
||
}
|
||
|
||
// It seems a linked list but it is not
|
||
// there will be only 2 of these for each stream
|
||
function CorkedRequest(state) {
|
||
var _this = this;
|
||
|
||
this.next = null;
|
||
this.entry = null;
|
||
this.finish = function () {
|
||
onCorkedFinish(_this, state);
|
||
};
|
||
}
|
||
/* </replacement> */
|
||
|
||
/*<replacement>*/
|
||
var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick;
|
||
/*</replacement>*/
|
||
|
||
/*<replacement>*/
|
||
var Duplex;
|
||
/*</replacement>*/
|
||
|
||
Writable.WritableState = WritableState;
|
||
|
||
/*<replacement>*/
|
||
var util = Object.create(__nccwpck_require__(9300));
|
||
util.inherits = __nccwpck_require__(3753);
|
||
/*</replacement>*/
|
||
|
||
/*<replacement>*/
|
||
var internalUtil = {
|
||
deprecate: __nccwpck_require__(2053)
|
||
};
|
||
/*</replacement>*/
|
||
|
||
/*<replacement>*/
|
||
var Stream = __nccwpck_require__(2342);
|
||
/*</replacement>*/
|
||
|
||
/*<replacement>*/
|
||
|
||
var Buffer = __nccwpck_require__(1534).Buffer;
|
||
var OurUint8Array = global.Uint8Array || function () {};
|
||
function _uint8ArrayToBuffer(chunk) {
|
||
return Buffer.from(chunk);
|
||
}
|
||
function _isUint8Array(obj) {
|
||
return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
|
||
}
|
||
|
||
/*</replacement>*/
|
||
|
||
var destroyImpl = __nccwpck_require__(6599);
|
||
|
||
util.inherits(Writable, Stream);
|
||
|
||
function nop() {}
|
||
|
||
function WritableState(options, stream) {
|
||
Duplex = Duplex || __nccwpck_require__(9994);
|
||
|
||
options = options || {};
|
||
|
||
// Duplex streams are both readable and writable, but share
|
||
// the same options object.
|
||
// However, some cases require setting options to different
|
||
// values for the readable and the writable sides of the duplex stream.
|
||
// These options can be provided separately as readableXXX and writableXXX.
|
||
var isDuplex = stream instanceof Duplex;
|
||
|
||
// object stream flag to indicate whether or not this stream
|
||
// contains buffers or objects.
|
||
this.objectMode = !!options.objectMode;
|
||
|
||
if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode;
|
||
|
||
// the point at which write() starts returning false
|
||
// Note: 0 is a valid value, means that we always return false if
|
||
// the entire buffer is not flushed immediately on write()
|
||
var hwm = options.highWaterMark;
|
||
var writableHwm = options.writableHighWaterMark;
|
||
var defaultHwm = this.objectMode ? 16 : 16 * 1024;
|
||
|
||
if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm;
|
||
|
||
// cast to ints.
|
||
this.highWaterMark = Math.floor(this.highWaterMark);
|
||
|
||
// if _final has been called
|
||
this.finalCalled = false;
|
||
|
||
// drain event flag.
|
||
this.needDrain = false;
|
||
// at the start of calling end()
|
||
this.ending = false;
|
||
// when end() has been called, and returned
|
||
this.ended = false;
|
||
// when 'finish' is emitted
|
||
this.finished = false;
|
||
|
||
// has it been destroyed
|
||
this.destroyed = false;
|
||
|
||
// should we decode strings into buffers before passing to _write?
|
||
// this is here so that some node-core streams can optimize string
|
||
// handling at a lower level.
|
||
var noDecode = options.decodeStrings === false;
|
||
this.decodeStrings = !noDecode;
|
||
|
||
// Crypto is kind of old and crusty. Historically, its default string
|
||
// encoding is 'binary' so we have to make this configurable.
|
||
// Everything else in the universe uses 'utf8', though.
|
||
this.defaultEncoding = options.defaultEncoding || 'utf8';
|
||
|
||
// not an actual buffer we keep track of, but a measurement
|
||
// of how much we're waiting to get pushed to some underlying
|
||
// socket or file.
|
||
this.length = 0;
|
||
|
||
// a flag to see when we're in the middle of a write.
|
||
this.writing = false;
|
||
|
||
// when true all writes will be buffered until .uncork() call
|
||
this.corked = 0;
|
||
|
||
// a flag to be able to tell if the onwrite cb is called immediately,
|
||
// or on a later tick. We set this to true at first, because any
|
||
// actions that shouldn't happen until "later" should generally also
|
||
// not happen before the first write call.
|
||
this.sync = true;
|
||
|
||
// a flag to know if we're processing previously buffered items, which
|
||
// may call the _write() callback in the same tick, so that we don't
|
||
// end up in an overlapped onwrite situation.
|
||
this.bufferProcessing = false;
|
||
|
||
// the callback that's passed to _write(chunk,cb)
|
||
this.onwrite = function (er) {
|
||
onwrite(stream, er);
|
||
};
|
||
|
||
// the callback that the user supplies to write(chunk,encoding,cb)
|
||
this.writecb = null;
|
||
|
||
// the amount that is being written when _write is called.
|
||
this.writelen = 0;
|
||
|
||
this.bufferedRequest = null;
|
||
this.lastBufferedRequest = null;
|
||
|
||
// number of pending user-supplied write callbacks
|
||
// this must be 0 before 'finish' can be emitted
|
||
this.pendingcb = 0;
|
||
|
||
// emit prefinish if the only thing we're waiting for is _write cbs
|
||
// This is relevant for synchronous Transform streams
|
||
this.prefinished = false;
|
||
|
||
// True if the error was already emitted and should not be thrown again
|
||
this.errorEmitted = false;
|
||
|
||
// count buffered requests
|
||
this.bufferedRequestCount = 0;
|
||
|
||
// allocate the first CorkedRequest, there is always
|
||
// one allocated and free to use, and we maintain at most two
|
||
this.corkedRequestsFree = new CorkedRequest(this);
|
||
}
|
||
|
||
WritableState.prototype.getBuffer = function getBuffer() {
|
||
var current = this.bufferedRequest;
|
||
var out = [];
|
||
while (current) {
|
||
out.push(current);
|
||
current = current.next;
|
||
}
|
||
return out;
|
||
};
|
||
|
||
(function () {
|
||
try {
|
||
Object.defineProperty(WritableState.prototype, 'buffer', {
|
||
get: internalUtil.deprecate(function () {
|
||
return this.getBuffer();
|
||
}, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003')
|
||
});
|
||
} catch (_) {}
|
||
})();
|
||
|
||
// Test _writableState for inheritance to account for Duplex streams,
|
||
// whose prototype chain only points to Readable.
|
||
var realHasInstance;
|
||
if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {
|
||
realHasInstance = Function.prototype[Symbol.hasInstance];
|
||
Object.defineProperty(Writable, Symbol.hasInstance, {
|
||
value: function (object) {
|
||
if (realHasInstance.call(this, object)) return true;
|
||
if (this !== Writable) return false;
|
||
|
||
return object && object._writableState instanceof WritableState;
|
||
}
|
||
});
|
||
} else {
|
||
realHasInstance = function (object) {
|
||
return object instanceof this;
|
||
};
|
||
}
|
||
|
||
function Writable(options) {
|
||
Duplex = Duplex || __nccwpck_require__(9994);
|
||
|
||
// Writable ctor is applied to Duplexes, too.
|
||
// `realHasInstance` is necessary because using plain `instanceof`
|
||
// would return false, as no `_writableState` property is attached.
|
||
|
||
// Trying to use the custom `instanceof` for Writable here will also break the
|
||
// Node.js LazyTransform implementation, which has a non-trivial getter for
|
||
// `_writableState` that would lead to infinite recursion.
|
||
if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) {
|
||
return new Writable(options);
|
||
}
|
||
|
||
this._writableState = new WritableState(options, this);
|
||
|
||
// legacy.
|
||
this.writable = true;
|
||
|
||
if (options) {
|
||
if (typeof options.write === 'function') this._write = options.write;
|
||
|
||
if (typeof options.writev === 'function') this._writev = options.writev;
|
||
|
||
if (typeof options.destroy === 'function') this._destroy = options.destroy;
|
||
|
||
if (typeof options.final === 'function') this._final = options.final;
|
||
}
|
||
|
||
Stream.call(this);
|
||
}
|
||
|
||
// Otherwise people can pipe Writable streams, which is just wrong.
|
||
Writable.prototype.pipe = function () {
|
||
this.emit('error', new Error('Cannot pipe, not readable'));
|
||
};
|
||
|
||
function writeAfterEnd(stream, cb) {
|
||
var er = new Error('write after end');
|
||
// TODO: defer error events consistently everywhere, not just the cb
|
||
stream.emit('error', er);
|
||
pna.nextTick(cb, er);
|
||
}
|
||
|
||
// Checks that a user-supplied chunk is valid, especially for the particular
|
||
// mode the stream is in. Currently this means that `null` is never accepted
|
||
// and undefined/non-string values are only allowed in object mode.
|
||
function validChunk(stream, state, chunk, cb) {
|
||
var valid = true;
|
||
var er = false;
|
||
|
||
if (chunk === null) {
|
||
er = new TypeError('May not write null values to stream');
|
||
} else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {
|
||
er = new TypeError('Invalid non-string/buffer chunk');
|
||
}
|
||
if (er) {
|
||
stream.emit('error', er);
|
||
pna.nextTick(cb, er);
|
||
valid = false;
|
||
}
|
||
return valid;
|
||
}
|
||
|
||
Writable.prototype.write = function (chunk, encoding, cb) {
|
||
var state = this._writableState;
|
||
var ret = false;
|
||
var isBuf = !state.objectMode && _isUint8Array(chunk);
|
||
|
||
if (isBuf && !Buffer.isBuffer(chunk)) {
|
||
chunk = _uint8ArrayToBuffer(chunk);
|
||
}
|
||
|
||
if (typeof encoding === 'function') {
|
||
cb = encoding;
|
||
encoding = null;
|
||
}
|
||
|
||
if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;
|
||
|
||
if (typeof cb !== 'function') cb = nop;
|
||
|
||
if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) {
|
||
state.pendingcb++;
|
||
ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb);
|
||
}
|
||
|
||
return ret;
|
||
};
|
||
|
||
Writable.prototype.cork = function () {
|
||
var state = this._writableState;
|
||
|
||
state.corked++;
|
||
};
|
||
|
||
Writable.prototype.uncork = function () {
|
||
var state = this._writableState;
|
||
|
||
if (state.corked) {
|
||
state.corked--;
|
||
|
||
if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);
|
||
}
|
||
};
|
||
|
||
Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
|
||
// node::ParseEncoding() requires lower case.
|
||
if (typeof encoding === 'string') encoding = encoding.toLowerCase();
|
||
if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding);
|
||
this._writableState.defaultEncoding = encoding;
|
||
return this;
|
||
};
|
||
|
||
function decodeChunk(state, chunk, encoding) {
|
||
if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {
|
||
chunk = Buffer.from(chunk, encoding);
|
||
}
|
||
return chunk;
|
||
}
|
||
|
||
Object.defineProperty(Writable.prototype, 'writableHighWaterMark', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function () {
|
||
return this._writableState.highWaterMark;
|
||
}
|
||
});
|
||
|
||
// if we're already writing something, then just put this
|
||
// in the queue, and wait our turn. Otherwise, call _write
|
||
// If we return false, then we need a drain event, so set that flag.
|
||
function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) {
|
||
if (!isBuf) {
|
||
var newChunk = decodeChunk(state, chunk, encoding);
|
||
if (chunk !== newChunk) {
|
||
isBuf = true;
|
||
encoding = 'buffer';
|
||
chunk = newChunk;
|
||
}
|
||
}
|
||
var len = state.objectMode ? 1 : chunk.length;
|
||
|
||
state.length += len;
|
||
|
||
var ret = state.length < state.highWaterMark;
|
||
// we must ensure that previous needDrain will not be reset to false.
|
||
if (!ret) state.needDrain = true;
|
||
|
||
if (state.writing || state.corked) {
|
||
var last = state.lastBufferedRequest;
|
||
state.lastBufferedRequest = {
|
||
chunk: chunk,
|
||
encoding: encoding,
|
||
isBuf: isBuf,
|
||
callback: cb,
|
||
next: null
|
||
};
|
||
if (last) {
|
||
last.next = state.lastBufferedRequest;
|
||
} else {
|
||
state.bufferedRequest = state.lastBufferedRequest;
|
||
}
|
||
state.bufferedRequestCount += 1;
|
||
} else {
|
||
doWrite(stream, state, false, len, chunk, encoding, cb);
|
||
}
|
||
|
||
return ret;
|
||
}
|
||
|
||
function doWrite(stream, state, writev, len, chunk, encoding, cb) {
|
||
state.writelen = len;
|
||
state.writecb = cb;
|
||
state.writing = true;
|
||
state.sync = true;
|
||
if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);
|
||
state.sync = false;
|
||
}
|
||
|
||
function onwriteError(stream, state, sync, er, cb) {
|
||
--state.pendingcb;
|
||
|
||
if (sync) {
|
||
// defer the callback if we are being called synchronously
|
||
// to avoid piling up things on the stack
|
||
pna.nextTick(cb, er);
|
||
// this can emit finish, and it will always happen
|
||
// after error
|
||
pna.nextTick(finishMaybe, stream, state);
|
||
stream._writableState.errorEmitted = true;
|
||
stream.emit('error', er);
|
||
} else {
|
||
// the caller expect this to happen before if
|
||
// it is async
|
||
cb(er);
|
||
stream._writableState.errorEmitted = true;
|
||
stream.emit('error', er);
|
||
// this can emit finish, but finish must
|
||
// always follow error
|
||
finishMaybe(stream, state);
|
||
}
|
||
}
|
||
|
||
function onwriteStateUpdate(state) {
|
||
state.writing = false;
|
||
state.writecb = null;
|
||
state.length -= state.writelen;
|
||
state.writelen = 0;
|
||
}
|
||
|
||
function onwrite(stream, er) {
|
||
var state = stream._writableState;
|
||
var sync = state.sync;
|
||
var cb = state.writecb;
|
||
|
||
onwriteStateUpdate(state);
|
||
|
||
if (er) onwriteError(stream, state, sync, er, cb);else {
|
||
// Check if we're actually ready to finish, but don't emit yet
|
||
var finished = needFinish(state);
|
||
|
||
if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {
|
||
clearBuffer(stream, state);
|
||
}
|
||
|
||
if (sync) {
|
||
/*<replacement>*/
|
||
asyncWrite(afterWrite, stream, state, finished, cb);
|
||
/*</replacement>*/
|
||
} else {
|
||
afterWrite(stream, state, finished, cb);
|
||
}
|
||
}
|
||
}
|
||
|
||
function afterWrite(stream, state, finished, cb) {
|
||
if (!finished) onwriteDrain(stream, state);
|
||
state.pendingcb--;
|
||
cb();
|
||
finishMaybe(stream, state);
|
||
}
|
||
|
||
// Must force callback to be called on nextTick, so that we don't
|
||
// emit 'drain' before the write() consumer gets the 'false' return
|
||
// value, and has a chance to attach a 'drain' listener.
|
||
function onwriteDrain(stream, state) {
|
||
if (state.length === 0 && state.needDrain) {
|
||
state.needDrain = false;
|
||
stream.emit('drain');
|
||
}
|
||
}
|
||
|
||
// if there's something in the buffer waiting, then process it
|
||
function clearBuffer(stream, state) {
|
||
state.bufferProcessing = true;
|
||
var entry = state.bufferedRequest;
|
||
|
||
if (stream._writev && entry && entry.next) {
|
||
// Fast case, write everything using _writev()
|
||
var l = state.bufferedRequestCount;
|
||
var buffer = new Array(l);
|
||
var holder = state.corkedRequestsFree;
|
||
holder.entry = entry;
|
||
|
||
var count = 0;
|
||
var allBuffers = true;
|
||
while (entry) {
|
||
buffer[count] = entry;
|
||
if (!entry.isBuf) allBuffers = false;
|
||
entry = entry.next;
|
||
count += 1;
|
||
}
|
||
buffer.allBuffers = allBuffers;
|
||
|
||
doWrite(stream, state, true, state.length, buffer, '', holder.finish);
|
||
|
||
// doWrite is almost always async, defer these to save a bit of time
|
||
// as the hot path ends with doWrite
|
||
state.pendingcb++;
|
||
state.lastBufferedRequest = null;
|
||
if (holder.next) {
|
||
state.corkedRequestsFree = holder.next;
|
||
holder.next = null;
|
||
} else {
|
||
state.corkedRequestsFree = new CorkedRequest(state);
|
||
}
|
||
state.bufferedRequestCount = 0;
|
||
} else {
|
||
// Slow case, write chunks one-by-one
|
||
while (entry) {
|
||
var chunk = entry.chunk;
|
||
var encoding = entry.encoding;
|
||
var cb = entry.callback;
|
||
var len = state.objectMode ? 1 : chunk.length;
|
||
|
||
doWrite(stream, state, false, len, chunk, encoding, cb);
|
||
entry = entry.next;
|
||
state.bufferedRequestCount--;
|
||
// if we didn't call the onwrite immediately, then
|
||
// it means that we need to wait until it does.
|
||
// also, that means that the chunk and cb are currently
|
||
// being processed, so move the buffer counter past them.
|
||
if (state.writing) {
|
||
break;
|
||
}
|
||
}
|
||
|
||
if (entry === null) state.lastBufferedRequest = null;
|
||
}
|
||
|
||
state.bufferedRequest = entry;
|
||
state.bufferProcessing = false;
|
||
}
|
||
|
||
Writable.prototype._write = function (chunk, encoding, cb) {
|
||
cb(new Error('_write() is not implemented'));
|
||
};
|
||
|
||
Writable.prototype._writev = null;
|
||
|
||
Writable.prototype.end = function (chunk, encoding, cb) {
|
||
var state = this._writableState;
|
||
|
||
if (typeof chunk === 'function') {
|
||
cb = chunk;
|
||
chunk = null;
|
||
encoding = null;
|
||
} else if (typeof encoding === 'function') {
|
||
cb = encoding;
|
||
encoding = null;
|
||
}
|
||
|
||
if (chunk !== null && chunk !== undefined) this.write(chunk, encoding);
|
||
|
||
// .end() fully uncorks
|
||
if (state.corked) {
|
||
state.corked = 1;
|
||
this.uncork();
|
||
}
|
||
|
||
// ignore unnecessary end() calls.
|
||
if (!state.ending && !state.finished) endWritable(this, state, cb);
|
||
};
|
||
|
||
function needFinish(state) {
|
||
return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;
|
||
}
|
||
function callFinal(stream, state) {
|
||
stream._final(function (err) {
|
||
state.pendingcb--;
|
||
if (err) {
|
||
stream.emit('error', err);
|
||
}
|
||
state.prefinished = true;
|
||
stream.emit('prefinish');
|
||
finishMaybe(stream, state);
|
||
});
|
||
}
|
||
function prefinish(stream, state) {
|
||
if (!state.prefinished && !state.finalCalled) {
|
||
if (typeof stream._final === 'function') {
|
||
state.pendingcb++;
|
||
state.finalCalled = true;
|
||
pna.nextTick(callFinal, stream, state);
|
||
} else {
|
||
state.prefinished = true;
|
||
stream.emit('prefinish');
|
||
}
|
||
}
|
||
}
|
||
|
||
function finishMaybe(stream, state) {
|
||
var need = needFinish(state);
|
||
if (need) {
|
||
prefinish(stream, state);
|
||
if (state.pendingcb === 0) {
|
||
state.finished = true;
|
||
stream.emit('finish');
|
||
}
|
||
}
|
||
return need;
|
||
}
|
||
|
||
function endWritable(stream, state, cb) {
|
||
state.ending = true;
|
||
finishMaybe(stream, state);
|
||
if (cb) {
|
||
if (state.finished) pna.nextTick(cb);else stream.once('finish', cb);
|
||
}
|
||
state.ended = true;
|
||
stream.writable = false;
|
||
}
|
||
|
||
function onCorkedFinish(corkReq, state, err) {
|
||
var entry = corkReq.entry;
|
||
corkReq.entry = null;
|
||
while (entry) {
|
||
var cb = entry.callback;
|
||
state.pendingcb--;
|
||
cb(err);
|
||
entry = entry.next;
|
||
}
|
||
if (state.corkedRequestsFree) {
|
||
state.corkedRequestsFree.next = corkReq;
|
||
} else {
|
||
state.corkedRequestsFree = corkReq;
|
||
}
|
||
}
|
||
|
||
Object.defineProperty(Writable.prototype, 'destroyed', {
|
||
get: function () {
|
||
if (this._writableState === undefined) {
|
||
return false;
|
||
}
|
||
return this._writableState.destroyed;
|
||
},
|
||
set: function (value) {
|
||
// we ignore the value if the stream
|
||
// has not been initialized yet
|
||
if (!this._writableState) {
|
||
return;
|
||
}
|
||
|
||
// backward compatibility, the user is explicitly
|
||
// managing destroyed
|
||
this._writableState.destroyed = value;
|
||
}
|
||
});
|
||
|
||
Writable.prototype.destroy = destroyImpl.destroy;
|
||
Writable.prototype._undestroy = destroyImpl.undestroy;
|
||
Writable.prototype._destroy = function (err, cb) {
|
||
this.end();
|
||
cb(err);
|
||
};
|
||
|
||
/***/ }),
|
||
|
||
/***/ 5582:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
|
||
|
||
var Buffer = __nccwpck_require__(1534).Buffer;
|
||
var util = __nccwpck_require__(1669);
|
||
|
||
function copyBuffer(src, target, offset) {
|
||
src.copy(target, offset);
|
||
}
|
||
|
||
module.exports = function () {
|
||
function BufferList() {
|
||
_classCallCheck(this, BufferList);
|
||
|
||
this.head = null;
|
||
this.tail = null;
|
||
this.length = 0;
|
||
}
|
||
|
||
BufferList.prototype.push = function push(v) {
|
||
var entry = { data: v, next: null };
|
||
if (this.length > 0) this.tail.next = entry;else this.head = entry;
|
||
this.tail = entry;
|
||
++this.length;
|
||
};
|
||
|
||
BufferList.prototype.unshift = function unshift(v) {
|
||
var entry = { data: v, next: this.head };
|
||
if (this.length === 0) this.tail = entry;
|
||
this.head = entry;
|
||
++this.length;
|
||
};
|
||
|
||
BufferList.prototype.shift = function shift() {
|
||
if (this.length === 0) return;
|
||
var ret = this.head.data;
|
||
if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next;
|
||
--this.length;
|
||
return ret;
|
||
};
|
||
|
||
BufferList.prototype.clear = function clear() {
|
||
this.head = this.tail = null;
|
||
this.length = 0;
|
||
};
|
||
|
||
BufferList.prototype.join = function join(s) {
|
||
if (this.length === 0) return '';
|
||
var p = this.head;
|
||
var ret = '' + p.data;
|
||
while (p = p.next) {
|
||
ret += s + p.data;
|
||
}return ret;
|
||
};
|
||
|
||
BufferList.prototype.concat = function concat(n) {
|
||
if (this.length === 0) return Buffer.alloc(0);
|
||
if (this.length === 1) return this.head.data;
|
||
var ret = Buffer.allocUnsafe(n >>> 0);
|
||
var p = this.head;
|
||
var i = 0;
|
||
while (p) {
|
||
copyBuffer(p.data, ret, i);
|
||
i += p.data.length;
|
||
p = p.next;
|
||
}
|
||
return ret;
|
||
};
|
||
|
||
return BufferList;
|
||
}();
|
||
|
||
if (util && util.inspect && util.inspect.custom) {
|
||
module.exports.prototype[util.inspect.custom] = function () {
|
||
var obj = util.inspect({ length: this.length });
|
||
return this.constructor.name + ' ' + obj;
|
||
};
|
||
}
|
||
|
||
/***/ }),
|
||
|
||
/***/ 6599:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
/*<replacement>*/
|
||
|
||
var pna = __nccwpck_require__(9029);
|
||
/*</replacement>*/
|
||
|
||
// undocumented cb() API, needed for core, not for public API
|
||
function destroy(err, cb) {
|
||
var _this = this;
|
||
|
||
var readableDestroyed = this._readableState && this._readableState.destroyed;
|
||
var writableDestroyed = this._writableState && this._writableState.destroyed;
|
||
|
||
if (readableDestroyed || writableDestroyed) {
|
||
if (cb) {
|
||
cb(err);
|
||
} else if (err && (!this._writableState || !this._writableState.errorEmitted)) {
|
||
pna.nextTick(emitErrorNT, this, err);
|
||
}
|
||
return this;
|
||
}
|
||
|
||
// we set destroyed to true before firing error callbacks in order
|
||
// to make it re-entrance safe in case destroy() is called within callbacks
|
||
|
||
if (this._readableState) {
|
||
this._readableState.destroyed = true;
|
||
}
|
||
|
||
// if this is a duplex stream mark the writable part as destroyed as well
|
||
if (this._writableState) {
|
||
this._writableState.destroyed = true;
|
||
}
|
||
|
||
this._destroy(err || null, function (err) {
|
||
if (!cb && err) {
|
||
pna.nextTick(emitErrorNT, _this, err);
|
||
if (_this._writableState) {
|
||
_this._writableState.errorEmitted = true;
|
||
}
|
||
} else if (cb) {
|
||
cb(err);
|
||
}
|
||
});
|
||
|
||
return this;
|
||
}
|
||
|
||
function undestroy() {
|
||
if (this._readableState) {
|
||
this._readableState.destroyed = false;
|
||
this._readableState.reading = false;
|
||
this._readableState.ended = false;
|
||
this._readableState.endEmitted = false;
|
||
}
|
||
|
||
if (this._writableState) {
|
||
this._writableState.destroyed = false;
|
||
this._writableState.ended = false;
|
||
this._writableState.ending = false;
|
||
this._writableState.finished = false;
|
||
this._writableState.errorEmitted = false;
|
||
}
|
||
}
|
||
|
||
function emitErrorNT(self, err) {
|
||
self.emit('error', err);
|
||
}
|
||
|
||
module.exports = {
|
||
destroy: destroy,
|
||
undestroy: undestroy
|
||
};
|
||
|
||
/***/ }),
|
||
|
||
/***/ 2342:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
module.exports = __nccwpck_require__(2413);
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 3484:
|
||
/***/ ((module, exports, __nccwpck_require__) => {
|
||
|
||
var Stream = __nccwpck_require__(2413);
|
||
if (process.env.READABLE_STREAM === 'disable' && Stream) {
|
||
module.exports = Stream;
|
||
exports = module.exports = Stream.Readable;
|
||
exports.Readable = Stream.Readable;
|
||
exports.Writable = Stream.Writable;
|
||
exports.Duplex = Stream.Duplex;
|
||
exports.Transform = Stream.Transform;
|
||
exports.PassThrough = Stream.PassThrough;
|
||
exports.Stream = Stream;
|
||
} else {
|
||
exports = module.exports = __nccwpck_require__(7913);
|
||
exports.Stream = Stream || exports;
|
||
exports.Readable = exports;
|
||
exports.Writable = __nccwpck_require__(2072);
|
||
exports.Duplex = __nccwpck_require__(9994);
|
||
exports.Transform = __nccwpck_require__(2555);
|
||
exports.PassThrough = __nccwpck_require__(685);
|
||
}
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 5899:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
/**
|
||
* Archiver Vending
|
||
*
|
||
* @ignore
|
||
* @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE}
|
||
* @copyright (c) 2012-2014 Chris Talkington, contributors.
|
||
*/
|
||
var Archiver = __nccwpck_require__(856);
|
||
|
||
var formats = {};
|
||
|
||
/**
|
||
* Dispenses a new Archiver instance.
|
||
*
|
||
* @constructor
|
||
* @param {String} format The archive format to use.
|
||
* @param {Object} options See [Archiver]{@link Archiver}
|
||
* @return {Archiver}
|
||
*/
|
||
var vending = function(format, options) {
|
||
return vending.create(format, options);
|
||
};
|
||
|
||
/**
|
||
* Creates a new Archiver instance.
|
||
*
|
||
* @param {String} format The archive format to use.
|
||
* @param {Object} options See [Archiver]{@link Archiver}
|
||
* @return {Archiver}
|
||
*/
|
||
vending.create = function(format, options) {
|
||
if (formats[format]) {
|
||
var instance = new Archiver(format, options);
|
||
instance.setFormat(format);
|
||
instance.setModule(new formats[format](options));
|
||
|
||
return instance;
|
||
} else {
|
||
throw new Error('create(' + format + '): format not registered');
|
||
}
|
||
};
|
||
|
||
/**
|
||
* Registers a format for use with archiver.
|
||
*
|
||
* @param {String} format The name of the format.
|
||
* @param {Function} module The function for archiver to interact with.
|
||
* @return void
|
||
*/
|
||
vending.registerFormat = function(format, module) {
|
||
if (formats[format]) {
|
||
throw new Error('register(' + format + '): format already registered');
|
||
}
|
||
|
||
if (typeof module !== 'function') {
|
||
throw new Error('register(' + format + '): format module invalid');
|
||
}
|
||
|
||
if (typeof module.prototype.append !== 'function' || typeof module.prototype.finalize !== 'function') {
|
||
throw new Error('register(' + format + '): format module missing methods');
|
||
}
|
||
|
||
formats[format] = module;
|
||
};
|
||
|
||
/**
|
||
* Check if the format is already registered.
|
||
*
|
||
* @param {String} format the name of the format.
|
||
* @return boolean
|
||
*/
|
||
vending.isRegisteredFormat = function (format) {
|
||
if (formats[format]) {
|
||
return true;
|
||
}
|
||
|
||
return false;
|
||
};
|
||
|
||
vending.registerFormat('zip', __nccwpck_require__(577));
|
||
vending.registerFormat('tar', __nccwpck_require__(6608));
|
||
vending.registerFormat('json', __nccwpck_require__(7313));
|
||
|
||
module.exports = vending;
|
||
|
||
/***/ }),
|
||
|
||
/***/ 856:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
/**
|
||
* Archiver Core
|
||
*
|
||
* @ignore
|
||
* @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE}
|
||
* @copyright (c) 2012-2014 Chris Talkington, contributors.
|
||
*/
|
||
var fs = __nccwpck_require__(5747);
|
||
var glob = __nccwpck_require__(951);
|
||
var async = __nccwpck_require__(9003);
|
||
var path = __nccwpck_require__(5622);
|
||
var util = __nccwpck_require__(9451);
|
||
|
||
var inherits = __nccwpck_require__(1669).inherits;
|
||
var ArchiverError = __nccwpck_require__(8730);
|
||
var Transform = __nccwpck_require__(2498).Transform;
|
||
|
||
var win32 = process.platform === 'win32';
|
||
|
||
/**
|
||
* @constructor
|
||
* @param {String} format The archive format to use.
|
||
* @param {(CoreOptions|TransformOptions)} options See also {@link ZipOptions} and {@link TarOptions}.
|
||
*/
|
||
var Archiver = function(format, options) {
|
||
if (!(this instanceof Archiver)) {
|
||
return new Archiver(format, options);
|
||
}
|
||
|
||
if (typeof format !== 'string') {
|
||
options = format;
|
||
format = 'zip';
|
||
}
|
||
|
||
options = this.options = util.defaults(options, {
|
||
highWaterMark: 1024 * 1024,
|
||
statConcurrency: 4
|
||
});
|
||
|
||
Transform.call(this, options);
|
||
|
||
this._format = false;
|
||
this._module = false;
|
||
this._pending = 0;
|
||
this._pointer = 0;
|
||
|
||
this._entriesCount = 0;
|
||
this._entriesProcessedCount = 0;
|
||
this._fsEntriesTotalBytes = 0;
|
||
this._fsEntriesProcessedBytes = 0;
|
||
|
||
this._queue = async.queue(this._onQueueTask.bind(this), 1);
|
||
this._queue.drain(this._onQueueDrain.bind(this));
|
||
|
||
this._statQueue = async.queue(this._onStatQueueTask.bind(this), options.statConcurrency);
|
||
this._statQueue.drain(this._onQueueDrain.bind(this));
|
||
|
||
this._state = {
|
||
aborted: false,
|
||
finalize: false,
|
||
finalizing: false,
|
||
finalized: false,
|
||
modulePiped: false
|
||
};
|
||
|
||
this._streams = [];
|
||
};
|
||
|
||
inherits(Archiver, Transform);
|
||
|
||
/**
|
||
* Internal logic for `abort`.
|
||
*
|
||
* @private
|
||
* @return void
|
||
*/
|
||
Archiver.prototype._abort = function() {
|
||
this._state.aborted = true;
|
||
this._queue.kill();
|
||
this._statQueue.kill();
|
||
|
||
if (this._queue.idle()) {
|
||
this._shutdown();
|
||
}
|
||
};
|
||
|
||
/**
|
||
* Internal helper for appending files.
|
||
*
|
||
* @private
|
||
* @param {String} filepath The source filepath.
|
||
* @param {EntryData} data The entry data.
|
||
* @return void
|
||
*/
|
||
Archiver.prototype._append = function(filepath, data) {
|
||
data = data || {};
|
||
|
||
var task = {
|
||
source: null,
|
||
filepath: filepath
|
||
};
|
||
|
||
if (!data.name) {
|
||
data.name = filepath;
|
||
}
|
||
|
||
data.sourcePath = filepath;
|
||
task.data = data;
|
||
this._entriesCount++;
|
||
|
||
if (data.stats && data.stats instanceof fs.Stats) {
|
||
task = this._updateQueueTaskWithStats(task, data.stats);
|
||
if (task) {
|
||
if (data.stats.size) {
|
||
this._fsEntriesTotalBytes += data.stats.size;
|
||
}
|
||
|
||
this._queue.push(task);
|
||
}
|
||
} else {
|
||
this._statQueue.push(task);
|
||
}
|
||
};
|
||
|
||
/**
|
||
* Internal logic for `finalize`.
|
||
*
|
||
* @private
|
||
* @return void
|
||
*/
|
||
Archiver.prototype._finalize = function() {
|
||
if (this._state.finalizing || this._state.finalized || this._state.aborted) {
|
||
return;
|
||
}
|
||
|
||
this._state.finalizing = true;
|
||
|
||
this._moduleFinalize();
|
||
|
||
this._state.finalizing = false;
|
||
this._state.finalized = true;
|
||
};
|
||
|
||
/**
|
||
* Checks the various state variables to determine if we can `finalize`.
|
||
*
|
||
* @private
|
||
* @return {Boolean}
|
||
*/
|
||
Archiver.prototype._maybeFinalize = function() {
|
||
if (this._state.finalizing || this._state.finalized || this._state.aborted) {
|
||
return false;
|
||
}
|
||
|
||
if (this._state.finalize && this._pending === 0 && this._queue.idle() && this._statQueue.idle()) {
|
||
this._finalize();
|
||
return true;
|
||
}
|
||
|
||
return false;
|
||
};
|
||
|
||
/**
|
||
* Appends an entry to the module.
|
||
*
|
||
* @private
|
||
* @fires Archiver#entry
|
||
* @param {(Buffer|Stream)} source
|
||
* @param {EntryData} data
|
||
* @param {Function} callback
|
||
* @return void
|
||
*/
|
||
Archiver.prototype._moduleAppend = function(source, data, callback) {
|
||
if (this._state.aborted) {
|
||
callback();
|
||
return;
|
||
}
|
||
|
||
this._module.append(source, data, function(err) {
|
||
this._task = null;
|
||
|
||
if (this._state.aborted) {
|
||
this._shutdown();
|
||
return;
|
||
}
|
||
|
||
if (err) {
|
||
this.emit('error', err);
|
||
setImmediate(callback);
|
||
return;
|
||
}
|
||
|
||
/**
|
||
* Fires when the entry's input has been processed and appended to the archive.
|
||
*
|
||
* @event Archiver#entry
|
||
* @type {EntryData}
|
||
*/
|
||
this.emit('entry', data);
|
||
this._entriesProcessedCount++;
|
||
|
||
if (data.stats && data.stats.size) {
|
||
this._fsEntriesProcessedBytes += data.stats.size;
|
||
}
|
||
|
||
/**
|
||
* @event Archiver#progress
|
||
* @type {ProgressData}
|
||
*/
|
||
this.emit('progress', {
|
||
entries: {
|
||
total: this._entriesCount,
|
||
processed: this._entriesProcessedCount
|
||
},
|
||
fs: {
|
||
totalBytes: this._fsEntriesTotalBytes,
|
||
processedBytes: this._fsEntriesProcessedBytes
|
||
}
|
||
});
|
||
|
||
setImmediate(callback);
|
||
}.bind(this));
|
||
};
|
||
|
||
/**
|
||
* Finalizes the module.
|
||
*
|
||
* @private
|
||
* @return void
|
||
*/
|
||
Archiver.prototype._moduleFinalize = function() {
|
||
if (typeof this._module.finalize === 'function') {
|
||
this._module.finalize();
|
||
} else if (typeof this._module.end === 'function') {
|
||
this._module.end();
|
||
} else {
|
||
this.emit('error', new ArchiverError('NOENDMETHOD'));
|
||
}
|
||
};
|
||
|
||
/**
|
||
* Pipes the module to our internal stream with error bubbling.
|
||
*
|
||
* @private
|
||
* @return void
|
||
*/
|
||
Archiver.prototype._modulePipe = function() {
|
||
this._module.on('error', this._onModuleError.bind(this));
|
||
this._module.pipe(this);
|
||
this._state.modulePiped = true;
|
||
};
|
||
|
||
/**
|
||
* Determines if the current module supports a defined feature.
|
||
*
|
||
* @private
|
||
* @param {String} key
|
||
* @return {Boolean}
|
||
*/
|
||
Archiver.prototype._moduleSupports = function(key) {
|
||
if (!this._module.supports || !this._module.supports[key]) {
|
||
return false;
|
||
}
|
||
|
||
return this._module.supports[key];
|
||
};
|
||
|
||
/**
|
||
* Unpipes the module from our internal stream.
|
||
*
|
||
* @private
|
||
* @return void
|
||
*/
|
||
Archiver.prototype._moduleUnpipe = function() {
|
||
this._module.unpipe(this);
|
||
this._state.modulePiped = false;
|
||
};
|
||
|
||
/**
|
||
* Normalizes entry data with fallbacks for key properties.
|
||
*
|
||
* @private
|
||
* @param {Object} data
|
||
* @param {fs.Stats} stats
|
||
* @return {Object}
|
||
*/
|
||
Archiver.prototype._normalizeEntryData = function(data, stats) {
|
||
data = util.defaults(data, {
|
||
type: 'file',
|
||
name: null,
|
||
date: null,
|
||
mode: null,
|
||
prefix: null,
|
||
sourcePath: null,
|
||
stats: false
|
||
});
|
||
|
||
if (stats && data.stats === false) {
|
||
data.stats = stats;
|
||
}
|
||
|
||
var isDir = data.type === 'directory';
|
||
|
||
if (data.name) {
|
||
if (typeof data.prefix === 'string' && '' !== data.prefix) {
|
||
data.name = data.prefix + '/' + data.name;
|
||
data.prefix = null;
|
||
}
|
||
|
||
data.name = util.sanitizePath(data.name);
|
||
|
||
if (data.type !== 'symlink' && data.name.slice(-1) === '/') {
|
||
isDir = true;
|
||
data.type = 'directory';
|
||
} else if (isDir) {
|
||
data.name += '/';
|
||
}
|
||
}
|
||
|
||
// 511 === 0777; 493 === 0755; 438 === 0666; 420 === 0644
|
||
if (typeof data.mode === 'number') {
|
||
if (win32) {
|
||
data.mode &= 511;
|
||
} else {
|
||
data.mode &= 4095
|
||
}
|
||
} else if (data.stats && data.mode === null) {
|
||
if (win32) {
|
||
data.mode = data.stats.mode & 511;
|
||
} else {
|
||
data.mode = data.stats.mode & 4095;
|
||
}
|
||
|
||
// stat isn't reliable on windows; force 0755 for dir
|
||
if (win32 && isDir) {
|
||
data.mode = 493;
|
||
}
|
||
} else if (data.mode === null) {
|
||
data.mode = isDir ? 493 : 420;
|
||
}
|
||
|
||
if (data.stats && data.date === null) {
|
||
data.date = data.stats.mtime;
|
||
} else {
|
||
data.date = util.dateify(data.date);
|
||
}
|
||
|
||
return data;
|
||
};
|
||
|
||
/**
|
||
* Error listener that re-emits error on to our internal stream.
|
||
*
|
||
* @private
|
||
* @param {Error} err
|
||
* @return void
|
||
*/
|
||
Archiver.prototype._onModuleError = function(err) {
|
||
/**
|
||
* @event Archiver#error
|
||
* @type {ErrorData}
|
||
*/
|
||
this.emit('error', err);
|
||
};
|
||
|
||
/**
|
||
* Checks the various state variables after queue has drained to determine if
|
||
* we need to `finalize`.
|
||
*
|
||
* @private
|
||
* @return void
|
||
*/
|
||
Archiver.prototype._onQueueDrain = function() {
|
||
if (this._state.finalizing || this._state.finalized || this._state.aborted) {
|
||
return;
|
||
}
|
||
|
||
if (this._state.finalize && this._pending === 0 && this._queue.idle() && this._statQueue.idle()) {
|
||
this._finalize();
|
||
}
|
||
};
|
||
|
||
/**
|
||
* Appends each queue task to the module.
|
||
*
|
||
* @private
|
||
* @param {Object} task
|
||
* @param {Function} callback
|
||
* @return void
|
||
*/
|
||
Archiver.prototype._onQueueTask = function(task, callback) {
|
||
var fullCallback = () => {
|
||
if(task.data.callback) {
|
||
task.data.callback();
|
||
}
|
||
callback();
|
||
}
|
||
|
||
if (this._state.finalizing || this._state.finalized || this._state.aborted) {
|
||
fullCallback();
|
||
return;
|
||
}
|
||
|
||
this._task = task;
|
||
this._moduleAppend(task.source, task.data, fullCallback);
|
||
};
|
||
|
||
/**
|
||
* Performs a file stat and reinjects the task back into the queue.
|
||
*
|
||
* @private
|
||
* @param {Object} task
|
||
* @param {Function} callback
|
||
* @return void
|
||
*/
|
||
Archiver.prototype._onStatQueueTask = function(task, callback) {
|
||
if (this._state.finalizing || this._state.finalized || this._state.aborted) {
|
||
callback();
|
||
return;
|
||
}
|
||
|
||
fs.lstat(task.filepath, function(err, stats) {
|
||
if (this._state.aborted) {
|
||
setImmediate(callback);
|
||
return;
|
||
}
|
||
|
||
if (err) {
|
||
this._entriesCount--;
|
||
|
||
/**
|
||
* @event Archiver#warning
|
||
* @type {ErrorData}
|
||
*/
|
||
this.emit('warning', err);
|
||
setImmediate(callback);
|
||
return;
|
||
}
|
||
|
||
task = this._updateQueueTaskWithStats(task, stats);
|
||
|
||
if (task) {
|
||
if (stats.size) {
|
||
this._fsEntriesTotalBytes += stats.size;
|
||
}
|
||
|
||
this._queue.push(task);
|
||
}
|
||
|
||
setImmediate(callback);
|
||
}.bind(this));
|
||
};
|
||
|
||
/**
|
||
* Unpipes the module and ends our internal stream.
|
||
*
|
||
* @private
|
||
* @return void
|
||
*/
|
||
Archiver.prototype._shutdown = function() {
|
||
this._moduleUnpipe();
|
||
this.end();
|
||
};
|
||
|
||
/**
|
||
* Tracks the bytes emitted by our internal stream.
|
||
*
|
||
* @private
|
||
* @param {Buffer} chunk
|
||
* @param {String} encoding
|
||
* @param {Function} callback
|
||
* @return void
|
||
*/
|
||
Archiver.prototype._transform = function(chunk, encoding, callback) {
|
||
if (chunk) {
|
||
this._pointer += chunk.length;
|
||
}
|
||
|
||
callback(null, chunk);
|
||
};
|
||
|
||
/**
|
||
* Updates and normalizes a queue task using stats data.
|
||
*
|
||
* @private
|
||
* @param {Object} task
|
||
* @param {fs.Stats} stats
|
||
* @return {Object}
|
||
*/
|
||
Archiver.prototype._updateQueueTaskWithStats = function(task, stats) {
|
||
if (stats.isFile()) {
|
||
task.data.type = 'file';
|
||
task.data.sourceType = 'stream';
|
||
task.source = util.lazyReadStream(task.filepath);
|
||
} else if (stats.isDirectory() && this._moduleSupports('directory')) {
|
||
task.data.name = util.trailingSlashIt(task.data.name);
|
||
task.data.type = 'directory';
|
||
task.data.sourcePath = util.trailingSlashIt(task.filepath);
|
||
task.data.sourceType = 'buffer';
|
||
task.source = Buffer.concat([]);
|
||
} else if (stats.isSymbolicLink() && this._moduleSupports('symlink')) {
|
||
var linkPath = fs.readlinkSync(task.filepath);
|
||
var dirName = path.dirname(task.filepath);
|
||
task.data.type = 'symlink';
|
||
task.data.linkname = path.relative(dirName, path.resolve(dirName, linkPath));
|
||
task.data.sourceType = 'buffer';
|
||
task.source = Buffer.concat([]);
|
||
} else {
|
||
if (stats.isDirectory()) {
|
||
this.emit('warning', new ArchiverError('DIRECTORYNOTSUPPORTED', task.data));
|
||
} else if (stats.isSymbolicLink()) {
|
||
this.emit('warning', new ArchiverError('SYMLINKNOTSUPPORTED', task.data));
|
||
} else {
|
||
this.emit('warning', new ArchiverError('ENTRYNOTSUPPORTED', task.data));
|
||
}
|
||
|
||
return null;
|
||
}
|
||
|
||
task.data = this._normalizeEntryData(task.data, stats);
|
||
|
||
return task;
|
||
};
|
||
|
||
/**
|
||
* Aborts the archiving process, taking a best-effort approach, by:
|
||
*
|
||
* - removing any pending queue tasks
|
||
* - allowing any active queue workers to finish
|
||
* - detaching internal module pipes
|
||
* - ending both sides of the Transform stream
|
||
*
|
||
* It will NOT drain any remaining sources.
|
||
*
|
||
* @return {this}
|
||
*/
|
||
Archiver.prototype.abort = function() {
|
||
if (this._state.aborted || this._state.finalized) {
|
||
return this;
|
||
}
|
||
|
||
this._abort();
|
||
|
||
return this;
|
||
};
|
||
|
||
/**
|
||
* Appends an input source (text string, buffer, or stream) to the instance.
|
||
*
|
||
* When the instance has received, processed, and emitted the input, the `entry`
|
||
* event is fired.
|
||
*
|
||
* @fires Archiver#entry
|
||
* @param {(Buffer|Stream|String)} source The input source.
|
||
* @param {EntryData} data See also {@link ZipEntryData} and {@link TarEntryData}.
|
||
* @return {this}
|
||
*/
|
||
Archiver.prototype.append = function(source, data) {
|
||
if (this._state.finalize || this._state.aborted) {
|
||
this.emit('error', new ArchiverError('QUEUECLOSED'));
|
||
return this;
|
||
}
|
||
|
||
data = this._normalizeEntryData(data);
|
||
|
||
if (typeof data.name !== 'string' || data.name.length === 0) {
|
||
this.emit('error', new ArchiverError('ENTRYNAMEREQUIRED'));
|
||
return this;
|
||
}
|
||
|
||
if (data.type === 'directory' && !this._moduleSupports('directory')) {
|
||
this.emit('error', new ArchiverError('DIRECTORYNOTSUPPORTED', { name: data.name }));
|
||
return this;
|
||
}
|
||
|
||
source = util.normalizeInputSource(source);
|
||
|
||
if (Buffer.isBuffer(source)) {
|
||
data.sourceType = 'buffer';
|
||
} else if (util.isStream(source)) {
|
||
data.sourceType = 'stream';
|
||
} else {
|
||
this.emit('error', new ArchiverError('INPUTSTEAMBUFFERREQUIRED', { name: data.name }));
|
||
return this;
|
||
}
|
||
|
||
this._entriesCount++;
|
||
this._queue.push({
|
||
data: data,
|
||
source: source
|
||
});
|
||
|
||
return this;
|
||
};
|
||
|
||
/**
|
||
* Appends a directory and its files, recursively, given its dirpath.
|
||
*
|
||
* @param {String} dirpath The source directory path.
|
||
* @param {String} destpath The destination path within the archive.
|
||
* @param {(EntryData|Function)} data See also [ZipEntryData]{@link ZipEntryData} and
|
||
* [TarEntryData]{@link TarEntryData}.
|
||
* @return {this}
|
||
*/
|
||
Archiver.prototype.directory = function(dirpath, destpath, data) {
|
||
if (this._state.finalize || this._state.aborted) {
|
||
this.emit('error', new ArchiverError('QUEUECLOSED'));
|
||
return this;
|
||
}
|
||
|
||
if (typeof dirpath !== 'string' || dirpath.length === 0) {
|
||
this.emit('error', new ArchiverError('DIRECTORYDIRPATHREQUIRED'));
|
||
return this;
|
||
}
|
||
|
||
this._pending++;
|
||
|
||
if (destpath === false) {
|
||
destpath = '';
|
||
} else if (typeof destpath !== 'string'){
|
||
destpath = dirpath;
|
||
}
|
||
|
||
var dataFunction = false;
|
||
if (typeof data === 'function') {
|
||
dataFunction = data;
|
||
data = {};
|
||
} else if (typeof data !== 'object') {
|
||
data = {};
|
||
}
|
||
|
||
var globOptions = {
|
||
stat: true,
|
||
dot: true
|
||
};
|
||
|
||
function onGlobEnd() {
|
||
this._pending--;
|
||
this._maybeFinalize();
|
||
}
|
||
|
||
function onGlobError(err) {
|
||
this.emit('error', err);
|
||
}
|
||
|
||
function onGlobMatch(match){
|
||
globber.pause();
|
||
|
||
var ignoreMatch = false;
|
||
var entryData = Object.assign({}, data);
|
||
entryData.name = match.relative;
|
||
entryData.prefix = destpath;
|
||
entryData.stats = match.stat;
|
||
entryData.callback = globber.resume.bind(globber);
|
||
|
||
try {
|
||
if (dataFunction) {
|
||
entryData = dataFunction(entryData);
|
||
|
||
if (entryData === false) {
|
||
ignoreMatch = true;
|
||
} else if (typeof entryData !== 'object') {
|
||
throw new ArchiverError('DIRECTORYFUNCTIONINVALIDDATA', { dirpath: dirpath });
|
||
}
|
||
}
|
||
} catch(e) {
|
||
this.emit('error', e);
|
||
return;
|
||
}
|
||
|
||
if (ignoreMatch) {
|
||
globber.resume();
|
||
return;
|
||
}
|
||
|
||
this._append(match.absolute, entryData);
|
||
}
|
||
|
||
var globber = glob(dirpath, globOptions);
|
||
globber.on('error', onGlobError.bind(this));
|
||
globber.on('match', onGlobMatch.bind(this));
|
||
globber.on('end', onGlobEnd.bind(this));
|
||
|
||
return this;
|
||
};
|
||
|
||
/**
|
||
* Appends a file given its filepath using a
|
||
* [lazystream]{@link https://github.com/jpommerening/node-lazystream} wrapper to
|
||
* prevent issues with open file limits.
|
||
*
|
||
* When the instance has received, processed, and emitted the file, the `entry`
|
||
* event is fired.
|
||
*
|
||
* @param {String} filepath The source filepath.
|
||
* @param {EntryData} data See also [ZipEntryData]{@link ZipEntryData} and
|
||
* [TarEntryData]{@link TarEntryData}.
|
||
* @return {this}
|
||
*/
|
||
Archiver.prototype.file = function(filepath, data) {
|
||
if (this._state.finalize || this._state.aborted) {
|
||
this.emit('error', new ArchiverError('QUEUECLOSED'));
|
||
return this;
|
||
}
|
||
|
||
if (typeof filepath !== 'string' || filepath.length === 0) {
|
||
this.emit('error', new ArchiverError('FILEFILEPATHREQUIRED'));
|
||
return this;
|
||
}
|
||
|
||
this._append(filepath, data);
|
||
|
||
return this;
|
||
};
|
||
|
||
/**
|
||
* Appends multiple files that match a glob pattern.
|
||
*
|
||
* @param {String} pattern The [glob pattern]{@link https://github.com/isaacs/minimatch} to match.
|
||
* @param {Object} options See [node-readdir-glob]{@link https://github.com/yqnn/node-readdir-glob#options}.
|
||
* @param {EntryData} data See also [ZipEntryData]{@link ZipEntryData} and
|
||
* [TarEntryData]{@link TarEntryData}.
|
||
* @return {this}
|
||
*/
|
||
Archiver.prototype.glob = function(pattern, options, data) {
|
||
this._pending++;
|
||
|
||
options = util.defaults(options, {
|
||
stat: true,
|
||
pattern: pattern
|
||
});
|
||
|
||
function onGlobEnd() {
|
||
this._pending--;
|
||
this._maybeFinalize();
|
||
}
|
||
|
||
function onGlobError(err) {
|
||
this.emit('error', err);
|
||
}
|
||
|
||
function onGlobMatch(match){
|
||
globber.pause();
|
||
var entryData = Object.assign({}, data);
|
||
entryData.callback = globber.resume.bind(globber);
|
||
entryData.stats = match.stat;
|
||
entryData.name = match.relative;
|
||
|
||
this._append(match.absolute, entryData);
|
||
}
|
||
|
||
var globber = glob(options.cwd || '.', options);
|
||
globber.on('error', onGlobError.bind(this));
|
||
globber.on('match', onGlobMatch.bind(this));
|
||
globber.on('end', onGlobEnd.bind(this));
|
||
|
||
return this;
|
||
};
|
||
|
||
/**
|
||
* Finalizes the instance and prevents further appending to the archive
|
||
* structure (queue will continue til drained).
|
||
*
|
||
* The `end`, `close` or `finish` events on the destination stream may fire
|
||
* right after calling this method so you should set listeners beforehand to
|
||
* properly detect stream completion.
|
||
*
|
||
* @return {Promise}
|
||
*/
|
||
Archiver.prototype.finalize = function() {
|
||
if (this._state.aborted) {
|
||
var abortedError = new ArchiverError('ABORTED');
|
||
this.emit('error', abortedError);
|
||
return Promise.reject(abortedError);
|
||
}
|
||
|
||
if (this._state.finalize) {
|
||
var finalizingError = new ArchiverError('FINALIZING');
|
||
this.emit('error', finalizingError);
|
||
return Promise.reject(finalizingError);
|
||
}
|
||
|
||
this._state.finalize = true;
|
||
|
||
if (this._pending === 0 && this._queue.idle() && this._statQueue.idle()) {
|
||
this._finalize();
|
||
}
|
||
|
||
var self = this;
|
||
|
||
return new Promise(function(resolve, reject) {
|
||
var errored;
|
||
|
||
self._module.on('end', function() {
|
||
if (!errored) {
|
||
resolve();
|
||
}
|
||
})
|
||
|
||
self._module.on('error', function(err) {
|
||
errored = true;
|
||
reject(err);
|
||
})
|
||
})
|
||
};
|
||
|
||
/**
|
||
* Sets the module format name used for archiving.
|
||
*
|
||
* @param {String} format The name of the format.
|
||
* @return {this}
|
||
*/
|
||
Archiver.prototype.setFormat = function(format) {
|
||
if (this._format) {
|
||
this.emit('error', new ArchiverError('FORMATSET'));
|
||
return this;
|
||
}
|
||
|
||
this._format = format;
|
||
|
||
return this;
|
||
};
|
||
|
||
/**
|
||
* Sets the module used for archiving.
|
||
*
|
||
* @param {Function} module The function for archiver to interact with.
|
||
* @return {this}
|
||
*/
|
||
Archiver.prototype.setModule = function(module) {
|
||
if (this._state.aborted) {
|
||
this.emit('error', new ArchiverError('ABORTED'));
|
||
return this;
|
||
}
|
||
|
||
if (this._state.module) {
|
||
this.emit('error', new ArchiverError('MODULESET'));
|
||
return this;
|
||
}
|
||
|
||
this._module = module;
|
||
this._modulePipe();
|
||
|
||
return this;
|
||
};
|
||
|
||
/**
|
||
* Appends a symlink to the instance.
|
||
*
|
||
* This does NOT interact with filesystem and is used for programmatically creating symlinks.
|
||
*
|
||
* @param {String} filepath The symlink path (within archive).
|
||
* @param {String} target The target path (within archive).
|
||
* @param {Number} mode Sets the entry permissions.
|
||
* @return {this}
|
||
*/
|
||
Archiver.prototype.symlink = function(filepath, target, mode) {
|
||
if (this._state.finalize || this._state.aborted) {
|
||
this.emit('error', new ArchiverError('QUEUECLOSED'));
|
||
return this;
|
||
}
|
||
|
||
if (typeof filepath !== 'string' || filepath.length === 0) {
|
||
this.emit('error', new ArchiverError('SYMLINKFILEPATHREQUIRED'));
|
||
return this;
|
||
}
|
||
|
||
if (typeof target !== 'string' || target.length === 0) {
|
||
this.emit('error', new ArchiverError('SYMLINKTARGETREQUIRED', { filepath: filepath }));
|
||
return this;
|
||
}
|
||
|
||
if (!this._moduleSupports('symlink')) {
|
||
this.emit('error', new ArchiverError('SYMLINKNOTSUPPORTED', { filepath: filepath }));
|
||
return this;
|
||
}
|
||
|
||
var data = {};
|
||
data.type = 'symlink';
|
||
data.name = filepath.replace(/\\/g, '/');
|
||
data.linkname = target.replace(/\\/g, '/');
|
||
data.sourceType = 'buffer';
|
||
|
||
if (typeof mode === "number") {
|
||
data.mode = mode;
|
||
}
|
||
|
||
this._entriesCount++;
|
||
this._queue.push({
|
||
data: data,
|
||
source: Buffer.concat([])
|
||
});
|
||
|
||
return this;
|
||
};
|
||
|
||
/**
|
||
* Returns the current length (in bytes) that has been emitted.
|
||
*
|
||
* @return {Number}
|
||
*/
|
||
Archiver.prototype.pointer = function() {
|
||
return this._pointer;
|
||
};
|
||
|
||
/**
|
||
* Middleware-like helper that has yet to be fully implemented.
|
||
*
|
||
* @private
|
||
* @param {Function} plugin
|
||
* @return {this}
|
||
*/
|
||
Archiver.prototype.use = function(plugin) {
|
||
this._streams.push(plugin);
|
||
return this;
|
||
};
|
||
|
||
module.exports = Archiver;
|
||
|
||
/**
|
||
* @typedef {Object} CoreOptions
|
||
* @global
|
||
* @property {Number} [statConcurrency=4] Sets the number of workers used to
|
||
* process the internal fs stat queue.
|
||
*/
|
||
|
||
/**
|
||
* @typedef {Object} TransformOptions
|
||
* @property {Boolean} [allowHalfOpen=true] If set to false, then the stream
|
||
* will automatically end the readable side when the writable side ends and vice
|
||
* versa.
|
||
* @property {Boolean} [readableObjectMode=false] Sets objectMode for readable
|
||
* side of the stream. Has no effect if objectMode is true.
|
||
* @property {Boolean} [writableObjectMode=false] Sets objectMode for writable
|
||
* side of the stream. Has no effect if objectMode is true.
|
||
* @property {Boolean} [decodeStrings=true] Whether or not to decode strings
|
||
* into Buffers before passing them to _write(). `Writable`
|
||
* @property {String} [encoding=NULL] If specified, then buffers will be decoded
|
||
* to strings using the specified encoding. `Readable`
|
||
* @property {Number} [highWaterMark=16kb] The maximum number of bytes to store
|
||
* in the internal buffer before ceasing to read from the underlying resource.
|
||
* `Readable` `Writable`
|
||
* @property {Boolean} [objectMode=false] Whether this stream should behave as a
|
||
* stream of objects. Meaning that stream.read(n) returns a single value instead
|
||
* of a Buffer of size n. `Readable` `Writable`
|
||
*/
|
||
|
||
/**
|
||
* @typedef {Object} EntryData
|
||
* @property {String} name Sets the entry name including internal path.
|
||
* @property {(String|Date)} [date=NOW()] Sets the entry date.
|
||
* @property {Number} [mode=D:0755/F:0644] Sets the entry permissions.
|
||
* @property {String} [prefix] Sets a path prefix for the entry name. Useful
|
||
* when working with methods like `directory` or `glob`.
|
||
* @property {fs.Stats} [stats] Sets the fs stat data for this entry allowing
|
||
* for reduction of fs stat calls when stat data is already known.
|
||
*/
|
||
|
||
/**
|
||
* @typedef {Object} ErrorData
|
||
* @property {String} message The message of the error.
|
||
* @property {String} code The error code assigned to this error.
|
||
* @property {String} data Additional data provided for reporting or debugging (where available).
|
||
*/
|
||
|
||
/**
|
||
* @typedef {Object} ProgressData
|
||
* @property {Object} entries
|
||
* @property {Number} entries.total Number of entries that have been appended.
|
||
* @property {Number} entries.processed Number of entries that have been processed.
|
||
* @property {Object} fs
|
||
* @property {Number} fs.totalBytes Number of bytes that have been appended. Calculated asynchronously and might not be accurate: it growth while entries are added. (based on fs.Stats)
|
||
* @property {Number} fs.processedBytes Number of bytes that have been processed. (based on fs.Stats)
|
||
*/
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 8730:
|
||
/***/ ((module, exports, __nccwpck_require__) => {
|
||
|
||
/**
|
||
* Archiver Core
|
||
*
|
||
* @ignore
|
||
* @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE}
|
||
* @copyright (c) 2012-2014 Chris Talkington, contributors.
|
||
*/
|
||
|
||
var util = __nccwpck_require__(1669);
|
||
|
||
const ERROR_CODES = {
|
||
'ABORTED': 'archive was aborted',
|
||
'DIRECTORYDIRPATHREQUIRED': 'diretory dirpath argument must be a non-empty string value',
|
||
'DIRECTORYFUNCTIONINVALIDDATA': 'invalid data returned by directory custom data function',
|
||
'ENTRYNAMEREQUIRED': 'entry name must be a non-empty string value',
|
||
'FILEFILEPATHREQUIRED': 'file filepath argument must be a non-empty string value',
|
||
'FINALIZING': 'archive already finalizing',
|
||
'QUEUECLOSED': 'queue closed',
|
||
'NOENDMETHOD': 'no suitable finalize/end method defined by module',
|
||
'DIRECTORYNOTSUPPORTED': 'support for directory entries not defined by module',
|
||
'FORMATSET': 'archive format already set',
|
||
'INPUTSTEAMBUFFERREQUIRED': 'input source must be valid Stream or Buffer instance',
|
||
'MODULESET': 'module already set',
|
||
'SYMLINKNOTSUPPORTED': 'support for symlink entries not defined by module',
|
||
'SYMLINKFILEPATHREQUIRED': 'symlink filepath argument must be a non-empty string value',
|
||
'SYMLINKTARGETREQUIRED': 'symlink target argument must be a non-empty string value',
|
||
'ENTRYNOTSUPPORTED': 'entry not supported'
|
||
};
|
||
|
||
function ArchiverError(code, data) {
|
||
Error.captureStackTrace(this, this.constructor);
|
||
//this.name = this.constructor.name;
|
||
this.message = ERROR_CODES[code] || code;
|
||
this.code = code;
|
||
this.data = data;
|
||
}
|
||
|
||
util.inherits(ArchiverError, Error);
|
||
|
||
exports = module.exports = ArchiverError;
|
||
|
||
/***/ }),
|
||
|
||
/***/ 7313:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
/**
|
||
* JSON Format Plugin
|
||
*
|
||
* @module plugins/json
|
||
* @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE}
|
||
* @copyright (c) 2012-2014 Chris Talkington, contributors.
|
||
*/
|
||
var inherits = __nccwpck_require__(1669).inherits;
|
||
var Transform = __nccwpck_require__(2498).Transform;
|
||
|
||
var crc32 = __nccwpck_require__(8085);
|
||
var util = __nccwpck_require__(9451);
|
||
|
||
/**
|
||
* @constructor
|
||
* @param {(JsonOptions|TransformOptions)} options
|
||
*/
|
||
var Json = function(options) {
|
||
if (!(this instanceof Json)) {
|
||
return new Json(options);
|
||
}
|
||
|
||
options = this.options = util.defaults(options, {});
|
||
|
||
Transform.call(this, options);
|
||
|
||
this.supports = {
|
||
directory: true,
|
||
symlink: true
|
||
};
|
||
|
||
this.files = [];
|
||
};
|
||
|
||
inherits(Json, Transform);
|
||
|
||
/**
|
||
* [_transform description]
|
||
*
|
||
* @private
|
||
* @param {Buffer} chunk
|
||
* @param {String} encoding
|
||
* @param {Function} callback
|
||
* @return void
|
||
*/
|
||
Json.prototype._transform = function(chunk, encoding, callback) {
|
||
callback(null, chunk);
|
||
};
|
||
|
||
/**
|
||
* [_writeStringified description]
|
||
*
|
||
* @private
|
||
* @return void
|
||
*/
|
||
Json.prototype._writeStringified = function() {
|
||
var fileString = JSON.stringify(this.files);
|
||
this.write(fileString);
|
||
};
|
||
|
||
/**
|
||
* [append description]
|
||
*
|
||
* @param {(Buffer|Stream)} source
|
||
* @param {EntryData} data
|
||
* @param {Function} callback
|
||
* @return void
|
||
*/
|
||
Json.prototype.append = function(source, data, callback) {
|
||
var self = this;
|
||
|
||
data.crc32 = 0;
|
||
|
||
function onend(err, sourceBuffer) {
|
||
if (err) {
|
||
callback(err);
|
||
return;
|
||
}
|
||
|
||
data.size = sourceBuffer.length || 0;
|
||
data.crc32 = crc32.unsigned(sourceBuffer);
|
||
|
||
self.files.push(data);
|
||
|
||
callback(null, data);
|
||
}
|
||
|
||
if (data.sourceType === 'buffer') {
|
||
onend(null, source);
|
||
} else if (data.sourceType === 'stream') {
|
||
util.collectStream(source, onend);
|
||
}
|
||
};
|
||
|
||
/**
|
||
* [finalize description]
|
||
*
|
||
* @return void
|
||
*/
|
||
Json.prototype.finalize = function() {
|
||
this._writeStringified();
|
||
this.end();
|
||
};
|
||
|
||
module.exports = Json;
|
||
|
||
/**
|
||
* @typedef {Object} JsonOptions
|
||
* @global
|
||
*/
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 6608:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
/**
|
||
* TAR Format Plugin
|
||
*
|
||
* @module plugins/tar
|
||
* @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE}
|
||
* @copyright (c) 2012-2014 Chris Talkington, contributors.
|
||
*/
|
||
var zlib = __nccwpck_require__(8761);
|
||
|
||
var engine = __nccwpck_require__(4703);
|
||
var util = __nccwpck_require__(9451);
|
||
|
||
/**
|
||
* @constructor
|
||
* @param {TarOptions} options
|
||
*/
|
||
var Tar = function(options) {
|
||
if (!(this instanceof Tar)) {
|
||
return new Tar(options);
|
||
}
|
||
|
||
options = this.options = util.defaults(options, {
|
||
gzip: false
|
||
});
|
||
|
||
if (typeof options.gzipOptions !== 'object') {
|
||
options.gzipOptions = {};
|
||
}
|
||
|
||
this.supports = {
|
||
directory: true,
|
||
symlink: true
|
||
};
|
||
|
||
this.engine = engine.pack(options);
|
||
this.compressor = false;
|
||
|
||
if (options.gzip) {
|
||
this.compressor = zlib.createGzip(options.gzipOptions);
|
||
this.compressor.on('error', this._onCompressorError.bind(this));
|
||
}
|
||
};
|
||
|
||
/**
|
||
* [_onCompressorError description]
|
||
*
|
||
* @private
|
||
* @param {Error} err
|
||
* @return void
|
||
*/
|
||
Tar.prototype._onCompressorError = function(err) {
|
||
this.engine.emit('error', err);
|
||
};
|
||
|
||
/**
|
||
* [append description]
|
||
*
|
||
* @param {(Buffer|Stream)} source
|
||
* @param {TarEntryData} data
|
||
* @param {Function} callback
|
||
* @return void
|
||
*/
|
||
Tar.prototype.append = function(source, data, callback) {
|
||
var self = this;
|
||
|
||
data.mtime = data.date;
|
||
|
||
function append(err, sourceBuffer) {
|
||
if (err) {
|
||
callback(err);
|
||
return;
|
||
}
|
||
|
||
self.engine.entry(data, sourceBuffer, function(err) {
|
||
callback(err, data);
|
||
});
|
||
}
|
||
|
||
if (data.sourceType === 'buffer') {
|
||
append(null, source);
|
||
} else if (data.sourceType === 'stream' && data.stats) {
|
||
data.size = data.stats.size;
|
||
|
||
var entry = self.engine.entry(data, function(err) {
|
||
callback(err, data);
|
||
});
|
||
|
||
source.pipe(entry);
|
||
} else if (data.sourceType === 'stream') {
|
||
util.collectStream(source, append);
|
||
}
|
||
};
|
||
|
||
/**
|
||
* [finalize description]
|
||
*
|
||
* @return void
|
||
*/
|
||
Tar.prototype.finalize = function() {
|
||
this.engine.finalize();
|
||
};
|
||
|
||
/**
|
||
* [on description]
|
||
*
|
||
* @return this.engine
|
||
*/
|
||
Tar.prototype.on = function() {
|
||
return this.engine.on.apply(this.engine, arguments);
|
||
};
|
||
|
||
/**
|
||
* [pipe description]
|
||
*
|
||
* @param {String} destination
|
||
* @param {Object} options
|
||
* @return this.engine
|
||
*/
|
||
Tar.prototype.pipe = function(destination, options) {
|
||
if (this.compressor) {
|
||
return this.engine.pipe.apply(this.engine, [this.compressor]).pipe(destination, options);
|
||
} else {
|
||
return this.engine.pipe.apply(this.engine, arguments);
|
||
}
|
||
};
|
||
|
||
/**
|
||
* [unpipe description]
|
||
*
|
||
* @return this.engine
|
||
*/
|
||
Tar.prototype.unpipe = function() {
|
||
if (this.compressor) {
|
||
return this.compressor.unpipe.apply(this.compressor, arguments);
|
||
} else {
|
||
return this.engine.unpipe.apply(this.engine, arguments);
|
||
}
|
||
};
|
||
|
||
module.exports = Tar;
|
||
|
||
/**
|
||
* @typedef {Object} TarOptions
|
||
* @global
|
||
* @property {Boolean} [gzip=false] Compress the tar archive using gzip.
|
||
* @property {Object} [gzipOptions] Passed to [zlib]{@link https://nodejs.org/api/zlib.html#zlib_class_options}
|
||
* to control compression.
|
||
* @property {*} [*] See [tar-stream]{@link https://github.com/mafintosh/tar-stream} documentation for additional properties.
|
||
*/
|
||
|
||
/**
|
||
* @typedef {Object} TarEntryData
|
||
* @global
|
||
* @property {String} name Sets the entry name including internal path.
|
||
* @property {(String|Date)} [date=NOW()] Sets the entry date.
|
||
* @property {Number} [mode=D:0755/F:0644] Sets the entry permissions.
|
||
* @property {String} [prefix] Sets a path prefix for the entry name. Useful
|
||
* when working with methods like `directory` or `glob`.
|
||
* @property {fs.Stats} [stats] Sets the fs stat data for this entry allowing
|
||
* for reduction of fs stat calls when stat data is already known.
|
||
*/
|
||
|
||
/**
|
||
* TarStream Module
|
||
* @external TarStream
|
||
* @see {@link https://github.com/mafintosh/tar-stream}
|
||
*/
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 577:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
/**
|
||
* ZIP Format Plugin
|
||
*
|
||
* @module plugins/zip
|
||
* @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE}
|
||
* @copyright (c) 2012-2014 Chris Talkington, contributors.
|
||
*/
|
||
var engine = __nccwpck_require__(1922);
|
||
var util = __nccwpck_require__(9451);
|
||
|
||
/**
|
||
* @constructor
|
||
* @param {ZipOptions} [options]
|
||
* @param {String} [options.comment] Sets the zip archive comment.
|
||
* @param {Boolean} [options.forceLocalTime=false] Forces the archive to contain local file times instead of UTC.
|
||
* @param {Boolean} [options.forceZip64=false] Forces the archive to contain ZIP64 headers.
|
||
* @param {Boolean} [options.namePrependSlash=false] Prepends a forward slash to archive file paths.
|
||
* @param {Boolean} [options.store=false] Sets the compression method to STORE.
|
||
* @param {Object} [options.zlib] Passed to [zlib]{@link https://nodejs.org/api/zlib.html#zlib_class_options}
|
||
*/
|
||
var Zip = function(options) {
|
||
if (!(this instanceof Zip)) {
|
||
return new Zip(options);
|
||
}
|
||
|
||
options = this.options = util.defaults(options, {
|
||
comment: '',
|
||
forceUTC: false,
|
||
namePrependSlash: false,
|
||
store: false
|
||
});
|
||
|
||
this.supports = {
|
||
directory: true,
|
||
symlink: true
|
||
};
|
||
|
||
this.engine = new engine(options);
|
||
};
|
||
|
||
/**
|
||
* @param {(Buffer|Stream)} source
|
||
* @param {ZipEntryData} data
|
||
* @param {String} data.name Sets the entry name including internal path.
|
||
* @param {(String|Date)} [data.date=NOW()] Sets the entry date.
|
||
* @param {Number} [data.mode=D:0755/F:0644] Sets the entry permissions.
|
||
* @param {String} [data.prefix] Sets a path prefix for the entry name. Useful
|
||
* when working with methods like `directory` or `glob`.
|
||
* @param {fs.Stats} [data.stats] Sets the fs stat data for this entry allowing
|
||
* for reduction of fs stat calls when stat data is already known.
|
||
* @param {Boolean} [data.store=ZipOptions.store] Sets the compression method to STORE.
|
||
* @param {Function} callback
|
||
* @return void
|
||
*/
|
||
Zip.prototype.append = function(source, data, callback) {
|
||
this.engine.entry(source, data, callback);
|
||
};
|
||
|
||
/**
|
||
* @return void
|
||
*/
|
||
Zip.prototype.finalize = function() {
|
||
this.engine.finalize();
|
||
};
|
||
|
||
/**
|
||
* @return this.engine
|
||
*/
|
||
Zip.prototype.on = function() {
|
||
return this.engine.on.apply(this.engine, arguments);
|
||
};
|
||
|
||
/**
|
||
* @return this.engine
|
||
*/
|
||
Zip.prototype.pipe = function() {
|
||
return this.engine.pipe.apply(this.engine, arguments);
|
||
};
|
||
|
||
/**
|
||
* @return this.engine
|
||
*/
|
||
Zip.prototype.unpipe = function() {
|
||
return this.engine.unpipe.apply(this.engine, arguments);
|
||
};
|
||
|
||
module.exports = Zip;
|
||
|
||
/**
|
||
* @typedef {Object} ZipOptions
|
||
* @global
|
||
* @property {String} [comment] Sets the zip archive comment.
|
||
* @property {Boolean} [forceLocalTime=false] Forces the archive to contain local file times instead of UTC.
|
||
* @property {Boolean} [forceZip64=false] Forces the archive to contain ZIP64 headers.
|
||
* @prpperty {Boolean} [namePrependSlash=false] Prepends a forward slash to archive file paths.
|
||
* @property {Boolean} [store=false] Sets the compression method to STORE.
|
||
* @property {Object} [zlib] Passed to [zlib]{@link https://nodejs.org/api/zlib.html#zlib_class_options}
|
||
* to control compression.
|
||
* @property {*} [*] See [zip-stream]{@link https://archiverjs.com/zip-stream/ZipStream.html} documentation for current list of properties.
|
||
*/
|
||
|
||
/**
|
||
* @typedef {Object} ZipEntryData
|
||
* @global
|
||
* @property {String} name Sets the entry name including internal path.
|
||
* @property {(String|Date)} [date=NOW()] Sets the entry date.
|
||
* @property {Number} [mode=D:0755/F:0644] Sets the entry permissions.
|
||
* @property {Boolean} [namePrependSlash=ZipOptions.namePrependSlash] Prepends a forward slash to archive file paths.
|
||
* @property {String} [prefix] Sets a path prefix for the entry name. Useful
|
||
* when working with methods like `directory` or `glob`.
|
||
* @property {fs.Stats} [stats] Sets the fs stat data for this entry allowing
|
||
* for reduction of fs stat calls when stat data is already known.
|
||
* @property {Boolean} [store=ZipOptions.store] Sets the compression method to STORE.
|
||
*/
|
||
|
||
/**
|
||
* ZipStream Module
|
||
* @external ZipStream
|
||
* @see {@link https://www.archiverjs.com/zip-stream/ZipStream.html}
|
||
*/
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 9003:
|
||
/***/ (function(__unused_webpack_module, exports) {
|
||
|
||
(function (global, factory) {
|
||
true ? factory(exports) :
|
||
0;
|
||
}(this, (function (exports) { 'use strict';
|
||
|
||
/**
|
||
* Creates a continuation function with some arguments already applied.
|
||
*
|
||
* Useful as a shorthand when combined with other control flow functions. Any
|
||
* arguments passed to the returned function are added to the arguments
|
||
* originally passed to apply.
|
||
*
|
||
* @name apply
|
||
* @static
|
||
* @memberOf module:Utils
|
||
* @method
|
||
* @category Util
|
||
* @param {Function} fn - The function you want to eventually apply all
|
||
* arguments to. Invokes with (arguments...).
|
||
* @param {...*} arguments... - Any number of arguments to automatically apply
|
||
* when the continuation is called.
|
||
* @returns {Function} the partially-applied function
|
||
* @example
|
||
*
|
||
* // using apply
|
||
* async.parallel([
|
||
* async.apply(fs.writeFile, 'testfile1', 'test1'),
|
||
* async.apply(fs.writeFile, 'testfile2', 'test2')
|
||
* ]);
|
||
*
|
||
*
|
||
* // the same process without using apply
|
||
* async.parallel([
|
||
* function(callback) {
|
||
* fs.writeFile('testfile1', 'test1', callback);
|
||
* },
|
||
* function(callback) {
|
||
* fs.writeFile('testfile2', 'test2', callback);
|
||
* }
|
||
* ]);
|
||
*
|
||
* // It's possible to pass any number of additional arguments when calling the
|
||
* // continuation:
|
||
*
|
||
* node> var fn = async.apply(sys.puts, 'one');
|
||
* node> fn('two', 'three');
|
||
* one
|
||
* two
|
||
* three
|
||
*/
|
||
function apply(fn, ...args) {
|
||
return (...callArgs) => fn(...args,...callArgs);
|
||
}
|
||
|
||
function initialParams (fn) {
|
||
return function (...args/*, callback*/) {
|
||
var callback = args.pop();
|
||
return fn.call(this, args, callback);
|
||
};
|
||
}
|
||
|
||
/* istanbul ignore file */
|
||
|
||
var hasSetImmediate = typeof setImmediate === 'function' && setImmediate;
|
||
var hasNextTick = typeof process === 'object' && typeof process.nextTick === 'function';
|
||
|
||
function fallback(fn) {
|
||
setTimeout(fn, 0);
|
||
}
|
||
|
||
function wrap(defer) {
|
||
return (fn, ...args) => defer(() => fn(...args));
|
||
}
|
||
|
||
var _defer;
|
||
|
||
if (hasSetImmediate) {
|
||
_defer = setImmediate;
|
||
} else if (hasNextTick) {
|
||
_defer = process.nextTick;
|
||
} else {
|
||
_defer = fallback;
|
||
}
|
||
|
||
var setImmediate$1 = wrap(_defer);
|
||
|
||
/**
|
||
* Take a sync function and make it async, passing its return value to a
|
||
* callback. This is useful for plugging sync functions into a waterfall,
|
||
* series, or other async functions. Any arguments passed to the generated
|
||
* function will be passed to the wrapped function (except for the final
|
||
* callback argument). Errors thrown will be passed to the callback.
|
||
*
|
||
* If the function passed to `asyncify` returns a Promise, that promises's
|
||
* resolved/rejected state will be used to call the callback, rather than simply
|
||
* the synchronous return value.
|
||
*
|
||
* This also means you can asyncify ES2017 `async` functions.
|
||
*
|
||
* @name asyncify
|
||
* @static
|
||
* @memberOf module:Utils
|
||
* @method
|
||
* @alias wrapSync
|
||
* @category Util
|
||
* @param {Function} func - The synchronous function, or Promise-returning
|
||
* function to convert to an {@link AsyncFunction}.
|
||
* @returns {AsyncFunction} An asynchronous wrapper of the `func`. To be
|
||
* invoked with `(args..., callback)`.
|
||
* @example
|
||
*
|
||
* // passing a regular synchronous function
|
||
* async.waterfall([
|
||
* async.apply(fs.readFile, filename, "utf8"),
|
||
* async.asyncify(JSON.parse),
|
||
* function (data, next) {
|
||
* // data is the result of parsing the text.
|
||
* // If there was a parsing error, it would have been caught.
|
||
* }
|
||
* ], callback);
|
||
*
|
||
* // passing a function returning a promise
|
||
* async.waterfall([
|
||
* async.apply(fs.readFile, filename, "utf8"),
|
||
* async.asyncify(function (contents) {
|
||
* return db.model.create(contents);
|
||
* }),
|
||
* function (model, next) {
|
||
* // `model` is the instantiated model object.
|
||
* // If there was an error, this function would be skipped.
|
||
* }
|
||
* ], callback);
|
||
*
|
||
* // es2017 example, though `asyncify` is not needed if your JS environment
|
||
* // supports async functions out of the box
|
||
* var q = async.queue(async.asyncify(async function(file) {
|
||
* var intermediateStep = await processFile(file);
|
||
* return await somePromise(intermediateStep)
|
||
* }));
|
||
*
|
||
* q.push(files);
|
||
*/
|
||
function asyncify(func) {
|
||
if (isAsync(func)) {
|
||
return function (...args/*, callback*/) {
|
||
const callback = args.pop();
|
||
const promise = func.apply(this, args);
|
||
return handlePromise(promise, callback)
|
||
}
|
||
}
|
||
|
||
return initialParams(function (args, callback) {
|
||
var result;
|
||
try {
|
||
result = func.apply(this, args);
|
||
} catch (e) {
|
||
return callback(e);
|
||
}
|
||
// if result is Promise object
|
||
if (result && typeof result.then === 'function') {
|
||
return handlePromise(result, callback)
|
||
} else {
|
||
callback(null, result);
|
||
}
|
||
});
|
||
}
|
||
|
||
function handlePromise(promise, callback) {
|
||
return promise.then(value => {
|
||
invokeCallback(callback, null, value);
|
||
}, err => {
|
||
invokeCallback(callback, err && err.message ? err : new Error(err));
|
||
});
|
||
}
|
||
|
||
function invokeCallback(callback, error, value) {
|
||
try {
|
||
callback(error, value);
|
||
} catch (err) {
|
||
setImmediate$1(e => { throw e }, err);
|
||
}
|
||
}
|
||
|
||
function isAsync(fn) {
|
||
return fn[Symbol.toStringTag] === 'AsyncFunction';
|
||
}
|
||
|
||
function isAsyncGenerator(fn) {
|
||
return fn[Symbol.toStringTag] === 'AsyncGenerator';
|
||
}
|
||
|
||
function isAsyncIterable(obj) {
|
||
return typeof obj[Symbol.asyncIterator] === 'function';
|
||
}
|
||
|
||
function wrapAsync(asyncFn) {
|
||
if (typeof asyncFn !== 'function') throw new Error('expected a function')
|
||
return isAsync(asyncFn) ? asyncify(asyncFn) : asyncFn;
|
||
}
|
||
|
||
// conditionally promisify a function.
|
||
// only return a promise if a callback is omitted
|
||
function awaitify (asyncFn, arity = asyncFn.length) {
|
||
if (!arity) throw new Error('arity is undefined')
|
||
function awaitable (...args) {
|
||
if (typeof args[arity - 1] === 'function') {
|
||
return asyncFn.apply(this, args)
|
||
}
|
||
|
||
return new Promise((resolve, reject) => {
|
||
args[arity - 1] = (err, ...cbArgs) => {
|
||
if (err) return reject(err)
|
||
resolve(cbArgs.length > 1 ? cbArgs : cbArgs[0]);
|
||
};
|
||
asyncFn.apply(this, args);
|
||
})
|
||
}
|
||
|
||
return awaitable
|
||
}
|
||
|
||
function applyEach (eachfn) {
|
||
return function applyEach(fns, ...callArgs) {
|
||
const go = awaitify(function (callback) {
|
||
var that = this;
|
||
return eachfn(fns, (fn, cb) => {
|
||
wrapAsync(fn).apply(that, callArgs.concat(cb));
|
||
}, callback);
|
||
});
|
||
return go;
|
||
};
|
||
}
|
||
|
||
function _asyncMap(eachfn, arr, iteratee, callback) {
|
||
arr = arr || [];
|
||
var results = [];
|
||
var counter = 0;
|
||
var _iteratee = wrapAsync(iteratee);
|
||
|
||
return eachfn(arr, (value, _, iterCb) => {
|
||
var index = counter++;
|
||
_iteratee(value, (err, v) => {
|
||
results[index] = v;
|
||
iterCb(err);
|
||
});
|
||
}, err => {
|
||
callback(err, results);
|
||
});
|
||
}
|
||
|
||
function isArrayLike(value) {
|
||
return value &&
|
||
typeof value.length === 'number' &&
|
||
value.length >= 0 &&
|
||
value.length % 1 === 0;
|
||
}
|
||
|
||
// A temporary value used to identify if the loop should be broken.
|
||
// See #1064, #1293
|
||
const breakLoop = {};
|
||
|
||
function once(fn) {
|
||
function wrapper (...args) {
|
||
if (fn === null) return;
|
||
var callFn = fn;
|
||
fn = null;
|
||
callFn.apply(this, args);
|
||
}
|
||
Object.assign(wrapper, fn);
|
||
return wrapper
|
||
}
|
||
|
||
function getIterator (coll) {
|
||
return coll[Symbol.iterator] && coll[Symbol.iterator]();
|
||
}
|
||
|
||
function createArrayIterator(coll) {
|
||
var i = -1;
|
||
var len = coll.length;
|
||
return function next() {
|
||
return ++i < len ? {value: coll[i], key: i} : null;
|
||
}
|
||
}
|
||
|
||
function createES2015Iterator(iterator) {
|
||
var i = -1;
|
||
return function next() {
|
||
var item = iterator.next();
|
||
if (item.done)
|
||
return null;
|
||
i++;
|
||
return {value: item.value, key: i};
|
||
}
|
||
}
|
||
|
||
function createObjectIterator(obj) {
|
||
var okeys = obj ? Object.keys(obj) : [];
|
||
var i = -1;
|
||
var len = okeys.length;
|
||
return function next() {
|
||
var key = okeys[++i];
|
||
return i < len ? {value: obj[key], key} : null;
|
||
};
|
||
}
|
||
|
||
function createIterator(coll) {
|
||
if (isArrayLike(coll)) {
|
||
return createArrayIterator(coll);
|
||
}
|
||
|
||
var iterator = getIterator(coll);
|
||
return iterator ? createES2015Iterator(iterator) : createObjectIterator(coll);
|
||
}
|
||
|
||
function onlyOnce(fn) {
|
||
return function (...args) {
|
||
if (fn === null) throw new Error("Callback was already called.");
|
||
var callFn = fn;
|
||
fn = null;
|
||
callFn.apply(this, args);
|
||
};
|
||
}
|
||
|
||
// for async generators
|
||
function asyncEachOfLimit(generator, limit, iteratee, callback) {
|
||
let done = false;
|
||
let canceled = false;
|
||
let awaiting = false;
|
||
let running = 0;
|
||
let idx = 0;
|
||
|
||
function replenish() {
|
||
//console.log('replenish')
|
||
if (running >= limit || awaiting || done) return
|
||
//console.log('replenish awaiting')
|
||
awaiting = true;
|
||
generator.next().then(({value, done: iterDone}) => {
|
||
//console.log('got value', value)
|
||
if (canceled || done) return
|
||
awaiting = false;
|
||
if (iterDone) {
|
||
done = true;
|
||
if (running <= 0) {
|
||
//console.log('done nextCb')
|
||
callback(null);
|
||
}
|
||
return;
|
||
}
|
||
running++;
|
||
iteratee(value, idx, iterateeCallback);
|
||
idx++;
|
||
replenish();
|
||
}).catch(handleError);
|
||
}
|
||
|
||
function iterateeCallback(err, result) {
|
||
//console.log('iterateeCallback')
|
||
running -= 1;
|
||
if (canceled) return
|
||
if (err) return handleError(err)
|
||
|
||
if (err === false) {
|
||
done = true;
|
||
canceled = true;
|
||
return
|
||
}
|
||
|
||
if (result === breakLoop || (done && running <= 0)) {
|
||
done = true;
|
||
//console.log('done iterCb')
|
||
return callback(null);
|
||
}
|
||
replenish();
|
||
}
|
||
|
||
function handleError(err) {
|
||
if (canceled) return
|
||
awaiting = false;
|
||
done = true;
|
||
callback(err);
|
||
}
|
||
|
||
replenish();
|
||
}
|
||
|
||
var eachOfLimit = (limit) => {
|
||
return (obj, iteratee, callback) => {
|
||
callback = once(callback);
|
||
if (limit <= 0) {
|
||
throw new RangeError('concurrency limit cannot be less than 1')
|
||
}
|
||
if (!obj) {
|
||
return callback(null);
|
||
}
|
||
if (isAsyncGenerator(obj)) {
|
||
return asyncEachOfLimit(obj, limit, iteratee, callback)
|
||
}
|
||
if (isAsyncIterable(obj)) {
|
||
return asyncEachOfLimit(obj[Symbol.asyncIterator](), limit, iteratee, callback)
|
||
}
|
||
var nextElem = createIterator(obj);
|
||
var done = false;
|
||
var canceled = false;
|
||
var running = 0;
|
||
var looping = false;
|
||
|
||
function iterateeCallback(err, value) {
|
||
if (canceled) return
|
||
running -= 1;
|
||
if (err) {
|
||
done = true;
|
||
callback(err);
|
||
}
|
||
else if (err === false) {
|
||
done = true;
|
||
canceled = true;
|
||
}
|
||
else if (value === breakLoop || (done && running <= 0)) {
|
||
done = true;
|
||
return callback(null);
|
||
}
|
||
else if (!looping) {
|
||
replenish();
|
||
}
|
||
}
|
||
|
||
function replenish () {
|
||
looping = true;
|
||
while (running < limit && !done) {
|
||
var elem = nextElem();
|
||
if (elem === null) {
|
||
done = true;
|
||
if (running <= 0) {
|
||
callback(null);
|
||
}
|
||
return;
|
||
}
|
||
running += 1;
|
||
iteratee(elem.value, elem.key, onlyOnce(iterateeCallback));
|
||
}
|
||
looping = false;
|
||
}
|
||
|
||
replenish();
|
||
};
|
||
};
|
||
|
||
/**
|
||
* The same as [`eachOf`]{@link module:Collections.eachOf} but runs a maximum of `limit` async operations at a
|
||
* time.
|
||
*
|
||
* @name eachOfLimit
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @see [async.eachOf]{@link module:Collections.eachOf}
|
||
* @alias forEachOfLimit
|
||
* @category Collection
|
||
* @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over.
|
||
* @param {number} limit - The maximum number of async operations at a time.
|
||
* @param {AsyncFunction} iteratee - An async function to apply to each
|
||
* item in `coll`. The `key` is the item's key, or index in the case of an
|
||
* array.
|
||
* Invoked with (item, key, callback).
|
||
* @param {Function} [callback] - A callback which is called when all
|
||
* `iteratee` functions have finished, or an error occurs. Invoked with (err).
|
||
* @returns {Promise} a promise, if a callback is omitted
|
||
*/
|
||
function eachOfLimit$1(coll, limit, iteratee, callback) {
|
||
return eachOfLimit(limit)(coll, wrapAsync(iteratee), callback);
|
||
}
|
||
|
||
var eachOfLimit$2 = awaitify(eachOfLimit$1, 4);
|
||
|
||
// eachOf implementation optimized for array-likes
|
||
function eachOfArrayLike(coll, iteratee, callback) {
|
||
callback = once(callback);
|
||
var index = 0,
|
||
completed = 0,
|
||
{length} = coll,
|
||
canceled = false;
|
||
if (length === 0) {
|
||
callback(null);
|
||
}
|
||
|
||
function iteratorCallback(err, value) {
|
||
if (err === false) {
|
||
canceled = true;
|
||
}
|
||
if (canceled === true) return
|
||
if (err) {
|
||
callback(err);
|
||
} else if ((++completed === length) || value === breakLoop) {
|
||
callback(null);
|
||
}
|
||
}
|
||
|
||
for (; index < length; index++) {
|
||
iteratee(coll[index], index, onlyOnce(iteratorCallback));
|
||
}
|
||
}
|
||
|
||
// a generic version of eachOf which can handle array, object, and iterator cases.
|
||
function eachOfGeneric (coll, iteratee, callback) {
|
||
return eachOfLimit$2(coll, Infinity, iteratee, callback);
|
||
}
|
||
|
||
/**
|
||
* Like [`each`]{@link module:Collections.each}, except that it passes the key (or index) as the second argument
|
||
* to the iteratee.
|
||
*
|
||
* @name eachOf
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @alias forEachOf
|
||
* @category Collection
|
||
* @see [async.each]{@link module:Collections.each}
|
||
* @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over.
|
||
* @param {AsyncFunction} iteratee - A function to apply to each
|
||
* item in `coll`.
|
||
* The `key` is the item's key, or index in the case of an array.
|
||
* Invoked with (item, key, callback).
|
||
* @param {Function} [callback] - A callback which is called when all
|
||
* `iteratee` functions have finished, or an error occurs. Invoked with (err).
|
||
* @returns {Promise} a promise, if a callback is omitted
|
||
* @example
|
||
*
|
||
* var obj = {dev: "/dev.json", test: "/test.json", prod: "/prod.json"};
|
||
* var configs = {};
|
||
*
|
||
* async.forEachOf(obj, function (value, key, callback) {
|
||
* fs.readFile(__dirname + value, "utf8", function (err, data) {
|
||
* if (err) return callback(err);
|
||
* try {
|
||
* configs[key] = JSON.parse(data);
|
||
* } catch (e) {
|
||
* return callback(e);
|
||
* }
|
||
* callback();
|
||
* });
|
||
* }, function (err) {
|
||
* if (err) console.error(err.message);
|
||
* // configs is now a map of JSON data
|
||
* doSomethingWith(configs);
|
||
* });
|
||
*/
|
||
function eachOf(coll, iteratee, callback) {
|
||
var eachOfImplementation = isArrayLike(coll) ? eachOfArrayLike : eachOfGeneric;
|
||
return eachOfImplementation(coll, wrapAsync(iteratee), callback);
|
||
}
|
||
|
||
var eachOf$1 = awaitify(eachOf, 3);
|
||
|
||
/**
|
||
* Produces a new collection of values by mapping each value in `coll` through
|
||
* the `iteratee` function. The `iteratee` is called with an item from `coll`
|
||
* and a callback for when it has finished processing. Each of these callback
|
||
* takes 2 arguments: an `error`, and the transformed item from `coll`. If
|
||
* `iteratee` passes an error to its callback, the main `callback` (for the
|
||
* `map` function) is immediately called with the error.
|
||
*
|
||
* Note, that since this function applies the `iteratee` to each item in
|
||
* parallel, there is no guarantee that the `iteratee` functions will complete
|
||
* in order. However, the results array will be in the same order as the
|
||
* original `coll`.
|
||
*
|
||
* If `map` is passed an Object, the results will be an Array. The results
|
||
* will roughly be in the order of the original Objects' keys (but this can
|
||
* vary across JavaScript engines).
|
||
*
|
||
* @name map
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @category Collection
|
||
* @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over.
|
||
* @param {AsyncFunction} iteratee - An async function to apply to each item in
|
||
* `coll`.
|
||
* The iteratee should complete with the transformed item.
|
||
* Invoked with (item, callback).
|
||
* @param {Function} [callback] - A callback which is called when all `iteratee`
|
||
* functions have finished, or an error occurs. Results is an Array of the
|
||
* transformed items from the `coll`. Invoked with (err, results).
|
||
* @returns {Promise} a promise, if no callback is passed
|
||
* @example
|
||
*
|
||
* async.map(['file1','file2','file3'], fs.stat, function(err, results) {
|
||
* // results is now an array of stats for each file
|
||
* });
|
||
*/
|
||
function map (coll, iteratee, callback) {
|
||
return _asyncMap(eachOf$1, coll, iteratee, callback)
|
||
}
|
||
var map$1 = awaitify(map, 3);
|
||
|
||
/**
|
||
* Applies the provided arguments to each function in the array, calling
|
||
* `callback` after all functions have completed. If you only provide the first
|
||
* argument, `fns`, then it will return a function which lets you pass in the
|
||
* arguments as if it were a single function call. If more arguments are
|
||
* provided, `callback` is required while `args` is still optional. The results
|
||
* for each of the applied async functions are passed to the final callback
|
||
* as an array.
|
||
*
|
||
* @name applyEach
|
||
* @static
|
||
* @memberOf module:ControlFlow
|
||
* @method
|
||
* @category Control Flow
|
||
* @param {Array|Iterable|AsyncIterable|Object} fns - A collection of {@link AsyncFunction}s
|
||
* to all call with the same arguments
|
||
* @param {...*} [args] - any number of separate arguments to pass to the
|
||
* function.
|
||
* @param {Function} [callback] - the final argument should be the callback,
|
||
* called when all functions have completed processing.
|
||
* @returns {AsyncFunction} - Returns a function that takes no args other than
|
||
* an optional callback, that is the result of applying the `args` to each
|
||
* of the functions.
|
||
* @example
|
||
*
|
||
* const appliedFn = async.applyEach([enableSearch, updateSchema], 'bucket')
|
||
*
|
||
* appliedFn((err, results) => {
|
||
* // results[0] is the results for `enableSearch`
|
||
* // results[1] is the results for `updateSchema`
|
||
* });
|
||
*
|
||
* // partial application example:
|
||
* async.each(
|
||
* buckets,
|
||
* async (bucket) => async.applyEach([enableSearch, updateSchema], bucket)(),
|
||
* callback
|
||
* );
|
||
*/
|
||
var applyEach$1 = applyEach(map$1);
|
||
|
||
/**
|
||
* The same as [`eachOf`]{@link module:Collections.eachOf} but runs only a single async operation at a time.
|
||
*
|
||
* @name eachOfSeries
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @see [async.eachOf]{@link module:Collections.eachOf}
|
||
* @alias forEachOfSeries
|
||
* @category Collection
|
||
* @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over.
|
||
* @param {AsyncFunction} iteratee - An async function to apply to each item in
|
||
* `coll`.
|
||
* Invoked with (item, key, callback).
|
||
* @param {Function} [callback] - A callback which is called when all `iteratee`
|
||
* functions have finished, or an error occurs. Invoked with (err).
|
||
* @returns {Promise} a promise, if a callback is omitted
|
||
*/
|
||
function eachOfSeries(coll, iteratee, callback) {
|
||
return eachOfLimit$2(coll, 1, iteratee, callback)
|
||
}
|
||
var eachOfSeries$1 = awaitify(eachOfSeries, 3);
|
||
|
||
/**
|
||
* The same as [`map`]{@link module:Collections.map} but runs only a single async operation at a time.
|
||
*
|
||
* @name mapSeries
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @see [async.map]{@link module:Collections.map}
|
||
* @category Collection
|
||
* @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over.
|
||
* @param {AsyncFunction} iteratee - An async function to apply to each item in
|
||
* `coll`.
|
||
* The iteratee should complete with the transformed item.
|
||
* Invoked with (item, callback).
|
||
* @param {Function} [callback] - A callback which is called when all `iteratee`
|
||
* functions have finished, or an error occurs. Results is an array of the
|
||
* transformed items from the `coll`. Invoked with (err, results).
|
||
* @returns {Promise} a promise, if no callback is passed
|
||
*/
|
||
function mapSeries (coll, iteratee, callback) {
|
||
return _asyncMap(eachOfSeries$1, coll, iteratee, callback)
|
||
}
|
||
var mapSeries$1 = awaitify(mapSeries, 3);
|
||
|
||
/**
|
||
* The same as [`applyEach`]{@link module:ControlFlow.applyEach} but runs only a single async operation at a time.
|
||
*
|
||
* @name applyEachSeries
|
||
* @static
|
||
* @memberOf module:ControlFlow
|
||
* @method
|
||
* @see [async.applyEach]{@link module:ControlFlow.applyEach}
|
||
* @category Control Flow
|
||
* @param {Array|Iterable|AsyncIterable|Object} fns - A collection of {@link AsyncFunction}s to all
|
||
* call with the same arguments
|
||
* @param {...*} [args] - any number of separate arguments to pass to the
|
||
* function.
|
||
* @param {Function} [callback] - the final argument should be the callback,
|
||
* called when all functions have completed processing.
|
||
* @returns {AsyncFunction} - A function, that when called, is the result of
|
||
* appling the `args` to the list of functions. It takes no args, other than
|
||
* a callback.
|
||
*/
|
||
var applyEachSeries = applyEach(mapSeries$1);
|
||
|
||
const PROMISE_SYMBOL = Symbol('promiseCallback');
|
||
|
||
function promiseCallback () {
|
||
let resolve, reject;
|
||
function callback (err, ...args) {
|
||
if (err) return reject(err)
|
||
resolve(args.length > 1 ? args : args[0]);
|
||
}
|
||
|
||
callback[PROMISE_SYMBOL] = new Promise((res, rej) => {
|
||
resolve = res,
|
||
reject = rej;
|
||
});
|
||
|
||
return callback
|
||
}
|
||
|
||
/**
|
||
* Determines the best order for running the {@link AsyncFunction}s in `tasks`, based on
|
||
* their requirements. Each function can optionally depend on other functions
|
||
* being completed first, and each function is run as soon as its requirements
|
||
* are satisfied.
|
||
*
|
||
* If any of the {@link AsyncFunction}s pass an error to their callback, the `auto` sequence
|
||
* will stop. Further tasks will not execute (so any other functions depending
|
||
* on it will not run), and the main `callback` is immediately called with the
|
||
* error.
|
||
*
|
||
* {@link AsyncFunction}s also receive an object containing the results of functions which
|
||
* have completed so far as the first argument, if they have dependencies. If a
|
||
* task function has no dependencies, it will only be passed a callback.
|
||
*
|
||
* @name auto
|
||
* @static
|
||
* @memberOf module:ControlFlow
|
||
* @method
|
||
* @category Control Flow
|
||
* @param {Object} tasks - An object. Each of its properties is either a
|
||
* function or an array of requirements, with the {@link AsyncFunction} itself the last item
|
||
* in the array. The object's key of a property serves as the name of the task
|
||
* defined by that property, i.e. can be used when specifying requirements for
|
||
* other tasks. The function receives one or two arguments:
|
||
* * a `results` object, containing the results of the previously executed
|
||
* functions, only passed if the task has any dependencies,
|
||
* * a `callback(err, result)` function, which must be called when finished,
|
||
* passing an `error` (which can be `null`) and the result of the function's
|
||
* execution.
|
||
* @param {number} [concurrency=Infinity] - An optional `integer` for
|
||
* determining the maximum number of tasks that can be run in parallel. By
|
||
* default, as many as possible.
|
||
* @param {Function} [callback] - An optional callback which is called when all
|
||
* the tasks have been completed. It receives the `err` argument if any `tasks`
|
||
* pass an error to their callback. Results are always returned; however, if an
|
||
* error occurs, no further `tasks` will be performed, and the results object
|
||
* will only contain partial results. Invoked with (err, results).
|
||
* @returns {Promise} a promise, if a callback is not passed
|
||
* @example
|
||
*
|
||
* async.auto({
|
||
* // this function will just be passed a callback
|
||
* readData: async.apply(fs.readFile, 'data.txt', 'utf-8'),
|
||
* showData: ['readData', function(results, cb) {
|
||
* // results.readData is the file's contents
|
||
* // ...
|
||
* }]
|
||
* }, callback);
|
||
*
|
||
* async.auto({
|
||
* get_data: function(callback) {
|
||
* console.log('in get_data');
|
||
* // async code to get some data
|
||
* callback(null, 'data', 'converted to array');
|
||
* },
|
||
* make_folder: function(callback) {
|
||
* console.log('in make_folder');
|
||
* // async code to create a directory to store a file in
|
||
* // this is run at the same time as getting the data
|
||
* callback(null, 'folder');
|
||
* },
|
||
* write_file: ['get_data', 'make_folder', function(results, callback) {
|
||
* console.log('in write_file', JSON.stringify(results));
|
||
* // once there is some data and the directory exists,
|
||
* // write the data to a file in the directory
|
||
* callback(null, 'filename');
|
||
* }],
|
||
* email_link: ['write_file', function(results, callback) {
|
||
* console.log('in email_link', JSON.stringify(results));
|
||
* // once the file is written let's email a link to it...
|
||
* // results.write_file contains the filename returned by write_file.
|
||
* callback(null, {'file':results.write_file, 'email':'user@example.com'});
|
||
* }]
|
||
* }, function(err, results) {
|
||
* console.log('err = ', err);
|
||
* console.log('results = ', results);
|
||
* });
|
||
*/
|
||
function auto(tasks, concurrency, callback) {
|
||
if (typeof concurrency !== 'number') {
|
||
// concurrency is optional, shift the args.
|
||
callback = concurrency;
|
||
concurrency = null;
|
||
}
|
||
callback = once(callback || promiseCallback());
|
||
var numTasks = Object.keys(tasks).length;
|
||
if (!numTasks) {
|
||
return callback(null);
|
||
}
|
||
if (!concurrency) {
|
||
concurrency = numTasks;
|
||
}
|
||
|
||
var results = {};
|
||
var runningTasks = 0;
|
||
var canceled = false;
|
||
var hasError = false;
|
||
|
||
var listeners = Object.create(null);
|
||
|
||
var readyTasks = [];
|
||
|
||
// for cycle detection:
|
||
var readyToCheck = []; // tasks that have been identified as reachable
|
||
// without the possibility of returning to an ancestor task
|
||
var uncheckedDependencies = {};
|
||
|
||
Object.keys(tasks).forEach(key => {
|
||
var task = tasks[key];
|
||
if (!Array.isArray(task)) {
|
||
// no dependencies
|
||
enqueueTask(key, [task]);
|
||
readyToCheck.push(key);
|
||
return;
|
||
}
|
||
|
||
var dependencies = task.slice(0, task.length - 1);
|
||
var remainingDependencies = dependencies.length;
|
||
if (remainingDependencies === 0) {
|
||
enqueueTask(key, task);
|
||
readyToCheck.push(key);
|
||
return;
|
||
}
|
||
uncheckedDependencies[key] = remainingDependencies;
|
||
|
||
dependencies.forEach(dependencyName => {
|
||
if (!tasks[dependencyName]) {
|
||
throw new Error('async.auto task `' + key +
|
||
'` has a non-existent dependency `' +
|
||
dependencyName + '` in ' +
|
||
dependencies.join(', '));
|
||
}
|
||
addListener(dependencyName, () => {
|
||
remainingDependencies--;
|
||
if (remainingDependencies === 0) {
|
||
enqueueTask(key, task);
|
||
}
|
||
});
|
||
});
|
||
});
|
||
|
||
checkForDeadlocks();
|
||
processQueue();
|
||
|
||
function enqueueTask(key, task) {
|
||
readyTasks.push(() => runTask(key, task));
|
||
}
|
||
|
||
function processQueue() {
|
||
if (canceled) return
|
||
if (readyTasks.length === 0 && runningTasks === 0) {
|
||
return callback(null, results);
|
||
}
|
||
while(readyTasks.length && runningTasks < concurrency) {
|
||
var run = readyTasks.shift();
|
||
run();
|
||
}
|
||
|
||
}
|
||
|
||
function addListener(taskName, fn) {
|
||
var taskListeners = listeners[taskName];
|
||
if (!taskListeners) {
|
||
taskListeners = listeners[taskName] = [];
|
||
}
|
||
|
||
taskListeners.push(fn);
|
||
}
|
||
|
||
function taskComplete(taskName) {
|
||
var taskListeners = listeners[taskName] || [];
|
||
taskListeners.forEach(fn => fn());
|
||
processQueue();
|
||
}
|
||
|
||
|
||
function runTask(key, task) {
|
||
if (hasError) return;
|
||
|
||
var taskCallback = onlyOnce((err, ...result) => {
|
||
runningTasks--;
|
||
if (err === false) {
|
||
canceled = true;
|
||
return
|
||
}
|
||
if (result.length < 2) {
|
||
[result] = result;
|
||
}
|
||
if (err) {
|
||
var safeResults = {};
|
||
Object.keys(results).forEach(rkey => {
|
||
safeResults[rkey] = results[rkey];
|
||
});
|
||
safeResults[key] = result;
|
||
hasError = true;
|
||
listeners = Object.create(null);
|
||
if (canceled) return
|
||
callback(err, safeResults);
|
||
} else {
|
||
results[key] = result;
|
||
taskComplete(key);
|
||
}
|
||
});
|
||
|
||
runningTasks++;
|
||
var taskFn = wrapAsync(task[task.length - 1]);
|
||
if (task.length > 1) {
|
||
taskFn(results, taskCallback);
|
||
} else {
|
||
taskFn(taskCallback);
|
||
}
|
||
}
|
||
|
||
function checkForDeadlocks() {
|
||
// Kahn's algorithm
|
||
// https://en.wikipedia.org/wiki/Topological_sorting#Kahn.27s_algorithm
|
||
// http://connalle.blogspot.com/2013/10/topological-sortingkahn-algorithm.html
|
||
var currentTask;
|
||
var counter = 0;
|
||
while (readyToCheck.length) {
|
||
currentTask = readyToCheck.pop();
|
||
counter++;
|
||
getDependents(currentTask).forEach(dependent => {
|
||
if (--uncheckedDependencies[dependent] === 0) {
|
||
readyToCheck.push(dependent);
|
||
}
|
||
});
|
||
}
|
||
|
||
if (counter !== numTasks) {
|
||
throw new Error(
|
||
'async.auto cannot execute tasks due to a recursive dependency'
|
||
);
|
||
}
|
||
}
|
||
|
||
function getDependents(taskName) {
|
||
var result = [];
|
||
Object.keys(tasks).forEach(key => {
|
||
const task = tasks[key];
|
||
if (Array.isArray(task) && task.indexOf(taskName) >= 0) {
|
||
result.push(key);
|
||
}
|
||
});
|
||
return result;
|
||
}
|
||
|
||
return callback[PROMISE_SYMBOL]
|
||
}
|
||
|
||
var FN_ARGS = /^(?:async\s+)?(?:function)?\s*\w*\s*\(\s*([^)]+)\s*\)(?:\s*{)/;
|
||
var ARROW_FN_ARGS = /^(?:async\s+)?\(?\s*([^)=]+)\s*\)?(?:\s*=>)/;
|
||
var FN_ARG_SPLIT = /,/;
|
||
var FN_ARG = /(=.+)?(\s*)$/;
|
||
var STRIP_COMMENTS = /((\/\/.*$)|(\/\*[\s\S]*?\*\/))/mg;
|
||
|
||
function parseParams(func) {
|
||
const src = func.toString().replace(STRIP_COMMENTS, '');
|
||
let match = src.match(FN_ARGS);
|
||
if (!match) {
|
||
match = src.match(ARROW_FN_ARGS);
|
||
}
|
||
if (!match) throw new Error('could not parse args in autoInject\nSource:\n' + src)
|
||
let [, args] = match;
|
||
return args
|
||
.replace(/\s/g, '')
|
||
.split(FN_ARG_SPLIT)
|
||
.map((arg) => arg.replace(FN_ARG, '').trim());
|
||
}
|
||
|
||
/**
|
||
* A dependency-injected version of the [async.auto]{@link module:ControlFlow.auto} function. Dependent
|
||
* tasks are specified as parameters to the function, after the usual callback
|
||
* parameter, with the parameter names matching the names of the tasks it
|
||
* depends on. This can provide even more readable task graphs which can be
|
||
* easier to maintain.
|
||
*
|
||
* If a final callback is specified, the task results are similarly injected,
|
||
* specified as named parameters after the initial error parameter.
|
||
*
|
||
* The autoInject function is purely syntactic sugar and its semantics are
|
||
* otherwise equivalent to [async.auto]{@link module:ControlFlow.auto}.
|
||
*
|
||
* @name autoInject
|
||
* @static
|
||
* @memberOf module:ControlFlow
|
||
* @method
|
||
* @see [async.auto]{@link module:ControlFlow.auto}
|
||
* @category Control Flow
|
||
* @param {Object} tasks - An object, each of whose properties is an {@link AsyncFunction} of
|
||
* the form 'func([dependencies...], callback). The object's key of a property
|
||
* serves as the name of the task defined by that property, i.e. can be used
|
||
* when specifying requirements for other tasks.
|
||
* * The `callback` parameter is a `callback(err, result)` which must be called
|
||
* when finished, passing an `error` (which can be `null`) and the result of
|
||
* the function's execution. The remaining parameters name other tasks on
|
||
* which the task is dependent, and the results from those tasks are the
|
||
* arguments of those parameters.
|
||
* @param {Function} [callback] - An optional callback which is called when all
|
||
* the tasks have been completed. It receives the `err` argument if any `tasks`
|
||
* pass an error to their callback, and a `results` object with any completed
|
||
* task results, similar to `auto`.
|
||
* @returns {Promise} a promise, if no callback is passed
|
||
* @example
|
||
*
|
||
* // The example from `auto` can be rewritten as follows:
|
||
* async.autoInject({
|
||
* get_data: function(callback) {
|
||
* // async code to get some data
|
||
* callback(null, 'data', 'converted to array');
|
||
* },
|
||
* make_folder: function(callback) {
|
||
* // async code to create a directory to store a file in
|
||
* // this is run at the same time as getting the data
|
||
* callback(null, 'folder');
|
||
* },
|
||
* write_file: function(get_data, make_folder, callback) {
|
||
* // once there is some data and the directory exists,
|
||
* // write the data to a file in the directory
|
||
* callback(null, 'filename');
|
||
* },
|
||
* email_link: function(write_file, callback) {
|
||
* // once the file is written let's email a link to it...
|
||
* // write_file contains the filename returned by write_file.
|
||
* callback(null, {'file':write_file, 'email':'user@example.com'});
|
||
* }
|
||
* }, function(err, results) {
|
||
* console.log('err = ', err);
|
||
* console.log('email_link = ', results.email_link);
|
||
* });
|
||
*
|
||
* // If you are using a JS minifier that mangles parameter names, `autoInject`
|
||
* // will not work with plain functions, since the parameter names will be
|
||
* // collapsed to a single letter identifier. To work around this, you can
|
||
* // explicitly specify the names of the parameters your task function needs
|
||
* // in an array, similar to Angular.js dependency injection.
|
||
*
|
||
* // This still has an advantage over plain `auto`, since the results a task
|
||
* // depends on are still spread into arguments.
|
||
* async.autoInject({
|
||
* //...
|
||
* write_file: ['get_data', 'make_folder', function(get_data, make_folder, callback) {
|
||
* callback(null, 'filename');
|
||
* }],
|
||
* email_link: ['write_file', function(write_file, callback) {
|
||
* callback(null, {'file':write_file, 'email':'user@example.com'});
|
||
* }]
|
||
* //...
|
||
* }, function(err, results) {
|
||
* console.log('err = ', err);
|
||
* console.log('email_link = ', results.email_link);
|
||
* });
|
||
*/
|
||
function autoInject(tasks, callback) {
|
||
var newTasks = {};
|
||
|
||
Object.keys(tasks).forEach(key => {
|
||
var taskFn = tasks[key];
|
||
var params;
|
||
var fnIsAsync = isAsync(taskFn);
|
||
var hasNoDeps =
|
||
(!fnIsAsync && taskFn.length === 1) ||
|
||
(fnIsAsync && taskFn.length === 0);
|
||
|
||
if (Array.isArray(taskFn)) {
|
||
params = [...taskFn];
|
||
taskFn = params.pop();
|
||
|
||
newTasks[key] = params.concat(params.length > 0 ? newTask : taskFn);
|
||
} else if (hasNoDeps) {
|
||
// no dependencies, use the function as-is
|
||
newTasks[key] = taskFn;
|
||
} else {
|
||
params = parseParams(taskFn);
|
||
if ((taskFn.length === 0 && !fnIsAsync) && params.length === 0) {
|
||
throw new Error("autoInject task functions require explicit parameters.");
|
||
}
|
||
|
||
// remove callback param
|
||
if (!fnIsAsync) params.pop();
|
||
|
||
newTasks[key] = params.concat(newTask);
|
||
}
|
||
|
||
function newTask(results, taskCb) {
|
||
var newArgs = params.map(name => results[name]);
|
||
newArgs.push(taskCb);
|
||
wrapAsync(taskFn)(...newArgs);
|
||
}
|
||
});
|
||
|
||
return auto(newTasks, callback);
|
||
}
|
||
|
||
// Simple doubly linked list (https://en.wikipedia.org/wiki/Doubly_linked_list) implementation
|
||
// used for queues. This implementation assumes that the node provided by the user can be modified
|
||
// to adjust the next and last properties. We implement only the minimal functionality
|
||
// for queue support.
|
||
class DLL {
|
||
constructor() {
|
||
this.head = this.tail = null;
|
||
this.length = 0;
|
||
}
|
||
|
||
removeLink(node) {
|
||
if (node.prev) node.prev.next = node.next;
|
||
else this.head = node.next;
|
||
if (node.next) node.next.prev = node.prev;
|
||
else this.tail = node.prev;
|
||
|
||
node.prev = node.next = null;
|
||
this.length -= 1;
|
||
return node;
|
||
}
|
||
|
||
empty () {
|
||
while(this.head) this.shift();
|
||
return this;
|
||
}
|
||
|
||
insertAfter(node, newNode) {
|
||
newNode.prev = node;
|
||
newNode.next = node.next;
|
||
if (node.next) node.next.prev = newNode;
|
||
else this.tail = newNode;
|
||
node.next = newNode;
|
||
this.length += 1;
|
||
}
|
||
|
||
insertBefore(node, newNode) {
|
||
newNode.prev = node.prev;
|
||
newNode.next = node;
|
||
if (node.prev) node.prev.next = newNode;
|
||
else this.head = newNode;
|
||
node.prev = newNode;
|
||
this.length += 1;
|
||
}
|
||
|
||
unshift(node) {
|
||
if (this.head) this.insertBefore(this.head, node);
|
||
else setInitial(this, node);
|
||
}
|
||
|
||
push(node) {
|
||
if (this.tail) this.insertAfter(this.tail, node);
|
||
else setInitial(this, node);
|
||
}
|
||
|
||
shift() {
|
||
return this.head && this.removeLink(this.head);
|
||
}
|
||
|
||
pop() {
|
||
return this.tail && this.removeLink(this.tail);
|
||
}
|
||
|
||
toArray() {
|
||
return [...this]
|
||
}
|
||
|
||
*[Symbol.iterator] () {
|
||
var cur = this.head;
|
||
while (cur) {
|
||
yield cur.data;
|
||
cur = cur.next;
|
||
}
|
||
}
|
||
|
||
remove (testFn) {
|
||
var curr = this.head;
|
||
while(curr) {
|
||
var {next} = curr;
|
||
if (testFn(curr)) {
|
||
this.removeLink(curr);
|
||
}
|
||
curr = next;
|
||
}
|
||
return this;
|
||
}
|
||
}
|
||
|
||
function setInitial(dll, node) {
|
||
dll.length = 1;
|
||
dll.head = dll.tail = node;
|
||
}
|
||
|
||
function queue(worker, concurrency, payload) {
|
||
if (concurrency == null) {
|
||
concurrency = 1;
|
||
}
|
||
else if(concurrency === 0) {
|
||
throw new RangeError('Concurrency must not be zero');
|
||
}
|
||
|
||
var _worker = wrapAsync(worker);
|
||
var numRunning = 0;
|
||
var workersList = [];
|
||
const events = {
|
||
error: [],
|
||
drain: [],
|
||
saturated: [],
|
||
unsaturated: [],
|
||
empty: []
|
||
};
|
||
|
||
function on (event, handler) {
|
||
events[event].push(handler);
|
||
}
|
||
|
||
function once (event, handler) {
|
||
const handleAndRemove = (...args) => {
|
||
off(event, handleAndRemove);
|
||
handler(...args);
|
||
};
|
||
events[event].push(handleAndRemove);
|
||
}
|
||
|
||
function off (event, handler) {
|
||
if (!event) return Object.keys(events).forEach(ev => events[ev] = [])
|
||
if (!handler) return events[event] = []
|
||
events[event] = events[event].filter(ev => ev !== handler);
|
||
}
|
||
|
||
function trigger (event, ...args) {
|
||
events[event].forEach(handler => handler(...args));
|
||
}
|
||
|
||
var processingScheduled = false;
|
||
function _insert(data, insertAtFront, rejectOnError, callback) {
|
||
if (callback != null && typeof callback !== 'function') {
|
||
throw new Error('task callback must be a function');
|
||
}
|
||
q.started = true;
|
||
|
||
var res, rej;
|
||
function promiseCallback (err, ...args) {
|
||
// we don't care about the error, let the global error handler
|
||
// deal with it
|
||
if (err) return rejectOnError ? rej(err) : res()
|
||
if (args.length <= 1) return res(args[0])
|
||
res(args);
|
||
}
|
||
|
||
var item = {
|
||
data,
|
||
callback: rejectOnError ?
|
||
promiseCallback :
|
||
(callback || promiseCallback)
|
||
};
|
||
|
||
if (insertAtFront) {
|
||
q._tasks.unshift(item);
|
||
} else {
|
||
q._tasks.push(item);
|
||
}
|
||
|
||
if (!processingScheduled) {
|
||
processingScheduled = true;
|
||
setImmediate$1(() => {
|
||
processingScheduled = false;
|
||
q.process();
|
||
});
|
||
}
|
||
|
||
if (rejectOnError || !callback) {
|
||
return new Promise((resolve, reject) => {
|
||
res = resolve;
|
||
rej = reject;
|
||
})
|
||
}
|
||
}
|
||
|
||
function _createCB(tasks) {
|
||
return function (err, ...args) {
|
||
numRunning -= 1;
|
||
|
||
for (var i = 0, l = tasks.length; i < l; i++) {
|
||
var task = tasks[i];
|
||
|
||
var index = workersList.indexOf(task);
|
||
if (index === 0) {
|
||
workersList.shift();
|
||
} else if (index > 0) {
|
||
workersList.splice(index, 1);
|
||
}
|
||
|
||
task.callback(err, ...args);
|
||
|
||
if (err != null) {
|
||
trigger('error', err, task.data);
|
||
}
|
||
}
|
||
|
||
if (numRunning <= (q.concurrency - q.buffer) ) {
|
||
trigger('unsaturated');
|
||
}
|
||
|
||
if (q.idle()) {
|
||
trigger('drain');
|
||
}
|
||
q.process();
|
||
};
|
||
}
|
||
|
||
function _maybeDrain(data) {
|
||
if (data.length === 0 && q.idle()) {
|
||
// call drain immediately if there are no tasks
|
||
setImmediate$1(() => trigger('drain'));
|
||
return true
|
||
}
|
||
return false
|
||
}
|
||
|
||
const eventMethod = (name) => (handler) => {
|
||
if (!handler) {
|
||
return new Promise((resolve, reject) => {
|
||
once(name, (err, data) => {
|
||
if (err) return reject(err)
|
||
resolve(data);
|
||
});
|
||
})
|
||
}
|
||
off(name);
|
||
on(name, handler);
|
||
|
||
};
|
||
|
||
var isProcessing = false;
|
||
var q = {
|
||
_tasks: new DLL(),
|
||
*[Symbol.iterator] () {
|
||
yield* q._tasks[Symbol.iterator]();
|
||
},
|
||
concurrency,
|
||
payload,
|
||
buffer: concurrency / 4,
|
||
started: false,
|
||
paused: false,
|
||
push (data, callback) {
|
||
if (Array.isArray(data)) {
|
||
if (_maybeDrain(data)) return
|
||
return data.map(datum => _insert(datum, false, false, callback))
|
||
}
|
||
return _insert(data, false, false, callback);
|
||
},
|
||
pushAsync (data, callback) {
|
||
if (Array.isArray(data)) {
|
||
if (_maybeDrain(data)) return
|
||
return data.map(datum => _insert(datum, false, true, callback))
|
||
}
|
||
return _insert(data, false, true, callback);
|
||
},
|
||
kill () {
|
||
off();
|
||
q._tasks.empty();
|
||
},
|
||
unshift (data, callback) {
|
||
if (Array.isArray(data)) {
|
||
if (_maybeDrain(data)) return
|
||
return data.map(datum => _insert(datum, true, false, callback))
|
||
}
|
||
return _insert(data, true, false, callback);
|
||
},
|
||
unshiftAsync (data, callback) {
|
||
if (Array.isArray(data)) {
|
||
if (_maybeDrain(data)) return
|
||
return data.map(datum => _insert(datum, true, true, callback))
|
||
}
|
||
return _insert(data, true, true, callback);
|
||
},
|
||
remove (testFn) {
|
||
q._tasks.remove(testFn);
|
||
},
|
||
process () {
|
||
// Avoid trying to start too many processing operations. This can occur
|
||
// when callbacks resolve synchronously (#1267).
|
||
if (isProcessing) {
|
||
return;
|
||
}
|
||
isProcessing = true;
|
||
while(!q.paused && numRunning < q.concurrency && q._tasks.length){
|
||
var tasks = [], data = [];
|
||
var l = q._tasks.length;
|
||
if (q.payload) l = Math.min(l, q.payload);
|
||
for (var i = 0; i < l; i++) {
|
||
var node = q._tasks.shift();
|
||
tasks.push(node);
|
||
workersList.push(node);
|
||
data.push(node.data);
|
||
}
|
||
|
||
numRunning += 1;
|
||
|
||
if (q._tasks.length === 0) {
|
||
trigger('empty');
|
||
}
|
||
|
||
if (numRunning === q.concurrency) {
|
||
trigger('saturated');
|
||
}
|
||
|
||
var cb = onlyOnce(_createCB(tasks));
|
||
_worker(data, cb);
|
||
}
|
||
isProcessing = false;
|
||
},
|
||
length () {
|
||
return q._tasks.length;
|
||
},
|
||
running () {
|
||
return numRunning;
|
||
},
|
||
workersList () {
|
||
return workersList;
|
||
},
|
||
idle() {
|
||
return q._tasks.length + numRunning === 0;
|
||
},
|
||
pause () {
|
||
q.paused = true;
|
||
},
|
||
resume () {
|
||
if (q.paused === false) { return; }
|
||
q.paused = false;
|
||
setImmediate$1(q.process);
|
||
}
|
||
};
|
||
// define these as fixed properties, so people get useful errors when updating
|
||
Object.defineProperties(q, {
|
||
saturated: {
|
||
writable: false,
|
||
value: eventMethod('saturated')
|
||
},
|
||
unsaturated: {
|
||
writable: false,
|
||
value: eventMethod('unsaturated')
|
||
},
|
||
empty: {
|
||
writable: false,
|
||
value: eventMethod('empty')
|
||
},
|
||
drain: {
|
||
writable: false,
|
||
value: eventMethod('drain')
|
||
},
|
||
error: {
|
||
writable: false,
|
||
value: eventMethod('error')
|
||
},
|
||
});
|
||
return q;
|
||
}
|
||
|
||
/**
|
||
* Creates a `cargo` object with the specified payload. Tasks added to the
|
||
* cargo will be processed altogether (up to the `payload` limit). If the
|
||
* `worker` is in progress, the task is queued until it becomes available. Once
|
||
* the `worker` has completed some tasks, each callback of those tasks is
|
||
* called. Check out [these](https://camo.githubusercontent.com/6bbd36f4cf5b35a0f11a96dcd2e97711ffc2fb37/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130382f62626330636662302d356632392d313165322d393734662d3333393763363464633835382e676966) [animations](https://camo.githubusercontent.com/f4810e00e1c5f5f8addbe3e9f49064fd5d102699/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130312f38346339323036362d356632392d313165322d383134662d3964336430323431336266642e676966)
|
||
* for how `cargo` and `queue` work.
|
||
*
|
||
* While [`queue`]{@link module:ControlFlow.queue} passes only one task to one of a group of workers
|
||
* at a time, cargo passes an array of tasks to a single worker, repeating
|
||
* when the worker is finished.
|
||
*
|
||
* @name cargo
|
||
* @static
|
||
* @memberOf module:ControlFlow
|
||
* @method
|
||
* @see [async.queue]{@link module:ControlFlow.queue}
|
||
* @category Control Flow
|
||
* @param {AsyncFunction} worker - An asynchronous function for processing an array
|
||
* of queued tasks. Invoked with `(tasks, callback)`.
|
||
* @param {number} [payload=Infinity] - An optional `integer` for determining
|
||
* how many tasks should be processed per round; if omitted, the default is
|
||
* unlimited.
|
||
* @returns {module:ControlFlow.QueueObject} A cargo object to manage the tasks. Callbacks can
|
||
* attached as certain properties to listen for specific events during the
|
||
* lifecycle of the cargo and inner queue.
|
||
* @example
|
||
*
|
||
* // create a cargo object with payload 2
|
||
* var cargo = async.cargo(function(tasks, callback) {
|
||
* for (var i=0; i<tasks.length; i++) {
|
||
* console.log('hello ' + tasks[i].name);
|
||
* }
|
||
* callback();
|
||
* }, 2);
|
||
*
|
||
* // add some items
|
||
* cargo.push({name: 'foo'}, function(err) {
|
||
* console.log('finished processing foo');
|
||
* });
|
||
* cargo.push({name: 'bar'}, function(err) {
|
||
* console.log('finished processing bar');
|
||
* });
|
||
* await cargo.push({name: 'baz'});
|
||
* console.log('finished processing baz');
|
||
*/
|
||
function cargo(worker, payload) {
|
||
return queue(worker, 1, payload);
|
||
}
|
||
|
||
/**
|
||
* Creates a `cargoQueue` object with the specified payload. Tasks added to the
|
||
* cargoQueue will be processed together (up to the `payload` limit) in `concurrency` parallel workers.
|
||
* If the all `workers` are in progress, the task is queued until one becomes available. Once
|
||
* a `worker` has completed some tasks, each callback of those tasks is
|
||
* called. Check out [these](https://camo.githubusercontent.com/6bbd36f4cf5b35a0f11a96dcd2e97711ffc2fb37/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130382f62626330636662302d356632392d313165322d393734662d3333393763363464633835382e676966) [animations](https://camo.githubusercontent.com/f4810e00e1c5f5f8addbe3e9f49064fd5d102699/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130312f38346339323036362d356632392d313165322d383134662d3964336430323431336266642e676966)
|
||
* for how `cargo` and `queue` work.
|
||
*
|
||
* While [`queue`]{@link module:ControlFlow.queue} passes only one task to one of a group of workers
|
||
* at a time, and [`cargo`]{@link module:ControlFlow.cargo} passes an array of tasks to a single worker,
|
||
* the cargoQueue passes an array of tasks to multiple parallel workers.
|
||
*
|
||
* @name cargoQueue
|
||
* @static
|
||
* @memberOf module:ControlFlow
|
||
* @method
|
||
* @see [async.queue]{@link module:ControlFlow.queue}
|
||
* @see [async.cargo]{@link module:ControlFLow.cargo}
|
||
* @category Control Flow
|
||
* @param {AsyncFunction} worker - An asynchronous function for processing an array
|
||
* of queued tasks. Invoked with `(tasks, callback)`.
|
||
* @param {number} [concurrency=1] - An `integer` for determining how many
|
||
* `worker` functions should be run in parallel. If omitted, the concurrency
|
||
* defaults to `1`. If the concurrency is `0`, an error is thrown.
|
||
* @param {number} [payload=Infinity] - An optional `integer` for determining
|
||
* how many tasks should be processed per round; if omitted, the default is
|
||
* unlimited.
|
||
* @returns {module:ControlFlow.QueueObject} A cargoQueue object to manage the tasks. Callbacks can
|
||
* attached as certain properties to listen for specific events during the
|
||
* lifecycle of the cargoQueue and inner queue.
|
||
* @example
|
||
*
|
||
* // create a cargoQueue object with payload 2 and concurrency 2
|
||
* var cargoQueue = async.cargoQueue(function(tasks, callback) {
|
||
* for (var i=0; i<tasks.length; i++) {
|
||
* console.log('hello ' + tasks[i].name);
|
||
* }
|
||
* callback();
|
||
* }, 2, 2);
|
||
*
|
||
* // add some items
|
||
* cargoQueue.push({name: 'foo'}, function(err) {
|
||
* console.log('finished processing foo');
|
||
* });
|
||
* cargoQueue.push({name: 'bar'}, function(err) {
|
||
* console.log('finished processing bar');
|
||
* });
|
||
* cargoQueue.push({name: 'baz'}, function(err) {
|
||
* console.log('finished processing baz');
|
||
* });
|
||
* cargoQueue.push({name: 'boo'}, function(err) {
|
||
* console.log('finished processing boo');
|
||
* });
|
||
*/
|
||
function cargo$1(worker, concurrency, payload) {
|
||
return queue(worker, concurrency, payload);
|
||
}
|
||
|
||
/**
|
||
* Reduces `coll` into a single value using an async `iteratee` to return each
|
||
* successive step. `memo` is the initial state of the reduction. This function
|
||
* only operates in series.
|
||
*
|
||
* For performance reasons, it may make sense to split a call to this function
|
||
* into a parallel map, and then use the normal `Array.prototype.reduce` on the
|
||
* results. This function is for situations where each step in the reduction
|
||
* needs to be async; if you can get the data before reducing it, then it's
|
||
* probably a good idea to do so.
|
||
*
|
||
* @name reduce
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @alias inject
|
||
* @alias foldl
|
||
* @category Collection
|
||
* @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over.
|
||
* @param {*} memo - The initial state of the reduction.
|
||
* @param {AsyncFunction} iteratee - A function applied to each item in the
|
||
* array to produce the next step in the reduction.
|
||
* The `iteratee` should complete with the next state of the reduction.
|
||
* If the iteratee complete with an error, the reduction is stopped and the
|
||
* main `callback` is immediately called with the error.
|
||
* Invoked with (memo, item, callback).
|
||
* @param {Function} [callback] - A callback which is called after all the
|
||
* `iteratee` functions have finished. Result is the reduced value. Invoked with
|
||
* (err, result).
|
||
* @returns {Promise} a promise, if no callback is passed
|
||
* @example
|
||
*
|
||
* async.reduce([1,2,3], 0, function(memo, item, callback) {
|
||
* // pointless async:
|
||
* process.nextTick(function() {
|
||
* callback(null, memo + item)
|
||
* });
|
||
* }, function(err, result) {
|
||
* // result is now equal to the last value of memo, which is 6
|
||
* });
|
||
*/
|
||
function reduce(coll, memo, iteratee, callback) {
|
||
callback = once(callback);
|
||
var _iteratee = wrapAsync(iteratee);
|
||
return eachOfSeries$1(coll, (x, i, iterCb) => {
|
||
_iteratee(memo, x, (err, v) => {
|
||
memo = v;
|
||
iterCb(err);
|
||
});
|
||
}, err => callback(err, memo));
|
||
}
|
||
var reduce$1 = awaitify(reduce, 4);
|
||
|
||
/**
|
||
* Version of the compose function that is more natural to read. Each function
|
||
* consumes the return value of the previous function. It is the equivalent of
|
||
* [compose]{@link module:ControlFlow.compose} with the arguments reversed.
|
||
*
|
||
* Each function is executed with the `this` binding of the composed function.
|
||
*
|
||
* @name seq
|
||
* @static
|
||
* @memberOf module:ControlFlow
|
||
* @method
|
||
* @see [async.compose]{@link module:ControlFlow.compose}
|
||
* @category Control Flow
|
||
* @param {...AsyncFunction} functions - the asynchronous functions to compose
|
||
* @returns {Function} a function that composes the `functions` in order
|
||
* @example
|
||
*
|
||
* // Requires lodash (or underscore), express3 and dresende's orm2.
|
||
* // Part of an app, that fetches cats of the logged user.
|
||
* // This example uses `seq` function to avoid overnesting and error
|
||
* // handling clutter.
|
||
* app.get('/cats', function(request, response) {
|
||
* var User = request.models.User;
|
||
* async.seq(
|
||
* _.bind(User.get, User), // 'User.get' has signature (id, callback(err, data))
|
||
* function(user, fn) {
|
||
* user.getCats(fn); // 'getCats' has signature (callback(err, data))
|
||
* }
|
||
* )(req.session.user_id, function (err, cats) {
|
||
* if (err) {
|
||
* console.error(err);
|
||
* response.json({ status: 'error', message: err.message });
|
||
* } else {
|
||
* response.json({ status: 'ok', message: 'Cats found', data: cats });
|
||
* }
|
||
* });
|
||
* });
|
||
*/
|
||
function seq(...functions) {
|
||
var _functions = functions.map(wrapAsync);
|
||
return function (...args) {
|
||
var that = this;
|
||
|
||
var cb = args[args.length - 1];
|
||
if (typeof cb == 'function') {
|
||
args.pop();
|
||
} else {
|
||
cb = promiseCallback();
|
||
}
|
||
|
||
reduce$1(_functions, args, (newargs, fn, iterCb) => {
|
||
fn.apply(that, newargs.concat((err, ...nextargs) => {
|
||
iterCb(err, nextargs);
|
||
}));
|
||
},
|
||
(err, results) => cb(err, ...results));
|
||
|
||
return cb[PROMISE_SYMBOL]
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Creates a function which is a composition of the passed asynchronous
|
||
* functions. Each function consumes the return value of the function that
|
||
* follows. Composing functions `f()`, `g()`, and `h()` would produce the result
|
||
* of `f(g(h()))`, only this version uses callbacks to obtain the return values.
|
||
*
|
||
* If the last argument to the composed function is not a function, a promise
|
||
* is returned when you call it.
|
||
*
|
||
* Each function is executed with the `this` binding of the composed function.
|
||
*
|
||
* @name compose
|
||
* @static
|
||
* @memberOf module:ControlFlow
|
||
* @method
|
||
* @category Control Flow
|
||
* @param {...AsyncFunction} functions - the asynchronous functions to compose
|
||
* @returns {Function} an asynchronous function that is the composed
|
||
* asynchronous `functions`
|
||
* @example
|
||
*
|
||
* function add1(n, callback) {
|
||
* setTimeout(function () {
|
||
* callback(null, n + 1);
|
||
* }, 10);
|
||
* }
|
||
*
|
||
* function mul3(n, callback) {
|
||
* setTimeout(function () {
|
||
* callback(null, n * 3);
|
||
* }, 10);
|
||
* }
|
||
*
|
||
* var add1mul3 = async.compose(mul3, add1);
|
||
* add1mul3(4, function (err, result) {
|
||
* // result now equals 15
|
||
* });
|
||
*/
|
||
function compose(...args) {
|
||
return seq(...args.reverse());
|
||
}
|
||
|
||
/**
|
||
* The same as [`map`]{@link module:Collections.map} but runs a maximum of `limit` async operations at a time.
|
||
*
|
||
* @name mapLimit
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @see [async.map]{@link module:Collections.map}
|
||
* @category Collection
|
||
* @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over.
|
||
* @param {number} limit - The maximum number of async operations at a time.
|
||
* @param {AsyncFunction} iteratee - An async function to apply to each item in
|
||
* `coll`.
|
||
* The iteratee should complete with the transformed item.
|
||
* Invoked with (item, callback).
|
||
* @param {Function} [callback] - A callback which is called when all `iteratee`
|
||
* functions have finished, or an error occurs. Results is an array of the
|
||
* transformed items from the `coll`. Invoked with (err, results).
|
||
* @returns {Promise} a promise, if no callback is passed
|
||
*/
|
||
function mapLimit (coll, limit, iteratee, callback) {
|
||
return _asyncMap(eachOfLimit(limit), coll, iteratee, callback)
|
||
}
|
||
var mapLimit$1 = awaitify(mapLimit, 4);
|
||
|
||
/**
|
||
* The same as [`concat`]{@link module:Collections.concat} but runs a maximum of `limit` async operations at a time.
|
||
*
|
||
* @name concatLimit
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @see [async.concat]{@link module:Collections.concat}
|
||
* @category Collection
|
||
* @alias flatMapLimit
|
||
* @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over.
|
||
* @param {number} limit - The maximum number of async operations at a time.
|
||
* @param {AsyncFunction} iteratee - A function to apply to each item in `coll`,
|
||
* which should use an array as its result. Invoked with (item, callback).
|
||
* @param {Function} [callback] - A callback which is called after all the
|
||
* `iteratee` functions have finished, or an error occurs. Results is an array
|
||
* containing the concatenated results of the `iteratee` function. Invoked with
|
||
* (err, results).
|
||
* @returns A Promise, if no callback is passed
|
||
*/
|
||
function concatLimit(coll, limit, iteratee, callback) {
|
||
var _iteratee = wrapAsync(iteratee);
|
||
return mapLimit$1(coll, limit, (val, iterCb) => {
|
||
_iteratee(val, (err, ...args) => {
|
||
if (err) return iterCb(err);
|
||
return iterCb(err, args);
|
||
});
|
||
}, (err, mapResults) => {
|
||
var result = [];
|
||
for (var i = 0; i < mapResults.length; i++) {
|
||
if (mapResults[i]) {
|
||
result = result.concat(...mapResults[i]);
|
||
}
|
||
}
|
||
|
||
return callback(err, result);
|
||
});
|
||
}
|
||
var concatLimit$1 = awaitify(concatLimit, 4);
|
||
|
||
/**
|
||
* Applies `iteratee` to each item in `coll`, concatenating the results. Returns
|
||
* the concatenated list. The `iteratee`s are called in parallel, and the
|
||
* results are concatenated as they return. The results array will be returned in
|
||
* the original order of `coll` passed to the `iteratee` function.
|
||
*
|
||
* @name concat
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @category Collection
|
||
* @alias flatMap
|
||
* @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over.
|
||
* @param {AsyncFunction} iteratee - A function to apply to each item in `coll`,
|
||
* which should use an array as its result. Invoked with (item, callback).
|
||
* @param {Function} [callback] - A callback which is called after all the
|
||
* `iteratee` functions have finished, or an error occurs. Results is an array
|
||
* containing the concatenated results of the `iteratee` function. Invoked with
|
||
* (err, results).
|
||
* @returns A Promise, if no callback is passed
|
||
* @example
|
||
*
|
||
* async.concat(['dir1','dir2','dir3'], fs.readdir, function(err, files) {
|
||
* // files is now a list of filenames that exist in the 3 directories
|
||
* });
|
||
*/
|
||
function concat(coll, iteratee, callback) {
|
||
return concatLimit$1(coll, Infinity, iteratee, callback)
|
||
}
|
||
var concat$1 = awaitify(concat, 3);
|
||
|
||
/**
|
||
* The same as [`concat`]{@link module:Collections.concat} but runs only a single async operation at a time.
|
||
*
|
||
* @name concatSeries
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @see [async.concat]{@link module:Collections.concat}
|
||
* @category Collection
|
||
* @alias flatMapSeries
|
||
* @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over.
|
||
* @param {AsyncFunction} iteratee - A function to apply to each item in `coll`.
|
||
* The iteratee should complete with an array an array of results.
|
||
* Invoked with (item, callback).
|
||
* @param {Function} [callback] - A callback which is called after all the
|
||
* `iteratee` functions have finished, or an error occurs. Results is an array
|
||
* containing the concatenated results of the `iteratee` function. Invoked with
|
||
* (err, results).
|
||
* @returns A Promise, if no callback is passed
|
||
*/
|
||
function concatSeries(coll, iteratee, callback) {
|
||
return concatLimit$1(coll, 1, iteratee, callback)
|
||
}
|
||
var concatSeries$1 = awaitify(concatSeries, 3);
|
||
|
||
/**
|
||
* Returns a function that when called, calls-back with the values provided.
|
||
* Useful as the first function in a [`waterfall`]{@link module:ControlFlow.waterfall}, or for plugging values in to
|
||
* [`auto`]{@link module:ControlFlow.auto}.
|
||
*
|
||
* @name constant
|
||
* @static
|
||
* @memberOf module:Utils
|
||
* @method
|
||
* @category Util
|
||
* @param {...*} arguments... - Any number of arguments to automatically invoke
|
||
* callback with.
|
||
* @returns {AsyncFunction} Returns a function that when invoked, automatically
|
||
* invokes the callback with the previous given arguments.
|
||
* @example
|
||
*
|
||
* async.waterfall([
|
||
* async.constant(42),
|
||
* function (value, next) {
|
||
* // value === 42
|
||
* },
|
||
* //...
|
||
* ], callback);
|
||
*
|
||
* async.waterfall([
|
||
* async.constant(filename, "utf8"),
|
||
* fs.readFile,
|
||
* function (fileData, next) {
|
||
* //...
|
||
* }
|
||
* //...
|
||
* ], callback);
|
||
*
|
||
* async.auto({
|
||
* hostname: async.constant("https://server.net/"),
|
||
* port: findFreePort,
|
||
* launchServer: ["hostname", "port", function (options, cb) {
|
||
* startServer(options, cb);
|
||
* }],
|
||
* //...
|
||
* }, callback);
|
||
*/
|
||
function constant(...args) {
|
||
return function (...ignoredArgs/*, callback*/) {
|
||
var callback = ignoredArgs.pop();
|
||
return callback(null, ...args);
|
||
};
|
||
}
|
||
|
||
function _createTester(check, getResult) {
|
||
return (eachfn, arr, _iteratee, cb) => {
|
||
var testPassed = false;
|
||
var testResult;
|
||
const iteratee = wrapAsync(_iteratee);
|
||
eachfn(arr, (value, _, callback) => {
|
||
iteratee(value, (err, result) => {
|
||
if (err || err === false) return callback(err);
|
||
|
||
if (check(result) && !testResult) {
|
||
testPassed = true;
|
||
testResult = getResult(true, value);
|
||
return callback(null, breakLoop);
|
||
}
|
||
callback();
|
||
});
|
||
}, err => {
|
||
if (err) return cb(err);
|
||
cb(null, testPassed ? testResult : getResult(false));
|
||
});
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Returns the first value in `coll` that passes an async truth test. The
|
||
* `iteratee` is applied in parallel, meaning the first iteratee to return
|
||
* `true` will fire the detect `callback` with that result. That means the
|
||
* result might not be the first item in the original `coll` (in terms of order)
|
||
* that passes the test.
|
||
|
||
* If order within the original `coll` is important, then look at
|
||
* [`detectSeries`]{@link module:Collections.detectSeries}.
|
||
*
|
||
* @name detect
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @alias find
|
||
* @category Collections
|
||
* @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over.
|
||
* @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`.
|
||
* The iteratee must complete with a boolean value as its result.
|
||
* Invoked with (item, callback).
|
||
* @param {Function} [callback] - A callback which is called as soon as any
|
||
* iteratee returns `true`, or after all the `iteratee` functions have finished.
|
||
* Result will be the first item in the array that passes the truth test
|
||
* (iteratee) or the value `undefined` if none passed. Invoked with
|
||
* (err, result).
|
||
* @returns A Promise, if no callback is passed
|
||
* @example
|
||
*
|
||
* async.detect(['file1','file2','file3'], function(filePath, callback) {
|
||
* fs.access(filePath, function(err) {
|
||
* callback(null, !err)
|
||
* });
|
||
* }, function(err, result) {
|
||
* // result now equals the first file in the list that exists
|
||
* });
|
||
*/
|
||
function detect(coll, iteratee, callback) {
|
||
return _createTester(bool => bool, (res, item) => item)(eachOf$1, coll, iteratee, callback)
|
||
}
|
||
var detect$1 = awaitify(detect, 3);
|
||
|
||
/**
|
||
* The same as [`detect`]{@link module:Collections.detect} but runs a maximum of `limit` async operations at a
|
||
* time.
|
||
*
|
||
* @name detectLimit
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @see [async.detect]{@link module:Collections.detect}
|
||
* @alias findLimit
|
||
* @category Collections
|
||
* @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over.
|
||
* @param {number} limit - The maximum number of async operations at a time.
|
||
* @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`.
|
||
* The iteratee must complete with a boolean value as its result.
|
||
* Invoked with (item, callback).
|
||
* @param {Function} [callback] - A callback which is called as soon as any
|
||
* iteratee returns `true`, or after all the `iteratee` functions have finished.
|
||
* Result will be the first item in the array that passes the truth test
|
||
* (iteratee) or the value `undefined` if none passed. Invoked with
|
||
* (err, result).
|
||
* @returns a Promise if no callback is passed
|
||
*/
|
||
function detectLimit(coll, limit, iteratee, callback) {
|
||
return _createTester(bool => bool, (res, item) => item)(eachOfLimit(limit), coll, iteratee, callback)
|
||
}
|
||
var detectLimit$1 = awaitify(detectLimit, 4);
|
||
|
||
/**
|
||
* The same as [`detect`]{@link module:Collections.detect} but runs only a single async operation at a time.
|
||
*
|
||
* @name detectSeries
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @see [async.detect]{@link module:Collections.detect}
|
||
* @alias findSeries
|
||
* @category Collections
|
||
* @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over.
|
||
* @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`.
|
||
* The iteratee must complete with a boolean value as its result.
|
||
* Invoked with (item, callback).
|
||
* @param {Function} [callback] - A callback which is called as soon as any
|
||
* iteratee returns `true`, or after all the `iteratee` functions have finished.
|
||
* Result will be the first item in the array that passes the truth test
|
||
* (iteratee) or the value `undefined` if none passed. Invoked with
|
||
* (err, result).
|
||
* @returns a Promise if no callback is passed
|
||
*/
|
||
function detectSeries(coll, iteratee, callback) {
|
||
return _createTester(bool => bool, (res, item) => item)(eachOfLimit(1), coll, iteratee, callback)
|
||
}
|
||
|
||
var detectSeries$1 = awaitify(detectSeries, 3);
|
||
|
||
function consoleFunc(name) {
|
||
return (fn, ...args) => wrapAsync(fn)(...args, (err, ...resultArgs) => {
|
||
if (typeof console === 'object') {
|
||
if (err) {
|
||
if (console.error) {
|
||
console.error(err);
|
||
}
|
||
} else if (console[name]) {
|
||
resultArgs.forEach(x => console[name](x));
|
||
}
|
||
}
|
||
})
|
||
}
|
||
|
||
/**
|
||
* Logs the result of an [`async` function]{@link AsyncFunction} to the
|
||
* `console` using `console.dir` to display the properties of the resulting object.
|
||
* Only works in Node.js or in browsers that support `console.dir` and
|
||
* `console.error` (such as FF and Chrome).
|
||
* If multiple arguments are returned from the async function,
|
||
* `console.dir` is called on each argument in order.
|
||
*
|
||
* @name dir
|
||
* @static
|
||
* @memberOf module:Utils
|
||
* @method
|
||
* @category Util
|
||
* @param {AsyncFunction} function - The function you want to eventually apply
|
||
* all arguments to.
|
||
* @param {...*} arguments... - Any number of arguments to apply to the function.
|
||
* @example
|
||
*
|
||
* // in a module
|
||
* var hello = function(name, callback) {
|
||
* setTimeout(function() {
|
||
* callback(null, {hello: name});
|
||
* }, 1000);
|
||
* };
|
||
*
|
||
* // in the node repl
|
||
* node> async.dir(hello, 'world');
|
||
* {hello: 'world'}
|
||
*/
|
||
var dir = consoleFunc('dir');
|
||
|
||
/**
|
||
* The post-check version of [`whilst`]{@link module:ControlFlow.whilst}. To reflect the difference in
|
||
* the order of operations, the arguments `test` and `iteratee` are switched.
|
||
*
|
||
* `doWhilst` is to `whilst` as `do while` is to `while` in plain JavaScript.
|
||
*
|
||
* @name doWhilst
|
||
* @static
|
||
* @memberOf module:ControlFlow
|
||
* @method
|
||
* @see [async.whilst]{@link module:ControlFlow.whilst}
|
||
* @category Control Flow
|
||
* @param {AsyncFunction} iteratee - A function which is called each time `test`
|
||
* passes. Invoked with (callback).
|
||
* @param {AsyncFunction} test - asynchronous truth test to perform after each
|
||
* execution of `iteratee`. Invoked with (...args, callback), where `...args` are the
|
||
* non-error args from the previous callback of `iteratee`.
|
||
* @param {Function} [callback] - A callback which is called after the test
|
||
* function has failed and repeated execution of `iteratee` has stopped.
|
||
* `callback` will be passed an error and any arguments passed to the final
|
||
* `iteratee`'s callback. Invoked with (err, [results]);
|
||
* @returns {Promise} a promise, if no callback is passed
|
||
*/
|
||
function doWhilst(iteratee, test, callback) {
|
||
callback = onlyOnce(callback);
|
||
var _fn = wrapAsync(iteratee);
|
||
var _test = wrapAsync(test);
|
||
var results;
|
||
|
||
function next(err, ...args) {
|
||
if (err) return callback(err);
|
||
if (err === false) return;
|
||
results = args;
|
||
_test(...args, check);
|
||
}
|
||
|
||
function check(err, truth) {
|
||
if (err) return callback(err);
|
||
if (err === false) return;
|
||
if (!truth) return callback(null, ...results);
|
||
_fn(next);
|
||
}
|
||
|
||
return check(null, true);
|
||
}
|
||
|
||
var doWhilst$1 = awaitify(doWhilst, 3);
|
||
|
||
/**
|
||
* Like ['doWhilst']{@link module:ControlFlow.doWhilst}, except the `test` is inverted. Note the
|
||
* argument ordering differs from `until`.
|
||
*
|
||
* @name doUntil
|
||
* @static
|
||
* @memberOf module:ControlFlow
|
||
* @method
|
||
* @see [async.doWhilst]{@link module:ControlFlow.doWhilst}
|
||
* @category Control Flow
|
||
* @param {AsyncFunction} iteratee - An async function which is called each time
|
||
* `test` fails. Invoked with (callback).
|
||
* @param {AsyncFunction} test - asynchronous truth test to perform after each
|
||
* execution of `iteratee`. Invoked with (...args, callback), where `...args` are the
|
||
* non-error args from the previous callback of `iteratee`
|
||
* @param {Function} [callback] - A callback which is called after the test
|
||
* function has passed and repeated execution of `iteratee` has stopped. `callback`
|
||
* will be passed an error and any arguments passed to the final `iteratee`'s
|
||
* callback. Invoked with (err, [results]);
|
||
* @returns {Promise} a promise, if no callback is passed
|
||
*/
|
||
function doUntil(iteratee, test, callback) {
|
||
const _test = wrapAsync(test);
|
||
return doWhilst$1(iteratee, (...args) => {
|
||
const cb = args.pop();
|
||
_test(...args, (err, truth) => cb (err, !truth));
|
||
}, callback);
|
||
}
|
||
|
||
function _withoutIndex(iteratee) {
|
||
return (value, index, callback) => iteratee(value, callback);
|
||
}
|
||
|
||
/**
|
||
* Applies the function `iteratee` to each item in `coll`, in parallel.
|
||
* The `iteratee` is called with an item from the list, and a callback for when
|
||
* it has finished. If the `iteratee` passes an error to its `callback`, the
|
||
* main `callback` (for the `each` function) is immediately called with the
|
||
* error.
|
||
*
|
||
* Note, that since this function applies `iteratee` to each item in parallel,
|
||
* there is no guarantee that the iteratee functions will complete in order.
|
||
*
|
||
* @name each
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @alias forEach
|
||
* @category Collection
|
||
* @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over.
|
||
* @param {AsyncFunction} iteratee - An async function to apply to
|
||
* each item in `coll`. Invoked with (item, callback).
|
||
* The array index is not passed to the iteratee.
|
||
* If you need the index, use `eachOf`.
|
||
* @param {Function} [callback] - A callback which is called when all
|
||
* `iteratee` functions have finished, or an error occurs. Invoked with (err).
|
||
* @returns {Promise} a promise, if a callback is omitted
|
||
* @example
|
||
*
|
||
* // assuming openFiles is an array of file names and saveFile is a function
|
||
* // to save the modified contents of that file:
|
||
*
|
||
* async.each(openFiles, saveFile, function(err){
|
||
* // if any of the saves produced an error, err would equal that error
|
||
* });
|
||
*
|
||
* // assuming openFiles is an array of file names
|
||
* async.each(openFiles, function(file, callback) {
|
||
*
|
||
* // Perform operation on file here.
|
||
* console.log('Processing file ' + file);
|
||
*
|
||
* if( file.length > 32 ) {
|
||
* console.log('This file name is too long');
|
||
* callback('File name too long');
|
||
* } else {
|
||
* // Do work to process file here
|
||
* console.log('File processed');
|
||
* callback();
|
||
* }
|
||
* }, function(err) {
|
||
* // if any of the file processing produced an error, err would equal that error
|
||
* if( err ) {
|
||
* // One of the iterations produced an error.
|
||
* // All processing will now stop.
|
||
* console.log('A file failed to process');
|
||
* } else {
|
||
* console.log('All files have been processed successfully');
|
||
* }
|
||
* });
|
||
*/
|
||
function eachLimit(coll, iteratee, callback) {
|
||
return eachOf$1(coll, _withoutIndex(wrapAsync(iteratee)), callback);
|
||
}
|
||
|
||
var each = awaitify(eachLimit, 3);
|
||
|
||
/**
|
||
* The same as [`each`]{@link module:Collections.each} but runs a maximum of `limit` async operations at a time.
|
||
*
|
||
* @name eachLimit
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @see [async.each]{@link module:Collections.each}
|
||
* @alias forEachLimit
|
||
* @category Collection
|
||
* @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over.
|
||
* @param {number} limit - The maximum number of async operations at a time.
|
||
* @param {AsyncFunction} iteratee - An async function to apply to each item in
|
||
* `coll`.
|
||
* The array index is not passed to the iteratee.
|
||
* If you need the index, use `eachOfLimit`.
|
||
* Invoked with (item, callback).
|
||
* @param {Function} [callback] - A callback which is called when all
|
||
* `iteratee` functions have finished, or an error occurs. Invoked with (err).
|
||
* @returns {Promise} a promise, if a callback is omitted
|
||
*/
|
||
function eachLimit$1(coll, limit, iteratee, callback) {
|
||
return eachOfLimit(limit)(coll, _withoutIndex(wrapAsync(iteratee)), callback);
|
||
}
|
||
var eachLimit$2 = awaitify(eachLimit$1, 4);
|
||
|
||
/**
|
||
* The same as [`each`]{@link module:Collections.each} but runs only a single async operation at a time.
|
||
*
|
||
* Note, that unlike [`each`]{@link module:Collections.each}, this function applies iteratee to each item
|
||
* in series and therefore the iteratee functions will complete in order.
|
||
|
||
* @name eachSeries
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @see [async.each]{@link module:Collections.each}
|
||
* @alias forEachSeries
|
||
* @category Collection
|
||
* @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over.
|
||
* @param {AsyncFunction} iteratee - An async function to apply to each
|
||
* item in `coll`.
|
||
* The array index is not passed to the iteratee.
|
||
* If you need the index, use `eachOfSeries`.
|
||
* Invoked with (item, callback).
|
||
* @param {Function} [callback] - A callback which is called when all
|
||
* `iteratee` functions have finished, or an error occurs. Invoked with (err).
|
||
* @returns {Promise} a promise, if a callback is omitted
|
||
*/
|
||
function eachSeries(coll, iteratee, callback) {
|
||
return eachLimit$2(coll, 1, iteratee, callback)
|
||
}
|
||
var eachSeries$1 = awaitify(eachSeries, 3);
|
||
|
||
/**
|
||
* Wrap an async function and ensure it calls its callback on a later tick of
|
||
* the event loop. If the function already calls its callback on a next tick,
|
||
* no extra deferral is added. This is useful for preventing stack overflows
|
||
* (`RangeError: Maximum call stack size exceeded`) and generally keeping
|
||
* [Zalgo](http://blog.izs.me/post/59142742143/designing-apis-for-asynchrony)
|
||
* contained. ES2017 `async` functions are returned as-is -- they are immune
|
||
* to Zalgo's corrupting influences, as they always resolve on a later tick.
|
||
*
|
||
* @name ensureAsync
|
||
* @static
|
||
* @memberOf module:Utils
|
||
* @method
|
||
* @category Util
|
||
* @param {AsyncFunction} fn - an async function, one that expects a node-style
|
||
* callback as its last argument.
|
||
* @returns {AsyncFunction} Returns a wrapped function with the exact same call
|
||
* signature as the function passed in.
|
||
* @example
|
||
*
|
||
* function sometimesAsync(arg, callback) {
|
||
* if (cache[arg]) {
|
||
* return callback(null, cache[arg]); // this would be synchronous!!
|
||
* } else {
|
||
* doSomeIO(arg, callback); // this IO would be asynchronous
|
||
* }
|
||
* }
|
||
*
|
||
* // this has a risk of stack overflows if many results are cached in a row
|
||
* async.mapSeries(args, sometimesAsync, done);
|
||
*
|
||
* // this will defer sometimesAsync's callback if necessary,
|
||
* // preventing stack overflows
|
||
* async.mapSeries(args, async.ensureAsync(sometimesAsync), done);
|
||
*/
|
||
function ensureAsync(fn) {
|
||
if (isAsync(fn)) return fn;
|
||
return function (...args/*, callback*/) {
|
||
var callback = args.pop();
|
||
var sync = true;
|
||
args.push((...innerArgs) => {
|
||
if (sync) {
|
||
setImmediate$1(() => callback(...innerArgs));
|
||
} else {
|
||
callback(...innerArgs);
|
||
}
|
||
});
|
||
fn.apply(this, args);
|
||
sync = false;
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Returns `true` if every element in `coll` satisfies an async test. If any
|
||
* iteratee call returns `false`, the main `callback` is immediately called.
|
||
*
|
||
* @name every
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @alias all
|
||
* @category Collection
|
||
* @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over.
|
||
* @param {AsyncFunction} iteratee - An async truth test to apply to each item
|
||
* in the collection in parallel.
|
||
* The iteratee must complete with a boolean result value.
|
||
* Invoked with (item, callback).
|
||
* @param {Function} [callback] - A callback which is called after all the
|
||
* `iteratee` functions have finished. Result will be either `true` or `false`
|
||
* depending on the values of the async tests. Invoked with (err, result).
|
||
* @returns {Promise} a promise, if no callback provided
|
||
* @example
|
||
*
|
||
* async.every(['file1','file2','file3'], function(filePath, callback) {
|
||
* fs.access(filePath, function(err) {
|
||
* callback(null, !err)
|
||
* });
|
||
* }, function(err, result) {
|
||
* // if result is true then every file exists
|
||
* });
|
||
*/
|
||
function every(coll, iteratee, callback) {
|
||
return _createTester(bool => !bool, res => !res)(eachOf$1, coll, iteratee, callback)
|
||
}
|
||
var every$1 = awaitify(every, 3);
|
||
|
||
/**
|
||
* The same as [`every`]{@link module:Collections.every} but runs a maximum of `limit` async operations at a time.
|
||
*
|
||
* @name everyLimit
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @see [async.every]{@link module:Collections.every}
|
||
* @alias allLimit
|
||
* @category Collection
|
||
* @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over.
|
||
* @param {number} limit - The maximum number of async operations at a time.
|
||
* @param {AsyncFunction} iteratee - An async truth test to apply to each item
|
||
* in the collection in parallel.
|
||
* The iteratee must complete with a boolean result value.
|
||
* Invoked with (item, callback).
|
||
* @param {Function} [callback] - A callback which is called after all the
|
||
* `iteratee` functions have finished. Result will be either `true` or `false`
|
||
* depending on the values of the async tests. Invoked with (err, result).
|
||
* @returns {Promise} a promise, if no callback provided
|
||
*/
|
||
function everyLimit(coll, limit, iteratee, callback) {
|
||
return _createTester(bool => !bool, res => !res)(eachOfLimit(limit), coll, iteratee, callback)
|
||
}
|
||
var everyLimit$1 = awaitify(everyLimit, 4);
|
||
|
||
/**
|
||
* The same as [`every`]{@link module:Collections.every} but runs only a single async operation at a time.
|
||
*
|
||
* @name everySeries
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @see [async.every]{@link module:Collections.every}
|
||
* @alias allSeries
|
||
* @category Collection
|
||
* @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over.
|
||
* @param {AsyncFunction} iteratee - An async truth test to apply to each item
|
||
* in the collection in series.
|
||
* The iteratee must complete with a boolean result value.
|
||
* Invoked with (item, callback).
|
||
* @param {Function} [callback] - A callback which is called after all the
|
||
* `iteratee` functions have finished. Result will be either `true` or `false`
|
||
* depending on the values of the async tests. Invoked with (err, result).
|
||
* @returns {Promise} a promise, if no callback provided
|
||
*/
|
||
function everySeries(coll, iteratee, callback) {
|
||
return _createTester(bool => !bool, res => !res)(eachOfSeries$1, coll, iteratee, callback)
|
||
}
|
||
var everySeries$1 = awaitify(everySeries, 3);
|
||
|
||
function filterArray(eachfn, arr, iteratee, callback) {
|
||
var truthValues = new Array(arr.length);
|
||
eachfn(arr, (x, index, iterCb) => {
|
||
iteratee(x, (err, v) => {
|
||
truthValues[index] = !!v;
|
||
iterCb(err);
|
||
});
|
||
}, err => {
|
||
if (err) return callback(err);
|
||
var results = [];
|
||
for (var i = 0; i < arr.length; i++) {
|
||
if (truthValues[i]) results.push(arr[i]);
|
||
}
|
||
callback(null, results);
|
||
});
|
||
}
|
||
|
||
function filterGeneric(eachfn, coll, iteratee, callback) {
|
||
var results = [];
|
||
eachfn(coll, (x, index, iterCb) => {
|
||
iteratee(x, (err, v) => {
|
||
if (err) return iterCb(err);
|
||
if (v) {
|
||
results.push({index, value: x});
|
||
}
|
||
iterCb(err);
|
||
});
|
||
}, err => {
|
||
if (err) return callback(err);
|
||
callback(null, results
|
||
.sort((a, b) => a.index - b.index)
|
||
.map(v => v.value));
|
||
});
|
||
}
|
||
|
||
function _filter(eachfn, coll, iteratee, callback) {
|
||
var filter = isArrayLike(coll) ? filterArray : filterGeneric;
|
||
return filter(eachfn, coll, wrapAsync(iteratee), callback);
|
||
}
|
||
|
||
/**
|
||
* Returns a new array of all the values in `coll` which pass an async truth
|
||
* test. This operation is performed in parallel, but the results array will be
|
||
* in the same order as the original.
|
||
*
|
||
* @name filter
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @alias select
|
||
* @category Collection
|
||
* @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over.
|
||
* @param {Function} iteratee - A truth test to apply to each item in `coll`.
|
||
* The `iteratee` is passed a `callback(err, truthValue)`, which must be called
|
||
* with a boolean argument once it has completed. Invoked with (item, callback).
|
||
* @param {Function} [callback] - A callback which is called after all the
|
||
* `iteratee` functions have finished. Invoked with (err, results).
|
||
* @returns {Promise} a promise, if no callback provided
|
||
* @example
|
||
*
|
||
* async.filter(['file1','file2','file3'], function(filePath, callback) {
|
||
* fs.access(filePath, function(err) {
|
||
* callback(null, !err)
|
||
* });
|
||
* }, function(err, results) {
|
||
* // results now equals an array of the existing files
|
||
* });
|
||
*/
|
||
function filter (coll, iteratee, callback) {
|
||
return _filter(eachOf$1, coll, iteratee, callback)
|
||
}
|
||
var filter$1 = awaitify(filter, 3);
|
||
|
||
/**
|
||
* The same as [`filter`]{@link module:Collections.filter} but runs a maximum of `limit` async operations at a
|
||
* time.
|
||
*
|
||
* @name filterLimit
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @see [async.filter]{@link module:Collections.filter}
|
||
* @alias selectLimit
|
||
* @category Collection
|
||
* @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over.
|
||
* @param {number} limit - The maximum number of async operations at a time.
|
||
* @param {Function} iteratee - A truth test to apply to each item in `coll`.
|
||
* The `iteratee` is passed a `callback(err, truthValue)`, which must be called
|
||
* with a boolean argument once it has completed. Invoked with (item, callback).
|
||
* @param {Function} [callback] - A callback which is called after all the
|
||
* `iteratee` functions have finished. Invoked with (err, results).
|
||
* @returns {Promise} a promise, if no callback provided
|
||
*/
|
||
function filterLimit (coll, limit, iteratee, callback) {
|
||
return _filter(eachOfLimit(limit), coll, iteratee, callback)
|
||
}
|
||
var filterLimit$1 = awaitify(filterLimit, 4);
|
||
|
||
/**
|
||
* The same as [`filter`]{@link module:Collections.filter} but runs only a single async operation at a time.
|
||
*
|
||
* @name filterSeries
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @see [async.filter]{@link module:Collections.filter}
|
||
* @alias selectSeries
|
||
* @category Collection
|
||
* @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over.
|
||
* @param {Function} iteratee - A truth test to apply to each item in `coll`.
|
||
* The `iteratee` is passed a `callback(err, truthValue)`, which must be called
|
||
* with a boolean argument once it has completed. Invoked with (item, callback).
|
||
* @param {Function} [callback] - A callback which is called after all the
|
||
* `iteratee` functions have finished. Invoked with (err, results)
|
||
* @returns {Promise} a promise, if no callback provided
|
||
*/
|
||
function filterSeries (coll, iteratee, callback) {
|
||
return _filter(eachOfSeries$1, coll, iteratee, callback)
|
||
}
|
||
var filterSeries$1 = awaitify(filterSeries, 3);
|
||
|
||
/**
|
||
* Calls the asynchronous function `fn` with a callback parameter that allows it
|
||
* to call itself again, in series, indefinitely.
|
||
|
||
* If an error is passed to the callback then `errback` is called with the
|
||
* error, and execution stops, otherwise it will never be called.
|
||
*
|
||
* @name forever
|
||
* @static
|
||
* @memberOf module:ControlFlow
|
||
* @method
|
||
* @category Control Flow
|
||
* @param {AsyncFunction} fn - an async function to call repeatedly.
|
||
* Invoked with (next).
|
||
* @param {Function} [errback] - when `fn` passes an error to it's callback,
|
||
* this function will be called, and execution stops. Invoked with (err).
|
||
* @returns {Promise} a promise that rejects if an error occurs and an errback
|
||
* is not passed
|
||
* @example
|
||
*
|
||
* async.forever(
|
||
* function(next) {
|
||
* // next is suitable for passing to things that need a callback(err [, whatever]);
|
||
* // it will result in this function being called again.
|
||
* },
|
||
* function(err) {
|
||
* // if next is called with a value in its first parameter, it will appear
|
||
* // in here as 'err', and execution will stop.
|
||
* }
|
||
* );
|
||
*/
|
||
function forever(fn, errback) {
|
||
var done = onlyOnce(errback);
|
||
var task = wrapAsync(ensureAsync(fn));
|
||
|
||
function next(err) {
|
||
if (err) return done(err);
|
||
if (err === false) return;
|
||
task(next);
|
||
}
|
||
return next();
|
||
}
|
||
var forever$1 = awaitify(forever, 2);
|
||
|
||
/**
|
||
* The same as [`groupBy`]{@link module:Collections.groupBy} but runs a maximum of `limit` async operations at a time.
|
||
*
|
||
* @name groupByLimit
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @see [async.groupBy]{@link module:Collections.groupBy}
|
||
* @category Collection
|
||
* @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over.
|
||
* @param {number} limit - The maximum number of async operations at a time.
|
||
* @param {AsyncFunction} iteratee - An async function to apply to each item in
|
||
* `coll`.
|
||
* The iteratee should complete with a `key` to group the value under.
|
||
* Invoked with (value, callback).
|
||
* @param {Function} [callback] - A callback which is called when all `iteratee`
|
||
* functions have finished, or an error occurs. Result is an `Object` whoses
|
||
* properties are arrays of values which returned the corresponding key.
|
||
* @returns {Promise} a promise, if no callback is passed
|
||
*/
|
||
function groupByLimit(coll, limit, iteratee, callback) {
|
||
var _iteratee = wrapAsync(iteratee);
|
||
return mapLimit$1(coll, limit, (val, iterCb) => {
|
||
_iteratee(val, (err, key) => {
|
||
if (err) return iterCb(err);
|
||
return iterCb(err, {key, val});
|
||
});
|
||
}, (err, mapResults) => {
|
||
var result = {};
|
||
// from MDN, handle object having an `hasOwnProperty` prop
|
||
var {hasOwnProperty} = Object.prototype;
|
||
|
||
for (var i = 0; i < mapResults.length; i++) {
|
||
if (mapResults[i]) {
|
||
var {key} = mapResults[i];
|
||
var {val} = mapResults[i];
|
||
|
||
if (hasOwnProperty.call(result, key)) {
|
||
result[key].push(val);
|
||
} else {
|
||
result[key] = [val];
|
||
}
|
||
}
|
||
}
|
||
|
||
return callback(err, result);
|
||
});
|
||
}
|
||
|
||
var groupByLimit$1 = awaitify(groupByLimit, 4);
|
||
|
||
/**
|
||
* Returns a new object, where each value corresponds to an array of items, from
|
||
* `coll`, that returned the corresponding key. That is, the keys of the object
|
||
* correspond to the values passed to the `iteratee` callback.
|
||
*
|
||
* Note: Since this function applies the `iteratee` to each item in parallel,
|
||
* there is no guarantee that the `iteratee` functions will complete in order.
|
||
* However, the values for each key in the `result` will be in the same order as
|
||
* the original `coll`. For Objects, the values will roughly be in the order of
|
||
* the original Objects' keys (but this can vary across JavaScript engines).
|
||
*
|
||
* @name groupBy
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @category Collection
|
||
* @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over.
|
||
* @param {AsyncFunction} iteratee - An async function to apply to each item in
|
||
* `coll`.
|
||
* The iteratee should complete with a `key` to group the value under.
|
||
* Invoked with (value, callback).
|
||
* @param {Function} [callback] - A callback which is called when all `iteratee`
|
||
* functions have finished, or an error occurs. Result is an `Object` whoses
|
||
* properties are arrays of values which returned the corresponding key.
|
||
* @returns {Promise} a promise, if no callback is passed
|
||
* @example
|
||
*
|
||
* async.groupBy(['userId1', 'userId2', 'userId3'], function(userId, callback) {
|
||
* db.findById(userId, function(err, user) {
|
||
* if (err) return callback(err);
|
||
* return callback(null, user.age);
|
||
* });
|
||
* }, function(err, result) {
|
||
* // result is object containing the userIds grouped by age
|
||
* // e.g. { 30: ['userId1', 'userId3'], 42: ['userId2']};
|
||
* });
|
||
*/
|
||
function groupBy (coll, iteratee, callback) {
|
||
return groupByLimit$1(coll, Infinity, iteratee, callback)
|
||
}
|
||
|
||
/**
|
||
* The same as [`groupBy`]{@link module:Collections.groupBy} but runs only a single async operation at a time.
|
||
*
|
||
* @name groupBySeries
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @see [async.groupBy]{@link module:Collections.groupBy}
|
||
* @category Collection
|
||
* @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over.
|
||
* @param {AsyncFunction} iteratee - An async function to apply to each item in
|
||
* `coll`.
|
||
* The iteratee should complete with a `key` to group the value under.
|
||
* Invoked with (value, callback).
|
||
* @param {Function} [callback] - A callback which is called when all `iteratee`
|
||
* functions have finished, or an error occurs. Result is an `Object` whoses
|
||
* properties are arrays of values which returned the corresponding key.
|
||
* @returns {Promise} a promise, if no callback is passed
|
||
*/
|
||
function groupBySeries (coll, iteratee, callback) {
|
||
return groupByLimit$1(coll, 1, iteratee, callback)
|
||
}
|
||
|
||
/**
|
||
* Logs the result of an `async` function to the `console`. Only works in
|
||
* Node.js or in browsers that support `console.log` and `console.error` (such
|
||
* as FF and Chrome). If multiple arguments are returned from the async
|
||
* function, `console.log` is called on each argument in order.
|
||
*
|
||
* @name log
|
||
* @static
|
||
* @memberOf module:Utils
|
||
* @method
|
||
* @category Util
|
||
* @param {AsyncFunction} function - The function you want to eventually apply
|
||
* all arguments to.
|
||
* @param {...*} arguments... - Any number of arguments to apply to the function.
|
||
* @example
|
||
*
|
||
* // in a module
|
||
* var hello = function(name, callback) {
|
||
* setTimeout(function() {
|
||
* callback(null, 'hello ' + name);
|
||
* }, 1000);
|
||
* };
|
||
*
|
||
* // in the node repl
|
||
* node> async.log(hello, 'world');
|
||
* 'hello world'
|
||
*/
|
||
var log = consoleFunc('log');
|
||
|
||
/**
|
||
* The same as [`mapValues`]{@link module:Collections.mapValues} but runs a maximum of `limit` async operations at a
|
||
* time.
|
||
*
|
||
* @name mapValuesLimit
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @see [async.mapValues]{@link module:Collections.mapValues}
|
||
* @category Collection
|
||
* @param {Object} obj - A collection to iterate over.
|
||
* @param {number} limit - The maximum number of async operations at a time.
|
||
* @param {AsyncFunction} iteratee - A function to apply to each value and key
|
||
* in `coll`.
|
||
* The iteratee should complete with the transformed value as its result.
|
||
* Invoked with (value, key, callback).
|
||
* @param {Function} [callback] - A callback which is called when all `iteratee`
|
||
* functions have finished, or an error occurs. `result` is a new object consisting
|
||
* of each key from `obj`, with each transformed value on the right-hand side.
|
||
* Invoked with (err, result).
|
||
* @returns {Promise} a promise, if no callback is passed
|
||
*/
|
||
function mapValuesLimit(obj, limit, iteratee, callback) {
|
||
callback = once(callback);
|
||
var newObj = {};
|
||
var _iteratee = wrapAsync(iteratee);
|
||
return eachOfLimit(limit)(obj, (val, key, next) => {
|
||
_iteratee(val, key, (err, result) => {
|
||
if (err) return next(err);
|
||
newObj[key] = result;
|
||
next(err);
|
||
});
|
||
}, err => callback(err, newObj));
|
||
}
|
||
|
||
var mapValuesLimit$1 = awaitify(mapValuesLimit, 4);
|
||
|
||
/**
|
||
* A relative of [`map`]{@link module:Collections.map}, designed for use with objects.
|
||
*
|
||
* Produces a new Object by mapping each value of `obj` through the `iteratee`
|
||
* function. The `iteratee` is called each `value` and `key` from `obj` and a
|
||
* callback for when it has finished processing. Each of these callbacks takes
|
||
* two arguments: an `error`, and the transformed item from `obj`. If `iteratee`
|
||
* passes an error to its callback, the main `callback` (for the `mapValues`
|
||
* function) is immediately called with the error.
|
||
*
|
||
* Note, the order of the keys in the result is not guaranteed. The keys will
|
||
* be roughly in the order they complete, (but this is very engine-specific)
|
||
*
|
||
* @name mapValues
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @category Collection
|
||
* @param {Object} obj - A collection to iterate over.
|
||
* @param {AsyncFunction} iteratee - A function to apply to each value and key
|
||
* in `coll`.
|
||
* The iteratee should complete with the transformed value as its result.
|
||
* Invoked with (value, key, callback).
|
||
* @param {Function} [callback] - A callback which is called when all `iteratee`
|
||
* functions have finished, or an error occurs. `result` is a new object consisting
|
||
* of each key from `obj`, with each transformed value on the right-hand side.
|
||
* Invoked with (err, result).
|
||
* @returns {Promise} a promise, if no callback is passed
|
||
* @example
|
||
*
|
||
* async.mapValues({
|
||
* f1: 'file1',
|
||
* f2: 'file2',
|
||
* f3: 'file3'
|
||
* }, function (file, key, callback) {
|
||
* fs.stat(file, callback);
|
||
* }, function(err, result) {
|
||
* // result is now a map of stats for each file, e.g.
|
||
* // {
|
||
* // f1: [stats for file1],
|
||
* // f2: [stats for file2],
|
||
* // f3: [stats for file3]
|
||
* // }
|
||
* });
|
||
*/
|
||
function mapValues(obj, iteratee, callback) {
|
||
return mapValuesLimit$1(obj, Infinity, iteratee, callback)
|
||
}
|
||
|
||
/**
|
||
* The same as [`mapValues`]{@link module:Collections.mapValues} but runs only a single async operation at a time.
|
||
*
|
||
* @name mapValuesSeries
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @see [async.mapValues]{@link module:Collections.mapValues}
|
||
* @category Collection
|
||
* @param {Object} obj - A collection to iterate over.
|
||
* @param {AsyncFunction} iteratee - A function to apply to each value and key
|
||
* in `coll`.
|
||
* The iteratee should complete with the transformed value as its result.
|
||
* Invoked with (value, key, callback).
|
||
* @param {Function} [callback] - A callback which is called when all `iteratee`
|
||
* functions have finished, or an error occurs. `result` is a new object consisting
|
||
* of each key from `obj`, with each transformed value on the right-hand side.
|
||
* Invoked with (err, result).
|
||
* @returns {Promise} a promise, if no callback is passed
|
||
*/
|
||
function mapValuesSeries(obj, iteratee, callback) {
|
||
return mapValuesLimit$1(obj, 1, iteratee, callback)
|
||
}
|
||
|
||
/**
|
||
* Caches the results of an async function. When creating a hash to store
|
||
* function results against, the callback is omitted from the hash and an
|
||
* optional hash function can be used.
|
||
*
|
||
* **Note: if the async function errs, the result will not be cached and
|
||
* subsequent calls will call the wrapped function.**
|
||
*
|
||
* If no hash function is specified, the first argument is used as a hash key,
|
||
* which may work reasonably if it is a string or a data type that converts to a
|
||
* distinct string. Note that objects and arrays will not behave reasonably.
|
||
* Neither will cases where the other arguments are significant. In such cases,
|
||
* specify your own hash function.
|
||
*
|
||
* The cache of results is exposed as the `memo` property of the function
|
||
* returned by `memoize`.
|
||
*
|
||
* @name memoize
|
||
* @static
|
||
* @memberOf module:Utils
|
||
* @method
|
||
* @category Util
|
||
* @param {AsyncFunction} fn - The async function to proxy and cache results from.
|
||
* @param {Function} hasher - An optional function for generating a custom hash
|
||
* for storing results. It has all the arguments applied to it apart from the
|
||
* callback, and must be synchronous.
|
||
* @returns {AsyncFunction} a memoized version of `fn`
|
||
* @example
|
||
*
|
||
* var slow_fn = function(name, callback) {
|
||
* // do something
|
||
* callback(null, result);
|
||
* };
|
||
* var fn = async.memoize(slow_fn);
|
||
*
|
||
* // fn can now be used as if it were slow_fn
|
||
* fn('some name', function() {
|
||
* // callback
|
||
* });
|
||
*/
|
||
function memoize(fn, hasher = v => v) {
|
||
var memo = Object.create(null);
|
||
var queues = Object.create(null);
|
||
var _fn = wrapAsync(fn);
|
||
var memoized = initialParams((args, callback) => {
|
||
var key = hasher(...args);
|
||
if (key in memo) {
|
||
setImmediate$1(() => callback(null, ...memo[key]));
|
||
} else if (key in queues) {
|
||
queues[key].push(callback);
|
||
} else {
|
||
queues[key] = [callback];
|
||
_fn(...args, (err, ...resultArgs) => {
|
||
// #1465 don't memoize if an error occurred
|
||
if (!err) {
|
||
memo[key] = resultArgs;
|
||
}
|
||
var q = queues[key];
|
||
delete queues[key];
|
||
for (var i = 0, l = q.length; i < l; i++) {
|
||
q[i](err, ...resultArgs);
|
||
}
|
||
});
|
||
}
|
||
});
|
||
memoized.memo = memo;
|
||
memoized.unmemoized = fn;
|
||
return memoized;
|
||
}
|
||
|
||
/**
|
||
* Calls `callback` on a later loop around the event loop. In Node.js this just
|
||
* calls `process.nextTick`. In the browser it will use `setImmediate` if
|
||
* available, otherwise `setTimeout(callback, 0)`, which means other higher
|
||
* priority events may precede the execution of `callback`.
|
||
*
|
||
* This is used internally for browser-compatibility purposes.
|
||
*
|
||
* @name nextTick
|
||
* @static
|
||
* @memberOf module:Utils
|
||
* @method
|
||
* @see [async.setImmediate]{@link module:Utils.setImmediate}
|
||
* @category Util
|
||
* @param {Function} callback - The function to call on a later loop around
|
||
* the event loop. Invoked with (args...).
|
||
* @param {...*} args... - any number of additional arguments to pass to the
|
||
* callback on the next tick.
|
||
* @example
|
||
*
|
||
* var call_order = [];
|
||
* async.nextTick(function() {
|
||
* call_order.push('two');
|
||
* // call_order now equals ['one','two']
|
||
* });
|
||
* call_order.push('one');
|
||
*
|
||
* async.setImmediate(function (a, b, c) {
|
||
* // a, b, and c equal 1, 2, and 3
|
||
* }, 1, 2, 3);
|
||
*/
|
||
var _defer$1;
|
||
|
||
if (hasNextTick) {
|
||
_defer$1 = process.nextTick;
|
||
} else if (hasSetImmediate) {
|
||
_defer$1 = setImmediate;
|
||
} else {
|
||
_defer$1 = fallback;
|
||
}
|
||
|
||
var nextTick = wrap(_defer$1);
|
||
|
||
var parallel = awaitify((eachfn, tasks, callback) => {
|
||
var results = isArrayLike(tasks) ? [] : {};
|
||
|
||
eachfn(tasks, (task, key, taskCb) => {
|
||
wrapAsync(task)((err, ...result) => {
|
||
if (result.length < 2) {
|
||
[result] = result;
|
||
}
|
||
results[key] = result;
|
||
taskCb(err);
|
||
});
|
||
}, err => callback(err, results));
|
||
}, 3);
|
||
|
||
/**
|
||
* Run the `tasks` collection of functions in parallel, without waiting until
|
||
* the previous function has completed. If any of the functions pass an error to
|
||
* its callback, the main `callback` is immediately called with the value of the
|
||
* error. Once the `tasks` have completed, the results are passed to the final
|
||
* `callback` as an array.
|
||
*
|
||
* **Note:** `parallel` is about kicking-off I/O tasks in parallel, not about
|
||
* parallel execution of code. If your tasks do not use any timers or perform
|
||
* any I/O, they will actually be executed in series. Any synchronous setup
|
||
* sections for each task will happen one after the other. JavaScript remains
|
||
* single-threaded.
|
||
*
|
||
* **Hint:** Use [`reflect`]{@link module:Utils.reflect} to continue the
|
||
* execution of other tasks when a task fails.
|
||
*
|
||
* It is also possible to use an object instead of an array. Each property will
|
||
* be run as a function and the results will be passed to the final `callback`
|
||
* as an object instead of an array. This can be a more readable way of handling
|
||
* results from {@link async.parallel}.
|
||
*
|
||
* @name parallel
|
||
* @static
|
||
* @memberOf module:ControlFlow
|
||
* @method
|
||
* @category Control Flow
|
||
* @param {Array|Iterable|AsyncIterable|Object} tasks - A collection of
|
||
* [async functions]{@link AsyncFunction} to run.
|
||
* Each async function can complete with any number of optional `result` values.
|
||
* @param {Function} [callback] - An optional callback to run once all the
|
||
* functions have completed successfully. This function gets a results array
|
||
* (or object) containing all the result arguments passed to the task callbacks.
|
||
* Invoked with (err, results).
|
||
* @returns {Promise} a promise, if a callback is not passed
|
||
*
|
||
* @example
|
||
* async.parallel([
|
||
* function(callback) {
|
||
* setTimeout(function() {
|
||
* callback(null, 'one');
|
||
* }, 200);
|
||
* },
|
||
* function(callback) {
|
||
* setTimeout(function() {
|
||
* callback(null, 'two');
|
||
* }, 100);
|
||
* }
|
||
* ],
|
||
* // optional callback
|
||
* function(err, results) {
|
||
* // the results array will equal ['one','two'] even though
|
||
* // the second function had a shorter timeout.
|
||
* });
|
||
*
|
||
* // an example using an object instead of an array
|
||
* async.parallel({
|
||
* one: function(callback) {
|
||
* setTimeout(function() {
|
||
* callback(null, 1);
|
||
* }, 200);
|
||
* },
|
||
* two: function(callback) {
|
||
* setTimeout(function() {
|
||
* callback(null, 2);
|
||
* }, 100);
|
||
* }
|
||
* }, function(err, results) {
|
||
* // results is now equals to: {one: 1, two: 2}
|
||
* });
|
||
*/
|
||
function parallel$1(tasks, callback) {
|
||
return parallel(eachOf$1, tasks, callback);
|
||
}
|
||
|
||
/**
|
||
* The same as [`parallel`]{@link module:ControlFlow.parallel} but runs a maximum of `limit` async operations at a
|
||
* time.
|
||
*
|
||
* @name parallelLimit
|
||
* @static
|
||
* @memberOf module:ControlFlow
|
||
* @method
|
||
* @see [async.parallel]{@link module:ControlFlow.parallel}
|
||
* @category Control Flow
|
||
* @param {Array|Iterable|AsyncIterable|Object} tasks - A collection of
|
||
* [async functions]{@link AsyncFunction} to run.
|
||
* Each async function can complete with any number of optional `result` values.
|
||
* @param {number} limit - The maximum number of async operations at a time.
|
||
* @param {Function} [callback] - An optional callback to run once all the
|
||
* functions have completed successfully. This function gets a results array
|
||
* (or object) containing all the result arguments passed to the task callbacks.
|
||
* Invoked with (err, results).
|
||
* @returns {Promise} a promise, if a callback is not passed
|
||
*/
|
||
function parallelLimit(tasks, limit, callback) {
|
||
return parallel(eachOfLimit(limit), tasks, callback);
|
||
}
|
||
|
||
/**
|
||
* A queue of tasks for the worker function to complete.
|
||
* @typedef {Iterable} QueueObject
|
||
* @memberOf module:ControlFlow
|
||
* @property {Function} length - a function returning the number of items
|
||
* waiting to be processed. Invoke with `queue.length()`.
|
||
* @property {boolean} started - a boolean indicating whether or not any
|
||
* items have been pushed and processed by the queue.
|
||
* @property {Function} running - a function returning the number of items
|
||
* currently being processed. Invoke with `queue.running()`.
|
||
* @property {Function} workersList - a function returning the array of items
|
||
* currently being processed. Invoke with `queue.workersList()`.
|
||
* @property {Function} idle - a function returning false if there are items
|
||
* waiting or being processed, or true if not. Invoke with `queue.idle()`.
|
||
* @property {number} concurrency - an integer for determining how many `worker`
|
||
* functions should be run in parallel. This property can be changed after a
|
||
* `queue` is created to alter the concurrency on-the-fly.
|
||
* @property {number} payload - an integer that specifies how many items are
|
||
* passed to the worker function at a time. only applies if this is a
|
||
* [cargo]{@link module:ControlFlow.cargo} object
|
||
* @property {AsyncFunction} push - add a new task to the `queue`. Calls `callback`
|
||
* once the `worker` has finished processing the task. Instead of a single task,
|
||
* a `tasks` array can be submitted. The respective callback is used for every
|
||
* task in the list. Invoke with `queue.push(task, [callback])`,
|
||
* @property {AsyncFunction} unshift - add a new task to the front of the `queue`.
|
||
* Invoke with `queue.unshift(task, [callback])`.
|
||
* @property {AsyncFunction} pushAsync - the same as `q.push`, except this returns
|
||
* a promise that rejects if an error occurs.
|
||
* @property {AsyncFunction} unshirtAsync - the same as `q.unshift`, except this returns
|
||
* a promise that rejects if an error occurs.
|
||
* @property {Function} remove - remove items from the queue that match a test
|
||
* function. The test function will be passed an object with a `data` property,
|
||
* and a `priority` property, if this is a
|
||
* [priorityQueue]{@link module:ControlFlow.priorityQueue} object.
|
||
* Invoked with `queue.remove(testFn)`, where `testFn` is of the form
|
||
* `function ({data, priority}) {}` and returns a Boolean.
|
||
* @property {Function} saturated - a function that sets a callback that is
|
||
* called when the number of running workers hits the `concurrency` limit, and
|
||
* further tasks will be queued. If the callback is omitted, `q.saturated()`
|
||
* returns a promise for the next occurrence.
|
||
* @property {Function} unsaturated - a function that sets a callback that is
|
||
* called when the number of running workers is less than the `concurrency` &
|
||
* `buffer` limits, and further tasks will not be queued. If the callback is
|
||
* omitted, `q.unsaturated()` returns a promise for the next occurrence.
|
||
* @property {number} buffer - A minimum threshold buffer in order to say that
|
||
* the `queue` is `unsaturated`.
|
||
* @property {Function} empty - a function that sets a callback that is called
|
||
* when the last item from the `queue` is given to a `worker`. If the callback
|
||
* is omitted, `q.empty()` returns a promise for the next occurrence.
|
||
* @property {Function} drain - a function that sets a callback that is called
|
||
* when the last item from the `queue` has returned from the `worker`. If the
|
||
* callback is omitted, `q.drain()` returns a promise for the next occurrence.
|
||
* @property {Function} error - a function that sets a callback that is called
|
||
* when a task errors. Has the signature `function(error, task)`. If the
|
||
* callback is omitted, `error()` returns a promise that rejects on the next
|
||
* error.
|
||
* @property {boolean} paused - a boolean for determining whether the queue is
|
||
* in a paused state.
|
||
* @property {Function} pause - a function that pauses the processing of tasks
|
||
* until `resume()` is called. Invoke with `queue.pause()`.
|
||
* @property {Function} resume - a function that resumes the processing of
|
||
* queued tasks when the queue is paused. Invoke with `queue.resume()`.
|
||
* @property {Function} kill - a function that removes the `drain` callback and
|
||
* empties remaining tasks from the queue forcing it to go idle. No more tasks
|
||
* should be pushed to the queue after calling this function. Invoke with `queue.kill()`.
|
||
*
|
||
* @example
|
||
* const q = aync.queue(worker, 2)
|
||
* q.push(item1)
|
||
* q.push(item2)
|
||
* q.push(item3)
|
||
* // queues are iterable, spread into an array to inspect
|
||
* const items = [...q] // [item1, item2, item3]
|
||
* // or use for of
|
||
* for (let item of q) {
|
||
* console.log(item)
|
||
* }
|
||
*
|
||
* q.drain(() => {
|
||
* console.log('all done')
|
||
* })
|
||
* // or
|
||
* await q.drain()
|
||
*/
|
||
|
||
/**
|
||
* Creates a `queue` object with the specified `concurrency`. Tasks added to the
|
||
* `queue` are processed in parallel (up to the `concurrency` limit). If all
|
||
* `worker`s are in progress, the task is queued until one becomes available.
|
||
* Once a `worker` completes a `task`, that `task`'s callback is called.
|
||
*
|
||
* @name queue
|
||
* @static
|
||
* @memberOf module:ControlFlow
|
||
* @method
|
||
* @category Control Flow
|
||
* @param {AsyncFunction} worker - An async function for processing a queued task.
|
||
* If you want to handle errors from an individual task, pass a callback to
|
||
* `q.push()`. Invoked with (task, callback).
|
||
* @param {number} [concurrency=1] - An `integer` for determining how many
|
||
* `worker` functions should be run in parallel. If omitted, the concurrency
|
||
* defaults to `1`. If the concurrency is `0`, an error is thrown.
|
||
* @returns {module:ControlFlow.QueueObject} A queue object to manage the tasks. Callbacks can be
|
||
* attached as certain properties to listen for specific events during the
|
||
* lifecycle of the queue.
|
||
* @example
|
||
*
|
||
* // create a queue object with concurrency 2
|
||
* var q = async.queue(function(task, callback) {
|
||
* console.log('hello ' + task.name);
|
||
* callback();
|
||
* }, 2);
|
||
*
|
||
* // assign a callback
|
||
* q.drain(function() {
|
||
* console.log('all items have been processed');
|
||
* });
|
||
* // or await the end
|
||
* await q.drain()
|
||
*
|
||
* // assign an error callback
|
||
* q.error(function(err, task) {
|
||
* console.error('task experienced an error');
|
||
* });
|
||
*
|
||
* // add some items to the queue
|
||
* q.push({name: 'foo'}, function(err) {
|
||
* console.log('finished processing foo');
|
||
* });
|
||
* // callback is optional
|
||
* q.push({name: 'bar'});
|
||
*
|
||
* // add some items to the queue (batch-wise)
|
||
* q.push([{name: 'baz'},{name: 'bay'},{name: 'bax'}], function(err) {
|
||
* console.log('finished processing item');
|
||
* });
|
||
*
|
||
* // add some items to the front of the queue
|
||
* q.unshift({name: 'bar'}, function (err) {
|
||
* console.log('finished processing bar');
|
||
* });
|
||
*/
|
||
function queue$1 (worker, concurrency) {
|
||
var _worker = wrapAsync(worker);
|
||
return queue((items, cb) => {
|
||
_worker(items[0], cb);
|
||
}, concurrency, 1);
|
||
}
|
||
|
||
// Binary min-heap implementation used for priority queue.
|
||
// Implementation is stable, i.e. push time is considered for equal priorities
|
||
class Heap {
|
||
constructor() {
|
||
this.heap = [];
|
||
this.pushCount = Number.MIN_SAFE_INTEGER;
|
||
}
|
||
|
||
get length() {
|
||
return this.heap.length;
|
||
}
|
||
|
||
empty () {
|
||
this.heap = [];
|
||
return this;
|
||
}
|
||
|
||
percUp(index) {
|
||
let p;
|
||
|
||
while (index > 0 && smaller(this.heap[index], this.heap[p=parent(index)])) {
|
||
let t = this.heap[index];
|
||
this.heap[index] = this.heap[p];
|
||
this.heap[p] = t;
|
||
|
||
index = p;
|
||
}
|
||
}
|
||
|
||
percDown(index) {
|
||
let l;
|
||
|
||
while ((l=leftChi(index)) < this.heap.length) {
|
||
if (l+1 < this.heap.length && smaller(this.heap[l+1], this.heap[l])) {
|
||
l = l+1;
|
||
}
|
||
|
||
if (smaller(this.heap[index], this.heap[l])) {
|
||
break;
|
||
}
|
||
|
||
let t = this.heap[index];
|
||
this.heap[index] = this.heap[l];
|
||
this.heap[l] = t;
|
||
|
||
index = l;
|
||
}
|
||
}
|
||
|
||
push(node) {
|
||
node.pushCount = ++this.pushCount;
|
||
this.heap.push(node);
|
||
this.percUp(this.heap.length-1);
|
||
}
|
||
|
||
unshift(node) {
|
||
return this.heap.push(node);
|
||
}
|
||
|
||
shift() {
|
||
let [top] = this.heap;
|
||
|
||
this.heap[0] = this.heap[this.heap.length-1];
|
||
this.heap.pop();
|
||
this.percDown(0);
|
||
|
||
return top;
|
||
}
|
||
|
||
toArray() {
|
||
return [...this];
|
||
}
|
||
|
||
*[Symbol.iterator] () {
|
||
for (let i = 0; i < this.heap.length; i++) {
|
||
yield this.heap[i].data;
|
||
}
|
||
}
|
||
|
||
remove (testFn) {
|
||
let j = 0;
|
||
for (let i = 0; i < this.heap.length; i++) {
|
||
if (!testFn(this.heap[i])) {
|
||
this.heap[j] = this.heap[i];
|
||
j++;
|
||
}
|
||
}
|
||
|
||
this.heap.splice(j);
|
||
|
||
for (let i = parent(this.heap.length-1); i >= 0; i--) {
|
||
this.percDown(i);
|
||
}
|
||
|
||
return this;
|
||
}
|
||
}
|
||
|
||
function leftChi(i) {
|
||
return (i<<1)+1;
|
||
}
|
||
|
||
function parent(i) {
|
||
return ((i+1)>>1)-1;
|
||
}
|
||
|
||
function smaller(x, y) {
|
||
if (x.priority !== y.priority) {
|
||
return x.priority < y.priority;
|
||
}
|
||
else {
|
||
return x.pushCount < y.pushCount;
|
||
}
|
||
}
|
||
|
||
/**
|
||
* The same as [async.queue]{@link module:ControlFlow.queue} only tasks are assigned a priority and
|
||
* completed in ascending priority order.
|
||
*
|
||
* @name priorityQueue
|
||
* @static
|
||
* @memberOf module:ControlFlow
|
||
* @method
|
||
* @see [async.queue]{@link module:ControlFlow.queue}
|
||
* @category Control Flow
|
||
* @param {AsyncFunction} worker - An async function for processing a queued task.
|
||
* If you want to handle errors from an individual task, pass a callback to
|
||
* `q.push()`.
|
||
* Invoked with (task, callback).
|
||
* @param {number} concurrency - An `integer` for determining how many `worker`
|
||
* functions should be run in parallel. If omitted, the concurrency defaults to
|
||
* `1`. If the concurrency is `0`, an error is thrown.
|
||
* @returns {module:ControlFlow.QueueObject} A priorityQueue object to manage the tasks. There are two
|
||
* differences between `queue` and `priorityQueue` objects:
|
||
* * `push(task, priority, [callback])` - `priority` should be a number. If an
|
||
* array of `tasks` is given, all tasks will be assigned the same priority.
|
||
* * The `unshift` method was removed.
|
||
*/
|
||
function priorityQueue(worker, concurrency) {
|
||
// Start with a normal queue
|
||
var q = queue$1(worker, concurrency);
|
||
|
||
q._tasks = new Heap();
|
||
|
||
// Override push to accept second parameter representing priority
|
||
q.push = function(data, priority = 0, callback = () => {}) {
|
||
if (typeof callback !== 'function') {
|
||
throw new Error('task callback must be a function');
|
||
}
|
||
q.started = true;
|
||
if (!Array.isArray(data)) {
|
||
data = [data];
|
||
}
|
||
if (data.length === 0 && q.idle()) {
|
||
// call drain immediately if there are no tasks
|
||
return setImmediate$1(() => q.drain());
|
||
}
|
||
|
||
for (var i = 0, l = data.length; i < l; i++) {
|
||
var item = {
|
||
data: data[i],
|
||
priority,
|
||
callback
|
||
};
|
||
|
||
q._tasks.push(item);
|
||
}
|
||
|
||
setImmediate$1(q.process);
|
||
};
|
||
|
||
// Remove unshift function
|
||
delete q.unshift;
|
||
|
||
return q;
|
||
}
|
||
|
||
/**
|
||
* Runs the `tasks` array of functions in parallel, without waiting until the
|
||
* previous function has completed. Once any of the `tasks` complete or pass an
|
||
* error to its callback, the main `callback` is immediately called. It's
|
||
* equivalent to `Promise.race()`.
|
||
*
|
||
* @name race
|
||
* @static
|
||
* @memberOf module:ControlFlow
|
||
* @method
|
||
* @category Control Flow
|
||
* @param {Array} tasks - An array containing [async functions]{@link AsyncFunction}
|
||
* to run. Each function can complete with an optional `result` value.
|
||
* @param {Function} callback - A callback to run once any of the functions have
|
||
* completed. This function gets an error or result from the first function that
|
||
* completed. Invoked with (err, result).
|
||
* @returns undefined
|
||
* @example
|
||
*
|
||
* async.race([
|
||
* function(callback) {
|
||
* setTimeout(function() {
|
||
* callback(null, 'one');
|
||
* }, 200);
|
||
* },
|
||
* function(callback) {
|
||
* setTimeout(function() {
|
||
* callback(null, 'two');
|
||
* }, 100);
|
||
* }
|
||
* ],
|
||
* // main callback
|
||
* function(err, result) {
|
||
* // the result will be equal to 'two' as it finishes earlier
|
||
* });
|
||
*/
|
||
function race(tasks, callback) {
|
||
callback = once(callback);
|
||
if (!Array.isArray(tasks)) return callback(new TypeError('First argument to race must be an array of functions'));
|
||
if (!tasks.length) return callback();
|
||
for (var i = 0, l = tasks.length; i < l; i++) {
|
||
wrapAsync(tasks[i])(callback);
|
||
}
|
||
}
|
||
|
||
var race$1 = awaitify(race, 2);
|
||
|
||
/**
|
||
* Same as [`reduce`]{@link module:Collections.reduce}, only operates on `array` in reverse order.
|
||
*
|
||
* @name reduceRight
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @see [async.reduce]{@link module:Collections.reduce}
|
||
* @alias foldr
|
||
* @category Collection
|
||
* @param {Array} array - A collection to iterate over.
|
||
* @param {*} memo - The initial state of the reduction.
|
||
* @param {AsyncFunction} iteratee - A function applied to each item in the
|
||
* array to produce the next step in the reduction.
|
||
* The `iteratee` should complete with the next state of the reduction.
|
||
* If the iteratee complete with an error, the reduction is stopped and the
|
||
* main `callback` is immediately called with the error.
|
||
* Invoked with (memo, item, callback).
|
||
* @param {Function} [callback] - A callback which is called after all the
|
||
* `iteratee` functions have finished. Result is the reduced value. Invoked with
|
||
* (err, result).
|
||
* @returns {Promise} a promise, if no callback is passed
|
||
*/
|
||
function reduceRight (array, memo, iteratee, callback) {
|
||
var reversed = [...array].reverse();
|
||
return reduce$1(reversed, memo, iteratee, callback);
|
||
}
|
||
|
||
/**
|
||
* Wraps the async function in another function that always completes with a
|
||
* result object, even when it errors.
|
||
*
|
||
* The result object has either the property `error` or `value`.
|
||
*
|
||
* @name reflect
|
||
* @static
|
||
* @memberOf module:Utils
|
||
* @method
|
||
* @category Util
|
||
* @param {AsyncFunction} fn - The async function you want to wrap
|
||
* @returns {Function} - A function that always passes null to it's callback as
|
||
* the error. The second argument to the callback will be an `object` with
|
||
* either an `error` or a `value` property.
|
||
* @example
|
||
*
|
||
* async.parallel([
|
||
* async.reflect(function(callback) {
|
||
* // do some stuff ...
|
||
* callback(null, 'one');
|
||
* }),
|
||
* async.reflect(function(callback) {
|
||
* // do some more stuff but error ...
|
||
* callback('bad stuff happened');
|
||
* }),
|
||
* async.reflect(function(callback) {
|
||
* // do some more stuff ...
|
||
* callback(null, 'two');
|
||
* })
|
||
* ],
|
||
* // optional callback
|
||
* function(err, results) {
|
||
* // values
|
||
* // results[0].value = 'one'
|
||
* // results[1].error = 'bad stuff happened'
|
||
* // results[2].value = 'two'
|
||
* });
|
||
*/
|
||
function reflect(fn) {
|
||
var _fn = wrapAsync(fn);
|
||
return initialParams(function reflectOn(args, reflectCallback) {
|
||
args.push((error, ...cbArgs) => {
|
||
let retVal = {};
|
||
if (error) {
|
||
retVal.error = error;
|
||
}
|
||
if (cbArgs.length > 0){
|
||
var value = cbArgs;
|
||
if (cbArgs.length <= 1) {
|
||
[value] = cbArgs;
|
||
}
|
||
retVal.value = value;
|
||
}
|
||
reflectCallback(null, retVal);
|
||
});
|
||
|
||
return _fn.apply(this, args);
|
||
});
|
||
}
|
||
|
||
/**
|
||
* A helper function that wraps an array or an object of functions with `reflect`.
|
||
*
|
||
* @name reflectAll
|
||
* @static
|
||
* @memberOf module:Utils
|
||
* @method
|
||
* @see [async.reflect]{@link module:Utils.reflect}
|
||
* @category Util
|
||
* @param {Array|Object|Iterable} tasks - The collection of
|
||
* [async functions]{@link AsyncFunction} to wrap in `async.reflect`.
|
||
* @returns {Array} Returns an array of async functions, each wrapped in
|
||
* `async.reflect`
|
||
* @example
|
||
*
|
||
* let tasks = [
|
||
* function(callback) {
|
||
* setTimeout(function() {
|
||
* callback(null, 'one');
|
||
* }, 200);
|
||
* },
|
||
* function(callback) {
|
||
* // do some more stuff but error ...
|
||
* callback(new Error('bad stuff happened'));
|
||
* },
|
||
* function(callback) {
|
||
* setTimeout(function() {
|
||
* callback(null, 'two');
|
||
* }, 100);
|
||
* }
|
||
* ];
|
||
*
|
||
* async.parallel(async.reflectAll(tasks),
|
||
* // optional callback
|
||
* function(err, results) {
|
||
* // values
|
||
* // results[0].value = 'one'
|
||
* // results[1].error = Error('bad stuff happened')
|
||
* // results[2].value = 'two'
|
||
* });
|
||
*
|
||
* // an example using an object instead of an array
|
||
* let tasks = {
|
||
* one: function(callback) {
|
||
* setTimeout(function() {
|
||
* callback(null, 'one');
|
||
* }, 200);
|
||
* },
|
||
* two: function(callback) {
|
||
* callback('two');
|
||
* },
|
||
* three: function(callback) {
|
||
* setTimeout(function() {
|
||
* callback(null, 'three');
|
||
* }, 100);
|
||
* }
|
||
* };
|
||
*
|
||
* async.parallel(async.reflectAll(tasks),
|
||
* // optional callback
|
||
* function(err, results) {
|
||
* // values
|
||
* // results.one.value = 'one'
|
||
* // results.two.error = 'two'
|
||
* // results.three.value = 'three'
|
||
* });
|
||
*/
|
||
function reflectAll(tasks) {
|
||
var results;
|
||
if (Array.isArray(tasks)) {
|
||
results = tasks.map(reflect);
|
||
} else {
|
||
results = {};
|
||
Object.keys(tasks).forEach(key => {
|
||
results[key] = reflect.call(this, tasks[key]);
|
||
});
|
||
}
|
||
return results;
|
||
}
|
||
|
||
function reject(eachfn, arr, _iteratee, callback) {
|
||
const iteratee = wrapAsync(_iteratee);
|
||
return _filter(eachfn, arr, (value, cb) => {
|
||
iteratee(value, (err, v) => {
|
||
cb(err, !v);
|
||
});
|
||
}, callback);
|
||
}
|
||
|
||
/**
|
||
* The opposite of [`filter`]{@link module:Collections.filter}. Removes values that pass an `async` truth test.
|
||
*
|
||
* @name reject
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @see [async.filter]{@link module:Collections.filter}
|
||
* @category Collection
|
||
* @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over.
|
||
* @param {Function} iteratee - An async truth test to apply to each item in
|
||
* `coll`.
|
||
* The should complete with a boolean value as its `result`.
|
||
* Invoked with (item, callback).
|
||
* @param {Function} [callback] - A callback which is called after all the
|
||
* `iteratee` functions have finished. Invoked with (err, results).
|
||
* @returns {Promise} a promise, if no callback is passed
|
||
* @example
|
||
*
|
||
* async.reject(['file1','file2','file3'], function(filePath, callback) {
|
||
* fs.access(filePath, function(err) {
|
||
* callback(null, !err)
|
||
* });
|
||
* }, function(err, results) {
|
||
* // results now equals an array of missing files
|
||
* createFiles(results);
|
||
* });
|
||
*/
|
||
function reject$1 (coll, iteratee, callback) {
|
||
return reject(eachOf$1, coll, iteratee, callback)
|
||
}
|
||
var reject$2 = awaitify(reject$1, 3);
|
||
|
||
/**
|
||
* The same as [`reject`]{@link module:Collections.reject} but runs a maximum of `limit` async operations at a
|
||
* time.
|
||
*
|
||
* @name rejectLimit
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @see [async.reject]{@link module:Collections.reject}
|
||
* @category Collection
|
||
* @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over.
|
||
* @param {number} limit - The maximum number of async operations at a time.
|
||
* @param {Function} iteratee - An async truth test to apply to each item in
|
||
* `coll`.
|
||
* The should complete with a boolean value as its `result`.
|
||
* Invoked with (item, callback).
|
||
* @param {Function} [callback] - A callback which is called after all the
|
||
* `iteratee` functions have finished. Invoked with (err, results).
|
||
* @returns {Promise} a promise, if no callback is passed
|
||
*/
|
||
function rejectLimit (coll, limit, iteratee, callback) {
|
||
return reject(eachOfLimit(limit), coll, iteratee, callback)
|
||
}
|
||
var rejectLimit$1 = awaitify(rejectLimit, 4);
|
||
|
||
/**
|
||
* The same as [`reject`]{@link module:Collections.reject} but runs only a single async operation at a time.
|
||
*
|
||
* @name rejectSeries
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @see [async.reject]{@link module:Collections.reject}
|
||
* @category Collection
|
||
* @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over.
|
||
* @param {Function} iteratee - An async truth test to apply to each item in
|
||
* `coll`.
|
||
* The should complete with a boolean value as its `result`.
|
||
* Invoked with (item, callback).
|
||
* @param {Function} [callback] - A callback which is called after all the
|
||
* `iteratee` functions have finished. Invoked with (err, results).
|
||
* @returns {Promise} a promise, if no callback is passed
|
||
*/
|
||
function rejectSeries (coll, iteratee, callback) {
|
||
return reject(eachOfSeries$1, coll, iteratee, callback)
|
||
}
|
||
var rejectSeries$1 = awaitify(rejectSeries, 3);
|
||
|
||
function constant$1(value) {
|
||
return function () {
|
||
return value;
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Attempts to get a successful response from `task` no more than `times` times
|
||
* before returning an error. If the task is successful, the `callback` will be
|
||
* passed the result of the successful task. If all attempts fail, the callback
|
||
* will be passed the error and result (if any) of the final attempt.
|
||
*
|
||
* @name retry
|
||
* @static
|
||
* @memberOf module:ControlFlow
|
||
* @method
|
||
* @category Control Flow
|
||
* @see [async.retryable]{@link module:ControlFlow.retryable}
|
||
* @param {Object|number} [opts = {times: 5, interval: 0}| 5] - Can be either an
|
||
* object with `times` and `interval` or a number.
|
||
* * `times` - The number of attempts to make before giving up. The default
|
||
* is `5`.
|
||
* * `interval` - The time to wait between retries, in milliseconds. The
|
||
* default is `0`. The interval may also be specified as a function of the
|
||
* retry count (see example).
|
||
* * `errorFilter` - An optional synchronous function that is invoked on
|
||
* erroneous result. If it returns `true` the retry attempts will continue;
|
||
* if the function returns `false` the retry flow is aborted with the current
|
||
* attempt's error and result being returned to the final callback.
|
||
* Invoked with (err).
|
||
* * If `opts` is a number, the number specifies the number of times to retry,
|
||
* with the default interval of `0`.
|
||
* @param {AsyncFunction} task - An async function to retry.
|
||
* Invoked with (callback).
|
||
* @param {Function} [callback] - An optional callback which is called when the
|
||
* task has succeeded, or after the final failed attempt. It receives the `err`
|
||
* and `result` arguments of the last attempt at completing the `task`. Invoked
|
||
* with (err, results).
|
||
* @returns {Promise} a promise if no callback provided
|
||
*
|
||
* @example
|
||
*
|
||
* // The `retry` function can be used as a stand-alone control flow by passing
|
||
* // a callback, as shown below:
|
||
*
|
||
* // try calling apiMethod 3 times
|
||
* async.retry(3, apiMethod, function(err, result) {
|
||
* // do something with the result
|
||
* });
|
||
*
|
||
* // try calling apiMethod 3 times, waiting 200 ms between each retry
|
||
* async.retry({times: 3, interval: 200}, apiMethod, function(err, result) {
|
||
* // do something with the result
|
||
* });
|
||
*
|
||
* // try calling apiMethod 10 times with exponential backoff
|
||
* // (i.e. intervals of 100, 200, 400, 800, 1600, ... milliseconds)
|
||
* async.retry({
|
||
* times: 10,
|
||
* interval: function(retryCount) {
|
||
* return 50 * Math.pow(2, retryCount);
|
||
* }
|
||
* }, apiMethod, function(err, result) {
|
||
* // do something with the result
|
||
* });
|
||
*
|
||
* // try calling apiMethod the default 5 times no delay between each retry
|
||
* async.retry(apiMethod, function(err, result) {
|
||
* // do something with the result
|
||
* });
|
||
*
|
||
* // try calling apiMethod only when error condition satisfies, all other
|
||
* // errors will abort the retry control flow and return to final callback
|
||
* async.retry({
|
||
* errorFilter: function(err) {
|
||
* return err.message === 'Temporary error'; // only retry on a specific error
|
||
* }
|
||
* }, apiMethod, function(err, result) {
|
||
* // do something with the result
|
||
* });
|
||
*
|
||
* // to retry individual methods that are not as reliable within other
|
||
* // control flow functions, use the `retryable` wrapper:
|
||
* async.auto({
|
||
* users: api.getUsers.bind(api),
|
||
* payments: async.retryable(3, api.getPayments.bind(api))
|
||
* }, function(err, results) {
|
||
* // do something with the results
|
||
* });
|
||
*
|
||
*/
|
||
const DEFAULT_TIMES = 5;
|
||
const DEFAULT_INTERVAL = 0;
|
||
|
||
function retry(opts, task, callback) {
|
||
var options = {
|
||
times: DEFAULT_TIMES,
|
||
intervalFunc: constant$1(DEFAULT_INTERVAL)
|
||
};
|
||
|
||
if (arguments.length < 3 && typeof opts === 'function') {
|
||
callback = task || promiseCallback();
|
||
task = opts;
|
||
} else {
|
||
parseTimes(options, opts);
|
||
callback = callback || promiseCallback();
|
||
}
|
||
|
||
if (typeof task !== 'function') {
|
||
throw new Error("Invalid arguments for async.retry");
|
||
}
|
||
|
||
var _task = wrapAsync(task);
|
||
|
||
var attempt = 1;
|
||
function retryAttempt() {
|
||
_task((err, ...args) => {
|
||
if (err === false) return
|
||
if (err && attempt++ < options.times &&
|
||
(typeof options.errorFilter != 'function' ||
|
||
options.errorFilter(err))) {
|
||
setTimeout(retryAttempt, options.intervalFunc(attempt - 1));
|
||
} else {
|
||
callback(err, ...args);
|
||
}
|
||
});
|
||
}
|
||
|
||
retryAttempt();
|
||
return callback[PROMISE_SYMBOL]
|
||
}
|
||
|
||
function parseTimes(acc, t) {
|
||
if (typeof t === 'object') {
|
||
acc.times = +t.times || DEFAULT_TIMES;
|
||
|
||
acc.intervalFunc = typeof t.interval === 'function' ?
|
||
t.interval :
|
||
constant$1(+t.interval || DEFAULT_INTERVAL);
|
||
|
||
acc.errorFilter = t.errorFilter;
|
||
} else if (typeof t === 'number' || typeof t === 'string') {
|
||
acc.times = +t || DEFAULT_TIMES;
|
||
} else {
|
||
throw new Error("Invalid arguments for async.retry");
|
||
}
|
||
}
|
||
|
||
/**
|
||
* A close relative of [`retry`]{@link module:ControlFlow.retry}. This method
|
||
* wraps a task and makes it retryable, rather than immediately calling it
|
||
* with retries.
|
||
*
|
||
* @name retryable
|
||
* @static
|
||
* @memberOf module:ControlFlow
|
||
* @method
|
||
* @see [async.retry]{@link module:ControlFlow.retry}
|
||
* @category Control Flow
|
||
* @param {Object|number} [opts = {times: 5, interval: 0}| 5] - optional
|
||
* options, exactly the same as from `retry`, except for a `opts.arity` that
|
||
* is the arity of the `task` function, defaulting to `task.length`
|
||
* @param {AsyncFunction} task - the asynchronous function to wrap.
|
||
* This function will be passed any arguments passed to the returned wrapper.
|
||
* Invoked with (...args, callback).
|
||
* @returns {AsyncFunction} The wrapped function, which when invoked, will
|
||
* retry on an error, based on the parameters specified in `opts`.
|
||
* This function will accept the same parameters as `task`.
|
||
* @example
|
||
*
|
||
* async.auto({
|
||
* dep1: async.retryable(3, getFromFlakyService),
|
||
* process: ["dep1", async.retryable(3, function (results, cb) {
|
||
* maybeProcessData(results.dep1, cb);
|
||
* })]
|
||
* }, callback);
|
||
*/
|
||
function retryable (opts, task) {
|
||
if (!task) {
|
||
task = opts;
|
||
opts = null;
|
||
}
|
||
let arity = (opts && opts.arity) || task.length;
|
||
if (isAsync(task)) {
|
||
arity += 1;
|
||
}
|
||
var _task = wrapAsync(task);
|
||
return initialParams((args, callback) => {
|
||
if (args.length < arity - 1 || callback == null) {
|
||
args.push(callback);
|
||
callback = promiseCallback();
|
||
}
|
||
function taskFn(cb) {
|
||
_task(...args, cb);
|
||
}
|
||
|
||
if (opts) retry(opts, taskFn, callback);
|
||
else retry(taskFn, callback);
|
||
|
||
return callback[PROMISE_SYMBOL]
|
||
});
|
||
}
|
||
|
||
/**
|
||
* Run the functions in the `tasks` collection in series, each one running once
|
||
* the previous function has completed. If any functions in the series pass an
|
||
* error to its callback, no more functions are run, and `callback` is
|
||
* immediately called with the value of the error. Otherwise, `callback`
|
||
* receives an array of results when `tasks` have completed.
|
||
*
|
||
* It is also possible to use an object instead of an array. Each property will
|
||
* be run as a function, and the results will be passed to the final `callback`
|
||
* as an object instead of an array. This can be a more readable way of handling
|
||
* results from {@link async.series}.
|
||
*
|
||
* **Note** that while many implementations preserve the order of object
|
||
* properties, the [ECMAScript Language Specification](http://www.ecma-international.org/ecma-262/5.1/#sec-8.6)
|
||
* explicitly states that
|
||
*
|
||
* > The mechanics and order of enumerating the properties is not specified.
|
||
*
|
||
* So if you rely on the order in which your series of functions are executed,
|
||
* and want this to work on all platforms, consider using an array.
|
||
*
|
||
* @name series
|
||
* @static
|
||
* @memberOf module:ControlFlow
|
||
* @method
|
||
* @category Control Flow
|
||
* @param {Array|Iterable|AsyncIterable|Object} tasks - A collection containing
|
||
* [async functions]{@link AsyncFunction} to run in series.
|
||
* Each function can complete with any number of optional `result` values.
|
||
* @param {Function} [callback] - An optional callback to run once all the
|
||
* functions have completed. This function gets a results array (or object)
|
||
* containing all the result arguments passed to the `task` callbacks. Invoked
|
||
* with (err, result).
|
||
* @return {Promise} a promise, if no callback is passed
|
||
* @example
|
||
* async.series([
|
||
* function(callback) {
|
||
* // do some stuff ...
|
||
* callback(null, 'one');
|
||
* },
|
||
* function(callback) {
|
||
* // do some more stuff ...
|
||
* callback(null, 'two');
|
||
* }
|
||
* ],
|
||
* // optional callback
|
||
* function(err, results) {
|
||
* // results is now equal to ['one', 'two']
|
||
* });
|
||
*
|
||
* async.series({
|
||
* one: function(callback) {
|
||
* setTimeout(function() {
|
||
* callback(null, 1);
|
||
* }, 200);
|
||
* },
|
||
* two: function(callback){
|
||
* setTimeout(function() {
|
||
* callback(null, 2);
|
||
* }, 100);
|
||
* }
|
||
* }, function(err, results) {
|
||
* // results is now equal to: {one: 1, two: 2}
|
||
* });
|
||
*/
|
||
function series(tasks, callback) {
|
||
return parallel(eachOfSeries$1, tasks, callback);
|
||
}
|
||
|
||
/**
|
||
* Returns `true` if at least one element in the `coll` satisfies an async test.
|
||
* If any iteratee call returns `true`, the main `callback` is immediately
|
||
* called.
|
||
*
|
||
* @name some
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @alias any
|
||
* @category Collection
|
||
* @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over.
|
||
* @param {AsyncFunction} iteratee - An async truth test to apply to each item
|
||
* in the collections in parallel.
|
||
* The iteratee should complete with a boolean `result` value.
|
||
* Invoked with (item, callback).
|
||
* @param {Function} [callback] - A callback which is called as soon as any
|
||
* iteratee returns `true`, or after all the iteratee functions have finished.
|
||
* Result will be either `true` or `false` depending on the values of the async
|
||
* tests. Invoked with (err, result).
|
||
* @returns {Promise} a promise, if no callback provided
|
||
* @example
|
||
*
|
||
* async.some(['file1','file2','file3'], function(filePath, callback) {
|
||
* fs.access(filePath, function(err) {
|
||
* callback(null, !err)
|
||
* });
|
||
* }, function(err, result) {
|
||
* // if result is true then at least one of the files exists
|
||
* });
|
||
*/
|
||
function some(coll, iteratee, callback) {
|
||
return _createTester(Boolean, res => res)(eachOf$1, coll, iteratee, callback)
|
||
}
|
||
var some$1 = awaitify(some, 3);
|
||
|
||
/**
|
||
* The same as [`some`]{@link module:Collections.some} but runs a maximum of `limit` async operations at a time.
|
||
*
|
||
* @name someLimit
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @see [async.some]{@link module:Collections.some}
|
||
* @alias anyLimit
|
||
* @category Collection
|
||
* @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over.
|
||
* @param {number} limit - The maximum number of async operations at a time.
|
||
* @param {AsyncFunction} iteratee - An async truth test to apply to each item
|
||
* in the collections in parallel.
|
||
* The iteratee should complete with a boolean `result` value.
|
||
* Invoked with (item, callback).
|
||
* @param {Function} [callback] - A callback which is called as soon as any
|
||
* iteratee returns `true`, or after all the iteratee functions have finished.
|
||
* Result will be either `true` or `false` depending on the values of the async
|
||
* tests. Invoked with (err, result).
|
||
* @returns {Promise} a promise, if no callback provided
|
||
*/
|
||
function someLimit(coll, limit, iteratee, callback) {
|
||
return _createTester(Boolean, res => res)(eachOfLimit(limit), coll, iteratee, callback)
|
||
}
|
||
var someLimit$1 = awaitify(someLimit, 4);
|
||
|
||
/**
|
||
* The same as [`some`]{@link module:Collections.some} but runs only a single async operation at a time.
|
||
*
|
||
* @name someSeries
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @see [async.some]{@link module:Collections.some}
|
||
* @alias anySeries
|
||
* @category Collection
|
||
* @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over.
|
||
* @param {AsyncFunction} iteratee - An async truth test to apply to each item
|
||
* in the collections in series.
|
||
* The iteratee should complete with a boolean `result` value.
|
||
* Invoked with (item, callback).
|
||
* @param {Function} [callback] - A callback which is called as soon as any
|
||
* iteratee returns `true`, or after all the iteratee functions have finished.
|
||
* Result will be either `true` or `false` depending on the values of the async
|
||
* tests. Invoked with (err, result).
|
||
* @returns {Promise} a promise, if no callback provided
|
||
*/
|
||
function someSeries(coll, iteratee, callback) {
|
||
return _createTester(Boolean, res => res)(eachOfSeries$1, coll, iteratee, callback)
|
||
}
|
||
var someSeries$1 = awaitify(someSeries, 3);
|
||
|
||
/**
|
||
* Sorts a list by the results of running each `coll` value through an async
|
||
* `iteratee`.
|
||
*
|
||
* @name sortBy
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @category Collection
|
||
* @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over.
|
||
* @param {AsyncFunction} iteratee - An async function to apply to each item in
|
||
* `coll`.
|
||
* The iteratee should complete with a value to use as the sort criteria as
|
||
* its `result`.
|
||
* Invoked with (item, callback).
|
||
* @param {Function} callback - A callback which is called after all the
|
||
* `iteratee` functions have finished, or an error occurs. Results is the items
|
||
* from the original `coll` sorted by the values returned by the `iteratee`
|
||
* calls. Invoked with (err, results).
|
||
* @returns {Promise} a promise, if no callback passed
|
||
* @example
|
||
*
|
||
* async.sortBy(['file1','file2','file3'], function(file, callback) {
|
||
* fs.stat(file, function(err, stats) {
|
||
* callback(err, stats.mtime);
|
||
* });
|
||
* }, function(err, results) {
|
||
* // results is now the original array of files sorted by
|
||
* // modified date
|
||
* });
|
||
*
|
||
* // By modifying the callback parameter the
|
||
* // sorting order can be influenced:
|
||
*
|
||
* // ascending order
|
||
* async.sortBy([1,9,3,5], function(x, callback) {
|
||
* callback(null, x);
|
||
* }, function(err,result) {
|
||
* // result callback
|
||
* });
|
||
*
|
||
* // descending order
|
||
* async.sortBy([1,9,3,5], function(x, callback) {
|
||
* callback(null, x*-1); //<- x*-1 instead of x, turns the order around
|
||
* }, function(err,result) {
|
||
* // result callback
|
||
* });
|
||
*/
|
||
function sortBy (coll, iteratee, callback) {
|
||
var _iteratee = wrapAsync(iteratee);
|
||
return map$1(coll, (x, iterCb) => {
|
||
_iteratee(x, (err, criteria) => {
|
||
if (err) return iterCb(err);
|
||
iterCb(err, {value: x, criteria});
|
||
});
|
||
}, (err, results) => {
|
||
if (err) return callback(err);
|
||
callback(null, results.sort(comparator).map(v => v.value));
|
||
});
|
||
|
||
function comparator(left, right) {
|
||
var a = left.criteria, b = right.criteria;
|
||
return a < b ? -1 : a > b ? 1 : 0;
|
||
}
|
||
}
|
||
var sortBy$1 = awaitify(sortBy, 3);
|
||
|
||
/**
|
||
* Sets a time limit on an asynchronous function. If the function does not call
|
||
* its callback within the specified milliseconds, it will be called with a
|
||
* timeout error. The code property for the error object will be `'ETIMEDOUT'`.
|
||
*
|
||
* @name timeout
|
||
* @static
|
||
* @memberOf module:Utils
|
||
* @method
|
||
* @category Util
|
||
* @param {AsyncFunction} asyncFn - The async function to limit in time.
|
||
* @param {number} milliseconds - The specified time limit.
|
||
* @param {*} [info] - Any variable you want attached (`string`, `object`, etc)
|
||
* to timeout Error for more information..
|
||
* @returns {AsyncFunction} Returns a wrapped function that can be used with any
|
||
* of the control flow functions.
|
||
* Invoke this function with the same parameters as you would `asyncFunc`.
|
||
* @example
|
||
*
|
||
* function myFunction(foo, callback) {
|
||
* doAsyncTask(foo, function(err, data) {
|
||
* // handle errors
|
||
* if (err) return callback(err);
|
||
*
|
||
* // do some stuff ...
|
||
*
|
||
* // return processed data
|
||
* return callback(null, data);
|
||
* });
|
||
* }
|
||
*
|
||
* var wrapped = async.timeout(myFunction, 1000);
|
||
*
|
||
* // call `wrapped` as you would `myFunction`
|
||
* wrapped({ bar: 'bar' }, function(err, data) {
|
||
* // if `myFunction` takes < 1000 ms to execute, `err`
|
||
* // and `data` will have their expected values
|
||
*
|
||
* // else `err` will be an Error with the code 'ETIMEDOUT'
|
||
* });
|
||
*/
|
||
function timeout(asyncFn, milliseconds, info) {
|
||
var fn = wrapAsync(asyncFn);
|
||
|
||
return initialParams((args, callback) => {
|
||
var timedOut = false;
|
||
var timer;
|
||
|
||
function timeoutCallback() {
|
||
var name = asyncFn.name || 'anonymous';
|
||
var error = new Error('Callback function "' + name + '" timed out.');
|
||
error.code = 'ETIMEDOUT';
|
||
if (info) {
|
||
error.info = info;
|
||
}
|
||
timedOut = true;
|
||
callback(error);
|
||
}
|
||
|
||
args.push((...cbArgs) => {
|
||
if (!timedOut) {
|
||
callback(...cbArgs);
|
||
clearTimeout(timer);
|
||
}
|
||
});
|
||
|
||
// setup timer and call original function
|
||
timer = setTimeout(timeoutCallback, milliseconds);
|
||
fn(...args);
|
||
});
|
||
}
|
||
|
||
function range(size) {
|
||
var result = Array(size);
|
||
while (size--) {
|
||
result[size] = size;
|
||
}
|
||
return result;
|
||
}
|
||
|
||
/**
|
||
* The same as [times]{@link module:ControlFlow.times} but runs a maximum of `limit` async operations at a
|
||
* time.
|
||
*
|
||
* @name timesLimit
|
||
* @static
|
||
* @memberOf module:ControlFlow
|
||
* @method
|
||
* @see [async.times]{@link module:ControlFlow.times}
|
||
* @category Control Flow
|
||
* @param {number} count - The number of times to run the function.
|
||
* @param {number} limit - The maximum number of async operations at a time.
|
||
* @param {AsyncFunction} iteratee - The async function to call `n` times.
|
||
* Invoked with the iteration index and a callback: (n, next).
|
||
* @param {Function} callback - see [async.map]{@link module:Collections.map}.
|
||
* @returns {Promise} a promise, if no callback is provided
|
||
*/
|
||
function timesLimit(count, limit, iteratee, callback) {
|
||
var _iteratee = wrapAsync(iteratee);
|
||
return mapLimit$1(range(count), limit, _iteratee, callback);
|
||
}
|
||
|
||
/**
|
||
* Calls the `iteratee` function `n` times, and accumulates results in the same
|
||
* manner you would use with [map]{@link module:Collections.map}.
|
||
*
|
||
* @name times
|
||
* @static
|
||
* @memberOf module:ControlFlow
|
||
* @method
|
||
* @see [async.map]{@link module:Collections.map}
|
||
* @category Control Flow
|
||
* @param {number} n - The number of times to run the function.
|
||
* @param {AsyncFunction} iteratee - The async function to call `n` times.
|
||
* Invoked with the iteration index and a callback: (n, next).
|
||
* @param {Function} callback - see {@link module:Collections.map}.
|
||
* @returns {Promise} a promise, if no callback is provided
|
||
* @example
|
||
*
|
||
* // Pretend this is some complicated async factory
|
||
* var createUser = function(id, callback) {
|
||
* callback(null, {
|
||
* id: 'user' + id
|
||
* });
|
||
* };
|
||
*
|
||
* // generate 5 users
|
||
* async.times(5, function(n, next) {
|
||
* createUser(n, function(err, user) {
|
||
* next(err, user);
|
||
* });
|
||
* }, function(err, users) {
|
||
* // we should now have 5 users
|
||
* });
|
||
*/
|
||
function times (n, iteratee, callback) {
|
||
return timesLimit(n, Infinity, iteratee, callback)
|
||
}
|
||
|
||
/**
|
||
* The same as [times]{@link module:ControlFlow.times} but runs only a single async operation at a time.
|
||
*
|
||
* @name timesSeries
|
||
* @static
|
||
* @memberOf module:ControlFlow
|
||
* @method
|
||
* @see [async.times]{@link module:ControlFlow.times}
|
||
* @category Control Flow
|
||
* @param {number} n - The number of times to run the function.
|
||
* @param {AsyncFunction} iteratee - The async function to call `n` times.
|
||
* Invoked with the iteration index and a callback: (n, next).
|
||
* @param {Function} callback - see {@link module:Collections.map}.
|
||
* @returns {Promise} a promise, if no callback is provided
|
||
*/
|
||
function timesSeries (n, iteratee, callback) {
|
||
return timesLimit(n, 1, iteratee, callback)
|
||
}
|
||
|
||
/**
|
||
* A relative of `reduce`. Takes an Object or Array, and iterates over each
|
||
* element in parallel, each step potentially mutating an `accumulator` value.
|
||
* The type of the accumulator defaults to the type of collection passed in.
|
||
*
|
||
* @name transform
|
||
* @static
|
||
* @memberOf module:Collections
|
||
* @method
|
||
* @category Collection
|
||
* @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over.
|
||
* @param {*} [accumulator] - The initial state of the transform. If omitted,
|
||
* it will default to an empty Object or Array, depending on the type of `coll`
|
||
* @param {AsyncFunction} iteratee - A function applied to each item in the
|
||
* collection that potentially modifies the accumulator.
|
||
* Invoked with (accumulator, item, key, callback).
|
||
* @param {Function} [callback] - A callback which is called after all the
|
||
* `iteratee` functions have finished. Result is the transformed accumulator.
|
||
* Invoked with (err, result).
|
||
* @returns {Promise} a promise, if no callback provided
|
||
* @example
|
||
*
|
||
* async.transform([1,2,3], function(acc, item, index, callback) {
|
||
* // pointless async:
|
||
* process.nextTick(function() {
|
||
* acc[index] = item * 2
|
||
* callback(null)
|
||
* });
|
||
* }, function(err, result) {
|
||
* // result is now equal to [2, 4, 6]
|
||
* });
|
||
*
|
||
* @example
|
||
*
|
||
* async.transform({a: 1, b: 2, c: 3}, function (obj, val, key, callback) {
|
||
* setImmediate(function () {
|
||
* obj[key] = val * 2;
|
||
* callback();
|
||
* })
|
||
* }, function (err, result) {
|
||
* // result is equal to {a: 2, b: 4, c: 6}
|
||
* })
|
||
*/
|
||
function transform (coll, accumulator, iteratee, callback) {
|
||
if (arguments.length <= 3 && typeof accumulator === 'function') {
|
||
callback = iteratee;
|
||
iteratee = accumulator;
|
||
accumulator = Array.isArray(coll) ? [] : {};
|
||
}
|
||
callback = once(callback || promiseCallback());
|
||
var _iteratee = wrapAsync(iteratee);
|
||
|
||
eachOf$1(coll, (v, k, cb) => {
|
||
_iteratee(accumulator, v, k, cb);
|
||
}, err => callback(err, accumulator));
|
||
return callback[PROMISE_SYMBOL]
|
||
}
|
||
|
||
/**
|
||
* It runs each task in series but stops whenever any of the functions were
|
||
* successful. If one of the tasks were successful, the `callback` will be
|
||
* passed the result of the successful task. If all tasks fail, the callback
|
||
* will be passed the error and result (if any) of the final attempt.
|
||
*
|
||
* @name tryEach
|
||
* @static
|
||
* @memberOf module:ControlFlow
|
||
* @method
|
||
* @category Control Flow
|
||
* @param {Array|Iterable|AsyncIterable|Object} tasks - A collection containing functions to
|
||
* run, each function is passed a `callback(err, result)` it must call on
|
||
* completion with an error `err` (which can be `null`) and an optional `result`
|
||
* value.
|
||
* @param {Function} [callback] - An optional callback which is called when one
|
||
* of the tasks has succeeded, or all have failed. It receives the `err` and
|
||
* `result` arguments of the last attempt at completing the `task`. Invoked with
|
||
* (err, results).
|
||
* @returns {Promise} a promise, if no callback is passed
|
||
* @example
|
||
* async.tryEach([
|
||
* function getDataFromFirstWebsite(callback) {
|
||
* // Try getting the data from the first website
|
||
* callback(err, data);
|
||
* },
|
||
* function getDataFromSecondWebsite(callback) {
|
||
* // First website failed,
|
||
* // Try getting the data from the backup website
|
||
* callback(err, data);
|
||
* }
|
||
* ],
|
||
* // optional callback
|
||
* function(err, results) {
|
||
* Now do something with the data.
|
||
* });
|
||
*
|
||
*/
|
||
function tryEach(tasks, callback) {
|
||
var error = null;
|
||
var result;
|
||
return eachSeries$1(tasks, (task, taskCb) => {
|
||
wrapAsync(task)((err, ...args) => {
|
||
if (err === false) return taskCb(err);
|
||
|
||
if (args.length < 2) {
|
||
[result] = args;
|
||
} else {
|
||
result = args;
|
||
}
|
||
error = err;
|
||
taskCb(err ? null : {});
|
||
});
|
||
}, () => callback(error, result));
|
||
}
|
||
|
||
var tryEach$1 = awaitify(tryEach);
|
||
|
||
/**
|
||
* Undoes a [memoize]{@link module:Utils.memoize}d function, reverting it to the original,
|
||
* unmemoized form. Handy for testing.
|
||
*
|
||
* @name unmemoize
|
||
* @static
|
||
* @memberOf module:Utils
|
||
* @method
|
||
* @see [async.memoize]{@link module:Utils.memoize}
|
||
* @category Util
|
||
* @param {AsyncFunction} fn - the memoized function
|
||
* @returns {AsyncFunction} a function that calls the original unmemoized function
|
||
*/
|
||
function unmemoize(fn) {
|
||
return (...args) => {
|
||
return (fn.unmemoized || fn)(...args);
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Repeatedly call `iteratee`, while `test` returns `true`. Calls `callback` when
|
||
* stopped, or an error occurs.
|
||
*
|
||
* @name whilst
|
||
* @static
|
||
* @memberOf module:ControlFlow
|
||
* @method
|
||
* @category Control Flow
|
||
* @param {AsyncFunction} test - asynchronous truth test to perform before each
|
||
* execution of `iteratee`. Invoked with ().
|
||
* @param {AsyncFunction} iteratee - An async function which is called each time
|
||
* `test` passes. Invoked with (callback).
|
||
* @param {Function} [callback] - A callback which is called after the test
|
||
* function has failed and repeated execution of `iteratee` has stopped. `callback`
|
||
* will be passed an error and any arguments passed to the final `iteratee`'s
|
||
* callback. Invoked with (err, [results]);
|
||
* @returns {Promise} a promise, if no callback is passed
|
||
* @example
|
||
*
|
||
* var count = 0;
|
||
* async.whilst(
|
||
* function test(cb) { cb(null, count < 5); },
|
||
* function iter(callback) {
|
||
* count++;
|
||
* setTimeout(function() {
|
||
* callback(null, count);
|
||
* }, 1000);
|
||
* },
|
||
* function (err, n) {
|
||
* // 5 seconds have passed, n = 5
|
||
* }
|
||
* );
|
||
*/
|
||
function whilst(test, iteratee, callback) {
|
||
callback = onlyOnce(callback);
|
||
var _fn = wrapAsync(iteratee);
|
||
var _test = wrapAsync(test);
|
||
var results = [];
|
||
|
||
function next(err, ...rest) {
|
||
if (err) return callback(err);
|
||
results = rest;
|
||
if (err === false) return;
|
||
_test(check);
|
||
}
|
||
|
||
function check(err, truth) {
|
||
if (err) return callback(err);
|
||
if (err === false) return;
|
||
if (!truth) return callback(null, ...results);
|
||
_fn(next);
|
||
}
|
||
|
||
return _test(check);
|
||
}
|
||
var whilst$1 = awaitify(whilst, 3);
|
||
|
||
/**
|
||
* Repeatedly call `iteratee` until `test` returns `true`. Calls `callback` when
|
||
* stopped, or an error occurs. `callback` will be passed an error and any
|
||
* arguments passed to the final `iteratee`'s callback.
|
||
*
|
||
* The inverse of [whilst]{@link module:ControlFlow.whilst}.
|
||
*
|
||
* @name until
|
||
* @static
|
||
* @memberOf module:ControlFlow
|
||
* @method
|
||
* @see [async.whilst]{@link module:ControlFlow.whilst}
|
||
* @category Control Flow
|
||
* @param {AsyncFunction} test - asynchronous truth test to perform before each
|
||
* execution of `iteratee`. Invoked with (callback).
|
||
* @param {AsyncFunction} iteratee - An async function which is called each time
|
||
* `test` fails. Invoked with (callback).
|
||
* @param {Function} [callback] - A callback which is called after the test
|
||
* function has passed and repeated execution of `iteratee` has stopped. `callback`
|
||
* will be passed an error and any arguments passed to the final `iteratee`'s
|
||
* callback. Invoked with (err, [results]);
|
||
* @returns {Promise} a promise, if a callback is not passed
|
||
*
|
||
* @example
|
||
* const results = []
|
||
* let finished = false
|
||
* async.until(function test(page, cb) {
|
||
* cb(null, finished)
|
||
* }, function iter(next) {
|
||
* fetchPage(url, (err, body) => {
|
||
* if (err) return next(err)
|
||
* results = results.concat(body.objects)
|
||
* finished = !!body.next
|
||
* next(err)
|
||
* })
|
||
* }, function done (err) {
|
||
* // all pages have been fetched
|
||
* })
|
||
*/
|
||
function until(test, iteratee, callback) {
|
||
const _test = wrapAsync(test);
|
||
return whilst$1((cb) => _test((err, truth) => cb (err, !truth)), iteratee, callback);
|
||
}
|
||
|
||
/**
|
||
* Runs the `tasks` array of functions in series, each passing their results to
|
||
* the next in the array. However, if any of the `tasks` pass an error to their
|
||
* own callback, the next function is not executed, and the main `callback` is
|
||
* immediately called with the error.
|
||
*
|
||
* @name waterfall
|
||
* @static
|
||
* @memberOf module:ControlFlow
|
||
* @method
|
||
* @category Control Flow
|
||
* @param {Array} tasks - An array of [async functions]{@link AsyncFunction}
|
||
* to run.
|
||
* Each function should complete with any number of `result` values.
|
||
* The `result` values will be passed as arguments, in order, to the next task.
|
||
* @param {Function} [callback] - An optional callback to run once all the
|
||
* functions have completed. This will be passed the results of the last task's
|
||
* callback. Invoked with (err, [results]).
|
||
* @returns undefined
|
||
* @example
|
||
*
|
||
* async.waterfall([
|
||
* function(callback) {
|
||
* callback(null, 'one', 'two');
|
||
* },
|
||
* function(arg1, arg2, callback) {
|
||
* // arg1 now equals 'one' and arg2 now equals 'two'
|
||
* callback(null, 'three');
|
||
* },
|
||
* function(arg1, callback) {
|
||
* // arg1 now equals 'three'
|
||
* callback(null, 'done');
|
||
* }
|
||
* ], function (err, result) {
|
||
* // result now equals 'done'
|
||
* });
|
||
*
|
||
* // Or, with named functions:
|
||
* async.waterfall([
|
||
* myFirstFunction,
|
||
* mySecondFunction,
|
||
* myLastFunction,
|
||
* ], function (err, result) {
|
||
* // result now equals 'done'
|
||
* });
|
||
* function myFirstFunction(callback) {
|
||
* callback(null, 'one', 'two');
|
||
* }
|
||
* function mySecondFunction(arg1, arg2, callback) {
|
||
* // arg1 now equals 'one' and arg2 now equals 'two'
|
||
* callback(null, 'three');
|
||
* }
|
||
* function myLastFunction(arg1, callback) {
|
||
* // arg1 now equals 'three'
|
||
* callback(null, 'done');
|
||
* }
|
||
*/
|
||
function waterfall (tasks, callback) {
|
||
callback = once(callback);
|
||
if (!Array.isArray(tasks)) return callback(new Error('First argument to waterfall must be an array of functions'));
|
||
if (!tasks.length) return callback();
|
||
var taskIndex = 0;
|
||
|
||
function nextTask(args) {
|
||
var task = wrapAsync(tasks[taskIndex++]);
|
||
task(...args, onlyOnce(next));
|
||
}
|
||
|
||
function next(err, ...args) {
|
||
if (err === false) return
|
||
if (err || taskIndex === tasks.length) {
|
||
return callback(err, ...args);
|
||
}
|
||
nextTask(args);
|
||
}
|
||
|
||
nextTask([]);
|
||
}
|
||
|
||
var waterfall$1 = awaitify(waterfall);
|
||
|
||
/**
|
||
* An "async function" in the context of Async is an asynchronous function with
|
||
* a variable number of parameters, with the final parameter being a callback.
|
||
* (`function (arg1, arg2, ..., callback) {}`)
|
||
* The final callback is of the form `callback(err, results...)`, which must be
|
||
* called once the function is completed. The callback should be called with a
|
||
* Error as its first argument to signal that an error occurred.
|
||
* Otherwise, if no error occurred, it should be called with `null` as the first
|
||
* argument, and any additional `result` arguments that may apply, to signal
|
||
* successful completion.
|
||
* The callback must be called exactly once, ideally on a later tick of the
|
||
* JavaScript event loop.
|
||
*
|
||
* This type of function is also referred to as a "Node-style async function",
|
||
* or a "continuation passing-style function" (CPS). Most of the methods of this
|
||
* library are themselves CPS/Node-style async functions, or functions that
|
||
* return CPS/Node-style async functions.
|
||
*
|
||
* Wherever we accept a Node-style async function, we also directly accept an
|
||
* [ES2017 `async` function]{@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function}.
|
||
* In this case, the `async` function will not be passed a final callback
|
||
* argument, and any thrown error will be used as the `err` argument of the
|
||
* implicit callback, and the return value will be used as the `result` value.
|
||
* (i.e. a `rejected` of the returned Promise becomes the `err` callback
|
||
* argument, and a `resolved` value becomes the `result`.)
|
||
*
|
||
* Note, due to JavaScript limitations, we can only detect native `async`
|
||
* functions and not transpilied implementations.
|
||
* Your environment must have `async`/`await` support for this to work.
|
||
* (e.g. Node > v7.6, or a recent version of a modern browser).
|
||
* If you are using `async` functions through a transpiler (e.g. Babel), you
|
||
* must still wrap the function with [asyncify]{@link module:Utils.asyncify},
|
||
* because the `async function` will be compiled to an ordinary function that
|
||
* returns a promise.
|
||
*
|
||
* @typedef {Function} AsyncFunction
|
||
* @static
|
||
*/
|
||
|
||
var index = {
|
||
apply,
|
||
applyEach: applyEach$1,
|
||
applyEachSeries,
|
||
asyncify,
|
||
auto,
|
||
autoInject,
|
||
cargo,
|
||
cargoQueue: cargo$1,
|
||
compose,
|
||
concat: concat$1,
|
||
concatLimit: concatLimit$1,
|
||
concatSeries: concatSeries$1,
|
||
constant,
|
||
detect: detect$1,
|
||
detectLimit: detectLimit$1,
|
||
detectSeries: detectSeries$1,
|
||
dir,
|
||
doUntil,
|
||
doWhilst: doWhilst$1,
|
||
each,
|
||
eachLimit: eachLimit$2,
|
||
eachOf: eachOf$1,
|
||
eachOfLimit: eachOfLimit$2,
|
||
eachOfSeries: eachOfSeries$1,
|
||
eachSeries: eachSeries$1,
|
||
ensureAsync,
|
||
every: every$1,
|
||
everyLimit: everyLimit$1,
|
||
everySeries: everySeries$1,
|
||
filter: filter$1,
|
||
filterLimit: filterLimit$1,
|
||
filterSeries: filterSeries$1,
|
||
forever: forever$1,
|
||
groupBy,
|
||
groupByLimit: groupByLimit$1,
|
||
groupBySeries,
|
||
log,
|
||
map: map$1,
|
||
mapLimit: mapLimit$1,
|
||
mapSeries: mapSeries$1,
|
||
mapValues,
|
||
mapValuesLimit: mapValuesLimit$1,
|
||
mapValuesSeries,
|
||
memoize,
|
||
nextTick,
|
||
parallel: parallel$1,
|
||
parallelLimit,
|
||
priorityQueue,
|
||
queue: queue$1,
|
||
race: race$1,
|
||
reduce: reduce$1,
|
||
reduceRight,
|
||
reflect,
|
||
reflectAll,
|
||
reject: reject$2,
|
||
rejectLimit: rejectLimit$1,
|
||
rejectSeries: rejectSeries$1,
|
||
retry,
|
||
retryable,
|
||
seq,
|
||
series,
|
||
setImmediate: setImmediate$1,
|
||
some: some$1,
|
||
someLimit: someLimit$1,
|
||
someSeries: someSeries$1,
|
||
sortBy: sortBy$1,
|
||
timeout,
|
||
times,
|
||
timesLimit,
|
||
timesSeries,
|
||
transform,
|
||
tryEach: tryEach$1,
|
||
unmemoize,
|
||
until,
|
||
waterfall: waterfall$1,
|
||
whilst: whilst$1,
|
||
|
||
// aliases
|
||
all: every$1,
|
||
allLimit: everyLimit$1,
|
||
allSeries: everySeries$1,
|
||
any: some$1,
|
||
anyLimit: someLimit$1,
|
||
anySeries: someSeries$1,
|
||
find: detect$1,
|
||
findLimit: detectLimit$1,
|
||
findSeries: detectSeries$1,
|
||
flatMap: concat$1,
|
||
flatMapLimit: concatLimit$1,
|
||
flatMapSeries: concatSeries$1,
|
||
forEach: each,
|
||
forEachSeries: eachSeries$1,
|
||
forEachLimit: eachLimit$2,
|
||
forEachOf: eachOf$1,
|
||
forEachOfSeries: eachOfSeries$1,
|
||
forEachOfLimit: eachOfLimit$2,
|
||
inject: reduce$1,
|
||
foldl: reduce$1,
|
||
foldr: reduceRight,
|
||
select: filter$1,
|
||
selectLimit: filterLimit$1,
|
||
selectSeries: filterSeries$1,
|
||
wrapSync: asyncify,
|
||
during: whilst$1,
|
||
doDuring: doWhilst$1
|
||
};
|
||
|
||
exports.default = index;
|
||
exports.apply = apply;
|
||
exports.applyEach = applyEach$1;
|
||
exports.applyEachSeries = applyEachSeries;
|
||
exports.asyncify = asyncify;
|
||
exports.auto = auto;
|
||
exports.autoInject = autoInject;
|
||
exports.cargo = cargo;
|
||
exports.cargoQueue = cargo$1;
|
||
exports.compose = compose;
|
||
exports.concat = concat$1;
|
||
exports.concatLimit = concatLimit$1;
|
||
exports.concatSeries = concatSeries$1;
|
||
exports.constant = constant;
|
||
exports.detect = detect$1;
|
||
exports.detectLimit = detectLimit$1;
|
||
exports.detectSeries = detectSeries$1;
|
||
exports.dir = dir;
|
||
exports.doUntil = doUntil;
|
||
exports.doWhilst = doWhilst$1;
|
||
exports.each = each;
|
||
exports.eachLimit = eachLimit$2;
|
||
exports.eachOf = eachOf$1;
|
||
exports.eachOfLimit = eachOfLimit$2;
|
||
exports.eachOfSeries = eachOfSeries$1;
|
||
exports.eachSeries = eachSeries$1;
|
||
exports.ensureAsync = ensureAsync;
|
||
exports.every = every$1;
|
||
exports.everyLimit = everyLimit$1;
|
||
exports.everySeries = everySeries$1;
|
||
exports.filter = filter$1;
|
||
exports.filterLimit = filterLimit$1;
|
||
exports.filterSeries = filterSeries$1;
|
||
exports.forever = forever$1;
|
||
exports.groupBy = groupBy;
|
||
exports.groupByLimit = groupByLimit$1;
|
||
exports.groupBySeries = groupBySeries;
|
||
exports.log = log;
|
||
exports.map = map$1;
|
||
exports.mapLimit = mapLimit$1;
|
||
exports.mapSeries = mapSeries$1;
|
||
exports.mapValues = mapValues;
|
||
exports.mapValuesLimit = mapValuesLimit$1;
|
||
exports.mapValuesSeries = mapValuesSeries;
|
||
exports.memoize = memoize;
|
||
exports.nextTick = nextTick;
|
||
exports.parallel = parallel$1;
|
||
exports.parallelLimit = parallelLimit;
|
||
exports.priorityQueue = priorityQueue;
|
||
exports.queue = queue$1;
|
||
exports.race = race$1;
|
||
exports.reduce = reduce$1;
|
||
exports.reduceRight = reduceRight;
|
||
exports.reflect = reflect;
|
||
exports.reflectAll = reflectAll;
|
||
exports.reject = reject$2;
|
||
exports.rejectLimit = rejectLimit$1;
|
||
exports.rejectSeries = rejectSeries$1;
|
||
exports.retry = retry;
|
||
exports.retryable = retryable;
|
||
exports.seq = seq;
|
||
exports.series = series;
|
||
exports.setImmediate = setImmediate$1;
|
||
exports.some = some$1;
|
||
exports.someLimit = someLimit$1;
|
||
exports.someSeries = someSeries$1;
|
||
exports.sortBy = sortBy$1;
|
||
exports.timeout = timeout;
|
||
exports.times = times;
|
||
exports.timesLimit = timesLimit;
|
||
exports.timesSeries = timesSeries;
|
||
exports.transform = transform;
|
||
exports.tryEach = tryEach$1;
|
||
exports.unmemoize = unmemoize;
|
||
exports.until = until;
|
||
exports.waterfall = waterfall$1;
|
||
exports.whilst = whilst$1;
|
||
exports.all = every$1;
|
||
exports.allLimit = everyLimit$1;
|
||
exports.allSeries = everySeries$1;
|
||
exports.any = some$1;
|
||
exports.anyLimit = someLimit$1;
|
||
exports.anySeries = someSeries$1;
|
||
exports.find = detect$1;
|
||
exports.findLimit = detectLimit$1;
|
||
exports.findSeries = detectSeries$1;
|
||
exports.flatMap = concat$1;
|
||
exports.flatMapLimit = concatLimit$1;
|
||
exports.flatMapSeries = concatSeries$1;
|
||
exports.forEach = each;
|
||
exports.forEachSeries = eachSeries$1;
|
||
exports.forEachLimit = eachLimit$2;
|
||
exports.forEachOf = eachOf$1;
|
||
exports.forEachOfSeries = eachOfSeries$1;
|
||
exports.forEachOfLimit = eachOfLimit$2;
|
||
exports.inject = reduce$1;
|
||
exports.foldl = reduce$1;
|
||
exports.foldr = reduceRight;
|
||
exports.select = filter$1;
|
||
exports.selectLimit = filterLimit$1;
|
||
exports.selectSeries = filterSeries$1;
|
||
exports.wrapSync = asyncify;
|
||
exports.during = whilst$1;
|
||
exports.doDuring = doWhilst$1;
|
||
|
||
Object.defineProperty(exports, '__esModule', { value: true });
|
||
|
||
})));
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 7961:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
module.exports = __nccwpck_require__(9874);
|
||
|
||
/***/ }),
|
||
|
||
/***/ 5683:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
var utils = __nccwpck_require__(9708);
|
||
var settle = __nccwpck_require__(4815);
|
||
var buildFullPath = __nccwpck_require__(9493);
|
||
var buildURL = __nccwpck_require__(9416);
|
||
var http = __nccwpck_require__(8605);
|
||
var https = __nccwpck_require__(7211);
|
||
var httpFollow = __nccwpck_require__(6792).http;
|
||
var httpsFollow = __nccwpck_require__(6792).https;
|
||
var url = __nccwpck_require__(8835);
|
||
var zlib = __nccwpck_require__(8761);
|
||
var pkg = __nccwpck_require__(696);
|
||
var createError = __nccwpck_require__(6807);
|
||
var enhanceError = __nccwpck_require__(7765);
|
||
|
||
var isHttps = /https:?/;
|
||
|
||
/**
|
||
*
|
||
* @param {http.ClientRequestArgs} options
|
||
* @param {AxiosProxyConfig} proxy
|
||
* @param {string} location
|
||
*/
|
||
function setProxy(options, proxy, location) {
|
||
options.hostname = proxy.host;
|
||
options.host = proxy.host;
|
||
options.port = proxy.port;
|
||
options.path = location;
|
||
|
||
// Basic proxy authorization
|
||
if (proxy.auth) {
|
||
var base64 = Buffer.from(proxy.auth.username + ':' + proxy.auth.password, 'utf8').toString('base64');
|
||
options.headers['Proxy-Authorization'] = 'Basic ' + base64;
|
||
}
|
||
|
||
// If a proxy is used, any redirects must also pass through the proxy
|
||
options.beforeRedirect = function beforeRedirect(redirection) {
|
||
redirection.headers.host = redirection.host;
|
||
setProxy(redirection, proxy, redirection.href);
|
||
};
|
||
}
|
||
|
||
/*eslint consistent-return:0*/
|
||
module.exports = function httpAdapter(config) {
|
||
return new Promise(function dispatchHttpRequest(resolvePromise, rejectPromise) {
|
||
var resolve = function resolve(value) {
|
||
resolvePromise(value);
|
||
};
|
||
var reject = function reject(value) {
|
||
rejectPromise(value);
|
||
};
|
||
var data = config.data;
|
||
var headers = config.headers;
|
||
|
||
// Set User-Agent (required by some servers)
|
||
// Only set header if it hasn't been set in config
|
||
// See https://github.com/axios/axios/issues/69
|
||
if (!headers['User-Agent'] && !headers['user-agent']) {
|
||
headers['User-Agent'] = 'axios/' + pkg.version;
|
||
}
|
||
|
||
if (data && !utils.isStream(data)) {
|
||
if (Buffer.isBuffer(data)) {
|
||
// Nothing to do...
|
||
} else if (utils.isArrayBuffer(data)) {
|
||
data = Buffer.from(new Uint8Array(data));
|
||
} else if (utils.isString(data)) {
|
||
data = Buffer.from(data, 'utf-8');
|
||
} else {
|
||
return reject(createError(
|
||
'Data after transformation must be a string, an ArrayBuffer, a Buffer, or a Stream',
|
||
config
|
||
));
|
||
}
|
||
|
||
// Add Content-Length header if data exists
|
||
headers['Content-Length'] = data.length;
|
||
}
|
||
|
||
// HTTP basic authentication
|
||
var auth = undefined;
|
||
if (config.auth) {
|
||
var username = config.auth.username || '';
|
||
var password = config.auth.password || '';
|
||
auth = username + ':' + password;
|
||
}
|
||
|
||
// Parse url
|
||
var fullPath = buildFullPath(config.baseURL, config.url);
|
||
var parsed = url.parse(fullPath);
|
||
var protocol = parsed.protocol || 'http:';
|
||
|
||
if (!auth && parsed.auth) {
|
||
var urlAuth = parsed.auth.split(':');
|
||
var urlUsername = urlAuth[0] || '';
|
||
var urlPassword = urlAuth[1] || '';
|
||
auth = urlUsername + ':' + urlPassword;
|
||
}
|
||
|
||
if (auth) {
|
||
delete headers.Authorization;
|
||
}
|
||
|
||
var isHttpsRequest = isHttps.test(protocol);
|
||
var agent = isHttpsRequest ? config.httpsAgent : config.httpAgent;
|
||
|
||
var options = {
|
||
path: buildURL(parsed.path, config.params, config.paramsSerializer).replace(/^\?/, ''),
|
||
method: config.method.toUpperCase(),
|
||
headers: headers,
|
||
agent: agent,
|
||
agents: { http: config.httpAgent, https: config.httpsAgent },
|
||
auth: auth
|
||
};
|
||
|
||
if (config.socketPath) {
|
||
options.socketPath = config.socketPath;
|
||
} else {
|
||
options.hostname = parsed.hostname;
|
||
options.port = parsed.port;
|
||
}
|
||
|
||
var proxy = config.proxy;
|
||
if (!proxy && proxy !== false) {
|
||
var proxyEnv = protocol.slice(0, -1) + '_proxy';
|
||
var proxyUrl = process.env[proxyEnv] || process.env[proxyEnv.toUpperCase()];
|
||
if (proxyUrl) {
|
||
var parsedProxyUrl = url.parse(proxyUrl);
|
||
var noProxyEnv = process.env.no_proxy || process.env.NO_PROXY;
|
||
var shouldProxy = true;
|
||
|
||
if (noProxyEnv) {
|
||
var noProxy = noProxyEnv.split(',').map(function trim(s) {
|
||
return s.trim();
|
||
});
|
||
|
||
shouldProxy = !noProxy.some(function proxyMatch(proxyElement) {
|
||
if (!proxyElement) {
|
||
return false;
|
||
}
|
||
if (proxyElement === '*') {
|
||
return true;
|
||
}
|
||
if (proxyElement[0] === '.' &&
|
||
parsed.hostname.substr(parsed.hostname.length - proxyElement.length) === proxyElement) {
|
||
return true;
|
||
}
|
||
|
||
return parsed.hostname === proxyElement;
|
||
});
|
||
}
|
||
|
||
if (shouldProxy) {
|
||
proxy = {
|
||
host: parsedProxyUrl.hostname,
|
||
port: parsedProxyUrl.port,
|
||
protocol: parsedProxyUrl.protocol
|
||
};
|
||
|
||
if (parsedProxyUrl.auth) {
|
||
var proxyUrlAuth = parsedProxyUrl.auth.split(':');
|
||
proxy.auth = {
|
||
username: proxyUrlAuth[0],
|
||
password: proxyUrlAuth[1]
|
||
};
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
if (proxy) {
|
||
options.headers.host = parsed.hostname + (parsed.port ? ':' + parsed.port : '');
|
||
setProxy(options, proxy, protocol + '//' + parsed.hostname + (parsed.port ? ':' + parsed.port : '') + options.path);
|
||
}
|
||
|
||
var transport;
|
||
var isHttpsProxy = isHttpsRequest && (proxy ? isHttps.test(proxy.protocol) : true);
|
||
if (config.transport) {
|
||
transport = config.transport;
|
||
} else if (config.maxRedirects === 0) {
|
||
transport = isHttpsProxy ? https : http;
|
||
} else {
|
||
if (config.maxRedirects) {
|
||
options.maxRedirects = config.maxRedirects;
|
||
}
|
||
transport = isHttpsProxy ? httpsFollow : httpFollow;
|
||
}
|
||
|
||
if (config.maxBodyLength > -1) {
|
||
options.maxBodyLength = config.maxBodyLength;
|
||
}
|
||
|
||
// Create the request
|
||
var req = transport.request(options, function handleResponse(res) {
|
||
if (req.aborted) return;
|
||
|
||
// uncompress the response body transparently if required
|
||
var stream = res;
|
||
|
||
// return the last request in case of redirects
|
||
var lastRequest = res.req || req;
|
||
|
||
|
||
// if no content, is HEAD request or decompress disabled we should not decompress
|
||
if (res.statusCode !== 204 && lastRequest.method !== 'HEAD' && config.decompress !== false) {
|
||
switch (res.headers['content-encoding']) {
|
||
/*eslint default-case:0*/
|
||
case 'gzip':
|
||
case 'compress':
|
||
case 'deflate':
|
||
// add the unzipper to the body stream processing pipeline
|
||
stream = stream.pipe(zlib.createUnzip());
|
||
|
||
// remove the content-encoding in order to not confuse downstream operations
|
||
delete res.headers['content-encoding'];
|
||
break;
|
||
}
|
||
}
|
||
|
||
var response = {
|
||
status: res.statusCode,
|
||
statusText: res.statusMessage,
|
||
headers: res.headers,
|
||
config: config,
|
||
request: lastRequest
|
||
};
|
||
|
||
if (config.responseType === 'stream') {
|
||
response.data = stream;
|
||
settle(resolve, reject, response);
|
||
} else {
|
||
var responseBuffer = [];
|
||
stream.on('data', function handleStreamData(chunk) {
|
||
responseBuffer.push(chunk);
|
||
|
||
// make sure the content length is not over the maxContentLength if specified
|
||
if (config.maxContentLength > -1 && Buffer.concat(responseBuffer).length > config.maxContentLength) {
|
||
stream.destroy();
|
||
reject(createError('maxContentLength size of ' + config.maxContentLength + ' exceeded',
|
||
config, null, lastRequest));
|
||
}
|
||
});
|
||
|
||
stream.on('error', function handleStreamError(err) {
|
||
if (req.aborted) return;
|
||
reject(enhanceError(err, config, null, lastRequest));
|
||
});
|
||
|
||
stream.on('end', function handleStreamEnd() {
|
||
var responseData = Buffer.concat(responseBuffer);
|
||
if (config.responseType !== 'arraybuffer') {
|
||
responseData = responseData.toString(config.responseEncoding);
|
||
if (!config.responseEncoding || config.responseEncoding === 'utf8') {
|
||
responseData = utils.stripBOM(responseData);
|
||
}
|
||
}
|
||
|
||
response.data = responseData;
|
||
settle(resolve, reject, response);
|
||
});
|
||
}
|
||
});
|
||
|
||
// Handle errors
|
||
req.on('error', function handleRequestError(err) {
|
||
if (req.aborted && err.code !== 'ERR_FR_TOO_MANY_REDIRECTS') return;
|
||
reject(enhanceError(err, config, null, req));
|
||
});
|
||
|
||
// Handle request timeout
|
||
if (config.timeout) {
|
||
// Sometime, the response will be very slow, and does not respond, the connect event will be block by event loop system.
|
||
// And timer callback will be fired, and abort() will be invoked before connection, then get "socket hang up" and code ECONNRESET.
|
||
// At this time, if we have a large number of request, nodejs will hang up some socket on background. and the number will up and up.
|
||
// And then these socket which be hang up will devoring CPU little by little.
|
||
// ClientRequest.setTimeout will be fired on the specify milliseconds, and can make sure that abort() will be fired after connect.
|
||
req.setTimeout(config.timeout, function handleRequestTimeout() {
|
||
req.abort();
|
||
reject(createError('timeout of ' + config.timeout + 'ms exceeded', config, 'ECONNABORTED', req));
|
||
});
|
||
}
|
||
|
||
if (config.cancelToken) {
|
||
// Handle cancellation
|
||
config.cancelToken.promise.then(function onCanceled(cancel) {
|
||
if (req.aborted) return;
|
||
|
||
req.abort();
|
||
reject(cancel);
|
||
});
|
||
}
|
||
|
||
// Send the request
|
||
if (utils.isStream(data)) {
|
||
data.on('error', function handleStreamError(err) {
|
||
reject(enhanceError(err, config, null, req));
|
||
}).pipe(req);
|
||
} else {
|
||
req.end(data);
|
||
}
|
||
});
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 2527:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
var utils = __nccwpck_require__(9708);
|
||
var settle = __nccwpck_require__(4815);
|
||
var cookies = __nccwpck_require__(5146);
|
||
var buildURL = __nccwpck_require__(9416);
|
||
var buildFullPath = __nccwpck_require__(9493);
|
||
var parseHeaders = __nccwpck_require__(8411);
|
||
var isURLSameOrigin = __nccwpck_require__(6756);
|
||
var createError = __nccwpck_require__(6807);
|
||
|
||
module.exports = function xhrAdapter(config) {
|
||
return new Promise(function dispatchXhrRequest(resolve, reject) {
|
||
var requestData = config.data;
|
||
var requestHeaders = config.headers;
|
||
|
||
if (utils.isFormData(requestData)) {
|
||
delete requestHeaders['Content-Type']; // Let the browser set it
|
||
}
|
||
|
||
var request = new XMLHttpRequest();
|
||
|
||
// HTTP basic authentication
|
||
if (config.auth) {
|
||
var username = config.auth.username || '';
|
||
var password = config.auth.password ? unescape(encodeURIComponent(config.auth.password)) : '';
|
||
requestHeaders.Authorization = 'Basic ' + btoa(username + ':' + password);
|
||
}
|
||
|
||
var fullPath = buildFullPath(config.baseURL, config.url);
|
||
request.open(config.method.toUpperCase(), buildURL(fullPath, config.params, config.paramsSerializer), true);
|
||
|
||
// Set the request timeout in MS
|
||
request.timeout = config.timeout;
|
||
|
||
// Listen for ready state
|
||
request.onreadystatechange = function handleLoad() {
|
||
if (!request || request.readyState !== 4) {
|
||
return;
|
||
}
|
||
|
||
// The request errored out and we didn't get a response, this will be
|
||
// handled by onerror instead
|
||
// With one exception: request that using file: protocol, most browsers
|
||
// will return status as 0 even though it's a successful request
|
||
if (request.status === 0 && !(request.responseURL && request.responseURL.indexOf('file:') === 0)) {
|
||
return;
|
||
}
|
||
|
||
// Prepare the response
|
||
var responseHeaders = 'getAllResponseHeaders' in request ? parseHeaders(request.getAllResponseHeaders()) : null;
|
||
var responseData = !config.responseType || config.responseType === 'text' ? request.responseText : request.response;
|
||
var response = {
|
||
data: responseData,
|
||
status: request.status,
|
||
statusText: request.statusText,
|
||
headers: responseHeaders,
|
||
config: config,
|
||
request: request
|
||
};
|
||
|
||
settle(resolve, reject, response);
|
||
|
||
// Clean up request
|
||
request = null;
|
||
};
|
||
|
||
// Handle browser request cancellation (as opposed to a manual cancellation)
|
||
request.onabort = function handleAbort() {
|
||
if (!request) {
|
||
return;
|
||
}
|
||
|
||
reject(createError('Request aborted', config, 'ECONNABORTED', request));
|
||
|
||
// Clean up request
|
||
request = null;
|
||
};
|
||
|
||
// Handle low level network errors
|
||
request.onerror = function handleError() {
|
||
// Real errors are hidden from us by the browser
|
||
// onerror should only fire if it's a network error
|
||
reject(createError('Network Error', config, null, request));
|
||
|
||
// Clean up request
|
||
request = null;
|
||
};
|
||
|
||
// Handle timeout
|
||
request.ontimeout = function handleTimeout() {
|
||
var timeoutErrorMessage = 'timeout of ' + config.timeout + 'ms exceeded';
|
||
if (config.timeoutErrorMessage) {
|
||
timeoutErrorMessage = config.timeoutErrorMessage;
|
||
}
|
||
reject(createError(timeoutErrorMessage, config, 'ECONNABORTED',
|
||
request));
|
||
|
||
// Clean up request
|
||
request = null;
|
||
};
|
||
|
||
// Add xsrf header
|
||
// This is only done if running in a standard browser environment.
|
||
// Specifically not if we're in a web worker, or react-native.
|
||
if (utils.isStandardBrowserEnv()) {
|
||
// Add xsrf header
|
||
var xsrfValue = (config.withCredentials || isURLSameOrigin(fullPath)) && config.xsrfCookieName ?
|
||
cookies.read(config.xsrfCookieName) :
|
||
undefined;
|
||
|
||
if (xsrfValue) {
|
||
requestHeaders[config.xsrfHeaderName] = xsrfValue;
|
||
}
|
||
}
|
||
|
||
// Add headers to the request
|
||
if ('setRequestHeader' in request) {
|
||
utils.forEach(requestHeaders, function setRequestHeader(val, key) {
|
||
if (typeof requestData === 'undefined' && key.toLowerCase() === 'content-type') {
|
||
// Remove Content-Type if data is undefined
|
||
delete requestHeaders[key];
|
||
} else {
|
||
// Otherwise add header to the request
|
||
request.setRequestHeader(key, val);
|
||
}
|
||
});
|
||
}
|
||
|
||
// Add withCredentials to request if needed
|
||
if (!utils.isUndefined(config.withCredentials)) {
|
||
request.withCredentials = !!config.withCredentials;
|
||
}
|
||
|
||
// Add responseType to request if needed
|
||
if (config.responseType) {
|
||
try {
|
||
request.responseType = config.responseType;
|
||
} catch (e) {
|
||
// Expected DOMException thrown by browsers not compatible XMLHttpRequest Level 2.
|
||
// But, this can be suppressed for 'json' type as it can be parsed by default 'transformResponse' function.
|
||
if (config.responseType !== 'json') {
|
||
throw e;
|
||
}
|
||
}
|
||
}
|
||
|
||
// Handle progress if needed
|
||
if (typeof config.onDownloadProgress === 'function') {
|
||
request.addEventListener('progress', config.onDownloadProgress);
|
||
}
|
||
|
||
// Not all browsers support upload events
|
||
if (typeof config.onUploadProgress === 'function' && request.upload) {
|
||
request.upload.addEventListener('progress', config.onUploadProgress);
|
||
}
|
||
|
||
if (config.cancelToken) {
|
||
// Handle cancellation
|
||
config.cancelToken.promise.then(function onCanceled(cancel) {
|
||
if (!request) {
|
||
return;
|
||
}
|
||
|
||
request.abort();
|
||
reject(cancel);
|
||
// Clean up request
|
||
request = null;
|
||
});
|
||
}
|
||
|
||
if (!requestData) {
|
||
requestData = null;
|
||
}
|
||
|
||
// Send the request
|
||
request.send(requestData);
|
||
});
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 9874:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
var utils = __nccwpck_require__(9708);
|
||
var bind = __nccwpck_require__(2897);
|
||
var Axios = __nccwpck_require__(8639);
|
||
var mergeConfig = __nccwpck_require__(3418);
|
||
var defaults = __nccwpck_require__(9740);
|
||
|
||
/**
|
||
* Create an instance of Axios
|
||
*
|
||
* @param {Object} defaultConfig The default config for the instance
|
||
* @return {Axios} A new instance of Axios
|
||
*/
|
||
function createInstance(defaultConfig) {
|
||
var context = new Axios(defaultConfig);
|
||
var instance = bind(Axios.prototype.request, context);
|
||
|
||
// Copy axios.prototype to instance
|
||
utils.extend(instance, Axios.prototype, context);
|
||
|
||
// Copy context to instance
|
||
utils.extend(instance, context);
|
||
|
||
return instance;
|
||
}
|
||
|
||
// Create the default instance to be exported
|
||
var axios = createInstance(defaults);
|
||
|
||
// Expose Axios class to allow class inheritance
|
||
axios.Axios = Axios;
|
||
|
||
// Factory for creating new instances
|
||
axios.create = function create(instanceConfig) {
|
||
return createInstance(mergeConfig(axios.defaults, instanceConfig));
|
||
};
|
||
|
||
// Expose Cancel & CancelToken
|
||
axios.Cancel = __nccwpck_require__(5398);
|
||
axios.CancelToken = __nccwpck_require__(1438);
|
||
axios.isCancel = __nccwpck_require__(9862);
|
||
|
||
// Expose all/spread
|
||
axios.all = function all(promises) {
|
||
return Promise.all(promises);
|
||
};
|
||
axios.spread = __nccwpck_require__(8174);
|
||
|
||
// Expose isAxiosError
|
||
axios.isAxiosError = __nccwpck_require__(3687);
|
||
|
||
module.exports = axios;
|
||
|
||
// Allow use of default import syntax in TypeScript
|
||
module.exports.default = axios;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 5398:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
/**
|
||
* A `Cancel` is an object that is thrown when an operation is canceled.
|
||
*
|
||
* @class
|
||
* @param {string=} message The message.
|
||
*/
|
||
function Cancel(message) {
|
||
this.message = message;
|
||
}
|
||
|
||
Cancel.prototype.toString = function toString() {
|
||
return 'Cancel' + (this.message ? ': ' + this.message : '');
|
||
};
|
||
|
||
Cancel.prototype.__CANCEL__ = true;
|
||
|
||
module.exports = Cancel;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 1438:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
var Cancel = __nccwpck_require__(5398);
|
||
|
||
/**
|
||
* A `CancelToken` is an object that can be used to request cancellation of an operation.
|
||
*
|
||
* @class
|
||
* @param {Function} executor The executor function.
|
||
*/
|
||
function CancelToken(executor) {
|
||
if (typeof executor !== 'function') {
|
||
throw new TypeError('executor must be a function.');
|
||
}
|
||
|
||
var resolvePromise;
|
||
this.promise = new Promise(function promiseExecutor(resolve) {
|
||
resolvePromise = resolve;
|
||
});
|
||
|
||
var token = this;
|
||
executor(function cancel(message) {
|
||
if (token.reason) {
|
||
// Cancellation has already been requested
|
||
return;
|
||
}
|
||
|
||
token.reason = new Cancel(message);
|
||
resolvePromise(token.reason);
|
||
});
|
||
}
|
||
|
||
/**
|
||
* Throws a `Cancel` if cancellation has been requested.
|
||
*/
|
||
CancelToken.prototype.throwIfRequested = function throwIfRequested() {
|
||
if (this.reason) {
|
||
throw this.reason;
|
||
}
|
||
};
|
||
|
||
/**
|
||
* Returns an object that contains a new `CancelToken` and a function that, when called,
|
||
* cancels the `CancelToken`.
|
||
*/
|
||
CancelToken.source = function source() {
|
||
var cancel;
|
||
var token = new CancelToken(function executor(c) {
|
||
cancel = c;
|
||
});
|
||
return {
|
||
token: token,
|
||
cancel: cancel
|
||
};
|
||
};
|
||
|
||
module.exports = CancelToken;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 9862:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
module.exports = function isCancel(value) {
|
||
return !!(value && value.__CANCEL__);
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 8639:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
var utils = __nccwpck_require__(9708);
|
||
var buildURL = __nccwpck_require__(9416);
|
||
var InterceptorManager = __nccwpck_require__(2378);
|
||
var dispatchRequest = __nccwpck_require__(9647);
|
||
var mergeConfig = __nccwpck_require__(3418);
|
||
|
||
/**
|
||
* Create a new instance of Axios
|
||
*
|
||
* @param {Object} instanceConfig The default config for the instance
|
||
*/
|
||
function Axios(instanceConfig) {
|
||
this.defaults = instanceConfig;
|
||
this.interceptors = {
|
||
request: new InterceptorManager(),
|
||
response: new InterceptorManager()
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Dispatch a request
|
||
*
|
||
* @param {Object} config The config specific for this request (merged with this.defaults)
|
||
*/
|
||
Axios.prototype.request = function request(config) {
|
||
/*eslint no-param-reassign:0*/
|
||
// Allow for axios('example/url'[, config]) a la fetch API
|
||
if (typeof config === 'string') {
|
||
config = arguments[1] || {};
|
||
config.url = arguments[0];
|
||
} else {
|
||
config = config || {};
|
||
}
|
||
|
||
config = mergeConfig(this.defaults, config);
|
||
|
||
// Set config.method
|
||
if (config.method) {
|
||
config.method = config.method.toLowerCase();
|
||
} else if (this.defaults.method) {
|
||
config.method = this.defaults.method.toLowerCase();
|
||
} else {
|
||
config.method = 'get';
|
||
}
|
||
|
||
// Hook up interceptors middleware
|
||
var chain = [dispatchRequest, undefined];
|
||
var promise = Promise.resolve(config);
|
||
|
||
this.interceptors.request.forEach(function unshiftRequestInterceptors(interceptor) {
|
||
chain.unshift(interceptor.fulfilled, interceptor.rejected);
|
||
});
|
||
|
||
this.interceptors.response.forEach(function pushResponseInterceptors(interceptor) {
|
||
chain.push(interceptor.fulfilled, interceptor.rejected);
|
||
});
|
||
|
||
while (chain.length) {
|
||
promise = promise.then(chain.shift(), chain.shift());
|
||
}
|
||
|
||
return promise;
|
||
};
|
||
|
||
Axios.prototype.getUri = function getUri(config) {
|
||
config = mergeConfig(this.defaults, config);
|
||
return buildURL(config.url, config.params, config.paramsSerializer).replace(/^\?/, '');
|
||
};
|
||
|
||
// Provide aliases for supported request methods
|
||
utils.forEach(['delete', 'get', 'head', 'options'], function forEachMethodNoData(method) {
|
||
/*eslint func-names:0*/
|
||
Axios.prototype[method] = function(url, config) {
|
||
return this.request(mergeConfig(config || {}, {
|
||
method: method,
|
||
url: url,
|
||
data: (config || {}).data
|
||
}));
|
||
};
|
||
});
|
||
|
||
utils.forEach(['post', 'put', 'patch'], function forEachMethodWithData(method) {
|
||
/*eslint func-names:0*/
|
||
Axios.prototype[method] = function(url, data, config) {
|
||
return this.request(mergeConfig(config || {}, {
|
||
method: method,
|
||
url: url,
|
||
data: data
|
||
}));
|
||
};
|
||
});
|
||
|
||
module.exports = Axios;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 2378:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
var utils = __nccwpck_require__(9708);
|
||
|
||
function InterceptorManager() {
|
||
this.handlers = [];
|
||
}
|
||
|
||
/**
|
||
* Add a new interceptor to the stack
|
||
*
|
||
* @param {Function} fulfilled The function to handle `then` for a `Promise`
|
||
* @param {Function} rejected The function to handle `reject` for a `Promise`
|
||
*
|
||
* @return {Number} An ID used to remove interceptor later
|
||
*/
|
||
InterceptorManager.prototype.use = function use(fulfilled, rejected) {
|
||
this.handlers.push({
|
||
fulfilled: fulfilled,
|
||
rejected: rejected
|
||
});
|
||
return this.handlers.length - 1;
|
||
};
|
||
|
||
/**
|
||
* Remove an interceptor from the stack
|
||
*
|
||
* @param {Number} id The ID that was returned by `use`
|
||
*/
|
||
InterceptorManager.prototype.eject = function eject(id) {
|
||
if (this.handlers[id]) {
|
||
this.handlers[id] = null;
|
||
}
|
||
};
|
||
|
||
/**
|
||
* Iterate over all the registered interceptors
|
||
*
|
||
* This method is particularly useful for skipping over any
|
||
* interceptors that may have become `null` calling `eject`.
|
||
*
|
||
* @param {Function} fn The function to call for each interceptor
|
||
*/
|
||
InterceptorManager.prototype.forEach = function forEach(fn) {
|
||
utils.forEach(this.handlers, function forEachHandler(h) {
|
||
if (h !== null) {
|
||
fn(h);
|
||
}
|
||
});
|
||
};
|
||
|
||
module.exports = InterceptorManager;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 9493:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
var isAbsoluteURL = __nccwpck_require__(4803);
|
||
var combineURLs = __nccwpck_require__(4584);
|
||
|
||
/**
|
||
* Creates a new URL by combining the baseURL with the requestedURL,
|
||
* only when the requestedURL is not already an absolute URL.
|
||
* If the requestURL is absolute, this function returns the requestedURL untouched.
|
||
*
|
||
* @param {string} baseURL The base URL
|
||
* @param {string} requestedURL Absolute or relative URL to combine
|
||
* @returns {string} The combined full path
|
||
*/
|
||
module.exports = function buildFullPath(baseURL, requestedURL) {
|
||
if (baseURL && !isAbsoluteURL(requestedURL)) {
|
||
return combineURLs(baseURL, requestedURL);
|
||
}
|
||
return requestedURL;
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 6807:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
var enhanceError = __nccwpck_require__(7765);
|
||
|
||
/**
|
||
* Create an Error with the specified message, config, error code, request and response.
|
||
*
|
||
* @param {string} message The error message.
|
||
* @param {Object} config The config.
|
||
* @param {string} [code] The error code (for example, 'ECONNABORTED').
|
||
* @param {Object} [request] The request.
|
||
* @param {Object} [response] The response.
|
||
* @returns {Error} The created error.
|
||
*/
|
||
module.exports = function createError(message, config, code, request, response) {
|
||
var error = new Error(message);
|
||
return enhanceError(error, config, code, request, response);
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 9647:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
var utils = __nccwpck_require__(9708);
|
||
var transformData = __nccwpck_require__(3554);
|
||
var isCancel = __nccwpck_require__(9862);
|
||
var defaults = __nccwpck_require__(9740);
|
||
|
||
/**
|
||
* Throws a `Cancel` if cancellation has been requested.
|
||
*/
|
||
function throwIfCancellationRequested(config) {
|
||
if (config.cancelToken) {
|
||
config.cancelToken.throwIfRequested();
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Dispatch a request to the server using the configured adapter.
|
||
*
|
||
* @param {object} config The config that is to be used for the request
|
||
* @returns {Promise} The Promise to be fulfilled
|
||
*/
|
||
module.exports = function dispatchRequest(config) {
|
||
throwIfCancellationRequested(config);
|
||
|
||
// Ensure headers exist
|
||
config.headers = config.headers || {};
|
||
|
||
// Transform request data
|
||
config.data = transformData(
|
||
config.data,
|
||
config.headers,
|
||
config.transformRequest
|
||
);
|
||
|
||
// Flatten headers
|
||
config.headers = utils.merge(
|
||
config.headers.common || {},
|
||
config.headers[config.method] || {},
|
||
config.headers
|
||
);
|
||
|
||
utils.forEach(
|
||
['delete', 'get', 'head', 'post', 'put', 'patch', 'common'],
|
||
function cleanHeaderConfig(method) {
|
||
delete config.headers[method];
|
||
}
|
||
);
|
||
|
||
var adapter = config.adapter || defaults.adapter;
|
||
|
||
return adapter(config).then(function onAdapterResolution(response) {
|
||
throwIfCancellationRequested(config);
|
||
|
||
// Transform response data
|
||
response.data = transformData(
|
||
response.data,
|
||
response.headers,
|
||
config.transformResponse
|
||
);
|
||
|
||
return response;
|
||
}, function onAdapterRejection(reason) {
|
||
if (!isCancel(reason)) {
|
||
throwIfCancellationRequested(config);
|
||
|
||
// Transform response data
|
||
if (reason && reason.response) {
|
||
reason.response.data = transformData(
|
||
reason.response.data,
|
||
reason.response.headers,
|
||
config.transformResponse
|
||
);
|
||
}
|
||
}
|
||
|
||
return Promise.reject(reason);
|
||
});
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 7765:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
/**
|
||
* Update an Error with the specified config, error code, and response.
|
||
*
|
||
* @param {Error} error The error to update.
|
||
* @param {Object} config The config.
|
||
* @param {string} [code] The error code (for example, 'ECONNABORTED').
|
||
* @param {Object} [request] The request.
|
||
* @param {Object} [response] The response.
|
||
* @returns {Error} The error.
|
||
*/
|
||
module.exports = function enhanceError(error, config, code, request, response) {
|
||
error.config = config;
|
||
if (code) {
|
||
error.code = code;
|
||
}
|
||
|
||
error.request = request;
|
||
error.response = response;
|
||
error.isAxiosError = true;
|
||
|
||
error.toJSON = function toJSON() {
|
||
return {
|
||
// Standard
|
||
message: this.message,
|
||
name: this.name,
|
||
// Microsoft
|
||
description: this.description,
|
||
number: this.number,
|
||
// Mozilla
|
||
fileName: this.fileName,
|
||
lineNumber: this.lineNumber,
|
||
columnNumber: this.columnNumber,
|
||
stack: this.stack,
|
||
// Axios
|
||
config: this.config,
|
||
code: this.code
|
||
};
|
||
};
|
||
return error;
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 3418:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
var utils = __nccwpck_require__(9708);
|
||
|
||
/**
|
||
* Config-specific merge-function which creates a new config-object
|
||
* by merging two configuration objects together.
|
||
*
|
||
* @param {Object} config1
|
||
* @param {Object} config2
|
||
* @returns {Object} New object resulting from merging config2 to config1
|
||
*/
|
||
module.exports = function mergeConfig(config1, config2) {
|
||
// eslint-disable-next-line no-param-reassign
|
||
config2 = config2 || {};
|
||
var config = {};
|
||
|
||
var valueFromConfig2Keys = ['url', 'method', 'data'];
|
||
var mergeDeepPropertiesKeys = ['headers', 'auth', 'proxy', 'params'];
|
||
var defaultToConfig2Keys = [
|
||
'baseURL', 'transformRequest', 'transformResponse', 'paramsSerializer',
|
||
'timeout', 'timeoutMessage', 'withCredentials', 'adapter', 'responseType', 'xsrfCookieName',
|
||
'xsrfHeaderName', 'onUploadProgress', 'onDownloadProgress', 'decompress',
|
||
'maxContentLength', 'maxBodyLength', 'maxRedirects', 'transport', 'httpAgent',
|
||
'httpsAgent', 'cancelToken', 'socketPath', 'responseEncoding'
|
||
];
|
||
var directMergeKeys = ['validateStatus'];
|
||
|
||
function getMergedValue(target, source) {
|
||
if (utils.isPlainObject(target) && utils.isPlainObject(source)) {
|
||
return utils.merge(target, source);
|
||
} else if (utils.isPlainObject(source)) {
|
||
return utils.merge({}, source);
|
||
} else if (utils.isArray(source)) {
|
||
return source.slice();
|
||
}
|
||
return source;
|
||
}
|
||
|
||
function mergeDeepProperties(prop) {
|
||
if (!utils.isUndefined(config2[prop])) {
|
||
config[prop] = getMergedValue(config1[prop], config2[prop]);
|
||
} else if (!utils.isUndefined(config1[prop])) {
|
||
config[prop] = getMergedValue(undefined, config1[prop]);
|
||
}
|
||
}
|
||
|
||
utils.forEach(valueFromConfig2Keys, function valueFromConfig2(prop) {
|
||
if (!utils.isUndefined(config2[prop])) {
|
||
config[prop] = getMergedValue(undefined, config2[prop]);
|
||
}
|
||
});
|
||
|
||
utils.forEach(mergeDeepPropertiesKeys, mergeDeepProperties);
|
||
|
||
utils.forEach(defaultToConfig2Keys, function defaultToConfig2(prop) {
|
||
if (!utils.isUndefined(config2[prop])) {
|
||
config[prop] = getMergedValue(undefined, config2[prop]);
|
||
} else if (!utils.isUndefined(config1[prop])) {
|
||
config[prop] = getMergedValue(undefined, config1[prop]);
|
||
}
|
||
});
|
||
|
||
utils.forEach(directMergeKeys, function merge(prop) {
|
||
if (prop in config2) {
|
||
config[prop] = getMergedValue(config1[prop], config2[prop]);
|
||
} else if (prop in config1) {
|
||
config[prop] = getMergedValue(undefined, config1[prop]);
|
||
}
|
||
});
|
||
|
||
var axiosKeys = valueFromConfig2Keys
|
||
.concat(mergeDeepPropertiesKeys)
|
||
.concat(defaultToConfig2Keys)
|
||
.concat(directMergeKeys);
|
||
|
||
var otherKeys = Object
|
||
.keys(config1)
|
||
.concat(Object.keys(config2))
|
||
.filter(function filterAxiosKeys(key) {
|
||
return axiosKeys.indexOf(key) === -1;
|
||
});
|
||
|
||
utils.forEach(otherKeys, mergeDeepProperties);
|
||
|
||
return config;
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 4815:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
var createError = __nccwpck_require__(6807);
|
||
|
||
/**
|
||
* Resolve or reject a Promise based on response status.
|
||
*
|
||
* @param {Function} resolve A function that resolves the promise.
|
||
* @param {Function} reject A function that rejects the promise.
|
||
* @param {object} response The response.
|
||
*/
|
||
module.exports = function settle(resolve, reject, response) {
|
||
var validateStatus = response.config.validateStatus;
|
||
if (!response.status || !validateStatus || validateStatus(response.status)) {
|
||
resolve(response);
|
||
} else {
|
||
reject(createError(
|
||
'Request failed with status code ' + response.status,
|
||
response.config,
|
||
null,
|
||
response.request,
|
||
response
|
||
));
|
||
}
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 3554:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
var utils = __nccwpck_require__(9708);
|
||
|
||
/**
|
||
* Transform the data for a request or a response
|
||
*
|
||
* @param {Object|String} data The data to be transformed
|
||
* @param {Array} headers The headers for the request or response
|
||
* @param {Array|Function} fns A single function or Array of functions
|
||
* @returns {*} The resulting transformed data
|
||
*/
|
||
module.exports = function transformData(data, headers, fns) {
|
||
/*eslint no-param-reassign:0*/
|
||
utils.forEach(fns, function transform(fn) {
|
||
data = fn(data, headers);
|
||
});
|
||
|
||
return data;
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 9740:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
var utils = __nccwpck_require__(9708);
|
||
var normalizeHeaderName = __nccwpck_require__(6906);
|
||
|
||
var DEFAULT_CONTENT_TYPE = {
|
||
'Content-Type': 'application/x-www-form-urlencoded'
|
||
};
|
||
|
||
function setContentTypeIfUnset(headers, value) {
|
||
if (!utils.isUndefined(headers) && utils.isUndefined(headers['Content-Type'])) {
|
||
headers['Content-Type'] = value;
|
||
}
|
||
}
|
||
|
||
function getDefaultAdapter() {
|
||
var adapter;
|
||
if (typeof XMLHttpRequest !== 'undefined') {
|
||
// For browsers use XHR adapter
|
||
adapter = __nccwpck_require__(2527);
|
||
} else if (typeof process !== 'undefined' && Object.prototype.toString.call(process) === '[object process]') {
|
||
// For node use HTTP adapter
|
||
adapter = __nccwpck_require__(5683);
|
||
}
|
||
return adapter;
|
||
}
|
||
|
||
var defaults = {
|
||
adapter: getDefaultAdapter(),
|
||
|
||
transformRequest: [function transformRequest(data, headers) {
|
||
normalizeHeaderName(headers, 'Accept');
|
||
normalizeHeaderName(headers, 'Content-Type');
|
||
if (utils.isFormData(data) ||
|
||
utils.isArrayBuffer(data) ||
|
||
utils.isBuffer(data) ||
|
||
utils.isStream(data) ||
|
||
utils.isFile(data) ||
|
||
utils.isBlob(data)
|
||
) {
|
||
return data;
|
||
}
|
||
if (utils.isArrayBufferView(data)) {
|
||
return data.buffer;
|
||
}
|
||
if (utils.isURLSearchParams(data)) {
|
||
setContentTypeIfUnset(headers, 'application/x-www-form-urlencoded;charset=utf-8');
|
||
return data.toString();
|
||
}
|
||
if (utils.isObject(data)) {
|
||
setContentTypeIfUnset(headers, 'application/json;charset=utf-8');
|
||
return JSON.stringify(data);
|
||
}
|
||
return data;
|
||
}],
|
||
|
||
transformResponse: [function transformResponse(data) {
|
||
/*eslint no-param-reassign:0*/
|
||
if (typeof data === 'string') {
|
||
try {
|
||
data = JSON.parse(data);
|
||
} catch (e) { /* Ignore */ }
|
||
}
|
||
return data;
|
||
}],
|
||
|
||
/**
|
||
* A timeout in milliseconds to abort a request. If set to 0 (default) a
|
||
* timeout is not created.
|
||
*/
|
||
timeout: 0,
|
||
|
||
xsrfCookieName: 'XSRF-TOKEN',
|
||
xsrfHeaderName: 'X-XSRF-TOKEN',
|
||
|
||
maxContentLength: -1,
|
||
maxBodyLength: -1,
|
||
|
||
validateStatus: function validateStatus(status) {
|
||
return status >= 200 && status < 300;
|
||
}
|
||
};
|
||
|
||
defaults.headers = {
|
||
common: {
|
||
'Accept': 'application/json, text/plain, */*'
|
||
}
|
||
};
|
||
|
||
utils.forEach(['delete', 'get', 'head'], function forEachMethodNoData(method) {
|
||
defaults.headers[method] = {};
|
||
});
|
||
|
||
utils.forEach(['post', 'put', 'patch'], function forEachMethodWithData(method) {
|
||
defaults.headers[method] = utils.merge(DEFAULT_CONTENT_TYPE);
|
||
});
|
||
|
||
module.exports = defaults;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 2897:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
module.exports = function bind(fn, thisArg) {
|
||
return function wrap() {
|
||
var args = new Array(arguments.length);
|
||
for (var i = 0; i < args.length; i++) {
|
||
args[i] = arguments[i];
|
||
}
|
||
return fn.apply(thisArg, args);
|
||
};
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 9416:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
var utils = __nccwpck_require__(9708);
|
||
|
||
function encode(val) {
|
||
return encodeURIComponent(val).
|
||
replace(/%3A/gi, ':').
|
||
replace(/%24/g, '$').
|
||
replace(/%2C/gi, ',').
|
||
replace(/%20/g, '+').
|
||
replace(/%5B/gi, '[').
|
||
replace(/%5D/gi, ']');
|
||
}
|
||
|
||
/**
|
||
* Build a URL by appending params to the end
|
||
*
|
||
* @param {string} url The base of the url (e.g., http://www.google.com)
|
||
* @param {object} [params] The params to be appended
|
||
* @returns {string} The formatted url
|
||
*/
|
||
module.exports = function buildURL(url, params, paramsSerializer) {
|
||
/*eslint no-param-reassign:0*/
|
||
if (!params) {
|
||
return url;
|
||
}
|
||
|
||
var serializedParams;
|
||
if (paramsSerializer) {
|
||
serializedParams = paramsSerializer(params);
|
||
} else if (utils.isURLSearchParams(params)) {
|
||
serializedParams = params.toString();
|
||
} else {
|
||
var parts = [];
|
||
|
||
utils.forEach(params, function serialize(val, key) {
|
||
if (val === null || typeof val === 'undefined') {
|
||
return;
|
||
}
|
||
|
||
if (utils.isArray(val)) {
|
||
key = key + '[]';
|
||
} else {
|
||
val = [val];
|
||
}
|
||
|
||
utils.forEach(val, function parseValue(v) {
|
||
if (utils.isDate(v)) {
|
||
v = v.toISOString();
|
||
} else if (utils.isObject(v)) {
|
||
v = JSON.stringify(v);
|
||
}
|
||
parts.push(encode(key) + '=' + encode(v));
|
||
});
|
||
});
|
||
|
||
serializedParams = parts.join('&');
|
||
}
|
||
|
||
if (serializedParams) {
|
||
var hashmarkIndex = url.indexOf('#');
|
||
if (hashmarkIndex !== -1) {
|
||
url = url.slice(0, hashmarkIndex);
|
||
}
|
||
|
||
url += (url.indexOf('?') === -1 ? '?' : '&') + serializedParams;
|
||
}
|
||
|
||
return url;
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 4584:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
/**
|
||
* Creates a new URL by combining the specified URLs
|
||
*
|
||
* @param {string} baseURL The base URL
|
||
* @param {string} relativeURL The relative URL
|
||
* @returns {string} The combined URL
|
||
*/
|
||
module.exports = function combineURLs(baseURL, relativeURL) {
|
||
return relativeURL
|
||
? baseURL.replace(/\/+$/, '') + '/' + relativeURL.replace(/^\/+/, '')
|
||
: baseURL;
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 5146:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
var utils = __nccwpck_require__(9708);
|
||
|
||
module.exports = (
|
||
utils.isStandardBrowserEnv() ?
|
||
|
||
// Standard browser envs support document.cookie
|
||
(function standardBrowserEnv() {
|
||
return {
|
||
write: function write(name, value, expires, path, domain, secure) {
|
||
var cookie = [];
|
||
cookie.push(name + '=' + encodeURIComponent(value));
|
||
|
||
if (utils.isNumber(expires)) {
|
||
cookie.push('expires=' + new Date(expires).toGMTString());
|
||
}
|
||
|
||
if (utils.isString(path)) {
|
||
cookie.push('path=' + path);
|
||
}
|
||
|
||
if (utils.isString(domain)) {
|
||
cookie.push('domain=' + domain);
|
||
}
|
||
|
||
if (secure === true) {
|
||
cookie.push('secure');
|
||
}
|
||
|
||
document.cookie = cookie.join('; ');
|
||
},
|
||
|
||
read: function read(name) {
|
||
var match = document.cookie.match(new RegExp('(^|;\\s*)(' + name + ')=([^;]*)'));
|
||
return (match ? decodeURIComponent(match[3]) : null);
|
||
},
|
||
|
||
remove: function remove(name) {
|
||
this.write(name, '', Date.now() - 86400000);
|
||
}
|
||
};
|
||
})() :
|
||
|
||
// Non standard browser env (web workers, react-native) lack needed support.
|
||
(function nonStandardBrowserEnv() {
|
||
return {
|
||
write: function write() {},
|
||
read: function read() { return null; },
|
||
remove: function remove() {}
|
||
};
|
||
})()
|
||
);
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 4803:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
/**
|
||
* Determines whether the specified URL is absolute
|
||
*
|
||
* @param {string} url The URL to test
|
||
* @returns {boolean} True if the specified URL is absolute, otherwise false
|
||
*/
|
||
module.exports = function isAbsoluteURL(url) {
|
||
// A URL is considered absolute if it begins with "<scheme>://" or "//" (protocol-relative URL).
|
||
// RFC 3986 defines scheme name as a sequence of characters beginning with a letter and followed
|
||
// by any combination of letters, digits, plus, period, or hyphen.
|
||
return /^([a-z][a-z\d\+\-\.]*:)?\/\//i.test(url);
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 3687:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
/**
|
||
* Determines whether the payload is an error thrown by Axios
|
||
*
|
||
* @param {*} payload The value to test
|
||
* @returns {boolean} True if the payload is an error thrown by Axios, otherwise false
|
||
*/
|
||
module.exports = function isAxiosError(payload) {
|
||
return (typeof payload === 'object') && (payload.isAxiosError === true);
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 6756:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
var utils = __nccwpck_require__(9708);
|
||
|
||
module.exports = (
|
||
utils.isStandardBrowserEnv() ?
|
||
|
||
// Standard browser envs have full support of the APIs needed to test
|
||
// whether the request URL is of the same origin as current location.
|
||
(function standardBrowserEnv() {
|
||
var msie = /(msie|trident)/i.test(navigator.userAgent);
|
||
var urlParsingNode = document.createElement('a');
|
||
var originURL;
|
||
|
||
/**
|
||
* Parse a URL to discover it's components
|
||
*
|
||
* @param {String} url The URL to be parsed
|
||
* @returns {Object}
|
||
*/
|
||
function resolveURL(url) {
|
||
var href = url;
|
||
|
||
if (msie) {
|
||
// IE needs attribute set twice to normalize properties
|
||
urlParsingNode.setAttribute('href', href);
|
||
href = urlParsingNode.href;
|
||
}
|
||
|
||
urlParsingNode.setAttribute('href', href);
|
||
|
||
// urlParsingNode provides the UrlUtils interface - http://url.spec.whatwg.org/#urlutils
|
||
return {
|
||
href: urlParsingNode.href,
|
||
protocol: urlParsingNode.protocol ? urlParsingNode.protocol.replace(/:$/, '') : '',
|
||
host: urlParsingNode.host,
|
||
search: urlParsingNode.search ? urlParsingNode.search.replace(/^\?/, '') : '',
|
||
hash: urlParsingNode.hash ? urlParsingNode.hash.replace(/^#/, '') : '',
|
||
hostname: urlParsingNode.hostname,
|
||
port: urlParsingNode.port,
|
||
pathname: (urlParsingNode.pathname.charAt(0) === '/') ?
|
||
urlParsingNode.pathname :
|
||
'/' + urlParsingNode.pathname
|
||
};
|
||
}
|
||
|
||
originURL = resolveURL(window.location.href);
|
||
|
||
/**
|
||
* Determine if a URL shares the same origin as the current location
|
||
*
|
||
* @param {String} requestURL The URL to test
|
||
* @returns {boolean} True if URL shares the same origin, otherwise false
|
||
*/
|
||
return function isURLSameOrigin(requestURL) {
|
||
var parsed = (utils.isString(requestURL)) ? resolveURL(requestURL) : requestURL;
|
||
return (parsed.protocol === originURL.protocol &&
|
||
parsed.host === originURL.host);
|
||
};
|
||
})() :
|
||
|
||
// Non standard browser envs (web workers, react-native) lack needed support.
|
||
(function nonStandardBrowserEnv() {
|
||
return function isURLSameOrigin() {
|
||
return true;
|
||
};
|
||
})()
|
||
);
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 6906:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
var utils = __nccwpck_require__(9708);
|
||
|
||
module.exports = function normalizeHeaderName(headers, normalizedName) {
|
||
utils.forEach(headers, function processHeader(value, name) {
|
||
if (name !== normalizedName && name.toUpperCase() === normalizedName.toUpperCase()) {
|
||
headers[normalizedName] = value;
|
||
delete headers[name];
|
||
}
|
||
});
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 8411:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
var utils = __nccwpck_require__(9708);
|
||
|
||
// Headers whose duplicates are ignored by node
|
||
// c.f. https://nodejs.org/api/http.html#http_message_headers
|
||
var ignoreDuplicateOf = [
|
||
'age', 'authorization', 'content-length', 'content-type', 'etag',
|
||
'expires', 'from', 'host', 'if-modified-since', 'if-unmodified-since',
|
||
'last-modified', 'location', 'max-forwards', 'proxy-authorization',
|
||
'referer', 'retry-after', 'user-agent'
|
||
];
|
||
|
||
/**
|
||
* Parse headers into an object
|
||
*
|
||
* ```
|
||
* Date: Wed, 27 Aug 2014 08:58:49 GMT
|
||
* Content-Type: application/json
|
||
* Connection: keep-alive
|
||
* Transfer-Encoding: chunked
|
||
* ```
|
||
*
|
||
* @param {String} headers Headers needing to be parsed
|
||
* @returns {Object} Headers parsed into an object
|
||
*/
|
||
module.exports = function parseHeaders(headers) {
|
||
var parsed = {};
|
||
var key;
|
||
var val;
|
||
var i;
|
||
|
||
if (!headers) { return parsed; }
|
||
|
||
utils.forEach(headers.split('\n'), function parser(line) {
|
||
i = line.indexOf(':');
|
||
key = utils.trim(line.substr(0, i)).toLowerCase();
|
||
val = utils.trim(line.substr(i + 1));
|
||
|
||
if (key) {
|
||
if (parsed[key] && ignoreDuplicateOf.indexOf(key) >= 0) {
|
||
return;
|
||
}
|
||
if (key === 'set-cookie') {
|
||
parsed[key] = (parsed[key] ? parsed[key] : []).concat([val]);
|
||
} else {
|
||
parsed[key] = parsed[key] ? parsed[key] + ', ' + val : val;
|
||
}
|
||
}
|
||
});
|
||
|
||
return parsed;
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 8174:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
/**
|
||
* Syntactic sugar for invoking a function and expanding an array for arguments.
|
||
*
|
||
* Common use case would be to use `Function.prototype.apply`.
|
||
*
|
||
* ```js
|
||
* function f(x, y, z) {}
|
||
* var args = [1, 2, 3];
|
||
* f.apply(null, args);
|
||
* ```
|
||
*
|
||
* With `spread` this example can be re-written.
|
||
*
|
||
* ```js
|
||
* spread(function(x, y, z) {})([1, 2, 3]);
|
||
* ```
|
||
*
|
||
* @param {Function} callback
|
||
* @returns {Function}
|
||
*/
|
||
module.exports = function spread(callback) {
|
||
return function wrap(arr) {
|
||
return callback.apply(null, arr);
|
||
};
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 9708:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
var bind = __nccwpck_require__(2897);
|
||
|
||
/*global toString:true*/
|
||
|
||
// utils is a library of generic helper functions non-specific to axios
|
||
|
||
var toString = Object.prototype.toString;
|
||
|
||
/**
|
||
* Determine if a value is an Array
|
||
*
|
||
* @param {Object} val The value to test
|
||
* @returns {boolean} True if value is an Array, otherwise false
|
||
*/
|
||
function isArray(val) {
|
||
return toString.call(val) === '[object Array]';
|
||
}
|
||
|
||
/**
|
||
* Determine if a value is undefined
|
||
*
|
||
* @param {Object} val The value to test
|
||
* @returns {boolean} True if the value is undefined, otherwise false
|
||
*/
|
||
function isUndefined(val) {
|
||
return typeof val === 'undefined';
|
||
}
|
||
|
||
/**
|
||
* Determine if a value is a Buffer
|
||
*
|
||
* @param {Object} val The value to test
|
||
* @returns {boolean} True if value is a Buffer, otherwise false
|
||
*/
|
||
function isBuffer(val) {
|
||
return val !== null && !isUndefined(val) && val.constructor !== null && !isUndefined(val.constructor)
|
||
&& typeof val.constructor.isBuffer === 'function' && val.constructor.isBuffer(val);
|
||
}
|
||
|
||
/**
|
||
* Determine if a value is an ArrayBuffer
|
||
*
|
||
* @param {Object} val The value to test
|
||
* @returns {boolean} True if value is an ArrayBuffer, otherwise false
|
||
*/
|
||
function isArrayBuffer(val) {
|
||
return toString.call(val) === '[object ArrayBuffer]';
|
||
}
|
||
|
||
/**
|
||
* Determine if a value is a FormData
|
||
*
|
||
* @param {Object} val The value to test
|
||
* @returns {boolean} True if value is an FormData, otherwise false
|
||
*/
|
||
function isFormData(val) {
|
||
return (typeof FormData !== 'undefined') && (val instanceof FormData);
|
||
}
|
||
|
||
/**
|
||
* Determine if a value is a view on an ArrayBuffer
|
||
*
|
||
* @param {Object} val The value to test
|
||
* @returns {boolean} True if value is a view on an ArrayBuffer, otherwise false
|
||
*/
|
||
function isArrayBufferView(val) {
|
||
var result;
|
||
if ((typeof ArrayBuffer !== 'undefined') && (ArrayBuffer.isView)) {
|
||
result = ArrayBuffer.isView(val);
|
||
} else {
|
||
result = (val) && (val.buffer) && (val.buffer instanceof ArrayBuffer);
|
||
}
|
||
return result;
|
||
}
|
||
|
||
/**
|
||
* Determine if a value is a String
|
||
*
|
||
* @param {Object} val The value to test
|
||
* @returns {boolean} True if value is a String, otherwise false
|
||
*/
|
||
function isString(val) {
|
||
return typeof val === 'string';
|
||
}
|
||
|
||
/**
|
||
* Determine if a value is a Number
|
||
*
|
||
* @param {Object} val The value to test
|
||
* @returns {boolean} True if value is a Number, otherwise false
|
||
*/
|
||
function isNumber(val) {
|
||
return typeof val === 'number';
|
||
}
|
||
|
||
/**
|
||
* Determine if a value is an Object
|
||
*
|
||
* @param {Object} val The value to test
|
||
* @returns {boolean} True if value is an Object, otherwise false
|
||
*/
|
||
function isObject(val) {
|
||
return val !== null && typeof val === 'object';
|
||
}
|
||
|
||
/**
|
||
* Determine if a value is a plain Object
|
||
*
|
||
* @param {Object} val The value to test
|
||
* @return {boolean} True if value is a plain Object, otherwise false
|
||
*/
|
||
function isPlainObject(val) {
|
||
if (toString.call(val) !== '[object Object]') {
|
||
return false;
|
||
}
|
||
|
||
var prototype = Object.getPrototypeOf(val);
|
||
return prototype === null || prototype === Object.prototype;
|
||
}
|
||
|
||
/**
|
||
* Determine if a value is a Date
|
||
*
|
||
* @param {Object} val The value to test
|
||
* @returns {boolean} True if value is a Date, otherwise false
|
||
*/
|
||
function isDate(val) {
|
||
return toString.call(val) === '[object Date]';
|
||
}
|
||
|
||
/**
|
||
* Determine if a value is a File
|
||
*
|
||
* @param {Object} val The value to test
|
||
* @returns {boolean} True if value is a File, otherwise false
|
||
*/
|
||
function isFile(val) {
|
||
return toString.call(val) === '[object File]';
|
||
}
|
||
|
||
/**
|
||
* Determine if a value is a Blob
|
||
*
|
||
* @param {Object} val The value to test
|
||
* @returns {boolean} True if value is a Blob, otherwise false
|
||
*/
|
||
function isBlob(val) {
|
||
return toString.call(val) === '[object Blob]';
|
||
}
|
||
|
||
/**
|
||
* Determine if a value is a Function
|
||
*
|
||
* @param {Object} val The value to test
|
||
* @returns {boolean} True if value is a Function, otherwise false
|
||
*/
|
||
function isFunction(val) {
|
||
return toString.call(val) === '[object Function]';
|
||
}
|
||
|
||
/**
|
||
* Determine if a value is a Stream
|
||
*
|
||
* @param {Object} val The value to test
|
||
* @returns {boolean} True if value is a Stream, otherwise false
|
||
*/
|
||
function isStream(val) {
|
||
return isObject(val) && isFunction(val.pipe);
|
||
}
|
||
|
||
/**
|
||
* Determine if a value is a URLSearchParams object
|
||
*
|
||
* @param {Object} val The value to test
|
||
* @returns {boolean} True if value is a URLSearchParams object, otherwise false
|
||
*/
|
||
function isURLSearchParams(val) {
|
||
return typeof URLSearchParams !== 'undefined' && val instanceof URLSearchParams;
|
||
}
|
||
|
||
/**
|
||
* Trim excess whitespace off the beginning and end of a string
|
||
*
|
||
* @param {String} str The String to trim
|
||
* @returns {String} The String freed of excess whitespace
|
||
*/
|
||
function trim(str) {
|
||
return str.replace(/^\s*/, '').replace(/\s*$/, '');
|
||
}
|
||
|
||
/**
|
||
* Determine if we're running in a standard browser environment
|
||
*
|
||
* This allows axios to run in a web worker, and react-native.
|
||
* Both environments support XMLHttpRequest, but not fully standard globals.
|
||
*
|
||
* web workers:
|
||
* typeof window -> undefined
|
||
* typeof document -> undefined
|
||
*
|
||
* react-native:
|
||
* navigator.product -> 'ReactNative'
|
||
* nativescript
|
||
* navigator.product -> 'NativeScript' or 'NS'
|
||
*/
|
||
function isStandardBrowserEnv() {
|
||
if (typeof navigator !== 'undefined' && (navigator.product === 'ReactNative' ||
|
||
navigator.product === 'NativeScript' ||
|
||
navigator.product === 'NS')) {
|
||
return false;
|
||
}
|
||
return (
|
||
typeof window !== 'undefined' &&
|
||
typeof document !== 'undefined'
|
||
);
|
||
}
|
||
|
||
/**
|
||
* Iterate over an Array or an Object invoking a function for each item.
|
||
*
|
||
* If `obj` is an Array callback will be called passing
|
||
* the value, index, and complete array for each item.
|
||
*
|
||
* If 'obj' is an Object callback will be called passing
|
||
* the value, key, and complete object for each property.
|
||
*
|
||
* @param {Object|Array} obj The object to iterate
|
||
* @param {Function} fn The callback to invoke for each item
|
||
*/
|
||
function forEach(obj, fn) {
|
||
// Don't bother if no value provided
|
||
if (obj === null || typeof obj === 'undefined') {
|
||
return;
|
||
}
|
||
|
||
// Force an array if not already something iterable
|
||
if (typeof obj !== 'object') {
|
||
/*eslint no-param-reassign:0*/
|
||
obj = [obj];
|
||
}
|
||
|
||
if (isArray(obj)) {
|
||
// Iterate over array values
|
||
for (var i = 0, l = obj.length; i < l; i++) {
|
||
fn.call(null, obj[i], i, obj);
|
||
}
|
||
} else {
|
||
// Iterate over object keys
|
||
for (var key in obj) {
|
||
if (Object.prototype.hasOwnProperty.call(obj, key)) {
|
||
fn.call(null, obj[key], key, obj);
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Accepts varargs expecting each argument to be an object, then
|
||
* immutably merges the properties of each object and returns result.
|
||
*
|
||
* When multiple objects contain the same key the later object in
|
||
* the arguments list will take precedence.
|
||
*
|
||
* Example:
|
||
*
|
||
* ```js
|
||
* var result = merge({foo: 123}, {foo: 456});
|
||
* console.log(result.foo); // outputs 456
|
||
* ```
|
||
*
|
||
* @param {Object} obj1 Object to merge
|
||
* @returns {Object} Result of all merge properties
|
||
*/
|
||
function merge(/* obj1, obj2, obj3, ... */) {
|
||
var result = {};
|
||
function assignValue(val, key) {
|
||
if (isPlainObject(result[key]) && isPlainObject(val)) {
|
||
result[key] = merge(result[key], val);
|
||
} else if (isPlainObject(val)) {
|
||
result[key] = merge({}, val);
|
||
} else if (isArray(val)) {
|
||
result[key] = val.slice();
|
||
} else {
|
||
result[key] = val;
|
||
}
|
||
}
|
||
|
||
for (var i = 0, l = arguments.length; i < l; i++) {
|
||
forEach(arguments[i], assignValue);
|
||
}
|
||
return result;
|
||
}
|
||
|
||
/**
|
||
* Extends object a by mutably adding to it the properties of object b.
|
||
*
|
||
* @param {Object} a The object to be extended
|
||
* @param {Object} b The object to copy properties from
|
||
* @param {Object} thisArg The object to bind function to
|
||
* @return {Object} The resulting value of object a
|
||
*/
|
||
function extend(a, b, thisArg) {
|
||
forEach(b, function assignValue(val, key) {
|
||
if (thisArg && typeof val === 'function') {
|
||
a[key] = bind(val, thisArg);
|
||
} else {
|
||
a[key] = val;
|
||
}
|
||
});
|
||
return a;
|
||
}
|
||
|
||
/**
|
||
* Remove byte order marker. This catches EF BB BF (the UTF-8 BOM)
|
||
*
|
||
* @param {string} content with BOM
|
||
* @return {string} content value without BOM
|
||
*/
|
||
function stripBOM(content) {
|
||
if (content.charCodeAt(0) === 0xFEFF) {
|
||
content = content.slice(1);
|
||
}
|
||
return content;
|
||
}
|
||
|
||
module.exports = {
|
||
isArray: isArray,
|
||
isArrayBuffer: isArrayBuffer,
|
||
isBuffer: isBuffer,
|
||
isFormData: isFormData,
|
||
isArrayBufferView: isArrayBufferView,
|
||
isString: isString,
|
||
isNumber: isNumber,
|
||
isObject: isObject,
|
||
isPlainObject: isPlainObject,
|
||
isUndefined: isUndefined,
|
||
isDate: isDate,
|
||
isFile: isFile,
|
||
isBlob: isBlob,
|
||
isFunction: isFunction,
|
||
isStream: isStream,
|
||
isURLSearchParams: isURLSearchParams,
|
||
isStandardBrowserEnv: isStandardBrowserEnv,
|
||
forEach: forEach,
|
||
merge: merge,
|
||
extend: extend,
|
||
trim: trim,
|
||
stripBOM: stripBOM
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 8976:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
|
||
module.exports = balanced;
|
||
function balanced(a, b, str) {
|
||
if (a instanceof RegExp) a = maybeMatch(a, str);
|
||
if (b instanceof RegExp) b = maybeMatch(b, str);
|
||
|
||
var r = range(a, b, str);
|
||
|
||
return r && {
|
||
start: r[0],
|
||
end: r[1],
|
||
pre: str.slice(0, r[0]),
|
||
body: str.slice(r[0] + a.length, r[1]),
|
||
post: str.slice(r[1] + b.length)
|
||
};
|
||
}
|
||
|
||
function maybeMatch(reg, str) {
|
||
var m = str.match(reg);
|
||
return m ? m[0] : null;
|
||
}
|
||
|
||
balanced.range = range;
|
||
function range(a, b, str) {
|
||
var begs, beg, left, right, result;
|
||
var ai = str.indexOf(a);
|
||
var bi = str.indexOf(b, ai + 1);
|
||
var i = ai;
|
||
|
||
if (ai >= 0 && bi > 0) {
|
||
if(a===b) {
|
||
return [ai, bi];
|
||
}
|
||
begs = [];
|
||
left = str.length;
|
||
|
||
while (i >= 0 && !result) {
|
||
if (i == ai) {
|
||
begs.push(i);
|
||
ai = str.indexOf(a, i + 1);
|
||
} else if (begs.length == 1) {
|
||
result = [ begs.pop(), bi ];
|
||
} else {
|
||
beg = begs.pop();
|
||
if (beg < left) {
|
||
left = beg;
|
||
right = bi;
|
||
}
|
||
|
||
bi = str.indexOf(b, i + 1);
|
||
}
|
||
|
||
i = ai < bi && ai >= 0 ? ai : bi;
|
||
}
|
||
|
||
if (begs.length) {
|
||
result = [ left, right ];
|
||
}
|
||
}
|
||
|
||
return result;
|
||
}
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 8091:
|
||
/***/ (function(module, exports, __nccwpck_require__) {
|
||
|
||
/* module decorator */ module = __nccwpck_require__.nmd(module);
|
||
/*! https://mths.be/base64 v1.0.0 by @mathias | MIT license */
|
||
;(function(root) {
|
||
|
||
// Detect free variables `exports`.
|
||
var freeExports = true && exports;
|
||
|
||
// Detect free variable `module`.
|
||
var freeModule = true && module &&
|
||
module.exports == freeExports && module;
|
||
|
||
// Detect free variable `global`, from Node.js or Browserified code, and use
|
||
// it as `root`.
|
||
var freeGlobal = typeof global == 'object' && global;
|
||
if (freeGlobal.global === freeGlobal || freeGlobal.window === freeGlobal) {
|
||
root = freeGlobal;
|
||
}
|
||
|
||
/*--------------------------------------------------------------------------*/
|
||
|
||
var InvalidCharacterError = function(message) {
|
||
this.message = message;
|
||
};
|
||
InvalidCharacterError.prototype = new Error;
|
||
InvalidCharacterError.prototype.name = 'InvalidCharacterError';
|
||
|
||
var error = function(message) {
|
||
// Note: the error messages used throughout this file match those used by
|
||
// the native `atob`/`btoa` implementation in Chromium.
|
||
throw new InvalidCharacterError(message);
|
||
};
|
||
|
||
var TABLE = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
|
||
// http://whatwg.org/html/common-microsyntaxes.html#space-character
|
||
var REGEX_SPACE_CHARACTERS = /[\t\n\f\r ]/g;
|
||
|
||
// `decode` is designed to be fully compatible with `atob` as described in the
|
||
// HTML Standard. http://whatwg.org/html/webappapis.html#dom-windowbase64-atob
|
||
// The optimized base64-decoding algorithm used is based on @atk’s excellent
|
||
// implementation. https://gist.github.com/atk/1020396
|
||
var decode = function(input) {
|
||
input = String(input)
|
||
.replace(REGEX_SPACE_CHARACTERS, '');
|
||
var length = input.length;
|
||
if (length % 4 == 0) {
|
||
input = input.replace(/==?$/, '');
|
||
length = input.length;
|
||
}
|
||
if (
|
||
length % 4 == 1 ||
|
||
// http://whatwg.org/C#alphanumeric-ascii-characters
|
||
/[^+a-zA-Z0-9/]/.test(input)
|
||
) {
|
||
error(
|
||
'Invalid character: the string to be decoded is not correctly encoded.'
|
||
);
|
||
}
|
||
var bitCounter = 0;
|
||
var bitStorage;
|
||
var buffer;
|
||
var output = '';
|
||
var position = -1;
|
||
while (++position < length) {
|
||
buffer = TABLE.indexOf(input.charAt(position));
|
||
bitStorage = bitCounter % 4 ? bitStorage * 64 + buffer : buffer;
|
||
// Unless this is the first of a group of 4 characters…
|
||
if (bitCounter++ % 4) {
|
||
// …convert the first 8 bits to a single ASCII character.
|
||
output += String.fromCharCode(
|
||
0xFF & bitStorage >> (-2 * bitCounter & 6)
|
||
);
|
||
}
|
||
}
|
||
return output;
|
||
};
|
||
|
||
// `encode` is designed to be fully compatible with `btoa` as described in the
|
||
// HTML Standard: http://whatwg.org/html/webappapis.html#dom-windowbase64-btoa
|
||
var encode = function(input) {
|
||
input = String(input);
|
||
if (/[^\0-\xFF]/.test(input)) {
|
||
// Note: no need to special-case astral symbols here, as surrogates are
|
||
// matched, and the input is supposed to only contain ASCII anyway.
|
||
error(
|
||
'The string to be encoded contains characters outside of the ' +
|
||
'Latin1 range.'
|
||
);
|
||
}
|
||
var padding = input.length % 3;
|
||
var output = '';
|
||
var position = -1;
|
||
var a;
|
||
var b;
|
||
var c;
|
||
var buffer;
|
||
// Make sure any padding is handled outside of the loop.
|
||
var length = input.length - padding;
|
||
|
||
while (++position < length) {
|
||
// Read three bytes, i.e. 24 bits.
|
||
a = input.charCodeAt(position) << 16;
|
||
b = input.charCodeAt(++position) << 8;
|
||
c = input.charCodeAt(++position);
|
||
buffer = a + b + c;
|
||
// Turn the 24 bits into four chunks of 6 bits each, and append the
|
||
// matching character for each of them to the output.
|
||
output += (
|
||
TABLE.charAt(buffer >> 18 & 0x3F) +
|
||
TABLE.charAt(buffer >> 12 & 0x3F) +
|
||
TABLE.charAt(buffer >> 6 & 0x3F) +
|
||
TABLE.charAt(buffer & 0x3F)
|
||
);
|
||
}
|
||
|
||
if (padding == 2) {
|
||
a = input.charCodeAt(position) << 8;
|
||
b = input.charCodeAt(++position);
|
||
buffer = a + b;
|
||
output += (
|
||
TABLE.charAt(buffer >> 10) +
|
||
TABLE.charAt((buffer >> 4) & 0x3F) +
|
||
TABLE.charAt((buffer << 2) & 0x3F) +
|
||
'='
|
||
);
|
||
} else if (padding == 1) {
|
||
buffer = input.charCodeAt(position);
|
||
output += (
|
||
TABLE.charAt(buffer >> 2) +
|
||
TABLE.charAt((buffer << 4) & 0x3F) +
|
||
'=='
|
||
);
|
||
}
|
||
|
||
return output;
|
||
};
|
||
|
||
var base64 = {
|
||
'encode': encode,
|
||
'decode': decode,
|
||
'version': '1.0.0'
|
||
};
|
||
|
||
// Some AMD build optimizers, like r.js, check for specific condition patterns
|
||
// like the following:
|
||
if (
|
||
typeof define == 'function' &&
|
||
typeof define.amd == 'object' &&
|
||
define.amd
|
||
) {
|
||
define(function() {
|
||
return base64;
|
||
});
|
||
} else if (freeExports && !freeExports.nodeType) {
|
||
if (freeModule) { // in Node.js or RingoJS v0.8.0+
|
||
freeModule.exports = base64;
|
||
} else { // in Narwhal or RingoJS v0.7.0-
|
||
for (var key in base64) {
|
||
base64.hasOwnProperty(key) && (freeExports[key] = base64[key]);
|
||
}
|
||
}
|
||
} else { // in Rhino or a web browser
|
||
root.base64 = base64;
|
||
}
|
||
|
||
}(this));
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 5627:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
const { Buffer } = __nccwpck_require__(4293)
|
||
const symbol = Symbol.for('BufferList')
|
||
|
||
function BufferList (buf) {
|
||
if (!(this instanceof BufferList)) {
|
||
return new BufferList(buf)
|
||
}
|
||
|
||
BufferList._init.call(this, buf)
|
||
}
|
||
|
||
BufferList._init = function _init (buf) {
|
||
Object.defineProperty(this, symbol, { value: true })
|
||
|
||
this._bufs = []
|
||
this.length = 0
|
||
|
||
if (buf) {
|
||
this.append(buf)
|
||
}
|
||
}
|
||
|
||
BufferList.prototype._new = function _new (buf) {
|
||
return new BufferList(buf)
|
||
}
|
||
|
||
BufferList.prototype._offset = function _offset (offset) {
|
||
if (offset === 0) {
|
||
return [0, 0]
|
||
}
|
||
|
||
let tot = 0
|
||
|
||
for (let i = 0; i < this._bufs.length; i++) {
|
||
const _t = tot + this._bufs[i].length
|
||
if (offset < _t || i === this._bufs.length - 1) {
|
||
return [i, offset - tot]
|
||
}
|
||
tot = _t
|
||
}
|
||
}
|
||
|
||
BufferList.prototype._reverseOffset = function (blOffset) {
|
||
const bufferId = blOffset[0]
|
||
let offset = blOffset[1]
|
||
|
||
for (let i = 0; i < bufferId; i++) {
|
||
offset += this._bufs[i].length
|
||
}
|
||
|
||
return offset
|
||
}
|
||
|
||
BufferList.prototype.get = function get (index) {
|
||
if (index > this.length || index < 0) {
|
||
return undefined
|
||
}
|
||
|
||
const offset = this._offset(index)
|
||
|
||
return this._bufs[offset[0]][offset[1]]
|
||
}
|
||
|
||
BufferList.prototype.slice = function slice (start, end) {
|
||
if (typeof start === 'number' && start < 0) {
|
||
start += this.length
|
||
}
|
||
|
||
if (typeof end === 'number' && end < 0) {
|
||
end += this.length
|
||
}
|
||
|
||
return this.copy(null, 0, start, end)
|
||
}
|
||
|
||
BufferList.prototype.copy = function copy (dst, dstStart, srcStart, srcEnd) {
|
||
if (typeof srcStart !== 'number' || srcStart < 0) {
|
||
srcStart = 0
|
||
}
|
||
|
||
if (typeof srcEnd !== 'number' || srcEnd > this.length) {
|
||
srcEnd = this.length
|
||
}
|
||
|
||
if (srcStart >= this.length) {
|
||
return dst || Buffer.alloc(0)
|
||
}
|
||
|
||
if (srcEnd <= 0) {
|
||
return dst || Buffer.alloc(0)
|
||
}
|
||
|
||
const copy = !!dst
|
||
const off = this._offset(srcStart)
|
||
const len = srcEnd - srcStart
|
||
let bytes = len
|
||
let bufoff = (copy && dstStart) || 0
|
||
let start = off[1]
|
||
|
||
// copy/slice everything
|
||
if (srcStart === 0 && srcEnd === this.length) {
|
||
if (!copy) {
|
||
// slice, but full concat if multiple buffers
|
||
return this._bufs.length === 1
|
||
? this._bufs[0]
|
||
: Buffer.concat(this._bufs, this.length)
|
||
}
|
||
|
||
// copy, need to copy individual buffers
|
||
for (let i = 0; i < this._bufs.length; i++) {
|
||
this._bufs[i].copy(dst, bufoff)
|
||
bufoff += this._bufs[i].length
|
||
}
|
||
|
||
return dst
|
||
}
|
||
|
||
// easy, cheap case where it's a subset of one of the buffers
|
||
if (bytes <= this._bufs[off[0]].length - start) {
|
||
return copy
|
||
? this._bufs[off[0]].copy(dst, dstStart, start, start + bytes)
|
||
: this._bufs[off[0]].slice(start, start + bytes)
|
||
}
|
||
|
||
if (!copy) {
|
||
// a slice, we need something to copy in to
|
||
dst = Buffer.allocUnsafe(len)
|
||
}
|
||
|
||
for (let i = off[0]; i < this._bufs.length; i++) {
|
||
const l = this._bufs[i].length - start
|
||
|
||
if (bytes > l) {
|
||
this._bufs[i].copy(dst, bufoff, start)
|
||
bufoff += l
|
||
} else {
|
||
this._bufs[i].copy(dst, bufoff, start, start + bytes)
|
||
bufoff += l
|
||
break
|
||
}
|
||
|
||
bytes -= l
|
||
|
||
if (start) {
|
||
start = 0
|
||
}
|
||
}
|
||
|
||
// safeguard so that we don't return uninitialized memory
|
||
if (dst.length > bufoff) return dst.slice(0, bufoff)
|
||
|
||
return dst
|
||
}
|
||
|
||
BufferList.prototype.shallowSlice = function shallowSlice (start, end) {
|
||
start = start || 0
|
||
end = typeof end !== 'number' ? this.length : end
|
||
|
||
if (start < 0) {
|
||
start += this.length
|
||
}
|
||
|
||
if (end < 0) {
|
||
end += this.length
|
||
}
|
||
|
||
if (start === end) {
|
||
return this._new()
|
||
}
|
||
|
||
const startOffset = this._offset(start)
|
||
const endOffset = this._offset(end)
|
||
const buffers = this._bufs.slice(startOffset[0], endOffset[0] + 1)
|
||
|
||
if (endOffset[1] === 0) {
|
||
buffers.pop()
|
||
} else {
|
||
buffers[buffers.length - 1] = buffers[buffers.length - 1].slice(0, endOffset[1])
|
||
}
|
||
|
||
if (startOffset[1] !== 0) {
|
||
buffers[0] = buffers[0].slice(startOffset[1])
|
||
}
|
||
|
||
return this._new(buffers)
|
||
}
|
||
|
||
BufferList.prototype.toString = function toString (encoding, start, end) {
|
||
return this.slice(start, end).toString(encoding)
|
||
}
|
||
|
||
BufferList.prototype.consume = function consume (bytes) {
|
||
// first, normalize the argument, in accordance with how Buffer does it
|
||
bytes = Math.trunc(bytes)
|
||
// do nothing if not a positive number
|
||
if (Number.isNaN(bytes) || bytes <= 0) return this
|
||
|
||
while (this._bufs.length) {
|
||
if (bytes >= this._bufs[0].length) {
|
||
bytes -= this._bufs[0].length
|
||
this.length -= this._bufs[0].length
|
||
this._bufs.shift()
|
||
} else {
|
||
this._bufs[0] = this._bufs[0].slice(bytes)
|
||
this.length -= bytes
|
||
break
|
||
}
|
||
}
|
||
|
||
return this
|
||
}
|
||
|
||
BufferList.prototype.duplicate = function duplicate () {
|
||
const copy = this._new()
|
||
|
||
for (let i = 0; i < this._bufs.length; i++) {
|
||
copy.append(this._bufs[i])
|
||
}
|
||
|
||
return copy
|
||
}
|
||
|
||
BufferList.prototype.append = function append (buf) {
|
||
if (buf == null) {
|
||
return this
|
||
}
|
||
|
||
if (buf.buffer) {
|
||
// append a view of the underlying ArrayBuffer
|
||
this._appendBuffer(Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength))
|
||
} else if (Array.isArray(buf)) {
|
||
for (let i = 0; i < buf.length; i++) {
|
||
this.append(buf[i])
|
||
}
|
||
} else if (this._isBufferList(buf)) {
|
||
// unwrap argument into individual BufferLists
|
||
for (let i = 0; i < buf._bufs.length; i++) {
|
||
this.append(buf._bufs[i])
|
||
}
|
||
} else {
|
||
// coerce number arguments to strings, since Buffer(number) does
|
||
// uninitialized memory allocation
|
||
if (typeof buf === 'number') {
|
||
buf = buf.toString()
|
||
}
|
||
|
||
this._appendBuffer(Buffer.from(buf))
|
||
}
|
||
|
||
return this
|
||
}
|
||
|
||
BufferList.prototype._appendBuffer = function appendBuffer (buf) {
|
||
this._bufs.push(buf)
|
||
this.length += buf.length
|
||
}
|
||
|
||
BufferList.prototype.indexOf = function (search, offset, encoding) {
|
||
if (encoding === undefined && typeof offset === 'string') {
|
||
encoding = offset
|
||
offset = undefined
|
||
}
|
||
|
||
if (typeof search === 'function' || Array.isArray(search)) {
|
||
throw new TypeError('The "value" argument must be one of type string, Buffer, BufferList, or Uint8Array.')
|
||
} else if (typeof search === 'number') {
|
||
search = Buffer.from([search])
|
||
} else if (typeof search === 'string') {
|
||
search = Buffer.from(search, encoding)
|
||
} else if (this._isBufferList(search)) {
|
||
search = search.slice()
|
||
} else if (Array.isArray(search.buffer)) {
|
||
search = Buffer.from(search.buffer, search.byteOffset, search.byteLength)
|
||
} else if (!Buffer.isBuffer(search)) {
|
||
search = Buffer.from(search)
|
||
}
|
||
|
||
offset = Number(offset || 0)
|
||
|
||
if (isNaN(offset)) {
|
||
offset = 0
|
||
}
|
||
|
||
if (offset < 0) {
|
||
offset = this.length + offset
|
||
}
|
||
|
||
if (offset < 0) {
|
||
offset = 0
|
||
}
|
||
|
||
if (search.length === 0) {
|
||
return offset > this.length ? this.length : offset
|
||
}
|
||
|
||
const blOffset = this._offset(offset)
|
||
let blIndex = blOffset[0] // index of which internal buffer we're working on
|
||
let buffOffset = blOffset[1] // offset of the internal buffer we're working on
|
||
|
||
// scan over each buffer
|
||
for (; blIndex < this._bufs.length; blIndex++) {
|
||
const buff = this._bufs[blIndex]
|
||
|
||
while (buffOffset < buff.length) {
|
||
const availableWindow = buff.length - buffOffset
|
||
|
||
if (availableWindow >= search.length) {
|
||
const nativeSearchResult = buff.indexOf(search, buffOffset)
|
||
|
||
if (nativeSearchResult !== -1) {
|
||
return this._reverseOffset([blIndex, nativeSearchResult])
|
||
}
|
||
|
||
buffOffset = buff.length - search.length + 1 // end of native search window
|
||
} else {
|
||
const revOffset = this._reverseOffset([blIndex, buffOffset])
|
||
|
||
if (this._match(revOffset, search)) {
|
||
return revOffset
|
||
}
|
||
|
||
buffOffset++
|
||
}
|
||
}
|
||
|
||
buffOffset = 0
|
||
}
|
||
|
||
return -1
|
||
}
|
||
|
||
BufferList.prototype._match = function (offset, search) {
|
||
if (this.length - offset < search.length) {
|
||
return false
|
||
}
|
||
|
||
for (let searchOffset = 0; searchOffset < search.length; searchOffset++) {
|
||
if (this.get(offset + searchOffset) !== search[searchOffset]) {
|
||
return false
|
||
}
|
||
}
|
||
return true
|
||
}
|
||
|
||
;(function () {
|
||
const methods = {
|
||
readDoubleBE: 8,
|
||
readDoubleLE: 8,
|
||
readFloatBE: 4,
|
||
readFloatLE: 4,
|
||
readInt32BE: 4,
|
||
readInt32LE: 4,
|
||
readUInt32BE: 4,
|
||
readUInt32LE: 4,
|
||
readInt16BE: 2,
|
||
readInt16LE: 2,
|
||
readUInt16BE: 2,
|
||
readUInt16LE: 2,
|
||
readInt8: 1,
|
||
readUInt8: 1,
|
||
readIntBE: null,
|
||
readIntLE: null,
|
||
readUIntBE: null,
|
||
readUIntLE: null
|
||
}
|
||
|
||
for (const m in methods) {
|
||
(function (m) {
|
||
if (methods[m] === null) {
|
||
BufferList.prototype[m] = function (offset, byteLength) {
|
||
return this.slice(offset, offset + byteLength)[m](0, byteLength)
|
||
}
|
||
} else {
|
||
BufferList.prototype[m] = function (offset = 0) {
|
||
return this.slice(offset, offset + methods[m])[m](0)
|
||
}
|
||
}
|
||
}(m))
|
||
}
|
||
}())
|
||
|
||
// Used internally by the class and also as an indicator of this object being
|
||
// a `BufferList`. It's not possible to use `instanceof BufferList` in a browser
|
||
// environment because there could be multiple different copies of the
|
||
// BufferList class and some `BufferList`s might be `BufferList`s.
|
||
BufferList.prototype._isBufferList = function _isBufferList (b) {
|
||
return b instanceof BufferList || BufferList.isBufferList(b)
|
||
}
|
||
|
||
BufferList.isBufferList = function isBufferList (b) {
|
||
return b != null && b[symbol]
|
||
}
|
||
|
||
module.exports = BufferList
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 1527:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
const DuplexStream = __nccwpck_require__(2498).Duplex
|
||
const inherits = __nccwpck_require__(3753)
|
||
const BufferList = __nccwpck_require__(5627)
|
||
|
||
function BufferListStream (callback) {
|
||
if (!(this instanceof BufferListStream)) {
|
||
return new BufferListStream(callback)
|
||
}
|
||
|
||
if (typeof callback === 'function') {
|
||
this._callback = callback
|
||
|
||
const piper = function piper (err) {
|
||
if (this._callback) {
|
||
this._callback(err)
|
||
this._callback = null
|
||
}
|
||
}.bind(this)
|
||
|
||
this.on('pipe', function onPipe (src) {
|
||
src.on('error', piper)
|
||
})
|
||
this.on('unpipe', function onUnpipe (src) {
|
||
src.removeListener('error', piper)
|
||
})
|
||
|
||
callback = null
|
||
}
|
||
|
||
BufferList._init.call(this, callback)
|
||
DuplexStream.call(this)
|
||
}
|
||
|
||
inherits(BufferListStream, DuplexStream)
|
||
Object.assign(BufferListStream.prototype, BufferList.prototype)
|
||
|
||
BufferListStream.prototype._new = function _new (callback) {
|
||
return new BufferListStream(callback)
|
||
}
|
||
|
||
BufferListStream.prototype._write = function _write (buf, encoding, callback) {
|
||
this._appendBuffer(buf)
|
||
|
||
if (typeof callback === 'function') {
|
||
callback()
|
||
}
|
||
}
|
||
|
||
BufferListStream.prototype._read = function _read (size) {
|
||
if (!this.length) {
|
||
return this.push(null)
|
||
}
|
||
|
||
size = Math.min(size, this.length)
|
||
this.push(this.slice(0, size))
|
||
this.consume(size)
|
||
}
|
||
|
||
BufferListStream.prototype.end = function end (chunk) {
|
||
DuplexStream.prototype.end.call(this, chunk)
|
||
|
||
if (this._callback) {
|
||
this._callback(null, this.slice())
|
||
this._callback = null
|
||
}
|
||
}
|
||
|
||
BufferListStream.prototype._destroy = function _destroy (err, cb) {
|
||
this._bufs.length = 0
|
||
this.length = 0
|
||
cb(err)
|
||
}
|
||
|
||
BufferListStream.prototype._isBufferList = function _isBufferList (b) {
|
||
return b instanceof BufferListStream || b instanceof BufferList || BufferListStream.isBufferList(b)
|
||
}
|
||
|
||
BufferListStream.isBufferList = BufferList.isBufferList
|
||
|
||
module.exports = BufferListStream
|
||
module.exports.BufferListStream = BufferListStream
|
||
module.exports.BufferList = BufferList
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 6363:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
var concatMap = __nccwpck_require__(8463);
|
||
var balanced = __nccwpck_require__(8976);
|
||
|
||
module.exports = expandTop;
|
||
|
||
var escSlash = '\0SLASH'+Math.random()+'\0';
|
||
var escOpen = '\0OPEN'+Math.random()+'\0';
|
||
var escClose = '\0CLOSE'+Math.random()+'\0';
|
||
var escComma = '\0COMMA'+Math.random()+'\0';
|
||
var escPeriod = '\0PERIOD'+Math.random()+'\0';
|
||
|
||
function numeric(str) {
|
||
return parseInt(str, 10) == str
|
||
? parseInt(str, 10)
|
||
: str.charCodeAt(0);
|
||
}
|
||
|
||
function escapeBraces(str) {
|
||
return str.split('\\\\').join(escSlash)
|
||
.split('\\{').join(escOpen)
|
||
.split('\\}').join(escClose)
|
||
.split('\\,').join(escComma)
|
||
.split('\\.').join(escPeriod);
|
||
}
|
||
|
||
function unescapeBraces(str) {
|
||
return str.split(escSlash).join('\\')
|
||
.split(escOpen).join('{')
|
||
.split(escClose).join('}')
|
||
.split(escComma).join(',')
|
||
.split(escPeriod).join('.');
|
||
}
|
||
|
||
|
||
// Basically just str.split(","), but handling cases
|
||
// where we have nested braced sections, which should be
|
||
// treated as individual members, like {a,{b,c},d}
|
||
function parseCommaParts(str) {
|
||
if (!str)
|
||
return [''];
|
||
|
||
var parts = [];
|
||
var m = balanced('{', '}', str);
|
||
|
||
if (!m)
|
||
return str.split(',');
|
||
|
||
var pre = m.pre;
|
||
var body = m.body;
|
||
var post = m.post;
|
||
var p = pre.split(',');
|
||
|
||
p[p.length-1] += '{' + body + '}';
|
||
var postParts = parseCommaParts(post);
|
||
if (post.length) {
|
||
p[p.length-1] += postParts.shift();
|
||
p.push.apply(p, postParts);
|
||
}
|
||
|
||
parts.push.apply(parts, p);
|
||
|
||
return parts;
|
||
}
|
||
|
||
function expandTop(str) {
|
||
if (!str)
|
||
return [];
|
||
|
||
// I don't know why Bash 4.3 does this, but it does.
|
||
// Anything starting with {} will have the first two bytes preserved
|
||
// but *only* at the top level, so {},a}b will not expand to anything,
|
||
// but a{},b}c will be expanded to [a}c,abc].
|
||
// One could argue that this is a bug in Bash, but since the goal of
|
||
// this module is to match Bash's rules, we escape a leading {}
|
||
if (str.substr(0, 2) === '{}') {
|
||
str = '\\{\\}' + str.substr(2);
|
||
}
|
||
|
||
return expand(escapeBraces(str), true).map(unescapeBraces);
|
||
}
|
||
|
||
function identity(e) {
|
||
return e;
|
||
}
|
||
|
||
function embrace(str) {
|
||
return '{' + str + '}';
|
||
}
|
||
function isPadded(el) {
|
||
return /^-?0\d/.test(el);
|
||
}
|
||
|
||
function lte(i, y) {
|
||
return i <= y;
|
||
}
|
||
function gte(i, y) {
|
||
return i >= y;
|
||
}
|
||
|
||
function expand(str, isTop) {
|
||
var expansions = [];
|
||
|
||
var m = balanced('{', '}', str);
|
||
if (!m || /\$$/.test(m.pre)) return [str];
|
||
|
||
var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
|
||
var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
|
||
var isSequence = isNumericSequence || isAlphaSequence;
|
||
var isOptions = m.body.indexOf(',') >= 0;
|
||
if (!isSequence && !isOptions) {
|
||
// {a},b}
|
||
if (m.post.match(/,.*\}/)) {
|
||
str = m.pre + '{' + m.body + escClose + m.post;
|
||
return expand(str);
|
||
}
|
||
return [str];
|
||
}
|
||
|
||
var n;
|
||
if (isSequence) {
|
||
n = m.body.split(/\.\./);
|
||
} else {
|
||
n = parseCommaParts(m.body);
|
||
if (n.length === 1) {
|
||
// x{{a,b}}y ==> x{a}y x{b}y
|
||
n = expand(n[0], false).map(embrace);
|
||
if (n.length === 1) {
|
||
var post = m.post.length
|
||
? expand(m.post, false)
|
||
: [''];
|
||
return post.map(function(p) {
|
||
return m.pre + n[0] + p;
|
||
});
|
||
}
|
||
}
|
||
}
|
||
|
||
// at this point, n is the parts, and we know it's not a comma set
|
||
// with a single entry.
|
||
|
||
// no need to expand pre, since it is guaranteed to be free of brace-sets
|
||
var pre = m.pre;
|
||
var post = m.post.length
|
||
? expand(m.post, false)
|
||
: [''];
|
||
|
||
var N;
|
||
|
||
if (isSequence) {
|
||
var x = numeric(n[0]);
|
||
var y = numeric(n[1]);
|
||
var width = Math.max(n[0].length, n[1].length)
|
||
var incr = n.length == 3
|
||
? Math.abs(numeric(n[2]))
|
||
: 1;
|
||
var test = lte;
|
||
var reverse = y < x;
|
||
if (reverse) {
|
||
incr *= -1;
|
||
test = gte;
|
||
}
|
||
var pad = n.some(isPadded);
|
||
|
||
N = [];
|
||
|
||
for (var i = x; test(i, y); i += incr) {
|
||
var c;
|
||
if (isAlphaSequence) {
|
||
c = String.fromCharCode(i);
|
||
if (c === '\\')
|
||
c = '';
|
||
} else {
|
||
c = String(i);
|
||
if (pad) {
|
||
var need = width - c.length;
|
||
if (need > 0) {
|
||
var z = new Array(need + 1).join('0');
|
||
if (i < 0)
|
||
c = '-' + z + c.slice(1);
|
||
else
|
||
c = z + c;
|
||
}
|
||
}
|
||
}
|
||
N.push(c);
|
||
}
|
||
} else {
|
||
N = concatMap(n, function(el) { return expand(el, false) });
|
||
}
|
||
|
||
for (var j = 0; j < N.length; j++) {
|
||
for (var k = 0; k < post.length; k++) {
|
||
var expansion = pre + N[j] + post[k];
|
||
if (!isTop || isSequence || expansion)
|
||
expansions.push(expansion);
|
||
}
|
||
}
|
||
|
||
return expansions;
|
||
}
|
||
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 6371:
|
||
/***/ ((module) => {
|
||
|
||
(function () {
|
||
"use strict";
|
||
|
||
function btoa(str) {
|
||
var buffer;
|
||
|
||
if (str instanceof Buffer) {
|
||
buffer = str;
|
||
} else {
|
||
buffer = Buffer.from(str.toString(), 'binary');
|
||
}
|
||
|
||
return buffer.toString('base64');
|
||
}
|
||
|
||
module.exports = btoa;
|
||
}());
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 8085:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
var Buffer = __nccwpck_require__(4293).Buffer;
|
||
|
||
var CRC_TABLE = [
|
||
0x00000000, 0x77073096, 0xee0e612c, 0x990951ba, 0x076dc419,
|
||
0x706af48f, 0xe963a535, 0x9e6495a3, 0x0edb8832, 0x79dcb8a4,
|
||
0xe0d5e91e, 0x97d2d988, 0x09b64c2b, 0x7eb17cbd, 0xe7b82d07,
|
||
0x90bf1d91, 0x1db71064, 0x6ab020f2, 0xf3b97148, 0x84be41de,
|
||
0x1adad47d, 0x6ddde4eb, 0xf4d4b551, 0x83d385c7, 0x136c9856,
|
||
0x646ba8c0, 0xfd62f97a, 0x8a65c9ec, 0x14015c4f, 0x63066cd9,
|
||
0xfa0f3d63, 0x8d080df5, 0x3b6e20c8, 0x4c69105e, 0xd56041e4,
|
||
0xa2677172, 0x3c03e4d1, 0x4b04d447, 0xd20d85fd, 0xa50ab56b,
|
||
0x35b5a8fa, 0x42b2986c, 0xdbbbc9d6, 0xacbcf940, 0x32d86ce3,
|
||
0x45df5c75, 0xdcd60dcf, 0xabd13d59, 0x26d930ac, 0x51de003a,
|
||
0xc8d75180, 0xbfd06116, 0x21b4f4b5, 0x56b3c423, 0xcfba9599,
|
||
0xb8bda50f, 0x2802b89e, 0x5f058808, 0xc60cd9b2, 0xb10be924,
|
||
0x2f6f7c87, 0x58684c11, 0xc1611dab, 0xb6662d3d, 0x76dc4190,
|
||
0x01db7106, 0x98d220bc, 0xefd5102a, 0x71b18589, 0x06b6b51f,
|
||
0x9fbfe4a5, 0xe8b8d433, 0x7807c9a2, 0x0f00f934, 0x9609a88e,
|
||
0xe10e9818, 0x7f6a0dbb, 0x086d3d2d, 0x91646c97, 0xe6635c01,
|
||
0x6b6b51f4, 0x1c6c6162, 0x856530d8, 0xf262004e, 0x6c0695ed,
|
||
0x1b01a57b, 0x8208f4c1, 0xf50fc457, 0x65b0d9c6, 0x12b7e950,
|
||
0x8bbeb8ea, 0xfcb9887c, 0x62dd1ddf, 0x15da2d49, 0x8cd37cf3,
|
||
0xfbd44c65, 0x4db26158, 0x3ab551ce, 0xa3bc0074, 0xd4bb30e2,
|
||
0x4adfa541, 0x3dd895d7, 0xa4d1c46d, 0xd3d6f4fb, 0x4369e96a,
|
||
0x346ed9fc, 0xad678846, 0xda60b8d0, 0x44042d73, 0x33031de5,
|
||
0xaa0a4c5f, 0xdd0d7cc9, 0x5005713c, 0x270241aa, 0xbe0b1010,
|
||
0xc90c2086, 0x5768b525, 0x206f85b3, 0xb966d409, 0xce61e49f,
|
||
0x5edef90e, 0x29d9c998, 0xb0d09822, 0xc7d7a8b4, 0x59b33d17,
|
||
0x2eb40d81, 0xb7bd5c3b, 0xc0ba6cad, 0xedb88320, 0x9abfb3b6,
|
||
0x03b6e20c, 0x74b1d29a, 0xead54739, 0x9dd277af, 0x04db2615,
|
||
0x73dc1683, 0xe3630b12, 0x94643b84, 0x0d6d6a3e, 0x7a6a5aa8,
|
||
0xe40ecf0b, 0x9309ff9d, 0x0a00ae27, 0x7d079eb1, 0xf00f9344,
|
||
0x8708a3d2, 0x1e01f268, 0x6906c2fe, 0xf762575d, 0x806567cb,
|
||
0x196c3671, 0x6e6b06e7, 0xfed41b76, 0x89d32be0, 0x10da7a5a,
|
||
0x67dd4acc, 0xf9b9df6f, 0x8ebeeff9, 0x17b7be43, 0x60b08ed5,
|
||
0xd6d6a3e8, 0xa1d1937e, 0x38d8c2c4, 0x4fdff252, 0xd1bb67f1,
|
||
0xa6bc5767, 0x3fb506dd, 0x48b2364b, 0xd80d2bda, 0xaf0a1b4c,
|
||
0x36034af6, 0x41047a60, 0xdf60efc3, 0xa867df55, 0x316e8eef,
|
||
0x4669be79, 0xcb61b38c, 0xbc66831a, 0x256fd2a0, 0x5268e236,
|
||
0xcc0c7795, 0xbb0b4703, 0x220216b9, 0x5505262f, 0xc5ba3bbe,
|
||
0xb2bd0b28, 0x2bb45a92, 0x5cb36a04, 0xc2d7ffa7, 0xb5d0cf31,
|
||
0x2cd99e8b, 0x5bdeae1d, 0x9b64c2b0, 0xec63f226, 0x756aa39c,
|
||
0x026d930a, 0x9c0906a9, 0xeb0e363f, 0x72076785, 0x05005713,
|
||
0x95bf4a82, 0xe2b87a14, 0x7bb12bae, 0x0cb61b38, 0x92d28e9b,
|
||
0xe5d5be0d, 0x7cdcefb7, 0x0bdbdf21, 0x86d3d2d4, 0xf1d4e242,
|
||
0x68ddb3f8, 0x1fda836e, 0x81be16cd, 0xf6b9265b, 0x6fb077e1,
|
||
0x18b74777, 0x88085ae6, 0xff0f6a70, 0x66063bca, 0x11010b5c,
|
||
0x8f659eff, 0xf862ae69, 0x616bffd3, 0x166ccf45, 0xa00ae278,
|
||
0xd70dd2ee, 0x4e048354, 0x3903b3c2, 0xa7672661, 0xd06016f7,
|
||
0x4969474d, 0x3e6e77db, 0xaed16a4a, 0xd9d65adc, 0x40df0b66,
|
||
0x37d83bf0, 0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9,
|
||
0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6, 0xbad03605,
|
||
0xcdd70693, 0x54de5729, 0x23d967bf, 0xb3667a2e, 0xc4614ab8,
|
||
0x5d681b02, 0x2a6f2b94, 0xb40bbe37, 0xc30c8ea1, 0x5a05df1b,
|
||
0x2d02ef8d
|
||
];
|
||
|
||
if (typeof Int32Array !== 'undefined') {
|
||
CRC_TABLE = new Int32Array(CRC_TABLE);
|
||
}
|
||
|
||
function ensureBuffer(input) {
|
||
if (Buffer.isBuffer(input)) {
|
||
return input;
|
||
}
|
||
|
||
var hasNewBufferAPI =
|
||
typeof Buffer.alloc === "function" &&
|
||
typeof Buffer.from === "function";
|
||
|
||
if (typeof input === "number") {
|
||
return hasNewBufferAPI ? Buffer.alloc(input) : new Buffer(input);
|
||
}
|
||
else if (typeof input === "string") {
|
||
return hasNewBufferAPI ? Buffer.from(input) : new Buffer(input);
|
||
}
|
||
else {
|
||
throw new Error("input must be buffer, number, or string, received " +
|
||
typeof input);
|
||
}
|
||
}
|
||
|
||
function bufferizeInt(num) {
|
||
var tmp = ensureBuffer(4);
|
||
tmp.writeInt32BE(num, 0);
|
||
return tmp;
|
||
}
|
||
|
||
function _crc32(buf, previous) {
|
||
buf = ensureBuffer(buf);
|
||
if (Buffer.isBuffer(previous)) {
|
||
previous = previous.readUInt32BE(0);
|
||
}
|
||
var crc = ~~previous ^ -1;
|
||
for (var n = 0; n < buf.length; n++) {
|
||
crc = CRC_TABLE[(crc ^ buf[n]) & 0xff] ^ (crc >>> 8);
|
||
}
|
||
return (crc ^ -1);
|
||
}
|
||
|
||
function crc32() {
|
||
return bufferizeInt(_crc32.apply(null, arguments));
|
||
}
|
||
crc32.signed = function () {
|
||
return _crc32.apply(null, arguments);
|
||
};
|
||
crc32.unsigned = function () {
|
||
return _crc32.apply(null, arguments) >>> 0;
|
||
};
|
||
|
||
module.exports = crc32;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 2295:
|
||
/***/ ((module) => {
|
||
|
||
var charenc = {
|
||
// UTF-8 encoding
|
||
utf8: {
|
||
// Convert a string to a byte array
|
||
stringToBytes: function(str) {
|
||
return charenc.bin.stringToBytes(unescape(encodeURIComponent(str)));
|
||
},
|
||
|
||
// Convert a byte array to a string
|
||
bytesToString: function(bytes) {
|
||
return decodeURIComponent(escape(charenc.bin.bytesToString(bytes)));
|
||
}
|
||
},
|
||
|
||
// Binary encoding
|
||
bin: {
|
||
// Convert a string to a byte array
|
||
stringToBytes: function(str) {
|
||
for (var bytes = [], i = 0; i < str.length; i++)
|
||
bytes.push(str.charCodeAt(i) & 0xFF);
|
||
return bytes;
|
||
},
|
||
|
||
// Convert a byte array to a string
|
||
bytesToString: function(bytes) {
|
||
for (var str = [], i = 0; i < bytes.length; i++)
|
||
str.push(String.fromCharCode(bytes[i]));
|
||
return str.join('');
|
||
}
|
||
}
|
||
};
|
||
|
||
module.exports = charenc;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 653:
|
||
/***/ ((module) => {
|
||
|
||
/**
|
||
* node-compress-commons
|
||
*
|
||
* Copyright (c) 2014 Chris Talkington, contributors.
|
||
* Licensed under the MIT license.
|
||
* https://github.com/archiverjs/node-compress-commons/blob/master/LICENSE-MIT
|
||
*/
|
||
var ArchiveEntry = module.exports = function() {};
|
||
|
||
ArchiveEntry.prototype.getName = function() {};
|
||
|
||
ArchiveEntry.prototype.getSize = function() {};
|
||
|
||
ArchiveEntry.prototype.getLastModifiedDate = function() {};
|
||
|
||
ArchiveEntry.prototype.isDirectory = function() {};
|
||
|
||
/***/ }),
|
||
|
||
/***/ 8239:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
/**
|
||
* node-compress-commons
|
||
*
|
||
* Copyright (c) 2014 Chris Talkington, contributors.
|
||
* Licensed under the MIT license.
|
||
* https://github.com/archiverjs/node-compress-commons/blob/master/LICENSE-MIT
|
||
*/
|
||
var inherits = __nccwpck_require__(1669).inherits;
|
||
var Transform = __nccwpck_require__(2498).Transform;
|
||
|
||
var ArchiveEntry = __nccwpck_require__(653);
|
||
var util = __nccwpck_require__(2755);
|
||
|
||
var ArchiveOutputStream = module.exports = function(options) {
|
||
if (!(this instanceof ArchiveOutputStream)) {
|
||
return new ArchiveOutputStream(options);
|
||
}
|
||
|
||
Transform.call(this, options);
|
||
|
||
this.offset = 0;
|
||
this._archive = {
|
||
finish: false,
|
||
finished: false,
|
||
processing: false
|
||
};
|
||
};
|
||
|
||
inherits(ArchiveOutputStream, Transform);
|
||
|
||
ArchiveOutputStream.prototype._appendBuffer = function(zae, source, callback) {
|
||
// scaffold only
|
||
};
|
||
|
||
ArchiveOutputStream.prototype._appendStream = function(zae, source, callback) {
|
||
// scaffold only
|
||
};
|
||
|
||
ArchiveOutputStream.prototype._emitErrorCallback = function(err) {
|
||
if (err) {
|
||
this.emit('error', err);
|
||
}
|
||
};
|
||
|
||
ArchiveOutputStream.prototype._finish = function(ae) {
|
||
// scaffold only
|
||
};
|
||
|
||
ArchiveOutputStream.prototype._normalizeEntry = function(ae) {
|
||
// scaffold only
|
||
};
|
||
|
||
ArchiveOutputStream.prototype._transform = function(chunk, encoding, callback) {
|
||
callback(null, chunk);
|
||
};
|
||
|
||
ArchiveOutputStream.prototype.entry = function(ae, source, callback) {
|
||
source = source || null;
|
||
|
||
if (typeof callback !== 'function') {
|
||
callback = this._emitErrorCallback.bind(this);
|
||
}
|
||
|
||
if (!(ae instanceof ArchiveEntry)) {
|
||
callback(new Error('not a valid instance of ArchiveEntry'));
|
||
return;
|
||
}
|
||
|
||
if (this._archive.finish || this._archive.finished) {
|
||
callback(new Error('unacceptable entry after finish'));
|
||
return;
|
||
}
|
||
|
||
if (this._archive.processing) {
|
||
callback(new Error('already processing an entry'));
|
||
return;
|
||
}
|
||
|
||
this._archive.processing = true;
|
||
this._normalizeEntry(ae);
|
||
this._entry = ae;
|
||
|
||
source = util.normalizeInputSource(source);
|
||
|
||
if (Buffer.isBuffer(source)) {
|
||
this._appendBuffer(ae, source, callback);
|
||
} else if (util.isStream(source)) {
|
||
this._appendStream(ae, source, callback);
|
||
} else {
|
||
this._archive.processing = false;
|
||
callback(new Error('input source must be valid Stream or Buffer instance'));
|
||
return;
|
||
}
|
||
|
||
return this;
|
||
};
|
||
|
||
ArchiveOutputStream.prototype.finish = function() {
|
||
if (this._archive.processing) {
|
||
this._archive.finish = true;
|
||
return;
|
||
}
|
||
|
||
this._finish();
|
||
};
|
||
|
||
ArchiveOutputStream.prototype.getBytesWritten = function() {
|
||
return this.offset;
|
||
};
|
||
|
||
ArchiveOutputStream.prototype.write = function(chunk, cb) {
|
||
if (chunk) {
|
||
this.offset += chunk.length;
|
||
}
|
||
|
||
return Transform.prototype.write.call(this, chunk, cb);
|
||
};
|
||
|
||
/***/ }),
|
||
|
||
/***/ 4505:
|
||
/***/ ((module) => {
|
||
|
||
/**
|
||
* node-compress-commons
|
||
*
|
||
* Copyright (c) 2014 Chris Talkington, contributors.
|
||
* Licensed under the MIT license.
|
||
* https://github.com/archiverjs/node-compress-commons/blob/master/LICENSE-MIT
|
||
*/
|
||
module.exports = {
|
||
WORD: 4,
|
||
DWORD: 8,
|
||
EMPTY: Buffer.alloc(0),
|
||
|
||
SHORT: 2,
|
||
SHORT_MASK: 0xffff,
|
||
SHORT_SHIFT: 16,
|
||
SHORT_ZERO: Buffer.from(Array(2)),
|
||
LONG: 4,
|
||
LONG_ZERO: Buffer.from(Array(4)),
|
||
|
||
MIN_VERSION_INITIAL: 10,
|
||
MIN_VERSION_DATA_DESCRIPTOR: 20,
|
||
MIN_VERSION_ZIP64: 45,
|
||
VERSION_MADEBY: 45,
|
||
|
||
METHOD_STORED: 0,
|
||
METHOD_DEFLATED: 8,
|
||
|
||
PLATFORM_UNIX: 3,
|
||
PLATFORM_FAT: 0,
|
||
|
||
SIG_LFH: 0x04034b50,
|
||
SIG_DD: 0x08074b50,
|
||
SIG_CFH: 0x02014b50,
|
||
SIG_EOCD: 0x06054b50,
|
||
SIG_ZIP64_EOCD: 0x06064B50,
|
||
SIG_ZIP64_EOCD_LOC: 0x07064B50,
|
||
|
||
ZIP64_MAGIC_SHORT: 0xffff,
|
||
ZIP64_MAGIC: 0xffffffff,
|
||
ZIP64_EXTRA_ID: 0x0001,
|
||
|
||
ZLIB_NO_COMPRESSION: 0,
|
||
ZLIB_BEST_SPEED: 1,
|
||
ZLIB_BEST_COMPRESSION: 9,
|
||
ZLIB_DEFAULT_COMPRESSION: -1,
|
||
|
||
MODE_MASK: 0xFFF,
|
||
DEFAULT_FILE_MODE: 33188, // 010644 = -rw-r--r-- = S_IFREG | S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH
|
||
DEFAULT_DIR_MODE: 16877, // 040755 = drwxr-xr-x = S_IFDIR | S_IRWXU | S_IRGRP | S_IXGRP | S_IROTH | S_IXOTH
|
||
|
||
EXT_FILE_ATTR_DIR: 1106051088, // 010173200020 = drwxr-xr-x = (((S_IFDIR | 0755) << 16) | S_DOS_D)
|
||
EXT_FILE_ATTR_FILE: 2175008800, // 020151000040 = -rw-r--r-- = (((S_IFREG | 0644) << 16) | S_DOS_A) >>> 0
|
||
|
||
// Unix file types
|
||
S_IFMT: 61440, // 0170000 type of file mask
|
||
S_IFIFO: 4096, // 010000 named pipe (fifo)
|
||
S_IFCHR: 8192, // 020000 character special
|
||
S_IFDIR: 16384, // 040000 directory
|
||
S_IFBLK: 24576, // 060000 block special
|
||
S_IFREG: 32768, // 0100000 regular
|
||
S_IFLNK: 40960, // 0120000 symbolic link
|
||
S_IFSOCK: 49152, // 0140000 socket
|
||
|
||
// DOS file type flags
|
||
S_DOS_A: 32, // 040 Archive
|
||
S_DOS_D: 16, // 020 Directory
|
||
S_DOS_V: 8, // 010 Volume
|
||
S_DOS_S: 4, // 04 System
|
||
S_DOS_H: 2, // 02 Hidden
|
||
S_DOS_R: 1 // 01 Read Only
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 2200:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
/**
|
||
* node-compress-commons
|
||
*
|
||
* Copyright (c) 2014 Chris Talkington, contributors.
|
||
* Licensed under the MIT license.
|
||
* https://github.com/archiverjs/node-compress-commons/blob/master/LICENSE-MIT
|
||
*/
|
||
var zipUtil = __nccwpck_require__(3249);
|
||
|
||
var DATA_DESCRIPTOR_FLAG = 1 << 3;
|
||
var ENCRYPTION_FLAG = 1 << 0;
|
||
var NUMBER_OF_SHANNON_FANO_TREES_FLAG = 1 << 2;
|
||
var SLIDING_DICTIONARY_SIZE_FLAG = 1 << 1;
|
||
var STRONG_ENCRYPTION_FLAG = 1 << 6;
|
||
var UFT8_NAMES_FLAG = 1 << 11;
|
||
|
||
var GeneralPurposeBit = module.exports = function() {
|
||
if (!(this instanceof GeneralPurposeBit)) {
|
||
return new GeneralPurposeBit();
|
||
}
|
||
|
||
this.descriptor = false;
|
||
this.encryption = false;
|
||
this.utf8 = false;
|
||
this.numberOfShannonFanoTrees = 0;
|
||
this.strongEncryption = false;
|
||
this.slidingDictionarySize = 0;
|
||
|
||
return this;
|
||
};
|
||
|
||
GeneralPurposeBit.prototype.encode = function() {
|
||
return zipUtil.getShortBytes(
|
||
(this.descriptor ? DATA_DESCRIPTOR_FLAG : 0) |
|
||
(this.utf8 ? UFT8_NAMES_FLAG : 0) |
|
||
(this.encryption ? ENCRYPTION_FLAG : 0) |
|
||
(this.strongEncryption ? STRONG_ENCRYPTION_FLAG : 0)
|
||
);
|
||
};
|
||
|
||
GeneralPurposeBit.prototype.parse = function(buf, offset) {
|
||
var flag = zipUtil.getShortBytesValue(buf, offset);
|
||
var gbp = new GeneralPurposeBit();
|
||
|
||
gbp.useDataDescriptor((flag & DATA_DESCRIPTOR_FLAG) !== 0);
|
||
gbp.useUTF8ForNames((flag & UFT8_NAMES_FLAG) !== 0);
|
||
gbp.useStrongEncryption((flag & STRONG_ENCRYPTION_FLAG) !== 0);
|
||
gbp.useEncryption((flag & ENCRYPTION_FLAG) !== 0);
|
||
gbp.setSlidingDictionarySize((flag & SLIDING_DICTIONARY_SIZE_FLAG) !== 0 ? 8192 : 4096);
|
||
gbp.setNumberOfShannonFanoTrees((flag & NUMBER_OF_SHANNON_FANO_TREES_FLAG) !== 0 ? 3 : 2);
|
||
|
||
return gbp;
|
||
};
|
||
|
||
GeneralPurposeBit.prototype.setNumberOfShannonFanoTrees = function(n) {
|
||
this.numberOfShannonFanoTrees = n;
|
||
};
|
||
|
||
GeneralPurposeBit.prototype.getNumberOfShannonFanoTrees = function() {
|
||
return this.numberOfShannonFanoTrees;
|
||
};
|
||
|
||
GeneralPurposeBit.prototype.setSlidingDictionarySize = function(n) {
|
||
this.slidingDictionarySize = n;
|
||
};
|
||
|
||
GeneralPurposeBit.prototype.getSlidingDictionarySize = function() {
|
||
return this.slidingDictionarySize;
|
||
};
|
||
|
||
GeneralPurposeBit.prototype.useDataDescriptor = function(b) {
|
||
this.descriptor = b;
|
||
};
|
||
|
||
GeneralPurposeBit.prototype.usesDataDescriptor = function() {
|
||
return this.descriptor;
|
||
};
|
||
|
||
GeneralPurposeBit.prototype.useEncryption = function(b) {
|
||
this.encryption = b;
|
||
};
|
||
|
||
GeneralPurposeBit.prototype.usesEncryption = function() {
|
||
return this.encryption;
|
||
};
|
||
|
||
GeneralPurposeBit.prototype.useStrongEncryption = function(b) {
|
||
this.strongEncryption = b;
|
||
};
|
||
|
||
GeneralPurposeBit.prototype.usesStrongEncryption = function() {
|
||
return this.strongEncryption;
|
||
};
|
||
|
||
GeneralPurposeBit.prototype.useUTF8ForNames = function(b) {
|
||
this.utf8 = b;
|
||
};
|
||
|
||
GeneralPurposeBit.prototype.usesUTF8ForNames = function() {
|
||
return this.utf8;
|
||
};
|
||
|
||
/***/ }),
|
||
|
||
/***/ 8642:
|
||
/***/ ((module) => {
|
||
|
||
/**
|
||
* node-compress-commons
|
||
*
|
||
* Copyright (c) 2014 Chris Talkington, contributors.
|
||
* Licensed under the MIT license.
|
||
* https://github.com/archiverjs/node-compress-commons/blob/master/LICENSE-MIT
|
||
*/
|
||
module.exports = {
|
||
/**
|
||
* Bits used for permissions (and sticky bit)
|
||
*/
|
||
PERM_MASK: 4095, // 07777
|
||
|
||
/**
|
||
* Bits used to indicate the filesystem object type.
|
||
*/
|
||
FILE_TYPE_FLAG: 61440, // 0170000
|
||
|
||
/**
|
||
* Indicates symbolic links.
|
||
*/
|
||
LINK_FLAG: 40960, // 0120000
|
||
|
||
/**
|
||
* Indicates plain files.
|
||
*/
|
||
FILE_FLAG: 32768, // 0100000
|
||
|
||
/**
|
||
* Indicates directories.
|
||
*/
|
||
DIR_FLAG: 16384, // 040000
|
||
|
||
// ----------------------------------------------------------
|
||
// somewhat arbitrary choices that are quite common for shared
|
||
// installations
|
||
// -----------------------------------------------------------
|
||
|
||
/**
|
||
* Default permissions for symbolic links.
|
||
*/
|
||
DEFAULT_LINK_PERM: 511, // 0777
|
||
|
||
/**
|
||
* Default permissions for directories.
|
||
*/
|
||
DEFAULT_DIR_PERM: 493, // 0755
|
||
|
||
/**
|
||
* Default permissions for plain files.
|
||
*/
|
||
DEFAULT_FILE_PERM: 420 // 0644
|
||
};
|
||
|
||
/***/ }),
|
||
|
||
/***/ 3249:
|
||
/***/ ((module) => {
|
||
|
||
/**
|
||
* node-compress-commons
|
||
*
|
||
* Copyright (c) 2014 Chris Talkington, contributors.
|
||
* Licensed under the MIT license.
|
||
* https://github.com/archiverjs/node-compress-commons/blob/master/LICENSE-MIT
|
||
*/
|
||
var util = module.exports = {};
|
||
|
||
util.dateToDos = function(d, forceLocalTime) {
|
||
forceLocalTime = forceLocalTime || false;
|
||
|
||
var year = forceLocalTime ? d.getFullYear() : d.getUTCFullYear();
|
||
|
||
if (year < 1980) {
|
||
return 2162688; // 1980-1-1 00:00:00
|
||
} else if (year >= 2044) {
|
||
return 2141175677; // 2043-12-31 23:59:58
|
||
}
|
||
|
||
var val = {
|
||
year: year,
|
||
month: forceLocalTime ? d.getMonth() : d.getUTCMonth(),
|
||
date: forceLocalTime ? d.getDate() : d.getUTCDate(),
|
||
hours: forceLocalTime ? d.getHours() : d.getUTCHours(),
|
||
minutes: forceLocalTime ? d.getMinutes() : d.getUTCMinutes(),
|
||
seconds: forceLocalTime ? d.getSeconds() : d.getUTCSeconds()
|
||
};
|
||
|
||
return ((val.year - 1980) << 25) | ((val.month + 1) << 21) | (val.date << 16) |
|
||
(val.hours << 11) | (val.minutes << 5) | (val.seconds / 2);
|
||
};
|
||
|
||
util.dosToDate = function(dos) {
|
||
return new Date(((dos >> 25) & 0x7f) + 1980, ((dos >> 21) & 0x0f) - 1, (dos >> 16) & 0x1f, (dos >> 11) & 0x1f, (dos >> 5) & 0x3f, (dos & 0x1f) << 1);
|
||
};
|
||
|
||
util.fromDosTime = function(buf) {
|
||
return util.dosToDate(buf.readUInt32LE(0));
|
||
};
|
||
|
||
util.getEightBytes = function(v) {
|
||
var buf = Buffer.alloc(8);
|
||
buf.writeUInt32LE(v % 0x0100000000, 0);
|
||
buf.writeUInt32LE((v / 0x0100000000) | 0, 4);
|
||
|
||
return buf;
|
||
};
|
||
|
||
util.getShortBytes = function(v) {
|
||
var buf = Buffer.alloc(2);
|
||
buf.writeUInt16LE((v & 0xFFFF) >>> 0, 0);
|
||
|
||
return buf;
|
||
};
|
||
|
||
util.getShortBytesValue = function(buf, offset) {
|
||
return buf.readUInt16LE(offset);
|
||
};
|
||
|
||
util.getLongBytes = function(v) {
|
||
var buf = Buffer.alloc(4);
|
||
buf.writeUInt32LE((v & 0xFFFFFFFF) >>> 0, 0);
|
||
|
||
return buf;
|
||
};
|
||
|
||
util.getLongBytesValue = function(buf, offset) {
|
||
return buf.readUInt32LE(offset);
|
||
};
|
||
|
||
util.toDosTime = function(d) {
|
||
return util.getLongBytes(util.dateToDos(d));
|
||
};
|
||
|
||
/***/ }),
|
||
|
||
/***/ 1701:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
/**
|
||
* node-compress-commons
|
||
*
|
||
* Copyright (c) 2014 Chris Talkington, contributors.
|
||
* Licensed under the MIT license.
|
||
* https://github.com/archiverjs/node-compress-commons/blob/master/LICENSE-MIT
|
||
*/
|
||
var inherits = __nccwpck_require__(1669).inherits;
|
||
var normalizePath = __nccwpck_require__(1980);
|
||
|
||
var ArchiveEntry = __nccwpck_require__(653);
|
||
var GeneralPurposeBit = __nccwpck_require__(2200);
|
||
var UnixStat = __nccwpck_require__(8642);
|
||
|
||
var constants = __nccwpck_require__(4505);
|
||
var zipUtil = __nccwpck_require__(3249);
|
||
|
||
var ZipArchiveEntry = module.exports = function(name) {
|
||
if (!(this instanceof ZipArchiveEntry)) {
|
||
return new ZipArchiveEntry(name);
|
||
}
|
||
|
||
ArchiveEntry.call(this);
|
||
|
||
this.platform = constants.PLATFORM_FAT;
|
||
this.method = -1;
|
||
|
||
this.name = null;
|
||
this.size = 0;
|
||
this.csize = 0;
|
||
this.gpb = new GeneralPurposeBit();
|
||
this.crc = 0;
|
||
this.time = -1;
|
||
|
||
this.minver = constants.MIN_VERSION_INITIAL;
|
||
this.mode = -1;
|
||
this.extra = null;
|
||
this.exattr = 0;
|
||
this.inattr = 0;
|
||
this.comment = null;
|
||
|
||
if (name) {
|
||
this.setName(name);
|
||
}
|
||
};
|
||
|
||
inherits(ZipArchiveEntry, ArchiveEntry);
|
||
|
||
/**
|
||
* Returns the extra fields related to the entry.
|
||
*
|
||
* @returns {Buffer}
|
||
*/
|
||
ZipArchiveEntry.prototype.getCentralDirectoryExtra = function() {
|
||
return this.getExtra();
|
||
};
|
||
|
||
/**
|
||
* Returns the comment set for the entry.
|
||
*
|
||
* @returns {string}
|
||
*/
|
||
ZipArchiveEntry.prototype.getComment = function() {
|
||
return this.comment !== null ? this.comment : '';
|
||
};
|
||
|
||
/**
|
||
* Returns the compressed size of the entry.
|
||
*
|
||
* @returns {number}
|
||
*/
|
||
ZipArchiveEntry.prototype.getCompressedSize = function() {
|
||
return this.csize;
|
||
};
|
||
|
||
/**
|
||
* Returns the CRC32 digest for the entry.
|
||
*
|
||
* @returns {number}
|
||
*/
|
||
ZipArchiveEntry.prototype.getCrc = function() {
|
||
return this.crc;
|
||
};
|
||
|
||
/**
|
||
* Returns the external file attributes for the entry.
|
||
*
|
||
* @returns {number}
|
||
*/
|
||
ZipArchiveEntry.prototype.getExternalAttributes = function() {
|
||
return this.exattr;
|
||
};
|
||
|
||
/**
|
||
* Returns the extra fields related to the entry.
|
||
*
|
||
* @returns {Buffer}
|
||
*/
|
||
ZipArchiveEntry.prototype.getExtra = function() {
|
||
return this.extra !== null ? this.extra : constants.EMPTY;
|
||
};
|
||
|
||
/**
|
||
* Returns the general purpose bits related to the entry.
|
||
*
|
||
* @returns {GeneralPurposeBit}
|
||
*/
|
||
ZipArchiveEntry.prototype.getGeneralPurposeBit = function() {
|
||
return this.gpb;
|
||
};
|
||
|
||
/**
|
||
* Returns the internal file attributes for the entry.
|
||
*
|
||
* @returns {number}
|
||
*/
|
||
ZipArchiveEntry.prototype.getInternalAttributes = function() {
|
||
return this.inattr;
|
||
};
|
||
|
||
/**
|
||
* Returns the last modified date of the entry.
|
||
*
|
||
* @returns {number}
|
||
*/
|
||
ZipArchiveEntry.prototype.getLastModifiedDate = function() {
|
||
return this.getTime();
|
||
};
|
||
|
||
/**
|
||
* Returns the extra fields related to the entry.
|
||
*
|
||
* @returns {Buffer}
|
||
*/
|
||
ZipArchiveEntry.prototype.getLocalFileDataExtra = function() {
|
||
return this.getExtra();
|
||
};
|
||
|
||
/**
|
||
* Returns the compression method used on the entry.
|
||
*
|
||
* @returns {number}
|
||
*/
|
||
ZipArchiveEntry.prototype.getMethod = function() {
|
||
return this.method;
|
||
};
|
||
|
||
/**
|
||
* Returns the filename of the entry.
|
||
*
|
||
* @returns {string}
|
||
*/
|
||
ZipArchiveEntry.prototype.getName = function() {
|
||
return this.name;
|
||
};
|
||
|
||
/**
|
||
* Returns the platform on which the entry was made.
|
||
*
|
||
* @returns {number}
|
||
*/
|
||
ZipArchiveEntry.prototype.getPlatform = function() {
|
||
return this.platform;
|
||
};
|
||
|
||
/**
|
||
* Returns the size of the entry.
|
||
*
|
||
* @returns {number}
|
||
*/
|
||
ZipArchiveEntry.prototype.getSize = function() {
|
||
return this.size;
|
||
};
|
||
|
||
/**
|
||
* Returns a date object representing the last modified date of the entry.
|
||
*
|
||
* @returns {number|Date}
|
||
*/
|
||
ZipArchiveEntry.prototype.getTime = function() {
|
||
return this.time !== -1 ? zipUtil.dosToDate(this.time) : -1;
|
||
};
|
||
|
||
/**
|
||
* Returns the DOS timestamp for the entry.
|
||
*
|
||
* @returns {number}
|
||
*/
|
||
ZipArchiveEntry.prototype.getTimeDos = function() {
|
||
return this.time !== -1 ? this.time : 0;
|
||
};
|
||
|
||
/**
|
||
* Returns the UNIX file permissions for the entry.
|
||
*
|
||
* @returns {number}
|
||
*/
|
||
ZipArchiveEntry.prototype.getUnixMode = function() {
|
||
return this.platform !== constants.PLATFORM_UNIX ? 0 : ((this.getExternalAttributes() >> constants.SHORT_SHIFT) & constants.SHORT_MASK);
|
||
};
|
||
|
||
/**
|
||
* Returns the version of ZIP needed to extract the entry.
|
||
*
|
||
* @returns {number}
|
||
*/
|
||
ZipArchiveEntry.prototype.getVersionNeededToExtract = function() {
|
||
return this.minver;
|
||
};
|
||
|
||
/**
|
||
* Sets the comment of the entry.
|
||
*
|
||
* @param comment
|
||
*/
|
||
ZipArchiveEntry.prototype.setComment = function(comment) {
|
||
if (Buffer.byteLength(comment) !== comment.length) {
|
||
this.getGeneralPurposeBit().useUTF8ForNames(true);
|
||
}
|
||
|
||
this.comment = comment;
|
||
};
|
||
|
||
/**
|
||
* Sets the compressed size of the entry.
|
||
*
|
||
* @param size
|
||
*/
|
||
ZipArchiveEntry.prototype.setCompressedSize = function(size) {
|
||
if (size < 0) {
|
||
throw new Error('invalid entry compressed size');
|
||
}
|
||
|
||
this.csize = size;
|
||
};
|
||
|
||
/**
|
||
* Sets the checksum of the entry.
|
||
*
|
||
* @param crc
|
||
*/
|
||
ZipArchiveEntry.prototype.setCrc = function(crc) {
|
||
if (crc < 0) {
|
||
throw new Error('invalid entry crc32');
|
||
}
|
||
|
||
this.crc = crc;
|
||
};
|
||
|
||
/**
|
||
* Sets the external file attributes of the entry.
|
||
*
|
||
* @param attr
|
||
*/
|
||
ZipArchiveEntry.prototype.setExternalAttributes = function(attr) {
|
||
this.exattr = attr >>> 0;
|
||
};
|
||
|
||
/**
|
||
* Sets the extra fields related to the entry.
|
||
*
|
||
* @param extra
|
||
*/
|
||
ZipArchiveEntry.prototype.setExtra = function(extra) {
|
||
this.extra = extra;
|
||
};
|
||
|
||
/**
|
||
* Sets the general purpose bits related to the entry.
|
||
*
|
||
* @param gpb
|
||
*/
|
||
ZipArchiveEntry.prototype.setGeneralPurposeBit = function(gpb) {
|
||
if (!(gpb instanceof GeneralPurposeBit)) {
|
||
throw new Error('invalid entry GeneralPurposeBit');
|
||
}
|
||
|
||
this.gpb = gpb;
|
||
};
|
||
|
||
/**
|
||
* Sets the internal file attributes of the entry.
|
||
*
|
||
* @param attr
|
||
*/
|
||
ZipArchiveEntry.prototype.setInternalAttributes = function(attr) {
|
||
this.inattr = attr;
|
||
};
|
||
|
||
/**
|
||
* Sets the compression method of the entry.
|
||
*
|
||
* @param method
|
||
*/
|
||
ZipArchiveEntry.prototype.setMethod = function(method) {
|
||
if (method < 0) {
|
||
throw new Error('invalid entry compression method');
|
||
}
|
||
|
||
this.method = method;
|
||
};
|
||
|
||
/**
|
||
* Sets the name of the entry.
|
||
*
|
||
* @param name
|
||
* @param prependSlash
|
||
*/
|
||
ZipArchiveEntry.prototype.setName = function(name, prependSlash = false) {
|
||
name = normalizePath(name, false)
|
||
.replace(/^\w+:/, '')
|
||
.replace(/^(\.\.\/|\/)+/, '');
|
||
|
||
if (prependSlash) {
|
||
name = `/${name}`;
|
||
}
|
||
|
||
if (Buffer.byteLength(name) !== name.length) {
|
||
this.getGeneralPurposeBit().useUTF8ForNames(true);
|
||
}
|
||
|
||
this.name = name;
|
||
};
|
||
|
||
/**
|
||
* Sets the platform on which the entry was made.
|
||
*
|
||
* @param platform
|
||
*/
|
||
ZipArchiveEntry.prototype.setPlatform = function(platform) {
|
||
this.platform = platform;
|
||
};
|
||
|
||
/**
|
||
* Sets the size of the entry.
|
||
*
|
||
* @param size
|
||
*/
|
||
ZipArchiveEntry.prototype.setSize = function(size) {
|
||
if (size < 0) {
|
||
throw new Error('invalid entry size');
|
||
}
|
||
|
||
this.size = size;
|
||
};
|
||
|
||
/**
|
||
* Sets the time of the entry.
|
||
*
|
||
* @param time
|
||
* @param forceLocalTime
|
||
*/
|
||
ZipArchiveEntry.prototype.setTime = function(time, forceLocalTime) {
|
||
if (!(time instanceof Date)) {
|
||
throw new Error('invalid entry time');
|
||
}
|
||
|
||
this.time = zipUtil.dateToDos(time, forceLocalTime);
|
||
};
|
||
|
||
/**
|
||
* Sets the UNIX file permissions for the entry.
|
||
*
|
||
* @param mode
|
||
*/
|
||
ZipArchiveEntry.prototype.setUnixMode = function(mode) {
|
||
mode |= this.isDirectory() ? constants.S_IFDIR : constants.S_IFREG;
|
||
|
||
var extattr = 0;
|
||
extattr |= (mode << constants.SHORT_SHIFT) | (this.isDirectory() ? constants.S_DOS_D : constants.S_DOS_A);
|
||
|
||
this.setExternalAttributes(extattr);
|
||
this.mode = mode & constants.MODE_MASK;
|
||
this.platform = constants.PLATFORM_UNIX;
|
||
};
|
||
|
||
/**
|
||
* Sets the version of ZIP needed to extract this entry.
|
||
*
|
||
* @param minver
|
||
*/
|
||
ZipArchiveEntry.prototype.setVersionNeededToExtract = function(minver) {
|
||
this.minver = minver;
|
||
};
|
||
|
||
/**
|
||
* Returns true if this entry represents a directory.
|
||
*
|
||
* @returns {boolean}
|
||
*/
|
||
ZipArchiveEntry.prototype.isDirectory = function() {
|
||
return this.getName().slice(-1) === '/';
|
||
};
|
||
|
||
/**
|
||
* Returns true if this entry represents a unix symlink,
|
||
* in which case the entry's content contains the target path
|
||
* for the symlink.
|
||
*
|
||
* @returns {boolean}
|
||
*/
|
||
ZipArchiveEntry.prototype.isUnixSymlink = function() {
|
||
return (this.getUnixMode() & UnixStat.FILE_TYPE_FLAG) === UnixStat.LINK_FLAG;
|
||
};
|
||
|
||
/**
|
||
* Returns true if this entry is using the ZIP64 extension of ZIP.
|
||
*
|
||
* @returns {boolean}
|
||
*/
|
||
ZipArchiveEntry.prototype.isZip64 = function() {
|
||
return this.csize > constants.ZIP64_MAGIC || this.size > constants.ZIP64_MAGIC;
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 5215:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
/**
|
||
* node-compress-commons
|
||
*
|
||
* Copyright (c) 2014 Chris Talkington, contributors.
|
||
* Licensed under the MIT license.
|
||
* https://github.com/archiverjs/node-compress-commons/blob/master/LICENSE-MIT
|
||
*/
|
||
var inherits = __nccwpck_require__(1669).inherits;
|
||
var crc32 = __nccwpck_require__(8085);
|
||
var {CRC32Stream} = __nccwpck_require__(915);
|
||
var {DeflateCRC32Stream} = __nccwpck_require__(915);
|
||
|
||
var ArchiveOutputStream = __nccwpck_require__(8239);
|
||
var ZipArchiveEntry = __nccwpck_require__(1701);
|
||
var GeneralPurposeBit = __nccwpck_require__(2200);
|
||
|
||
var constants = __nccwpck_require__(4505);
|
||
var util = __nccwpck_require__(2755);
|
||
var zipUtil = __nccwpck_require__(3249);
|
||
|
||
var ZipArchiveOutputStream = module.exports = function(options) {
|
||
if (!(this instanceof ZipArchiveOutputStream)) {
|
||
return new ZipArchiveOutputStream(options);
|
||
}
|
||
|
||
options = this.options = this._defaults(options);
|
||
|
||
ArchiveOutputStream.call(this, options);
|
||
|
||
this._entry = null;
|
||
this._entries = [];
|
||
this._archive = {
|
||
centralLength: 0,
|
||
centralOffset: 0,
|
||
comment: '',
|
||
finish: false,
|
||
finished: false,
|
||
processing: false,
|
||
forceZip64: options.forceZip64,
|
||
forceLocalTime: options.forceLocalTime
|
||
};
|
||
};
|
||
|
||
inherits(ZipArchiveOutputStream, ArchiveOutputStream);
|
||
|
||
ZipArchiveOutputStream.prototype._afterAppend = function(ae) {
|
||
this._entries.push(ae);
|
||
|
||
if (ae.getGeneralPurposeBit().usesDataDescriptor()) {
|
||
this._writeDataDescriptor(ae);
|
||
}
|
||
|
||
this._archive.processing = false;
|
||
this._entry = null;
|
||
|
||
if (this._archive.finish && !this._archive.finished) {
|
||
this._finish();
|
||
}
|
||
};
|
||
|
||
ZipArchiveOutputStream.prototype._appendBuffer = function(ae, source, callback) {
|
||
if (source.length === 0) {
|
||
ae.setMethod(constants.METHOD_STORED);
|
||
}
|
||
|
||
var method = ae.getMethod();
|
||
|
||
if (method === constants.METHOD_STORED) {
|
||
ae.setSize(source.length);
|
||
ae.setCompressedSize(source.length);
|
||
ae.setCrc(crc32.unsigned(source));
|
||
}
|
||
|
||
this._writeLocalFileHeader(ae);
|
||
|
||
if (method === constants.METHOD_STORED) {
|
||
this.write(source);
|
||
this._afterAppend(ae);
|
||
callback(null, ae);
|
||
return;
|
||
} else if (method === constants.METHOD_DEFLATED) {
|
||
this._smartStream(ae, callback).end(source);
|
||
return;
|
||
} else {
|
||
callback(new Error('compression method ' + method + ' not implemented'));
|
||
return;
|
||
}
|
||
};
|
||
|
||
ZipArchiveOutputStream.prototype._appendStream = function(ae, source, callback) {
|
||
ae.getGeneralPurposeBit().useDataDescriptor(true);
|
||
ae.setVersionNeededToExtract(constants.MIN_VERSION_DATA_DESCRIPTOR);
|
||
|
||
this._writeLocalFileHeader(ae);
|
||
|
||
var smart = this._smartStream(ae, callback);
|
||
source.once('error', function(err) {
|
||
smart.emit('error', err);
|
||
smart.end();
|
||
})
|
||
source.pipe(smart);
|
||
};
|
||
|
||
ZipArchiveOutputStream.prototype._defaults = function(o) {
|
||
if (typeof o !== 'object') {
|
||
o = {};
|
||
}
|
||
|
||
if (typeof o.zlib !== 'object') {
|
||
o.zlib = {};
|
||
}
|
||
|
||
if (typeof o.zlib.level !== 'number') {
|
||
o.zlib.level = constants.ZLIB_BEST_SPEED;
|
||
}
|
||
|
||
o.forceZip64 = !!o.forceZip64;
|
||
o.forceLocalTime = !!o.forceLocalTime;
|
||
|
||
return o;
|
||
};
|
||
|
||
ZipArchiveOutputStream.prototype._finish = function() {
|
||
this._archive.centralOffset = this.offset;
|
||
|
||
this._entries.forEach(function(ae) {
|
||
this._writeCentralFileHeader(ae);
|
||
}.bind(this));
|
||
|
||
this._archive.centralLength = this.offset - this._archive.centralOffset;
|
||
|
||
if (this.isZip64()) {
|
||
this._writeCentralDirectoryZip64();
|
||
}
|
||
|
||
this._writeCentralDirectoryEnd();
|
||
|
||
this._archive.processing = false;
|
||
this._archive.finish = true;
|
||
this._archive.finished = true;
|
||
this.end();
|
||
};
|
||
|
||
ZipArchiveOutputStream.prototype._normalizeEntry = function(ae) {
|
||
if (ae.getMethod() === -1) {
|
||
ae.setMethod(constants.METHOD_DEFLATED);
|
||
}
|
||
|
||
if (ae.getMethod() === constants.METHOD_DEFLATED) {
|
||
ae.getGeneralPurposeBit().useDataDescriptor(true);
|
||
ae.setVersionNeededToExtract(constants.MIN_VERSION_DATA_DESCRIPTOR);
|
||
}
|
||
|
||
if (ae.getTime() === -1) {
|
||
ae.setTime(new Date(), this._archive.forceLocalTime);
|
||
}
|
||
|
||
ae._offsets = {
|
||
file: 0,
|
||
data: 0,
|
||
contents: 0,
|
||
};
|
||
};
|
||
|
||
ZipArchiveOutputStream.prototype._smartStream = function(ae, callback) {
|
||
var deflate = ae.getMethod() === constants.METHOD_DEFLATED;
|
||
var process = deflate ? new DeflateCRC32Stream(this.options.zlib) : new CRC32Stream();
|
||
var error = null;
|
||
|
||
function handleStuff() {
|
||
var digest = process.digest().readUInt32BE(0);
|
||
ae.setCrc(digest);
|
||
ae.setSize(process.size());
|
||
ae.setCompressedSize(process.size(true));
|
||
this._afterAppend(ae);
|
||
callback(error, ae);
|
||
}
|
||
|
||
process.once('end', handleStuff.bind(this));
|
||
process.once('error', function(err) {
|
||
error = err;
|
||
});
|
||
|
||
process.pipe(this, { end: false });
|
||
|
||
return process;
|
||
};
|
||
|
||
ZipArchiveOutputStream.prototype._writeCentralDirectoryEnd = function() {
|
||
var records = this._entries.length;
|
||
var size = this._archive.centralLength;
|
||
var offset = this._archive.centralOffset;
|
||
|
||
if (this.isZip64()) {
|
||
records = constants.ZIP64_MAGIC_SHORT;
|
||
size = constants.ZIP64_MAGIC;
|
||
offset = constants.ZIP64_MAGIC;
|
||
}
|
||
|
||
// signature
|
||
this.write(zipUtil.getLongBytes(constants.SIG_EOCD));
|
||
|
||
// disk numbers
|
||
this.write(constants.SHORT_ZERO);
|
||
this.write(constants.SHORT_ZERO);
|
||
|
||
// number of entries
|
||
this.write(zipUtil.getShortBytes(records));
|
||
this.write(zipUtil.getShortBytes(records));
|
||
|
||
// length and location of CD
|
||
this.write(zipUtil.getLongBytes(size));
|
||
this.write(zipUtil.getLongBytes(offset));
|
||
|
||
// archive comment
|
||
var comment = this.getComment();
|
||
var commentLength = Buffer.byteLength(comment);
|
||
this.write(zipUtil.getShortBytes(commentLength));
|
||
this.write(comment);
|
||
};
|
||
|
||
ZipArchiveOutputStream.prototype._writeCentralDirectoryZip64 = function() {
|
||
// signature
|
||
this.write(zipUtil.getLongBytes(constants.SIG_ZIP64_EOCD));
|
||
|
||
// size of the ZIP64 EOCD record
|
||
this.write(zipUtil.getEightBytes(44));
|
||
|
||
// version made by
|
||
this.write(zipUtil.getShortBytes(constants.MIN_VERSION_ZIP64));
|
||
|
||
// version to extract
|
||
this.write(zipUtil.getShortBytes(constants.MIN_VERSION_ZIP64));
|
||
|
||
// disk numbers
|
||
this.write(constants.LONG_ZERO);
|
||
this.write(constants.LONG_ZERO);
|
||
|
||
// number of entries
|
||
this.write(zipUtil.getEightBytes(this._entries.length));
|
||
this.write(zipUtil.getEightBytes(this._entries.length));
|
||
|
||
// length and location of CD
|
||
this.write(zipUtil.getEightBytes(this._archive.centralLength));
|
||
this.write(zipUtil.getEightBytes(this._archive.centralOffset));
|
||
|
||
// extensible data sector
|
||
// not implemented at this time
|
||
|
||
// end of central directory locator
|
||
this.write(zipUtil.getLongBytes(constants.SIG_ZIP64_EOCD_LOC));
|
||
|
||
// disk number holding the ZIP64 EOCD record
|
||
this.write(constants.LONG_ZERO);
|
||
|
||
// relative offset of the ZIP64 EOCD record
|
||
this.write(zipUtil.getEightBytes(this._archive.centralOffset + this._archive.centralLength));
|
||
|
||
// total number of disks
|
||
this.write(zipUtil.getLongBytes(1));
|
||
};
|
||
|
||
ZipArchiveOutputStream.prototype._writeCentralFileHeader = function(ae) {
|
||
var gpb = ae.getGeneralPurposeBit();
|
||
var method = ae.getMethod();
|
||
var offsets = ae._offsets;
|
||
|
||
var size = ae.getSize();
|
||
var compressedSize = ae.getCompressedSize();
|
||
|
||
if (ae.isZip64() || offsets.file > constants.ZIP64_MAGIC) {
|
||
size = constants.ZIP64_MAGIC;
|
||
compressedSize = constants.ZIP64_MAGIC;
|
||
|
||
ae.setVersionNeededToExtract(constants.MIN_VERSION_ZIP64);
|
||
|
||
var extraBuf = Buffer.concat([
|
||
zipUtil.getShortBytes(constants.ZIP64_EXTRA_ID),
|
||
zipUtil.getShortBytes(24),
|
||
zipUtil.getEightBytes(ae.getSize()),
|
||
zipUtil.getEightBytes(ae.getCompressedSize()),
|
||
zipUtil.getEightBytes(offsets.file)
|
||
], 28);
|
||
|
||
ae.setExtra(extraBuf);
|
||
}
|
||
|
||
// signature
|
||
this.write(zipUtil.getLongBytes(constants.SIG_CFH));
|
||
|
||
// version made by
|
||
this.write(zipUtil.getShortBytes((ae.getPlatform() << 8) | constants.VERSION_MADEBY));
|
||
|
||
// version to extract and general bit flag
|
||
this.write(zipUtil.getShortBytes(ae.getVersionNeededToExtract()));
|
||
this.write(gpb.encode());
|
||
|
||
// compression method
|
||
this.write(zipUtil.getShortBytes(method));
|
||
|
||
// datetime
|
||
this.write(zipUtil.getLongBytes(ae.getTimeDos()));
|
||
|
||
// crc32 checksum
|
||
this.write(zipUtil.getLongBytes(ae.getCrc()));
|
||
|
||
// sizes
|
||
this.write(zipUtil.getLongBytes(compressedSize));
|
||
this.write(zipUtil.getLongBytes(size));
|
||
|
||
var name = ae.getName();
|
||
var comment = ae.getComment();
|
||
var extra = ae.getCentralDirectoryExtra();
|
||
|
||
if (gpb.usesUTF8ForNames()) {
|
||
name = Buffer.from(name);
|
||
comment = Buffer.from(comment);
|
||
}
|
||
|
||
// name length
|
||
this.write(zipUtil.getShortBytes(name.length));
|
||
|
||
// extra length
|
||
this.write(zipUtil.getShortBytes(extra.length));
|
||
|
||
// comments length
|
||
this.write(zipUtil.getShortBytes(comment.length));
|
||
|
||
// disk number start
|
||
this.write(constants.SHORT_ZERO);
|
||
|
||
// internal attributes
|
||
this.write(zipUtil.getShortBytes(ae.getInternalAttributes()));
|
||
|
||
// external attributes
|
||
this.write(zipUtil.getLongBytes(ae.getExternalAttributes()));
|
||
|
||
// relative offset of LFH
|
||
if (offsets.file > constants.ZIP64_MAGIC) {
|
||
this.write(zipUtil.getLongBytes(constants.ZIP64_MAGIC));
|
||
} else {
|
||
this.write(zipUtil.getLongBytes(offsets.file));
|
||
}
|
||
|
||
// name
|
||
this.write(name);
|
||
|
||
// extra
|
||
this.write(extra);
|
||
|
||
// comment
|
||
this.write(comment);
|
||
};
|
||
|
||
ZipArchiveOutputStream.prototype._writeDataDescriptor = function(ae) {
|
||
// signature
|
||
this.write(zipUtil.getLongBytes(constants.SIG_DD));
|
||
|
||
// crc32 checksum
|
||
this.write(zipUtil.getLongBytes(ae.getCrc()));
|
||
|
||
// sizes
|
||
if (ae.isZip64()) {
|
||
this.write(zipUtil.getEightBytes(ae.getCompressedSize()));
|
||
this.write(zipUtil.getEightBytes(ae.getSize()));
|
||
} else {
|
||
this.write(zipUtil.getLongBytes(ae.getCompressedSize()));
|
||
this.write(zipUtil.getLongBytes(ae.getSize()));
|
||
}
|
||
};
|
||
|
||
ZipArchiveOutputStream.prototype._writeLocalFileHeader = function(ae) {
|
||
var gpb = ae.getGeneralPurposeBit();
|
||
var method = ae.getMethod();
|
||
var name = ae.getName();
|
||
var extra = ae.getLocalFileDataExtra();
|
||
|
||
if (ae.isZip64()) {
|
||
gpb.useDataDescriptor(true);
|
||
ae.setVersionNeededToExtract(constants.MIN_VERSION_ZIP64);
|
||
}
|
||
|
||
if (gpb.usesUTF8ForNames()) {
|
||
name = Buffer.from(name);
|
||
}
|
||
|
||
ae._offsets.file = this.offset;
|
||
|
||
// signature
|
||
this.write(zipUtil.getLongBytes(constants.SIG_LFH));
|
||
|
||
// version to extract and general bit flag
|
||
this.write(zipUtil.getShortBytes(ae.getVersionNeededToExtract()));
|
||
this.write(gpb.encode());
|
||
|
||
// compression method
|
||
this.write(zipUtil.getShortBytes(method));
|
||
|
||
// datetime
|
||
this.write(zipUtil.getLongBytes(ae.getTimeDos()));
|
||
|
||
ae._offsets.data = this.offset;
|
||
|
||
// crc32 checksum and sizes
|
||
if (gpb.usesDataDescriptor()) {
|
||
this.write(constants.LONG_ZERO);
|
||
this.write(constants.LONG_ZERO);
|
||
this.write(constants.LONG_ZERO);
|
||
} else {
|
||
this.write(zipUtil.getLongBytes(ae.getCrc()));
|
||
this.write(zipUtil.getLongBytes(ae.getCompressedSize()));
|
||
this.write(zipUtil.getLongBytes(ae.getSize()));
|
||
}
|
||
|
||
// name length
|
||
this.write(zipUtil.getShortBytes(name.length));
|
||
|
||
// extra length
|
||
this.write(zipUtil.getShortBytes(extra.length));
|
||
|
||
// name
|
||
this.write(name);
|
||
|
||
// extra
|
||
this.write(extra);
|
||
|
||
ae._offsets.contents = this.offset;
|
||
};
|
||
|
||
ZipArchiveOutputStream.prototype.getComment = function(comment) {
|
||
return this._archive.comment !== null ? this._archive.comment : '';
|
||
};
|
||
|
||
ZipArchiveOutputStream.prototype.isZip64 = function() {
|
||
return this._archive.forceZip64 || this._entries.length > constants.ZIP64_MAGIC_SHORT || this._archive.centralLength > constants.ZIP64_MAGIC || this._archive.centralOffset > constants.ZIP64_MAGIC;
|
||
};
|
||
|
||
ZipArchiveOutputStream.prototype.setComment = function(comment) {
|
||
this._archive.comment = comment;
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 2247:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
/**
|
||
* node-compress-commons
|
||
*
|
||
* Copyright (c) 2014 Chris Talkington, contributors.
|
||
* Licensed under the MIT license.
|
||
* https://github.com/archiverjs/node-compress-commons/blob/master/LICENSE-MIT
|
||
*/
|
||
module.exports = {
|
||
ArchiveEntry: __nccwpck_require__(653),
|
||
ZipArchiveEntry: __nccwpck_require__(1701),
|
||
ArchiveOutputStream: __nccwpck_require__(8239),
|
||
ZipArchiveOutputStream: __nccwpck_require__(5215)
|
||
};
|
||
|
||
/***/ }),
|
||
|
||
/***/ 2755:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
/**
|
||
* node-compress-commons
|
||
*
|
||
* Copyright (c) 2014 Chris Talkington, contributors.
|
||
* Licensed under the MIT license.
|
||
* https://github.com/archiverjs/node-compress-commons/blob/master/LICENSE-MIT
|
||
*/
|
||
var Stream = __nccwpck_require__(2413).Stream;
|
||
var PassThrough = __nccwpck_require__(2498).PassThrough;
|
||
|
||
var util = module.exports = {};
|
||
|
||
util.isStream = function(source) {
|
||
return source instanceof Stream;
|
||
};
|
||
|
||
util.normalizeInputSource = function(source) {
|
||
if (source === null) {
|
||
return Buffer.alloc(0);
|
||
} else if (typeof source === 'string') {
|
||
return Buffer.from(source);
|
||
} else if (util.isStream(source) && !source._readableState) {
|
||
var normalized = new PassThrough();
|
||
source.pipe(normalized);
|
||
|
||
return normalized;
|
||
}
|
||
|
||
return source;
|
||
};
|
||
|
||
/***/ }),
|
||
|
||
/***/ 8463:
|
||
/***/ ((module) => {
|
||
|
||
module.exports = function (xs, fn) {
|
||
var res = [];
|
||
for (var i = 0; i < xs.length; i++) {
|
||
var x = fn(xs[i], i);
|
||
if (isArray(x)) res.push.apply(res, x);
|
||
else res.push(x);
|
||
}
|
||
return res;
|
||
};
|
||
|
||
var isArray = Array.isArray || function (xs) {
|
||
return Object.prototype.toString.call(xs) === '[object Array]';
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 9300:
|
||
/***/ ((__unused_webpack_module, exports) => {
|
||
|
||
// Copyright Joyent, Inc. and other Node contributors.
|
||
//
|
||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||
// copy of this software and associated documentation files (the
|
||
// "Software"), to deal in the Software without restriction, including
|
||
// without limitation the rights to use, copy, modify, merge, publish,
|
||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||
// persons to whom the Software is furnished to do so, subject to the
|
||
// following conditions:
|
||
//
|
||
// The above copyright notice and this permission notice shall be included
|
||
// in all copies or substantial portions of the Software.
|
||
//
|
||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||
|
||
// NOTE: These type checking functions intentionally don't use `instanceof`
|
||
// because it is fragile and can be easily faked with `Object.create()`.
|
||
|
||
function isArray(arg) {
|
||
if (Array.isArray) {
|
||
return Array.isArray(arg);
|
||
}
|
||
return objectToString(arg) === '[object Array]';
|
||
}
|
||
exports.isArray = isArray;
|
||
|
||
function isBoolean(arg) {
|
||
return typeof arg === 'boolean';
|
||
}
|
||
exports.isBoolean = isBoolean;
|
||
|
||
function isNull(arg) {
|
||
return arg === null;
|
||
}
|
||
exports.isNull = isNull;
|
||
|
||
function isNullOrUndefined(arg) {
|
||
return arg == null;
|
||
}
|
||
exports.isNullOrUndefined = isNullOrUndefined;
|
||
|
||
function isNumber(arg) {
|
||
return typeof arg === 'number';
|
||
}
|
||
exports.isNumber = isNumber;
|
||
|
||
function isString(arg) {
|
||
return typeof arg === 'string';
|
||
}
|
||
exports.isString = isString;
|
||
|
||
function isSymbol(arg) {
|
||
return typeof arg === 'symbol';
|
||
}
|
||
exports.isSymbol = isSymbol;
|
||
|
||
function isUndefined(arg) {
|
||
return arg === void 0;
|
||
}
|
||
exports.isUndefined = isUndefined;
|
||
|
||
function isRegExp(re) {
|
||
return objectToString(re) === '[object RegExp]';
|
||
}
|
||
exports.isRegExp = isRegExp;
|
||
|
||
function isObject(arg) {
|
||
return typeof arg === 'object' && arg !== null;
|
||
}
|
||
exports.isObject = isObject;
|
||
|
||
function isDate(d) {
|
||
return objectToString(d) === '[object Date]';
|
||
}
|
||
exports.isDate = isDate;
|
||
|
||
function isError(e) {
|
||
return (objectToString(e) === '[object Error]' || e instanceof Error);
|
||
}
|
||
exports.isError = isError;
|
||
|
||
function isFunction(arg) {
|
||
return typeof arg === 'function';
|
||
}
|
||
exports.isFunction = isFunction;
|
||
|
||
function isPrimitive(arg) {
|
||
return arg === null ||
|
||
typeof arg === 'boolean' ||
|
||
typeof arg === 'number' ||
|
||
typeof arg === 'string' ||
|
||
typeof arg === 'symbol' || // ES6 symbol
|
||
typeof arg === 'undefined';
|
||
}
|
||
exports.isPrimitive = isPrimitive;
|
||
|
||
exports.isBuffer = Buffer.isBuffer;
|
||
|
||
function objectToString(o) {
|
||
return Object.prototype.toString.call(o);
|
||
}
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 2723:
|
||
/***/ ((__unused_webpack_module, exports) => {
|
||
|
||
/* crc32.js (C) 2014-present SheetJS -- http://sheetjs.com */
|
||
/* vim: set ts=2: */
|
||
/*exported CRC32 */
|
||
var CRC32;
|
||
(function (factory) {
|
||
/*jshint ignore:start */
|
||
/*eslint-disable */
|
||
if(typeof DO_NOT_EXPORT_CRC === 'undefined') {
|
||
if(true) {
|
||
factory(exports);
|
||
} else {}
|
||
} else {
|
||
factory(CRC32 = {});
|
||
}
|
||
/*eslint-enable */
|
||
/*jshint ignore:end */
|
||
}(function(CRC32) {
|
||
CRC32.version = '1.2.0';
|
||
/* see perf/crc32table.js */
|
||
/*global Int32Array */
|
||
function signed_crc_table() {
|
||
var c = 0, table = new Array(256);
|
||
|
||
for(var n =0; n != 256; ++n){
|
||
c = n;
|
||
c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1));
|
||
c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1));
|
||
c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1));
|
||
c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1));
|
||
c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1));
|
||
c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1));
|
||
c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1));
|
||
c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1));
|
||
table[n] = c;
|
||
}
|
||
|
||
return typeof Int32Array !== 'undefined' ? new Int32Array(table) : table;
|
||
}
|
||
|
||
var T = signed_crc_table();
|
||
function crc32_bstr(bstr, seed) {
|
||
var C = seed ^ -1, L = bstr.length - 1;
|
||
for(var i = 0; i < L;) {
|
||
C = (C>>>8) ^ T[(C^bstr.charCodeAt(i++))&0xFF];
|
||
C = (C>>>8) ^ T[(C^bstr.charCodeAt(i++))&0xFF];
|
||
}
|
||
if(i === L) C = (C>>>8) ^ T[(C ^ bstr.charCodeAt(i))&0xFF];
|
||
return C ^ -1;
|
||
}
|
||
|
||
function crc32_buf(buf, seed) {
|
||
if(buf.length > 10000) return crc32_buf_8(buf, seed);
|
||
var C = seed ^ -1, L = buf.length - 3;
|
||
for(var i = 0; i < L;) {
|
||
C = (C>>>8) ^ T[(C^buf[i++])&0xFF];
|
||
C = (C>>>8) ^ T[(C^buf[i++])&0xFF];
|
||
C = (C>>>8) ^ T[(C^buf[i++])&0xFF];
|
||
C = (C>>>8) ^ T[(C^buf[i++])&0xFF];
|
||
}
|
||
while(i < L+3) C = (C>>>8) ^ T[(C^buf[i++])&0xFF];
|
||
return C ^ -1;
|
||
}
|
||
|
||
function crc32_buf_8(buf, seed) {
|
||
var C = seed ^ -1, L = buf.length - 7;
|
||
for(var i = 0; i < L;) {
|
||
C = (C>>>8) ^ T[(C^buf[i++])&0xFF];
|
||
C = (C>>>8) ^ T[(C^buf[i++])&0xFF];
|
||
C = (C>>>8) ^ T[(C^buf[i++])&0xFF];
|
||
C = (C>>>8) ^ T[(C^buf[i++])&0xFF];
|
||
C = (C>>>8) ^ T[(C^buf[i++])&0xFF];
|
||
C = (C>>>8) ^ T[(C^buf[i++])&0xFF];
|
||
C = (C>>>8) ^ T[(C^buf[i++])&0xFF];
|
||
C = (C>>>8) ^ T[(C^buf[i++])&0xFF];
|
||
}
|
||
while(i < L+7) C = (C>>>8) ^ T[(C^buf[i++])&0xFF];
|
||
return C ^ -1;
|
||
}
|
||
|
||
function crc32_str(str, seed) {
|
||
var C = seed ^ -1;
|
||
for(var i = 0, L=str.length, c, d; i < L;) {
|
||
c = str.charCodeAt(i++);
|
||
if(c < 0x80) {
|
||
C = (C>>>8) ^ T[(C ^ c)&0xFF];
|
||
} else if(c < 0x800) {
|
||
C = (C>>>8) ^ T[(C ^ (192|((c>>6)&31)))&0xFF];
|
||
C = (C>>>8) ^ T[(C ^ (128|(c&63)))&0xFF];
|
||
} else if(c >= 0xD800 && c < 0xE000) {
|
||
c = (c&1023)+64; d = str.charCodeAt(i++)&1023;
|
||
C = (C>>>8) ^ T[(C ^ (240|((c>>8)&7)))&0xFF];
|
||
C = (C>>>8) ^ T[(C ^ (128|((c>>2)&63)))&0xFF];
|
||
C = (C>>>8) ^ T[(C ^ (128|((d>>6)&15)|((c&3)<<4)))&0xFF];
|
||
C = (C>>>8) ^ T[(C ^ (128|(d&63)))&0xFF];
|
||
} else {
|
||
C = (C>>>8) ^ T[(C ^ (224|((c>>12)&15)))&0xFF];
|
||
C = (C>>>8) ^ T[(C ^ (128|((c>>6)&63)))&0xFF];
|
||
C = (C>>>8) ^ T[(C ^ (128|(c&63)))&0xFF];
|
||
}
|
||
}
|
||
return C ^ -1;
|
||
}
|
||
CRC32.table = T;
|
||
// $FlowIgnore
|
||
CRC32.bstr = crc32_bstr;
|
||
// $FlowIgnore
|
||
CRC32.buf = crc32_buf;
|
||
// $FlowIgnore
|
||
CRC32.str = crc32_str;
|
||
}));
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 6268:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
/**
|
||
* node-crc32-stream
|
||
*
|
||
* Copyright (c) 2014 Chris Talkington, contributors.
|
||
* Licensed under the MIT license.
|
||
* https://github.com/archiverjs/node-crc32-stream/blob/master/LICENSE-MIT
|
||
*/
|
||
|
||
|
||
|
||
const {Transform} = __nccwpck_require__(2498);
|
||
|
||
const crc32 = __nccwpck_require__(2723);
|
||
|
||
class CRC32Stream extends Transform {
|
||
constructor(options) {
|
||
super(options);
|
||
this.checksum = Buffer.allocUnsafe(4);
|
||
this.checksum.writeInt32BE(0, 0);
|
||
|
||
this.rawSize = 0;
|
||
}
|
||
|
||
_transform(chunk, encoding, callback) {
|
||
if (chunk) {
|
||
this.checksum = crc32.buf(chunk, this.checksum) >>> 0;
|
||
this.rawSize += chunk.length;
|
||
}
|
||
|
||
callback(null, chunk);
|
||
}
|
||
|
||
digest(encoding) {
|
||
const checksum = Buffer.allocUnsafe(4);
|
||
checksum.writeUInt32BE(this.checksum >>> 0, 0);
|
||
return encoding ? checksum.toString(encoding) : checksum;
|
||
}
|
||
|
||
hex() {
|
||
return this.digest('hex').toUpperCase();
|
||
}
|
||
|
||
size() {
|
||
return this.rawSize;
|
||
}
|
||
}
|
||
|
||
module.exports = CRC32Stream;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 962:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
/**
|
||
* node-crc32-stream
|
||
*
|
||
* Copyright (c) 2014 Chris Talkington, contributors.
|
||
* Licensed under the MIT license.
|
||
* https://github.com/archiverjs/node-crc32-stream/blob/master/LICENSE-MIT
|
||
*/
|
||
|
||
|
||
|
||
const {DeflateRaw} = __nccwpck_require__(8761);
|
||
|
||
const crc32 = __nccwpck_require__(2723);
|
||
|
||
class DeflateCRC32Stream extends DeflateRaw {
|
||
constructor(options) {
|
||
super(options);
|
||
|
||
this.checksum = Buffer.allocUnsafe(4);
|
||
this.checksum.writeInt32BE(0, 0);
|
||
|
||
this.rawSize = 0;
|
||
this.compressedSize = 0;
|
||
}
|
||
|
||
push(chunk, encoding) {
|
||
if (chunk) {
|
||
this.compressedSize += chunk.length;
|
||
}
|
||
|
||
return super.push(chunk, encoding);
|
||
}
|
||
|
||
_transform(chunk, encoding, callback) {
|
||
if (chunk) {
|
||
this.checksum = crc32.buf(chunk, this.checksum) >>> 0;
|
||
this.rawSize += chunk.length;
|
||
}
|
||
|
||
super._transform(chunk, encoding, callback)
|
||
}
|
||
|
||
digest(encoding) {
|
||
const checksum = Buffer.allocUnsafe(4);
|
||
checksum.writeUInt32BE(this.checksum >>> 0, 0);
|
||
return encoding ? checksum.toString(encoding) : checksum;
|
||
}
|
||
|
||
hex() {
|
||
return this.digest('hex').toUpperCase();
|
||
}
|
||
|
||
size(compressed = false) {
|
||
if (compressed) {
|
||
return this.compressedSize;
|
||
} else {
|
||
return this.rawSize;
|
||
}
|
||
}
|
||
}
|
||
|
||
module.exports = DeflateCRC32Stream;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 915:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
/**
|
||
* node-crc32-stream
|
||
*
|
||
* Copyright (c) 2014 Chris Talkington, contributors.
|
||
* Licensed under the MIT license.
|
||
* https://github.com/archiverjs/node-crc32-stream/blob/master/LICENSE-MIT
|
||
*/
|
||
|
||
|
||
|
||
module.exports = {
|
||
CRC32Stream: __nccwpck_require__(6268),
|
||
DeflateCRC32Stream: __nccwpck_require__(962)
|
||
}
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 5438:
|
||
/***/ ((module) => {
|
||
|
||
(function() {
|
||
var base64map
|
||
= 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/',
|
||
|
||
crypt = {
|
||
// Bit-wise rotation left
|
||
rotl: function(n, b) {
|
||
return (n << b) | (n >>> (32 - b));
|
||
},
|
||
|
||
// Bit-wise rotation right
|
||
rotr: function(n, b) {
|
||
return (n << (32 - b)) | (n >>> b);
|
||
},
|
||
|
||
// Swap big-endian to little-endian and vice versa
|
||
endian: function(n) {
|
||
// If number given, swap endian
|
||
if (n.constructor == Number) {
|
||
return crypt.rotl(n, 8) & 0x00FF00FF | crypt.rotl(n, 24) & 0xFF00FF00;
|
||
}
|
||
|
||
// Else, assume array and swap all items
|
||
for (var i = 0; i < n.length; i++)
|
||
n[i] = crypt.endian(n[i]);
|
||
return n;
|
||
},
|
||
|
||
// Generate an array of any length of random bytes
|
||
randomBytes: function(n) {
|
||
for (var bytes = []; n > 0; n--)
|
||
bytes.push(Math.floor(Math.random() * 256));
|
||
return bytes;
|
||
},
|
||
|
||
// Convert a byte array to big-endian 32-bit words
|
||
bytesToWords: function(bytes) {
|
||
for (var words = [], i = 0, b = 0; i < bytes.length; i++, b += 8)
|
||
words[b >>> 5] |= bytes[i] << (24 - b % 32);
|
||
return words;
|
||
},
|
||
|
||
// Convert big-endian 32-bit words to a byte array
|
||
wordsToBytes: function(words) {
|
||
for (var bytes = [], b = 0; b < words.length * 32; b += 8)
|
||
bytes.push((words[b >>> 5] >>> (24 - b % 32)) & 0xFF);
|
||
return bytes;
|
||
},
|
||
|
||
// Convert a byte array to a hex string
|
||
bytesToHex: function(bytes) {
|
||
for (var hex = [], i = 0; i < bytes.length; i++) {
|
||
hex.push((bytes[i] >>> 4).toString(16));
|
||
hex.push((bytes[i] & 0xF).toString(16));
|
||
}
|
||
return hex.join('');
|
||
},
|
||
|
||
// Convert a hex string to a byte array
|
||
hexToBytes: function(hex) {
|
||
for (var bytes = [], c = 0; c < hex.length; c += 2)
|
||
bytes.push(parseInt(hex.substr(c, 2), 16));
|
||
return bytes;
|
||
},
|
||
|
||
// Convert a byte array to a base-64 string
|
||
bytesToBase64: function(bytes) {
|
||
for (var base64 = [], i = 0; i < bytes.length; i += 3) {
|
||
var triplet = (bytes[i] << 16) | (bytes[i + 1] << 8) | bytes[i + 2];
|
||
for (var j = 0; j < 4; j++)
|
||
if (i * 8 + j * 6 <= bytes.length * 8)
|
||
base64.push(base64map.charAt((triplet >>> 6 * (3 - j)) & 0x3F));
|
||
else
|
||
base64.push('=');
|
||
}
|
||
return base64.join('');
|
||
},
|
||
|
||
// Convert a base-64 string to a byte array
|
||
base64ToBytes: function(base64) {
|
||
// Remove non-base-64 characters
|
||
base64 = base64.replace(/[^A-Z0-9+\/]/ig, '');
|
||
|
||
for (var bytes = [], i = 0, imod4 = 0; i < base64.length;
|
||
imod4 = ++i % 4) {
|
||
if (imod4 == 0) continue;
|
||
bytes.push(((base64map.indexOf(base64.charAt(i - 1))
|
||
& (Math.pow(2, -2 * imod4 + 8) - 1)) << (imod4 * 2))
|
||
| (base64map.indexOf(base64.charAt(i)) >>> (6 - imod4 * 2)));
|
||
}
|
||
return bytes;
|
||
}
|
||
};
|
||
|
||
module.exports = crypt;
|
||
})();
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 3860:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
var once = __nccwpck_require__(2940);
|
||
|
||
var noop = function() {};
|
||
|
||
var isRequest = function(stream) {
|
||
return stream.setHeader && typeof stream.abort === 'function';
|
||
};
|
||
|
||
var isChildProcess = function(stream) {
|
||
return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3
|
||
};
|
||
|
||
var eos = function(stream, opts, callback) {
|
||
if (typeof opts === 'function') return eos(stream, null, opts);
|
||
if (!opts) opts = {};
|
||
|
||
callback = once(callback || noop);
|
||
|
||
var ws = stream._writableState;
|
||
var rs = stream._readableState;
|
||
var readable = opts.readable || (opts.readable !== false && stream.readable);
|
||
var writable = opts.writable || (opts.writable !== false && stream.writable);
|
||
var cancelled = false;
|
||
|
||
var onlegacyfinish = function() {
|
||
if (!stream.writable) onfinish();
|
||
};
|
||
|
||
var onfinish = function() {
|
||
writable = false;
|
||
if (!readable) callback.call(stream);
|
||
};
|
||
|
||
var onend = function() {
|
||
readable = false;
|
||
if (!writable) callback.call(stream);
|
||
};
|
||
|
||
var onexit = function(exitCode) {
|
||
callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null);
|
||
};
|
||
|
||
var onerror = function(err) {
|
||
callback.call(stream, err);
|
||
};
|
||
|
||
var onclose = function() {
|
||
process.nextTick(onclosenexttick);
|
||
};
|
||
|
||
var onclosenexttick = function() {
|
||
if (cancelled) return;
|
||
if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close'));
|
||
if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close'));
|
||
};
|
||
|
||
var onrequest = function() {
|
||
stream.req.on('finish', onfinish);
|
||
};
|
||
|
||
if (isRequest(stream)) {
|
||
stream.on('complete', onfinish);
|
||
stream.on('abort', onclose);
|
||
if (stream.req) onrequest();
|
||
else stream.on('request', onrequest);
|
||
} else if (writable && !ws) { // legacy streams
|
||
stream.on('end', onlegacyfinish);
|
||
stream.on('close', onlegacyfinish);
|
||
}
|
||
|
||
if (isChildProcess(stream)) stream.on('exit', onexit);
|
||
|
||
stream.on('end', onend);
|
||
stream.on('finish', onfinish);
|
||
if (opts.error !== false) stream.on('error', onerror);
|
||
stream.on('close', onclose);
|
||
|
||
return function() {
|
||
cancelled = true;
|
||
stream.removeListener('complete', onfinish);
|
||
stream.removeListener('abort', onclose);
|
||
stream.removeListener('request', onrequest);
|
||
if (stream.req) stream.req.removeListener('finish', onfinish);
|
||
stream.removeListener('end', onlegacyfinish);
|
||
stream.removeListener('close', onlegacyfinish);
|
||
stream.removeListener('finish', onfinish);
|
||
stream.removeListener('exit', onexit);
|
||
stream.removeListener('end', onend);
|
||
stream.removeListener('error', onerror);
|
||
stream.removeListener('close', onclose);
|
||
};
|
||
};
|
||
|
||
module.exports = eos;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 695:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
//parse Empty Node as self closing node
|
||
const buildOptions = __nccwpck_require__(429).buildOptions;
|
||
|
||
const defaultOptions = {
|
||
attributeNamePrefix: '@_',
|
||
attrNodeName: false,
|
||
textNodeName: '#text',
|
||
ignoreAttributes: true,
|
||
cdataTagName: false,
|
||
cdataPositionChar: '\\c',
|
||
format: false,
|
||
indentBy: ' ',
|
||
supressEmptyNode: false,
|
||
tagValueProcessor: function(a) {
|
||
return a;
|
||
},
|
||
attrValueProcessor: function(a) {
|
||
return a;
|
||
},
|
||
};
|
||
|
||
const props = [
|
||
'attributeNamePrefix',
|
||
'attrNodeName',
|
||
'textNodeName',
|
||
'ignoreAttributes',
|
||
'cdataTagName',
|
||
'cdataPositionChar',
|
||
'format',
|
||
'indentBy',
|
||
'supressEmptyNode',
|
||
'tagValueProcessor',
|
||
'attrValueProcessor',
|
||
];
|
||
|
||
function Parser(options) {
|
||
this.options = buildOptions(options, defaultOptions, props);
|
||
if (this.options.ignoreAttributes || this.options.attrNodeName) {
|
||
this.isAttribute = function(/*a*/) {
|
||
return false;
|
||
};
|
||
} else {
|
||
this.attrPrefixLen = this.options.attributeNamePrefix.length;
|
||
this.isAttribute = isAttribute;
|
||
}
|
||
if (this.options.cdataTagName) {
|
||
this.isCDATA = isCDATA;
|
||
} else {
|
||
this.isCDATA = function(/*a*/) {
|
||
return false;
|
||
};
|
||
}
|
||
this.replaceCDATAstr = replaceCDATAstr;
|
||
this.replaceCDATAarr = replaceCDATAarr;
|
||
|
||
if (this.options.format) {
|
||
this.indentate = indentate;
|
||
this.tagEndChar = '>\n';
|
||
this.newLine = '\n';
|
||
} else {
|
||
this.indentate = function() {
|
||
return '';
|
||
};
|
||
this.tagEndChar = '>';
|
||
this.newLine = '';
|
||
}
|
||
|
||
if (this.options.supressEmptyNode) {
|
||
this.buildTextNode = buildEmptyTextNode;
|
||
this.buildObjNode = buildEmptyObjNode;
|
||
} else {
|
||
this.buildTextNode = buildTextValNode;
|
||
this.buildObjNode = buildObjectNode;
|
||
}
|
||
|
||
this.buildTextValNode = buildTextValNode;
|
||
this.buildObjectNode = buildObjectNode;
|
||
}
|
||
|
||
Parser.prototype.parse = function(jObj) {
|
||
return this.j2x(jObj, 0).val;
|
||
};
|
||
|
||
Parser.prototype.j2x = function(jObj, level) {
|
||
let attrStr = '';
|
||
let val = '';
|
||
const keys = Object.keys(jObj);
|
||
const len = keys.length;
|
||
for (let i = 0; i < len; i++) {
|
||
const key = keys[i];
|
||
if (typeof jObj[key] === 'undefined') {
|
||
// supress undefined node
|
||
} else if (jObj[key] === null) {
|
||
val += this.indentate(level) + '<' + key + '/' + this.tagEndChar;
|
||
} else if (jObj[key] instanceof Date) {
|
||
val += this.buildTextNode(jObj[key], key, '', level);
|
||
} else if (typeof jObj[key] !== 'object') {
|
||
//premitive type
|
||
const attr = this.isAttribute(key);
|
||
if (attr) {
|
||
attrStr += ' ' + attr + '="' + this.options.attrValueProcessor('' + jObj[key]) + '"';
|
||
} else if (this.isCDATA(key)) {
|
||
if (jObj[this.options.textNodeName]) {
|
||
val += this.replaceCDATAstr(jObj[this.options.textNodeName], jObj[key]);
|
||
} else {
|
||
val += this.replaceCDATAstr('', jObj[key]);
|
||
}
|
||
} else {
|
||
//tag value
|
||
if (key === this.options.textNodeName) {
|
||
if (jObj[this.options.cdataTagName]) {
|
||
//value will added while processing cdata
|
||
} else {
|
||
val += this.options.tagValueProcessor('' + jObj[key]);
|
||
}
|
||
} else {
|
||
val += this.buildTextNode(jObj[key], key, '', level);
|
||
}
|
||
}
|
||
} else if (Array.isArray(jObj[key])) {
|
||
//repeated nodes
|
||
if (this.isCDATA(key)) {
|
||
val += this.indentate(level);
|
||
if (jObj[this.options.textNodeName]) {
|
||
val += this.replaceCDATAarr(jObj[this.options.textNodeName], jObj[key]);
|
||
} else {
|
||
val += this.replaceCDATAarr('', jObj[key]);
|
||
}
|
||
} else {
|
||
//nested nodes
|
||
const arrLen = jObj[key].length;
|
||
for (let j = 0; j < arrLen; j++) {
|
||
const item = jObj[key][j];
|
||
if (typeof item === 'undefined') {
|
||
// supress undefined node
|
||
} else if (item === null) {
|
||
val += this.indentate(level) + '<' + key + '/' + this.tagEndChar;
|
||
} else if (typeof item === 'object') {
|
||
const result = this.j2x(item, level + 1);
|
||
val += this.buildObjNode(result.val, key, result.attrStr, level);
|
||
} else {
|
||
val += this.buildTextNode(item, key, '', level);
|
||
}
|
||
}
|
||
}
|
||
} else {
|
||
//nested node
|
||
if (this.options.attrNodeName && key === this.options.attrNodeName) {
|
||
const Ks = Object.keys(jObj[key]);
|
||
const L = Ks.length;
|
||
for (let j = 0; j < L; j++) {
|
||
attrStr += ' ' + Ks[j] + '="' + this.options.attrValueProcessor('' + jObj[key][Ks[j]]) + '"';
|
||
}
|
||
} else {
|
||
const result = this.j2x(jObj[key], level + 1);
|
||
val += this.buildObjNode(result.val, key, result.attrStr, level);
|
||
}
|
||
}
|
||
}
|
||
return {attrStr: attrStr, val: val};
|
||
};
|
||
|
||
function replaceCDATAstr(str, cdata) {
|
||
str = this.options.tagValueProcessor('' + str);
|
||
if (this.options.cdataPositionChar === '' || str === '') {
|
||
return str + '<![CDATA[' + cdata + ']]' + this.tagEndChar;
|
||
} else {
|
||
return str.replace(this.options.cdataPositionChar, '<![CDATA[' + cdata + ']]' + this.tagEndChar);
|
||
}
|
||
}
|
||
|
||
function replaceCDATAarr(str, cdata) {
|
||
str = this.options.tagValueProcessor('' + str);
|
||
if (this.options.cdataPositionChar === '' || str === '') {
|
||
return str + '<![CDATA[' + cdata.join(']]><![CDATA[') + ']]' + this.tagEndChar;
|
||
} else {
|
||
for (let v in cdata) {
|
||
str = str.replace(this.options.cdataPositionChar, '<![CDATA[' + cdata[v] + ']]>');
|
||
}
|
||
return str + this.newLine;
|
||
}
|
||
}
|
||
|
||
function buildObjectNode(val, key, attrStr, level) {
|
||
if (attrStr && !val.includes('<')) {
|
||
return (
|
||
this.indentate(level) +
|
||
'<' +
|
||
key +
|
||
attrStr +
|
||
'>' +
|
||
val +
|
||
//+ this.newLine
|
||
// + this.indentate(level)
|
||
'</' +
|
||
key +
|
||
this.tagEndChar
|
||
);
|
||
} else {
|
||
return (
|
||
this.indentate(level) +
|
||
'<' +
|
||
key +
|
||
attrStr +
|
||
this.tagEndChar +
|
||
val +
|
||
//+ this.newLine
|
||
this.indentate(level) +
|
||
'</' +
|
||
key +
|
||
this.tagEndChar
|
||
);
|
||
}
|
||
}
|
||
|
||
function buildEmptyObjNode(val, key, attrStr, level) {
|
||
if (val !== '') {
|
||
return this.buildObjectNode(val, key, attrStr, level);
|
||
} else {
|
||
return this.indentate(level) + '<' + key + attrStr + '/' + this.tagEndChar;
|
||
//+ this.newLine
|
||
}
|
||
}
|
||
|
||
function buildTextValNode(val, key, attrStr, level) {
|
||
return (
|
||
this.indentate(level) +
|
||
'<' +
|
||
key +
|
||
attrStr +
|
||
'>' +
|
||
this.options.tagValueProcessor(val) +
|
||
'</' +
|
||
key +
|
||
this.tagEndChar
|
||
);
|
||
}
|
||
|
||
function buildEmptyTextNode(val, key, attrStr, level) {
|
||
if (val !== '') {
|
||
return this.buildTextValNode(val, key, attrStr, level);
|
||
} else {
|
||
return this.indentate(level) + '<' + key + attrStr + '/' + this.tagEndChar;
|
||
}
|
||
}
|
||
|
||
function indentate(level) {
|
||
return this.options.indentBy.repeat(level);
|
||
}
|
||
|
||
function isAttribute(name /*, options*/) {
|
||
if (name.startsWith(this.options.attributeNamePrefix)) {
|
||
return name.substr(this.attrPrefixLen);
|
||
} else {
|
||
return false;
|
||
}
|
||
}
|
||
|
||
function isCDATA(name) {
|
||
return name === this.options.cdataTagName;
|
||
}
|
||
|
||
//formatting
|
||
//indentation
|
||
//\n after each closing or self closing tag
|
||
|
||
module.exports = Parser;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 5282:
|
||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
const char = function(a) {
|
||
return String.fromCharCode(a);
|
||
};
|
||
|
||
const chars = {
|
||
nilChar: char(176),
|
||
missingChar: char(201),
|
||
nilPremitive: char(175),
|
||
missingPremitive: char(200),
|
||
|
||
emptyChar: char(178),
|
||
emptyValue: char(177), //empty Premitive
|
||
|
||
boundryChar: char(179),
|
||
|
||
objStart: char(198),
|
||
arrStart: char(204),
|
||
arrayEnd: char(185),
|
||
};
|
||
|
||
const charsArr = [
|
||
chars.nilChar,
|
||
chars.nilPremitive,
|
||
chars.missingChar,
|
||
chars.missingPremitive,
|
||
chars.boundryChar,
|
||
chars.emptyChar,
|
||
chars.emptyValue,
|
||
chars.arrayEnd,
|
||
chars.objStart,
|
||
chars.arrStart,
|
||
];
|
||
|
||
const _e = function(node, e_schema, options) {
|
||
if (typeof e_schema === 'string') {
|
||
//premitive
|
||
if (node && node[0] && node[0].val !== undefined) {
|
||
return getValue(node[0].val, e_schema);
|
||
} else {
|
||
return getValue(node, e_schema);
|
||
}
|
||
} else {
|
||
const hasValidData = hasData(node);
|
||
if (hasValidData === true) {
|
||
let str = '';
|
||
if (Array.isArray(e_schema)) {
|
||
//attributes can't be repeated. hence check in children tags only
|
||
str += chars.arrStart;
|
||
const itemSchema = e_schema[0];
|
||
//var itemSchemaType = itemSchema;
|
||
const arr_len = node.length;
|
||
|
||
if (typeof itemSchema === 'string') {
|
||
for (let arr_i = 0; arr_i < arr_len; arr_i++) {
|
||
const r = getValue(node[arr_i].val, itemSchema);
|
||
str = processValue(str, r);
|
||
}
|
||
} else {
|
||
for (let arr_i = 0; arr_i < arr_len; arr_i++) {
|
||
const r = _e(node[arr_i], itemSchema, options);
|
||
str = processValue(str, r);
|
||
}
|
||
}
|
||
str += chars.arrayEnd; //indicates that next item is not array item
|
||
} else {
|
||
//object
|
||
str += chars.objStart;
|
||
const keys = Object.keys(e_schema);
|
||
if (Array.isArray(node)) {
|
||
node = node[0];
|
||
}
|
||
for (let i in keys) {
|
||
const key = keys[i];
|
||
//a property defined in schema can be present either in attrsMap or children tags
|
||
//options.textNodeName will not present in both maps, take it's value from val
|
||
//options.attrNodeName will be present in attrsMap
|
||
let r;
|
||
if (!options.ignoreAttributes && node.attrsMap && node.attrsMap[key]) {
|
||
r = _e(node.attrsMap[key], e_schema[key], options);
|
||
} else if (key === options.textNodeName) {
|
||
r = _e(node.val, e_schema[key], options);
|
||
} else {
|
||
r = _e(node.child[key], e_schema[key], options);
|
||
}
|
||
str = processValue(str, r);
|
||
}
|
||
}
|
||
return str;
|
||
} else {
|
||
return hasValidData;
|
||
}
|
||
}
|
||
};
|
||
|
||
const getValue = function(a /*, type*/) {
|
||
switch (a) {
|
||
case undefined:
|
||
return chars.missingPremitive;
|
||
case null:
|
||
return chars.nilPremitive;
|
||
case '':
|
||
return chars.emptyValue;
|
||
default:
|
||
return a;
|
||
}
|
||
};
|
||
|
||
const processValue = function(str, r) {
|
||
if (!isAppChar(r[0]) && !isAppChar(str[str.length - 1])) {
|
||
str += chars.boundryChar;
|
||
}
|
||
return str + r;
|
||
};
|
||
|
||
const isAppChar = function(ch) {
|
||
return charsArr.indexOf(ch) !== -1;
|
||
};
|
||
|
||
function hasData(jObj) {
|
||
if (jObj === undefined) {
|
||
return chars.missingChar;
|
||
} else if (jObj === null) {
|
||
return chars.nilChar;
|
||
} else if (
|
||
jObj.child &&
|
||
Object.keys(jObj.child).length === 0 &&
|
||
(!jObj.attrsMap || Object.keys(jObj.attrsMap).length === 0)
|
||
) {
|
||
return chars.emptyChar;
|
||
} else {
|
||
return true;
|
||
}
|
||
}
|
||
|
||
const x2j = __nccwpck_require__(7926);
|
||
const buildOptions = __nccwpck_require__(429).buildOptions;
|
||
|
||
const convert2nimn = function(node, e_schema, options) {
|
||
options = buildOptions(options, x2j.defaultOptions, x2j.props);
|
||
return _e(node, e_schema, options);
|
||
};
|
||
|
||
exports.convert2nimn = convert2nimn;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 3885:
|
||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
const util = __nccwpck_require__(429);
|
||
|
||
const convertToJson = function(node, options, parentTagName) {
|
||
const jObj = {};
|
||
|
||
// when no child node or attr is present
|
||
if ((!node.child || util.isEmptyObject(node.child)) && (!node.attrsMap || util.isEmptyObject(node.attrsMap))) {
|
||
return util.isExist(node.val) ? node.val : '';
|
||
}
|
||
|
||
// otherwise create a textnode if node has some text
|
||
if (util.isExist(node.val) && !(typeof node.val === 'string' && (node.val === '' || node.val === options.cdataPositionChar))) {
|
||
const asArray = util.isTagNameInArrayMode(node.tagname, options.arrayMode, parentTagName)
|
||
jObj[options.textNodeName] = asArray ? [node.val] : node.val;
|
||
}
|
||
|
||
util.merge(jObj, node.attrsMap, options.arrayMode);
|
||
|
||
const keys = Object.keys(node.child);
|
||
for (let index = 0; index < keys.length; index++) {
|
||
const tagName = keys[index];
|
||
if (node.child[tagName] && node.child[tagName].length > 1) {
|
||
jObj[tagName] = [];
|
||
for (let tag in node.child[tagName]) {
|
||
if (node.child[tagName].hasOwnProperty(tag)) {
|
||
jObj[tagName].push(convertToJson(node.child[tagName][tag], options, tagName));
|
||
}
|
||
}
|
||
} else {
|
||
const result = convertToJson(node.child[tagName][0], options, tagName);
|
||
const asArray = (options.arrayMode === true && typeof result === 'object') || util.isTagNameInArrayMode(tagName, options.arrayMode, parentTagName);
|
||
jObj[tagName] = asArray ? [result] : result;
|
||
}
|
||
}
|
||
|
||
//add value
|
||
return jObj;
|
||
};
|
||
|
||
exports.convertToJson = convertToJson;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 3305:
|
||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
const util = __nccwpck_require__(429);
|
||
const buildOptions = __nccwpck_require__(429).buildOptions;
|
||
const x2j = __nccwpck_require__(7926);
|
||
|
||
//TODO: do it later
|
||
const convertToJsonString = function(node, options) {
|
||
options = buildOptions(options, x2j.defaultOptions, x2j.props);
|
||
|
||
options.indentBy = options.indentBy || '';
|
||
return _cToJsonStr(node, options, 0);
|
||
};
|
||
|
||
const _cToJsonStr = function(node, options, level) {
|
||
let jObj = '{';
|
||
|
||
//traver through all the children
|
||
const keys = Object.keys(node.child);
|
||
|
||
for (let index = 0; index < keys.length; index++) {
|
||
var tagname = keys[index];
|
||
if (node.child[tagname] && node.child[tagname].length > 1) {
|
||
jObj += '"' + tagname + '" : [ ';
|
||
for (var tag in node.child[tagname]) {
|
||
jObj += _cToJsonStr(node.child[tagname][tag], options) + ' , ';
|
||
}
|
||
jObj = jObj.substr(0, jObj.length - 1) + ' ] '; //remove extra comma in last
|
||
} else {
|
||
jObj += '"' + tagname + '" : ' + _cToJsonStr(node.child[tagname][0], options) + ' ,';
|
||
}
|
||
}
|
||
util.merge(jObj, node.attrsMap);
|
||
//add attrsMap as new children
|
||
if (util.isEmptyObject(jObj)) {
|
||
return util.isExist(node.val) ? node.val : '';
|
||
} else {
|
||
if (util.isExist(node.val)) {
|
||
if (!(typeof node.val === 'string' && (node.val === '' || node.val === options.cdataPositionChar))) {
|
||
jObj += '"' + options.textNodeName + '" : ' + stringval(node.val);
|
||
}
|
||
}
|
||
}
|
||
//add value
|
||
if (jObj[jObj.length - 1] === ',') {
|
||
jObj = jObj.substr(0, jObj.length - 2);
|
||
}
|
||
return jObj + '}';
|
||
};
|
||
|
||
function stringval(v) {
|
||
if (v === true || v === false || !isNaN(v)) {
|
||
return v;
|
||
} else {
|
||
return '"' + v + '"';
|
||
}
|
||
}
|
||
|
||
function indentate(options, level) {
|
||
return options.indentBy.repeat(level);
|
||
}
|
||
|
||
exports.convertToJsonString = convertToJsonString;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 704:
|
||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
const nodeToJson = __nccwpck_require__(3885);
|
||
const xmlToNodeobj = __nccwpck_require__(7926);
|
||
const x2xmlnode = __nccwpck_require__(7926);
|
||
const buildOptions = __nccwpck_require__(429).buildOptions;
|
||
const validator = __nccwpck_require__(137);
|
||
|
||
exports.parse = function(xmlData, options, validationOption) {
|
||
if( validationOption){
|
||
if(validationOption === true) validationOption = {}
|
||
|
||
const result = validator.validate(xmlData, validationOption);
|
||
if (result !== true) {
|
||
throw Error( result.err.msg)
|
||
}
|
||
}
|
||
options = buildOptions(options, x2xmlnode.defaultOptions, x2xmlnode.props);
|
||
const traversableObj = xmlToNodeobj.getTraversalObj(xmlData, options)
|
||
//print(traversableObj, " ");
|
||
return nodeToJson.convertToJson(traversableObj, options);
|
||
};
|
||
exports.convertTonimn = __nccwpck_require__(5282).convert2nimn;
|
||
exports.getTraversalObj = xmlToNodeobj.getTraversalObj;
|
||
exports.convertToJson = nodeToJson.convertToJson;
|
||
exports.convertToJsonString = __nccwpck_require__(3305).convertToJsonString;
|
||
exports.validate = validator.validate;
|
||
exports.j2xParser = __nccwpck_require__(695);
|
||
exports.parseToNimn = function(xmlData, schema, options) {
|
||
return exports.convertTonimn(exports.getTraversalObj(xmlData, options), schema, options);
|
||
};
|
||
|
||
|
||
function print(xmlNode, indentation){
|
||
if(xmlNode){
|
||
console.log(indentation + "{")
|
||
console.log(indentation + " \"tagName\": \"" + xmlNode.tagname + "\", ");
|
||
if(xmlNode.parent){
|
||
console.log(indentation + " \"parent\": \"" + xmlNode.parent.tagname + "\", ");
|
||
}
|
||
console.log(indentation + " \"val\": \"" + xmlNode.val + "\", ");
|
||
console.log(indentation + " \"attrs\": " + JSON.stringify(xmlNode.attrsMap,null,4) + ", ");
|
||
|
||
if(xmlNode.child){
|
||
console.log(indentation + "\"child\": {")
|
||
const indentation2 = indentation + indentation;
|
||
Object.keys(xmlNode.child).forEach( function(key) {
|
||
const node = xmlNode.child[key];
|
||
|
||
if(Array.isArray(node)){
|
||
console.log(indentation + "\""+key+"\" :[")
|
||
node.forEach( function(item,index) {
|
||
//console.log(indentation + " \""+index+"\" : [")
|
||
print(item, indentation2);
|
||
})
|
||
console.log(indentation + "],")
|
||
}else{
|
||
console.log(indentation + " \""+key+"\" : {")
|
||
print(node, indentation2);
|
||
console.log(indentation + "},")
|
||
}
|
||
});
|
||
console.log(indentation + "},")
|
||
}
|
||
console.log(indentation + "},")
|
||
}
|
||
}
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 429:
|
||
/***/ ((__unused_webpack_module, exports) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
const nameStartChar = ':A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD';
|
||
const nameChar = nameStartChar + '\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040';
|
||
const nameRegexp = '[' + nameStartChar + '][' + nameChar + ']*'
|
||
const regexName = new RegExp('^' + nameRegexp + '$');
|
||
|
||
const getAllMatches = function(string, regex) {
|
||
const matches = [];
|
||
let match = regex.exec(string);
|
||
while (match) {
|
||
const allmatches = [];
|
||
const len = match.length;
|
||
for (let index = 0; index < len; index++) {
|
||
allmatches.push(match[index]);
|
||
}
|
||
matches.push(allmatches);
|
||
match = regex.exec(string);
|
||
}
|
||
return matches;
|
||
};
|
||
|
||
const isName = function(string) {
|
||
const match = regexName.exec(string);
|
||
return !(match === null || typeof match === 'undefined');
|
||
};
|
||
|
||
exports.isExist = function(v) {
|
||
return typeof v !== 'undefined';
|
||
};
|
||
|
||
exports.isEmptyObject = function(obj) {
|
||
return Object.keys(obj).length === 0;
|
||
};
|
||
|
||
/**
|
||
* Copy all the properties of a into b.
|
||
* @param {*} target
|
||
* @param {*} a
|
||
*/
|
||
exports.merge = function(target, a, arrayMode) {
|
||
if (a) {
|
||
const keys = Object.keys(a); // will return an array of own properties
|
||
const len = keys.length; //don't make it inline
|
||
for (let i = 0; i < len; i++) {
|
||
if (arrayMode === 'strict') {
|
||
target[keys[i]] = [ a[keys[i]] ];
|
||
} else {
|
||
target[keys[i]] = a[keys[i]];
|
||
}
|
||
}
|
||
}
|
||
};
|
||
/* exports.merge =function (b,a){
|
||
return Object.assign(b,a);
|
||
} */
|
||
|
||
exports.getValue = function(v) {
|
||
if (exports.isExist(v)) {
|
||
return v;
|
||
} else {
|
||
return '';
|
||
}
|
||
};
|
||
|
||
// const fakeCall = function(a) {return a;};
|
||
// const fakeCallNoReturn = function() {};
|
||
|
||
exports.buildOptions = function(options, defaultOptions, props) {
|
||
var newOptions = {};
|
||
if (!options) {
|
||
return defaultOptions; //if there are not options
|
||
}
|
||
|
||
for (let i = 0; i < props.length; i++) {
|
||
if (options[props[i]] !== undefined) {
|
||
newOptions[props[i]] = options[props[i]];
|
||
} else {
|
||
newOptions[props[i]] = defaultOptions[props[i]];
|
||
}
|
||
}
|
||
return newOptions;
|
||
};
|
||
|
||
/**
|
||
* Check if a tag name should be treated as array
|
||
*
|
||
* @param tagName the node tagname
|
||
* @param arrayMode the array mode option
|
||
* @param parentTagName the parent tag name
|
||
* @returns {boolean} true if node should be parsed as array
|
||
*/
|
||
exports.isTagNameInArrayMode = function (tagName, arrayMode, parentTagName) {
|
||
if (arrayMode === false) {
|
||
return false;
|
||
} else if (arrayMode instanceof RegExp) {
|
||
return arrayMode.test(tagName);
|
||
} else if (typeof arrayMode === 'function') {
|
||
return !!arrayMode(tagName, parentTagName);
|
||
}
|
||
|
||
return arrayMode === "strict";
|
||
}
|
||
|
||
exports.isName = isName;
|
||
exports.getAllMatches = getAllMatches;
|
||
exports.nameRegexp = nameRegexp;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 137:
|
||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
const util = __nccwpck_require__(429);
|
||
|
||
const defaultOptions = {
|
||
allowBooleanAttributes: false, //A tag can have attributes without any value
|
||
};
|
||
|
||
const props = ['allowBooleanAttributes'];
|
||
|
||
//const tagsPattern = new RegExp("<\\/?([\\w:\\-_\.]+)\\s*\/?>","g");
|
||
exports.validate = function (xmlData, options) {
|
||
options = util.buildOptions(options, defaultOptions, props);
|
||
|
||
//xmlData = xmlData.replace(/(\r\n|\n|\r)/gm,"");//make it single line
|
||
//xmlData = xmlData.replace(/(^\s*<\?xml.*?\?>)/g,"");//Remove XML starting tag
|
||
//xmlData = xmlData.replace(/(<!DOCTYPE[\s\w\"\.\/\-\:]+(\[.*\])*\s*>)/g,"");//Remove DOCTYPE
|
||
const tags = [];
|
||
let tagFound = false;
|
||
|
||
//indicates that the root tag has been closed (aka. depth 0 has been reached)
|
||
let reachedRoot = false;
|
||
|
||
if (xmlData[0] === '\ufeff') {
|
||
// check for byte order mark (BOM)
|
||
xmlData = xmlData.substr(1);
|
||
}
|
||
|
||
for (let i = 0; i < xmlData.length; i++) {
|
||
|
||
if (xmlData[i] === '<' && xmlData[i+1] === '?') {
|
||
i+=2;
|
||
i = readPI(xmlData,i);
|
||
if (i.err) return i;
|
||
}else if (xmlData[i] === '<') {
|
||
//starting of tag
|
||
//read until you reach to '>' avoiding any '>' in attribute value
|
||
|
||
i++;
|
||
|
||
if (xmlData[i] === '!') {
|
||
i = readCommentAndCDATA(xmlData, i);
|
||
continue;
|
||
} else {
|
||
let closingTag = false;
|
||
if (xmlData[i] === '/') {
|
||
//closing tag
|
||
closingTag = true;
|
||
i++;
|
||
}
|
||
//read tagname
|
||
let tagName = '';
|
||
for (; i < xmlData.length &&
|
||
xmlData[i] !== '>' &&
|
||
xmlData[i] !== ' ' &&
|
||
xmlData[i] !== '\t' &&
|
||
xmlData[i] !== '\n' &&
|
||
xmlData[i] !== '\r'; i++
|
||
) {
|
||
tagName += xmlData[i];
|
||
}
|
||
tagName = tagName.trim();
|
||
//console.log(tagName);
|
||
|
||
if (tagName[tagName.length - 1] === '/') {
|
||
//self closing tag without attributes
|
||
tagName = tagName.substring(0, tagName.length - 1);
|
||
//continue;
|
||
i--;
|
||
}
|
||
if (!validateTagName(tagName)) {
|
||
let msg;
|
||
if (tagName.trim().length === 0) {
|
||
msg = "There is an unnecessary space between tag name and backward slash '</ ..'.";
|
||
} else {
|
||
msg = "Tag '"+tagName+"' is an invalid name.";
|
||
}
|
||
return getErrorObject('InvalidTag', msg, getLineNumberForPosition(xmlData, i));
|
||
}
|
||
|
||
const result = readAttributeStr(xmlData, i);
|
||
if (result === false) {
|
||
return getErrorObject('InvalidAttr', "Attributes for '"+tagName+"' have open quote.", getLineNumberForPosition(xmlData, i));
|
||
}
|
||
let attrStr = result.value;
|
||
i = result.index;
|
||
|
||
if (attrStr[attrStr.length - 1] === '/') {
|
||
//self closing tag
|
||
attrStr = attrStr.substring(0, attrStr.length - 1);
|
||
const isValid = validateAttributeString(attrStr, options);
|
||
if (isValid === true) {
|
||
tagFound = true;
|
||
//continue; //text may presents after self closing tag
|
||
} else {
|
||
//the result from the nested function returns the position of the error within the attribute
|
||
//in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute
|
||
//this gives us the absolute index in the entire xml, which we can use to find the line at last
|
||
return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, i - attrStr.length + isValid.err.line));
|
||
}
|
||
} else if (closingTag) {
|
||
if (!result.tagClosed) {
|
||
return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' doesn't have proper closing.", getLineNumberForPosition(xmlData, i));
|
||
} else if (attrStr.trim().length > 0) {
|
||
return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' can't have attributes or invalid starting.", getLineNumberForPosition(xmlData, i));
|
||
} else {
|
||
const otg = tags.pop();
|
||
if (tagName !== otg) {
|
||
return getErrorObject('InvalidTag', "Closing tag '"+otg+"' is expected inplace of '"+tagName+"'.", getLineNumberForPosition(xmlData, i));
|
||
}
|
||
|
||
//when there are no more tags, we reached the root level.
|
||
if (tags.length == 0) {
|
||
reachedRoot = true;
|
||
}
|
||
}
|
||
} else {
|
||
const isValid = validateAttributeString(attrStr, options);
|
||
if (isValid !== true) {
|
||
//the result from the nested function returns the position of the error within the attribute
|
||
//in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute
|
||
//this gives us the absolute index in the entire xml, which we can use to find the line at last
|
||
return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, i - attrStr.length + isValid.err.line));
|
||
}
|
||
|
||
//if the root level has been reached before ...
|
||
if (reachedRoot === true) {
|
||
return getErrorObject('InvalidXml', 'Multiple possible root nodes found.', getLineNumberForPosition(xmlData, i));
|
||
} else {
|
||
tags.push(tagName);
|
||
}
|
||
tagFound = true;
|
||
}
|
||
|
||
//skip tag text value
|
||
//It may include comments and CDATA value
|
||
for (i++; i < xmlData.length; i++) {
|
||
if (xmlData[i] === '<') {
|
||
if (xmlData[i + 1] === '!') {
|
||
//comment or CADATA
|
||
i++;
|
||
i = readCommentAndCDATA(xmlData, i);
|
||
continue;
|
||
} else if (xmlData[i+1] === '?') {
|
||
i = readPI(xmlData, ++i);
|
||
if (i.err) return i;
|
||
} else{
|
||
break;
|
||
}
|
||
} else if (xmlData[i] === '&') {
|
||
const afterAmp = validateAmpersand(xmlData, i);
|
||
if (afterAmp == -1)
|
||
return getErrorObject('InvalidChar', "char '&' is not expected.", getLineNumberForPosition(xmlData, i));
|
||
i = afterAmp;
|
||
}
|
||
} //end of reading tag text value
|
||
if (xmlData[i] === '<') {
|
||
i--;
|
||
}
|
||
}
|
||
} else {
|
||
if (xmlData[i] === ' ' || xmlData[i] === '\t' || xmlData[i] === '\n' || xmlData[i] === '\r') {
|
||
continue;
|
||
}
|
||
return getErrorObject('InvalidChar', "char '"+xmlData[i]+"' is not expected.", getLineNumberForPosition(xmlData, i));
|
||
}
|
||
}
|
||
|
||
if (!tagFound) {
|
||
return getErrorObject('InvalidXml', 'Start tag expected.', 1);
|
||
} else if (tags.length > 0) {
|
||
return getErrorObject('InvalidXml', "Invalid '"+JSON.stringify(tags, null, 4).replace(/\r?\n/g, '')+"' found.", 1);
|
||
}
|
||
|
||
return true;
|
||
};
|
||
|
||
/**
|
||
* Read Processing insstructions and skip
|
||
* @param {*} xmlData
|
||
* @param {*} i
|
||
*/
|
||
function readPI(xmlData, i) {
|
||
var start = i;
|
||
for (; i < xmlData.length; i++) {
|
||
if (xmlData[i] == '?' || xmlData[i] == ' ') {
|
||
//tagname
|
||
var tagname = xmlData.substr(start, i - start);
|
||
if (i > 5 && tagname === 'xml') {
|
||
return getErrorObject('InvalidXml', 'XML declaration allowed only at the start of the document.', getLineNumberForPosition(xmlData, i));
|
||
} else if (xmlData[i] == '?' && xmlData[i + 1] == '>') {
|
||
//check if valid attribut string
|
||
i++;
|
||
break;
|
||
} else {
|
||
continue;
|
||
}
|
||
}
|
||
}
|
||
return i;
|
||
}
|
||
|
||
function readCommentAndCDATA(xmlData, i) {
|
||
if (xmlData.length > i + 5 && xmlData[i + 1] === '-' && xmlData[i + 2] === '-') {
|
||
//comment
|
||
for (i += 3; i < xmlData.length; i++) {
|
||
if (xmlData[i] === '-' && xmlData[i + 1] === '-' && xmlData[i + 2] === '>') {
|
||
i += 2;
|
||
break;
|
||
}
|
||
}
|
||
} else if (
|
||
xmlData.length > i + 8 &&
|
||
xmlData[i + 1] === 'D' &&
|
||
xmlData[i + 2] === 'O' &&
|
||
xmlData[i + 3] === 'C' &&
|
||
xmlData[i + 4] === 'T' &&
|
||
xmlData[i + 5] === 'Y' &&
|
||
xmlData[i + 6] === 'P' &&
|
||
xmlData[i + 7] === 'E'
|
||
) {
|
||
let angleBracketsCount = 1;
|
||
for (i += 8; i < xmlData.length; i++) {
|
||
if (xmlData[i] === '<') {
|
||
angleBracketsCount++;
|
||
} else if (xmlData[i] === '>') {
|
||
angleBracketsCount--;
|
||
if (angleBracketsCount === 0) {
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
} else if (
|
||
xmlData.length > i + 9 &&
|
||
xmlData[i + 1] === '[' &&
|
||
xmlData[i + 2] === 'C' &&
|
||
xmlData[i + 3] === 'D' &&
|
||
xmlData[i + 4] === 'A' &&
|
||
xmlData[i + 5] === 'T' &&
|
||
xmlData[i + 6] === 'A' &&
|
||
xmlData[i + 7] === '['
|
||
) {
|
||
for (i += 8; i < xmlData.length; i++) {
|
||
if (xmlData[i] === ']' && xmlData[i + 1] === ']' && xmlData[i + 2] === '>') {
|
||
i += 2;
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
|
||
return i;
|
||
}
|
||
|
||
var doubleQuote = '"';
|
||
var singleQuote = "'";
|
||
|
||
/**
|
||
* Keep reading xmlData until '<' is found outside the attribute value.
|
||
* @param {string} xmlData
|
||
* @param {number} i
|
||
*/
|
||
function readAttributeStr(xmlData, i) {
|
||
let attrStr = '';
|
||
let startChar = '';
|
||
let tagClosed = false;
|
||
for (; i < xmlData.length; i++) {
|
||
if (xmlData[i] === doubleQuote || xmlData[i] === singleQuote) {
|
||
if (startChar === '') {
|
||
startChar = xmlData[i];
|
||
} else if (startChar !== xmlData[i]) {
|
||
//if vaue is enclosed with double quote then single quotes are allowed inside the value and vice versa
|
||
continue;
|
||
} else {
|
||
startChar = '';
|
||
}
|
||
} else if (xmlData[i] === '>') {
|
||
if (startChar === '') {
|
||
tagClosed = true;
|
||
break;
|
||
}
|
||
}
|
||
attrStr += xmlData[i];
|
||
}
|
||
if (startChar !== '') {
|
||
return false;
|
||
}
|
||
|
||
return {
|
||
value: attrStr,
|
||
index: i,
|
||
tagClosed: tagClosed
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Select all the attributes whether valid or invalid.
|
||
*/
|
||
const validAttrStrRegxp = new RegExp('(\\s*)([^\\s=]+)(\\s*=)?(\\s*([\'"])(([\\s\\S])*?)\\5)?', 'g');
|
||
|
||
//attr, ="sd", a="amit's", a="sd"b="saf", ab cd=""
|
||
|
||
function validateAttributeString(attrStr, options) {
|
||
//console.log("start:"+attrStr+":end");
|
||
|
||
//if(attrStr.trim().length === 0) return true; //empty string
|
||
|
||
const matches = util.getAllMatches(attrStr, validAttrStrRegxp);
|
||
const attrNames = {};
|
||
|
||
for (let i = 0; i < matches.length; i++) {
|
||
if (matches[i][1].length === 0) {
|
||
//nospace before attribute name: a="sd"b="saf"
|
||
return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' has no space in starting.", getPositionFromMatch(attrStr, matches[i][0]))
|
||
} else if (matches[i][3] === undefined && !options.allowBooleanAttributes) {
|
||
//independent attribute: ab
|
||
return getErrorObject('InvalidAttr', "boolean attribute '"+matches[i][2]+"' is not allowed.", getPositionFromMatch(attrStr, matches[i][0]));
|
||
}
|
||
/* else if(matches[i][6] === undefined){//attribute without value: ab=
|
||
return { err: { code:"InvalidAttr",msg:"attribute " + matches[i][2] + " has no value assigned."}};
|
||
} */
|
||
const attrName = matches[i][2];
|
||
if (!validateAttrName(attrName)) {
|
||
return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is an invalid name.", getPositionFromMatch(attrStr, matches[i][0]));
|
||
}
|
||
if (!attrNames.hasOwnProperty(attrName)) {
|
||
//check for duplicate attribute.
|
||
attrNames[attrName] = 1;
|
||
} else {
|
||
return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is repeated.", getPositionFromMatch(attrStr, matches[i][0]));
|
||
}
|
||
}
|
||
|
||
return true;
|
||
}
|
||
|
||
function validateNumberAmpersand(xmlData, i) {
|
||
let re = /\d/;
|
||
if (xmlData[i] === 'x') {
|
||
i++;
|
||
re = /[\da-fA-F]/;
|
||
}
|
||
for (; i < xmlData.length; i++) {
|
||
if (xmlData[i] === ';')
|
||
return i;
|
||
if (!xmlData[i].match(re))
|
||
break;
|
||
}
|
||
return -1;
|
||
}
|
||
|
||
function validateAmpersand(xmlData, i) {
|
||
// https://www.w3.org/TR/xml/#dt-charref
|
||
i++;
|
||
if (xmlData[i] === ';')
|
||
return -1;
|
||
if (xmlData[i] === '#') {
|
||
i++;
|
||
return validateNumberAmpersand(xmlData, i);
|
||
}
|
||
let count = 0;
|
||
for (; i < xmlData.length; i++, count++) {
|
||
if (xmlData[i].match(/\w/) && count < 20)
|
||
continue;
|
||
if (xmlData[i] === ';')
|
||
break;
|
||
return -1;
|
||
}
|
||
return i;
|
||
}
|
||
|
||
function getErrorObject(code, message, lineNumber) {
|
||
return {
|
||
err: {
|
||
code: code,
|
||
msg: message,
|
||
line: lineNumber,
|
||
},
|
||
};
|
||
}
|
||
|
||
function validateAttrName(attrName) {
|
||
return util.isName(attrName);
|
||
}
|
||
|
||
// const startsWithXML = /^xml/i;
|
||
|
||
function validateTagName(tagname) {
|
||
return util.isName(tagname) /* && !tagname.match(startsWithXML) */;
|
||
}
|
||
|
||
//this function returns the line number for the character at the given index
|
||
function getLineNumberForPosition(xmlData, index) {
|
||
var lines = xmlData.substring(0, index).split(/\r?\n/);
|
||
return lines.length;
|
||
}
|
||
|
||
//this function returns the position of the last character of match within attrStr
|
||
function getPositionFromMatch(attrStr, match) {
|
||
return attrStr.indexOf(match) + match.length;
|
||
}
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 3106:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
module.exports = function(tagname, parent, val) {
|
||
this.tagname = tagname;
|
||
this.parent = parent;
|
||
this.child = {}; //child tags
|
||
this.attrsMap = {}; //attributes map
|
||
this.val = val; //text only
|
||
this.addChild = function(child) {
|
||
if (Array.isArray(this.child[child.tagname])) {
|
||
//already presents
|
||
this.child[child.tagname].push(child);
|
||
} else {
|
||
this.child[child.tagname] = [child];
|
||
}
|
||
};
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 7926:
|
||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
const util = __nccwpck_require__(429);
|
||
const buildOptions = __nccwpck_require__(429).buildOptions;
|
||
const xmlNode = __nccwpck_require__(3106);
|
||
const regx =
|
||
'<((!\\[CDATA\\[([\\s\\S]*?)(]]>))|((NAME:)?(NAME))([^>]*)>|((\\/)(NAME)\\s*>))([^<]*)'
|
||
.replace(/NAME/g, util.nameRegexp);
|
||
|
||
//const tagsRegx = new RegExp("<(\\/?[\\w:\\-\._]+)([^>]*)>(\\s*"+cdataRegx+")*([^<]+)?","g");
|
||
//const tagsRegx = new RegExp("<(\\/?)((\\w*:)?([\\w:\\-\._]+))([^>]*)>([^<]*)("+cdataRegx+"([^<]*))*([^<]+)?","g");
|
||
|
||
//polyfill
|
||
if (!Number.parseInt && window.parseInt) {
|
||
Number.parseInt = window.parseInt;
|
||
}
|
||
if (!Number.parseFloat && window.parseFloat) {
|
||
Number.parseFloat = window.parseFloat;
|
||
}
|
||
|
||
const defaultOptions = {
|
||
attributeNamePrefix: '@_',
|
||
attrNodeName: false,
|
||
textNodeName: '#text',
|
||
ignoreAttributes: true,
|
||
ignoreNameSpace: false,
|
||
allowBooleanAttributes: false, //a tag can have attributes without any value
|
||
//ignoreRootElement : false,
|
||
parseNodeValue: true,
|
||
parseAttributeValue: false,
|
||
arrayMode: false,
|
||
trimValues: true, //Trim string values of tag and attributes
|
||
cdataTagName: false,
|
||
cdataPositionChar: '\\c',
|
||
tagValueProcessor: function(a, tagName) {
|
||
return a;
|
||
},
|
||
attrValueProcessor: function(a, attrName) {
|
||
return a;
|
||
},
|
||
stopNodes: []
|
||
//decodeStrict: false,
|
||
};
|
||
|
||
exports.defaultOptions = defaultOptions;
|
||
|
||
const props = [
|
||
'attributeNamePrefix',
|
||
'attrNodeName',
|
||
'textNodeName',
|
||
'ignoreAttributes',
|
||
'ignoreNameSpace',
|
||
'allowBooleanAttributes',
|
||
'parseNodeValue',
|
||
'parseAttributeValue',
|
||
'arrayMode',
|
||
'trimValues',
|
||
'cdataTagName',
|
||
'cdataPositionChar',
|
||
'tagValueProcessor',
|
||
'attrValueProcessor',
|
||
'parseTrueNumberOnly',
|
||
'stopNodes'
|
||
];
|
||
exports.props = props;
|
||
|
||
/**
|
||
* Trim -> valueProcessor -> parse value
|
||
* @param {string} tagName
|
||
* @param {string} val
|
||
* @param {object} options
|
||
*/
|
||
function processTagValue(tagName, val, options) {
|
||
if (val) {
|
||
if (options.trimValues) {
|
||
val = val.trim();
|
||
}
|
||
val = options.tagValueProcessor(val, tagName);
|
||
val = parseValue(val, options.parseNodeValue, options.parseTrueNumberOnly);
|
||
}
|
||
|
||
return val;
|
||
}
|
||
|
||
function resolveNameSpace(tagname, options) {
|
||
if (options.ignoreNameSpace) {
|
||
const tags = tagname.split(':');
|
||
const prefix = tagname.charAt(0) === '/' ? '/' : '';
|
||
if (tags[0] === 'xmlns') {
|
||
return '';
|
||
}
|
||
if (tags.length === 2) {
|
||
tagname = prefix + tags[1];
|
||
}
|
||
}
|
||
return tagname;
|
||
}
|
||
|
||
function parseValue(val, shouldParse, parseTrueNumberOnly) {
|
||
if (shouldParse && typeof val === 'string') {
|
||
let parsed;
|
||
if (val.trim() === '' || isNaN(val)) {
|
||
parsed = val === 'true' ? true : val === 'false' ? false : val;
|
||
} else {
|
||
if (val.indexOf('0x') !== -1) {
|
||
//support hexa decimal
|
||
parsed = Number.parseInt(val, 16);
|
||
} else if (val.indexOf('.') !== -1) {
|
||
parsed = Number.parseFloat(val);
|
||
val = val.replace(/\.?0+$/, "");
|
||
} else {
|
||
parsed = Number.parseInt(val, 10);
|
||
}
|
||
if (parseTrueNumberOnly) {
|
||
parsed = String(parsed) === val ? parsed : val;
|
||
}
|
||
}
|
||
return parsed;
|
||
} else {
|
||
if (util.isExist(val)) {
|
||
return val;
|
||
} else {
|
||
return '';
|
||
}
|
||
}
|
||
}
|
||
|
||
//TODO: change regex to capture NS
|
||
//const attrsRegx = new RegExp("([\\w\\-\\.\\:]+)\\s*=\\s*(['\"])((.|\n)*?)\\2","gm");
|
||
const attrsRegx = new RegExp('([^\\s=]+)\\s*(=\\s*([\'"])(.*?)\\3)?', 'g');
|
||
|
||
function buildAttributesMap(attrStr, options) {
|
||
if (!options.ignoreAttributes && typeof attrStr === 'string') {
|
||
attrStr = attrStr.replace(/\r?\n/g, ' ');
|
||
//attrStr = attrStr || attrStr.trim();
|
||
|
||
const matches = util.getAllMatches(attrStr, attrsRegx);
|
||
const len = matches.length; //don't make it inline
|
||
const attrs = {};
|
||
for (let i = 0; i < len; i++) {
|
||
const attrName = resolveNameSpace(matches[i][1], options);
|
||
if (attrName.length) {
|
||
if (matches[i][4] !== undefined) {
|
||
if (options.trimValues) {
|
||
matches[i][4] = matches[i][4].trim();
|
||
}
|
||
matches[i][4] = options.attrValueProcessor(matches[i][4], attrName);
|
||
attrs[options.attributeNamePrefix + attrName] = parseValue(
|
||
matches[i][4],
|
||
options.parseAttributeValue,
|
||
options.parseTrueNumberOnly
|
||
);
|
||
} else if (options.allowBooleanAttributes) {
|
||
attrs[options.attributeNamePrefix + attrName] = true;
|
||
}
|
||
}
|
||
}
|
||
if (!Object.keys(attrs).length) {
|
||
return;
|
||
}
|
||
if (options.attrNodeName) {
|
||
const attrCollection = {};
|
||
attrCollection[options.attrNodeName] = attrs;
|
||
return attrCollection;
|
||
}
|
||
return attrs;
|
||
}
|
||
}
|
||
|
||
const getTraversalObj = function(xmlData, options) {
|
||
xmlData = xmlData.replace(/\r\n?/g, "\n");
|
||
options = buildOptions(options, defaultOptions, props);
|
||
const xmlObj = new xmlNode('!xml');
|
||
let currentNode = xmlObj;
|
||
let textData = "";
|
||
|
||
//function match(xmlData){
|
||
for(let i=0; i< xmlData.length; i++){
|
||
const ch = xmlData[i];
|
||
if(ch === '<'){
|
||
if( xmlData[i+1] === '/') {//Closing Tag
|
||
const closeIndex = findClosingIndex(xmlData, ">", i, "Closing Tag is not closed.")
|
||
let tagName = xmlData.substring(i+2,closeIndex).trim();
|
||
|
||
if(options.ignoreNameSpace){
|
||
const colonIndex = tagName.indexOf(":");
|
||
if(colonIndex !== -1){
|
||
tagName = tagName.substr(colonIndex+1);
|
||
}
|
||
}
|
||
|
||
/* if (currentNode.parent) {
|
||
currentNode.parent.val = util.getValue(currentNode.parent.val) + '' + processTagValue2(tagName, textData , options);
|
||
} */
|
||
if(currentNode){
|
||
if(currentNode.val){
|
||
currentNode.val = util.getValue(currentNode.val) + '' + processTagValue(tagName, textData , options);
|
||
}else{
|
||
currentNode.val = processTagValue(tagName, textData , options);
|
||
}
|
||
}
|
||
|
||
if (options.stopNodes.length && options.stopNodes.includes(currentNode.tagname)) {
|
||
currentNode.child = []
|
||
if (currentNode.attrsMap == undefined) { currentNode.attrsMap = {}}
|
||
currentNode.val = xmlData.substr(currentNode.startIndex + 1, i - currentNode.startIndex - 1)
|
||
}
|
||
currentNode = currentNode.parent;
|
||
textData = "";
|
||
i = closeIndex;
|
||
} else if( xmlData[i+1] === '?') {
|
||
i = findClosingIndex(xmlData, "?>", i, "Pi Tag is not closed.")
|
||
} else if(xmlData.substr(i + 1, 3) === '!--') {
|
||
i = findClosingIndex(xmlData, "-->", i, "Comment is not closed.")
|
||
} else if( xmlData.substr(i + 1, 2) === '!D') {
|
||
const closeIndex = findClosingIndex(xmlData, ">", i, "DOCTYPE is not closed.")
|
||
const tagExp = xmlData.substring(i, closeIndex);
|
||
if(tagExp.indexOf("[") >= 0){
|
||
i = xmlData.indexOf("]>", i) + 1;
|
||
}else{
|
||
i = closeIndex;
|
||
}
|
||
}else if(xmlData.substr(i + 1, 2) === '![') {
|
||
const closeIndex = findClosingIndex(xmlData, "]]>", i, "CDATA is not closed.") - 2
|
||
const tagExp = xmlData.substring(i + 9,closeIndex);
|
||
|
||
//considerations
|
||
//1. CDATA will always have parent node
|
||
//2. A tag with CDATA is not a leaf node so it's value would be string type.
|
||
if(textData){
|
||
currentNode.val = util.getValue(currentNode.val) + '' + processTagValue(currentNode.tagname, textData , options);
|
||
textData = "";
|
||
}
|
||
|
||
if (options.cdataTagName) {
|
||
//add cdata node
|
||
const childNode = new xmlNode(options.cdataTagName, currentNode, tagExp);
|
||
currentNode.addChild(childNode);
|
||
//for backtracking
|
||
currentNode.val = util.getValue(currentNode.val) + options.cdataPositionChar;
|
||
//add rest value to parent node
|
||
if (tagExp) {
|
||
childNode.val = tagExp;
|
||
}
|
||
} else {
|
||
currentNode.val = (currentNode.val || '') + (tagExp || '');
|
||
}
|
||
|
||
i = closeIndex + 2;
|
||
}else {//Opening tag
|
||
const result = closingIndexForOpeningTag(xmlData, i+1)
|
||
let tagExp = result.data;
|
||
const closeIndex = result.index;
|
||
const separatorIndex = tagExp.indexOf(" ");
|
||
let tagName = tagExp;
|
||
let shouldBuildAttributesMap = true;
|
||
if(separatorIndex !== -1){
|
||
tagName = tagExp.substr(0, separatorIndex).replace(/\s\s*$/, '');
|
||
tagExp = tagExp.substr(separatorIndex + 1);
|
||
}
|
||
|
||
if(options.ignoreNameSpace){
|
||
const colonIndex = tagName.indexOf(":");
|
||
if(colonIndex !== -1){
|
||
tagName = tagName.substr(colonIndex+1);
|
||
shouldBuildAttributesMap = tagName !== result.data.substr(colonIndex + 1);
|
||
}
|
||
}
|
||
|
||
//save text to parent node
|
||
if (currentNode && textData) {
|
||
if(currentNode.tagname !== '!xml'){
|
||
currentNode.val = util.getValue(currentNode.val) + '' + processTagValue( currentNode.tagname, textData, options);
|
||
}
|
||
}
|
||
|
||
if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){//selfClosing tag
|
||
|
||
if(tagName[tagName.length - 1] === "/"){ //remove trailing '/'
|
||
tagName = tagName.substr(0, tagName.length - 1);
|
||
tagExp = tagName;
|
||
}else{
|
||
tagExp = tagExp.substr(0, tagExp.length - 1);
|
||
}
|
||
|
||
const childNode = new xmlNode(tagName, currentNode, '');
|
||
if(tagName !== tagExp){
|
||
childNode.attrsMap = buildAttributesMap(tagExp, options);
|
||
}
|
||
currentNode.addChild(childNode);
|
||
}else{//opening tag
|
||
|
||
const childNode = new xmlNode( tagName, currentNode );
|
||
if (options.stopNodes.length && options.stopNodes.includes(childNode.tagname)) {
|
||
childNode.startIndex=closeIndex;
|
||
}
|
||
if(tagName !== tagExp && shouldBuildAttributesMap){
|
||
childNode.attrsMap = buildAttributesMap(tagExp, options);
|
||
}
|
||
currentNode.addChild(childNode);
|
||
currentNode = childNode;
|
||
}
|
||
textData = "";
|
||
i = closeIndex;
|
||
}
|
||
}else{
|
||
textData += xmlData[i];
|
||
}
|
||
}
|
||
return xmlObj;
|
||
}
|
||
|
||
function closingIndexForOpeningTag(data, i){
|
||
let attrBoundary;
|
||
let tagExp = "";
|
||
for (let index = i; index < data.length; index++) {
|
||
let ch = data[index];
|
||
if (attrBoundary) {
|
||
if (ch === attrBoundary) attrBoundary = "";//reset
|
||
} else if (ch === '"' || ch === "'") {
|
||
attrBoundary = ch;
|
||
} else if (ch === '>') {
|
||
return {
|
||
data: tagExp,
|
||
index: index
|
||
}
|
||
} else if (ch === '\t') {
|
||
ch = " "
|
||
}
|
||
tagExp += ch;
|
||
}
|
||
}
|
||
|
||
function findClosingIndex(xmlData, str, i, errMsg){
|
||
const closingIndex = xmlData.indexOf(str, i);
|
||
if(closingIndex === -1){
|
||
throw new Error(errMsg)
|
||
}else{
|
||
return closingIndex + str.length - 1;
|
||
}
|
||
}
|
||
|
||
exports.getTraversalObj = getTraversalObj;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 5804:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
var debug;
|
||
|
||
module.exports = function () {
|
||
if (!debug) {
|
||
try {
|
||
/* eslint global-require: off */
|
||
debug = __nccwpck_require__(882)("follow-redirects");
|
||
}
|
||
catch (error) {
|
||
debug = function () { /* */ };
|
||
}
|
||
}
|
||
debug.apply(null, arguments);
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 6792:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
var url = __nccwpck_require__(8835);
|
||
var URL = url.URL;
|
||
var http = __nccwpck_require__(8605);
|
||
var https = __nccwpck_require__(7211);
|
||
var Writable = __nccwpck_require__(2413).Writable;
|
||
var assert = __nccwpck_require__(2357);
|
||
var debug = __nccwpck_require__(5804);
|
||
|
||
// Create handlers that pass events from native requests
|
||
var events = ["abort", "aborted", "connect", "error", "socket", "timeout"];
|
||
var eventHandlers = Object.create(null);
|
||
events.forEach(function (event) {
|
||
eventHandlers[event] = function (arg1, arg2, arg3) {
|
||
this._redirectable.emit(event, arg1, arg2, arg3);
|
||
};
|
||
});
|
||
|
||
// Error types with codes
|
||
var RedirectionError = createErrorType(
|
||
"ERR_FR_REDIRECTION_FAILURE",
|
||
""
|
||
);
|
||
var TooManyRedirectsError = createErrorType(
|
||
"ERR_FR_TOO_MANY_REDIRECTS",
|
||
"Maximum number of redirects exceeded"
|
||
);
|
||
var MaxBodyLengthExceededError = createErrorType(
|
||
"ERR_FR_MAX_BODY_LENGTH_EXCEEDED",
|
||
"Request body larger than maxBodyLength limit"
|
||
);
|
||
var WriteAfterEndError = createErrorType(
|
||
"ERR_STREAM_WRITE_AFTER_END",
|
||
"write after end"
|
||
);
|
||
|
||
// An HTTP(S) request that can be redirected
|
||
function RedirectableRequest(options, responseCallback) {
|
||
// Initialize the request
|
||
Writable.call(this);
|
||
this._sanitizeOptions(options);
|
||
this._options = options;
|
||
this._ended = false;
|
||
this._ending = false;
|
||
this._redirectCount = 0;
|
||
this._redirects = [];
|
||
this._requestBodyLength = 0;
|
||
this._requestBodyBuffers = [];
|
||
|
||
// Attach a callback if passed
|
||
if (responseCallback) {
|
||
this.on("response", responseCallback);
|
||
}
|
||
|
||
// React to responses of native requests
|
||
var self = this;
|
||
this._onNativeResponse = function (response) {
|
||
self._processResponse(response);
|
||
};
|
||
|
||
// Perform the first request
|
||
this._performRequest();
|
||
}
|
||
RedirectableRequest.prototype = Object.create(Writable.prototype);
|
||
|
||
RedirectableRequest.prototype.abort = function () {
|
||
abortRequest(this._currentRequest);
|
||
this.emit("abort");
|
||
};
|
||
|
||
// Writes buffered data to the current native request
|
||
RedirectableRequest.prototype.write = function (data, encoding, callback) {
|
||
// Writing is not allowed if end has been called
|
||
if (this._ending) {
|
||
throw new WriteAfterEndError();
|
||
}
|
||
|
||
// Validate input and shift parameters if necessary
|
||
if (!(typeof data === "string" || typeof data === "object" && ("length" in data))) {
|
||
throw new TypeError("data should be a string, Buffer or Uint8Array");
|
||
}
|
||
if (typeof encoding === "function") {
|
||
callback = encoding;
|
||
encoding = null;
|
||
}
|
||
|
||
// Ignore empty buffers, since writing them doesn't invoke the callback
|
||
// https://github.com/nodejs/node/issues/22066
|
||
if (data.length === 0) {
|
||
if (callback) {
|
||
callback();
|
||
}
|
||
return;
|
||
}
|
||
// Only write when we don't exceed the maximum body length
|
||
if (this._requestBodyLength + data.length <= this._options.maxBodyLength) {
|
||
this._requestBodyLength += data.length;
|
||
this._requestBodyBuffers.push({ data: data, encoding: encoding });
|
||
this._currentRequest.write(data, encoding, callback);
|
||
}
|
||
// Error when we exceed the maximum body length
|
||
else {
|
||
this.emit("error", new MaxBodyLengthExceededError());
|
||
this.abort();
|
||
}
|
||
};
|
||
|
||
// Ends the current native request
|
||
RedirectableRequest.prototype.end = function (data, encoding, callback) {
|
||
// Shift parameters if necessary
|
||
if (typeof data === "function") {
|
||
callback = data;
|
||
data = encoding = null;
|
||
}
|
||
else if (typeof encoding === "function") {
|
||
callback = encoding;
|
||
encoding = null;
|
||
}
|
||
|
||
// Write data if needed and end
|
||
if (!data) {
|
||
this._ended = this._ending = true;
|
||
this._currentRequest.end(null, null, callback);
|
||
}
|
||
else {
|
||
var self = this;
|
||
var currentRequest = this._currentRequest;
|
||
this.write(data, encoding, function () {
|
||
self._ended = true;
|
||
currentRequest.end(null, null, callback);
|
||
});
|
||
this._ending = true;
|
||
}
|
||
};
|
||
|
||
// Sets a header value on the current native request
|
||
RedirectableRequest.prototype.setHeader = function (name, value) {
|
||
this._options.headers[name] = value;
|
||
this._currentRequest.setHeader(name, value);
|
||
};
|
||
|
||
// Clears a header value on the current native request
|
||
RedirectableRequest.prototype.removeHeader = function (name) {
|
||
delete this._options.headers[name];
|
||
this._currentRequest.removeHeader(name);
|
||
};
|
||
|
||
// Global timeout for all underlying requests
|
||
RedirectableRequest.prototype.setTimeout = function (msecs, callback) {
|
||
var self = this;
|
||
if (callback) {
|
||
this.on("timeout", callback);
|
||
}
|
||
|
||
function destroyOnTimeout(socket) {
|
||
socket.setTimeout(msecs);
|
||
socket.removeListener("timeout", socket.destroy);
|
||
socket.addListener("timeout", socket.destroy);
|
||
}
|
||
|
||
// Sets up a timer to trigger a timeout event
|
||
function startTimer(socket) {
|
||
if (self._timeout) {
|
||
clearTimeout(self._timeout);
|
||
}
|
||
self._timeout = setTimeout(function () {
|
||
self.emit("timeout");
|
||
clearTimer();
|
||
}, msecs);
|
||
destroyOnTimeout(socket);
|
||
}
|
||
|
||
// Prevent a timeout from triggering
|
||
function clearTimer() {
|
||
clearTimeout(this._timeout);
|
||
if (callback) {
|
||
self.removeListener("timeout", callback);
|
||
}
|
||
if (!this.socket) {
|
||
self._currentRequest.removeListener("socket", startTimer);
|
||
}
|
||
}
|
||
|
||
// Start the timer when the socket is opened
|
||
if (this.socket) {
|
||
startTimer(this.socket);
|
||
}
|
||
else {
|
||
this._currentRequest.once("socket", startTimer);
|
||
}
|
||
|
||
this.on("socket", destroyOnTimeout);
|
||
this.once("response", clearTimer);
|
||
this.once("error", clearTimer);
|
||
|
||
return this;
|
||
};
|
||
|
||
// Proxy all other public ClientRequest methods
|
||
[
|
||
"flushHeaders", "getHeader",
|
||
"setNoDelay", "setSocketKeepAlive",
|
||
].forEach(function (method) {
|
||
RedirectableRequest.prototype[method] = function (a, b) {
|
||
return this._currentRequest[method](a, b);
|
||
};
|
||
});
|
||
|
||
// Proxy all public ClientRequest properties
|
||
["aborted", "connection", "socket"].forEach(function (property) {
|
||
Object.defineProperty(RedirectableRequest.prototype, property, {
|
||
get: function () { return this._currentRequest[property]; },
|
||
});
|
||
});
|
||
|
||
RedirectableRequest.prototype._sanitizeOptions = function (options) {
|
||
// Ensure headers are always present
|
||
if (!options.headers) {
|
||
options.headers = {};
|
||
}
|
||
|
||
// Since http.request treats host as an alias of hostname,
|
||
// but the url module interprets host as hostname plus port,
|
||
// eliminate the host property to avoid confusion.
|
||
if (options.host) {
|
||
// Use hostname if set, because it has precedence
|
||
if (!options.hostname) {
|
||
options.hostname = options.host;
|
||
}
|
||
delete options.host;
|
||
}
|
||
|
||
// Complete the URL object when necessary
|
||
if (!options.pathname && options.path) {
|
||
var searchPos = options.path.indexOf("?");
|
||
if (searchPos < 0) {
|
||
options.pathname = options.path;
|
||
}
|
||
else {
|
||
options.pathname = options.path.substring(0, searchPos);
|
||
options.search = options.path.substring(searchPos);
|
||
}
|
||
}
|
||
};
|
||
|
||
|
||
// Executes the next native request (initial or redirect)
|
||
RedirectableRequest.prototype._performRequest = function () {
|
||
// Load the native protocol
|
||
var protocol = this._options.protocol;
|
||
var nativeProtocol = this._options.nativeProtocols[protocol];
|
||
if (!nativeProtocol) {
|
||
this.emit("error", new TypeError("Unsupported protocol " + protocol));
|
||
return;
|
||
}
|
||
|
||
// If specified, use the agent corresponding to the protocol
|
||
// (HTTP and HTTPS use different types of agents)
|
||
if (this._options.agents) {
|
||
var scheme = protocol.substr(0, protocol.length - 1);
|
||
this._options.agent = this._options.agents[scheme];
|
||
}
|
||
|
||
// Create the native request
|
||
var request = this._currentRequest =
|
||
nativeProtocol.request(this._options, this._onNativeResponse);
|
||
this._currentUrl = url.format(this._options);
|
||
|
||
// Set up event handlers
|
||
request._redirectable = this;
|
||
for (var e = 0; e < events.length; e++) {
|
||
request.on(events[e], eventHandlers[events[e]]);
|
||
}
|
||
|
||
// End a redirected request
|
||
// (The first request must be ended explicitly with RedirectableRequest#end)
|
||
if (this._isRedirect) {
|
||
// Write the request entity and end.
|
||
var i = 0;
|
||
var self = this;
|
||
var buffers = this._requestBodyBuffers;
|
||
(function writeNext(error) {
|
||
// Only write if this request has not been redirected yet
|
||
/* istanbul ignore else */
|
||
if (request === self._currentRequest) {
|
||
// Report any write errors
|
||
/* istanbul ignore if */
|
||
if (error) {
|
||
self.emit("error", error);
|
||
}
|
||
// Write the next buffer if there are still left
|
||
else if (i < buffers.length) {
|
||
var buffer = buffers[i++];
|
||
/* istanbul ignore else */
|
||
if (!request.finished) {
|
||
request.write(buffer.data, buffer.encoding, writeNext);
|
||
}
|
||
}
|
||
// End the request if `end` has been called on us
|
||
else if (self._ended) {
|
||
request.end();
|
||
}
|
||
}
|
||
}());
|
||
}
|
||
};
|
||
|
||
// Processes a response from the current native request
|
||
RedirectableRequest.prototype._processResponse = function (response) {
|
||
// Store the redirected response
|
||
var statusCode = response.statusCode;
|
||
if (this._options.trackRedirects) {
|
||
this._redirects.push({
|
||
url: this._currentUrl,
|
||
headers: response.headers,
|
||
statusCode: statusCode,
|
||
});
|
||
}
|
||
|
||
// RFC7231§6.4: The 3xx (Redirection) class of status code indicates
|
||
// that further action needs to be taken by the user agent in order to
|
||
// fulfill the request. If a Location header field is provided,
|
||
// the user agent MAY automatically redirect its request to the URI
|
||
// referenced by the Location field value,
|
||
// even if the specific status code is not understood.
|
||
var location = response.headers.location;
|
||
if (location && this._options.followRedirects !== false &&
|
||
statusCode >= 300 && statusCode < 400) {
|
||
// Abort the current request
|
||
abortRequest(this._currentRequest);
|
||
// Discard the remainder of the response to avoid waiting for data
|
||
response.destroy();
|
||
|
||
// RFC7231§6.4: A client SHOULD detect and intervene
|
||
// in cyclical redirections (i.e., "infinite" redirection loops).
|
||
if (++this._redirectCount > this._options.maxRedirects) {
|
||
this.emit("error", new TooManyRedirectsError());
|
||
return;
|
||
}
|
||
|
||
// RFC7231§6.4: Automatic redirection needs to done with
|
||
// care for methods not known to be safe, […]
|
||
// RFC7231§6.4.2–3: For historical reasons, a user agent MAY change
|
||
// the request method from POST to GET for the subsequent request.
|
||
if ((statusCode === 301 || statusCode === 302) && this._options.method === "POST" ||
|
||
// RFC7231§6.4.4: The 303 (See Other) status code indicates that
|
||
// the server is redirecting the user agent to a different resource […]
|
||
// A user agent can perform a retrieval request targeting that URI
|
||
// (a GET or HEAD request if using HTTP) […]
|
||
(statusCode === 303) && !/^(?:GET|HEAD)$/.test(this._options.method)) {
|
||
this._options.method = "GET";
|
||
// Drop a possible entity and headers related to it
|
||
this._requestBodyBuffers = [];
|
||
removeMatchingHeaders(/^content-/i, this._options.headers);
|
||
}
|
||
|
||
// Drop the Host header, as the redirect might lead to a different host
|
||
var previousHostName = removeMatchingHeaders(/^host$/i, this._options.headers) ||
|
||
url.parse(this._currentUrl).hostname;
|
||
|
||
// Create the redirected request
|
||
var redirectUrl = url.resolve(this._currentUrl, location);
|
||
debug("redirecting to", redirectUrl);
|
||
this._isRedirect = true;
|
||
var redirectUrlParts = url.parse(redirectUrl);
|
||
Object.assign(this._options, redirectUrlParts);
|
||
|
||
// Drop the Authorization header if redirecting to another host
|
||
if (redirectUrlParts.hostname !== previousHostName) {
|
||
removeMatchingHeaders(/^authorization$/i, this._options.headers);
|
||
}
|
||
|
||
// Evaluate the beforeRedirect callback
|
||
if (typeof this._options.beforeRedirect === "function") {
|
||
var responseDetails = { headers: response.headers };
|
||
try {
|
||
this._options.beforeRedirect.call(null, this._options, responseDetails);
|
||
}
|
||
catch (err) {
|
||
this.emit("error", err);
|
||
return;
|
||
}
|
||
this._sanitizeOptions(this._options);
|
||
}
|
||
|
||
// Perform the redirected request
|
||
try {
|
||
this._performRequest();
|
||
}
|
||
catch (cause) {
|
||
var error = new RedirectionError("Redirected request failed: " + cause.message);
|
||
error.cause = cause;
|
||
this.emit("error", error);
|
||
}
|
||
}
|
||
else {
|
||
// The response is not a redirect; return it as-is
|
||
response.responseUrl = this._currentUrl;
|
||
response.redirects = this._redirects;
|
||
this.emit("response", response);
|
||
|
||
// Clean up
|
||
this._requestBodyBuffers = [];
|
||
}
|
||
};
|
||
|
||
// Wraps the key/value object of protocols with redirect functionality
|
||
function wrap(protocols) {
|
||
// Default settings
|
||
var exports = {
|
||
maxRedirects: 21,
|
||
maxBodyLength: 10 * 1024 * 1024,
|
||
};
|
||
|
||
// Wrap each protocol
|
||
var nativeProtocols = {};
|
||
Object.keys(protocols).forEach(function (scheme) {
|
||
var protocol = scheme + ":";
|
||
var nativeProtocol = nativeProtocols[protocol] = protocols[scheme];
|
||
var wrappedProtocol = exports[scheme] = Object.create(nativeProtocol);
|
||
|
||
// Executes a request, following redirects
|
||
function request(input, options, callback) {
|
||
// Parse parameters
|
||
if (typeof input === "string") {
|
||
var urlStr = input;
|
||
try {
|
||
input = urlToOptions(new URL(urlStr));
|
||
}
|
||
catch (err) {
|
||
/* istanbul ignore next */
|
||
input = url.parse(urlStr);
|
||
}
|
||
}
|
||
else if (URL && (input instanceof URL)) {
|
||
input = urlToOptions(input);
|
||
}
|
||
else {
|
||
callback = options;
|
||
options = input;
|
||
input = { protocol: protocol };
|
||
}
|
||
if (typeof options === "function") {
|
||
callback = options;
|
||
options = null;
|
||
}
|
||
|
||
// Set defaults
|
||
options = Object.assign({
|
||
maxRedirects: exports.maxRedirects,
|
||
maxBodyLength: exports.maxBodyLength,
|
||
}, input, options);
|
||
options.nativeProtocols = nativeProtocols;
|
||
|
||
assert.equal(options.protocol, protocol, "protocol mismatch");
|
||
debug("options", options);
|
||
return new RedirectableRequest(options, callback);
|
||
}
|
||
|
||
// Executes a GET request, following redirects
|
||
function get(input, options, callback) {
|
||
var wrappedRequest = wrappedProtocol.request(input, options, callback);
|
||
wrappedRequest.end();
|
||
return wrappedRequest;
|
||
}
|
||
|
||
// Expose the properties on the wrapped protocol
|
||
Object.defineProperties(wrappedProtocol, {
|
||
request: { value: request, configurable: true, enumerable: true, writable: true },
|
||
get: { value: get, configurable: true, enumerable: true, writable: true },
|
||
});
|
||
});
|
||
return exports;
|
||
}
|
||
|
||
/* istanbul ignore next */
|
||
function noop() { /* empty */ }
|
||
|
||
// from https://github.com/nodejs/node/blob/master/lib/internal/url.js
|
||
function urlToOptions(urlObject) {
|
||
var options = {
|
||
protocol: urlObject.protocol,
|
||
hostname: urlObject.hostname.startsWith("[") ?
|
||
/* istanbul ignore next */
|
||
urlObject.hostname.slice(1, -1) :
|
||
urlObject.hostname,
|
||
hash: urlObject.hash,
|
||
search: urlObject.search,
|
||
pathname: urlObject.pathname,
|
||
path: urlObject.pathname + urlObject.search,
|
||
href: urlObject.href,
|
||
};
|
||
if (urlObject.port !== "") {
|
||
options.port = Number(urlObject.port);
|
||
}
|
||
return options;
|
||
}
|
||
|
||
function removeMatchingHeaders(regex, headers) {
|
||
var lastValue;
|
||
for (var header in headers) {
|
||
if (regex.test(header)) {
|
||
lastValue = headers[header];
|
||
delete headers[header];
|
||
}
|
||
}
|
||
return lastValue;
|
||
}
|
||
|
||
function createErrorType(code, defaultMessage) {
|
||
function CustomError(message) {
|
||
Error.captureStackTrace(this, this.constructor);
|
||
this.message = message || defaultMessage;
|
||
}
|
||
CustomError.prototype = new Error();
|
||
CustomError.prototype.constructor = CustomError;
|
||
CustomError.prototype.name = "Error [" + code + "]";
|
||
CustomError.prototype.code = code;
|
||
return CustomError;
|
||
}
|
||
|
||
function abortRequest(request) {
|
||
for (var e = 0; e < events.length; e++) {
|
||
request.removeListener(events[e], eventHandlers[events[e]]);
|
||
}
|
||
request.on("error", noop);
|
||
request.abort();
|
||
}
|
||
|
||
// Exports
|
||
module.exports = wrap({ http: http, https: https });
|
||
module.exports.wrap = wrap;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 5596:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
module.exports = __nccwpck_require__(5747).constants || __nccwpck_require__(7619)
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 6904:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
module.exports = realpath
|
||
realpath.realpath = realpath
|
||
realpath.sync = realpathSync
|
||
realpath.realpathSync = realpathSync
|
||
realpath.monkeypatch = monkeypatch
|
||
realpath.unmonkeypatch = unmonkeypatch
|
||
|
||
var fs = __nccwpck_require__(5747)
|
||
var origRealpath = fs.realpath
|
||
var origRealpathSync = fs.realpathSync
|
||
|
||
var version = process.version
|
||
var ok = /^v[0-5]\./.test(version)
|
||
var old = __nccwpck_require__(1025)
|
||
|
||
function newError (er) {
|
||
return er && er.syscall === 'realpath' && (
|
||
er.code === 'ELOOP' ||
|
||
er.code === 'ENOMEM' ||
|
||
er.code === 'ENAMETOOLONG'
|
||
)
|
||
}
|
||
|
||
function realpath (p, cache, cb) {
|
||
if (ok) {
|
||
return origRealpath(p, cache, cb)
|
||
}
|
||
|
||
if (typeof cache === 'function') {
|
||
cb = cache
|
||
cache = null
|
||
}
|
||
origRealpath(p, cache, function (er, result) {
|
||
if (newError(er)) {
|
||
old.realpath(p, cache, cb)
|
||
} else {
|
||
cb(er, result)
|
||
}
|
||
})
|
||
}
|
||
|
||
function realpathSync (p, cache) {
|
||
if (ok) {
|
||
return origRealpathSync(p, cache)
|
||
}
|
||
|
||
try {
|
||
return origRealpathSync(p, cache)
|
||
} catch (er) {
|
||
if (newError(er)) {
|
||
return old.realpathSync(p, cache)
|
||
} else {
|
||
throw er
|
||
}
|
||
}
|
||
}
|
||
|
||
function monkeypatch () {
|
||
fs.realpath = realpath
|
||
fs.realpathSync = realpathSync
|
||
}
|
||
|
||
function unmonkeypatch () {
|
||
fs.realpath = origRealpath
|
||
fs.realpathSync = origRealpathSync
|
||
}
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 1025:
|
||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||
|
||
// Copyright Joyent, Inc. and other Node contributors.
|
||
//
|
||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||
// copy of this software and associated documentation files (the
|
||
// "Software"), to deal in the Software without restriction, including
|
||
// without limitation the rights to use, copy, modify, merge, publish,
|
||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||
// persons to whom the Software is furnished to do so, subject to the
|
||
// following conditions:
|
||
//
|
||
// The above copyright notice and this permission notice shall be included
|
||
// in all copies or substantial portions of the Software.
|
||
//
|
||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||
|
||
var pathModule = __nccwpck_require__(5622);
|
||
var isWindows = process.platform === 'win32';
|
||
var fs = __nccwpck_require__(5747);
|
||
|
||
// JavaScript implementation of realpath, ported from node pre-v6
|
||
|
||
var DEBUG = process.env.NODE_DEBUG && /fs/.test(process.env.NODE_DEBUG);
|
||
|
||
function rethrow() {
|
||
// Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and
|
||
// is fairly slow to generate.
|
||
var callback;
|
||
if (DEBUG) {
|
||
var backtrace = new Error;
|
||
callback = debugCallback;
|
||
} else
|
||
callback = missingCallback;
|
||
|
||
return callback;
|
||
|
||
function debugCallback(err) {
|
||
if (err) {
|
||
backtrace.message = err.message;
|
||
err = backtrace;
|
||
missingCallback(err);
|
||
}
|
||
}
|
||
|
||
function missingCallback(err) {
|
||
if (err) {
|
||
if (process.throwDeprecation)
|
||
throw err; // Forgot a callback but don't know where? Use NODE_DEBUG=fs
|
||
else if (!process.noDeprecation) {
|
||
var msg = 'fs: missing callback ' + (err.stack || err.message);
|
||
if (process.traceDeprecation)
|
||
console.trace(msg);
|
||
else
|
||
console.error(msg);
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
function maybeCallback(cb) {
|
||
return typeof cb === 'function' ? cb : rethrow();
|
||
}
|
||
|
||
var normalize = pathModule.normalize;
|
||
|
||
// Regexp that finds the next partion of a (partial) path
|
||
// result is [base_with_slash, base], e.g. ['somedir/', 'somedir']
|
||
if (isWindows) {
|
||
var nextPartRe = /(.*?)(?:[\/\\]+|$)/g;
|
||
} else {
|
||
var nextPartRe = /(.*?)(?:[\/]+|$)/g;
|
||
}
|
||
|
||
// Regex to find the device root, including trailing slash. E.g. 'c:\\'.
|
||
if (isWindows) {
|
||
var splitRootRe = /^(?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?[\\\/]*/;
|
||
} else {
|
||
var splitRootRe = /^[\/]*/;
|
||
}
|
||
|
||
exports.realpathSync = function realpathSync(p, cache) {
|
||
// make p is absolute
|
||
p = pathModule.resolve(p);
|
||
|
||
if (cache && Object.prototype.hasOwnProperty.call(cache, p)) {
|
||
return cache[p];
|
||
}
|
||
|
||
var original = p,
|
||
seenLinks = {},
|
||
knownHard = {};
|
||
|
||
// current character position in p
|
||
var pos;
|
||
// the partial path so far, including a trailing slash if any
|
||
var current;
|
||
// the partial path without a trailing slash (except when pointing at a root)
|
||
var base;
|
||
// the partial path scanned in the previous round, with slash
|
||
var previous;
|
||
|
||
start();
|
||
|
||
function start() {
|
||
// Skip over roots
|
||
var m = splitRootRe.exec(p);
|
||
pos = m[0].length;
|
||
current = m[0];
|
||
base = m[0];
|
||
previous = '';
|
||
|
||
// On windows, check that the root exists. On unix there is no need.
|
||
if (isWindows && !knownHard[base]) {
|
||
fs.lstatSync(base);
|
||
knownHard[base] = true;
|
||
}
|
||
}
|
||
|
||
// walk down the path, swapping out linked pathparts for their real
|
||
// values
|
||
// NB: p.length changes.
|
||
while (pos < p.length) {
|
||
// find the next part
|
||
nextPartRe.lastIndex = pos;
|
||
var result = nextPartRe.exec(p);
|
||
previous = current;
|
||
current += result[0];
|
||
base = previous + result[1];
|
||
pos = nextPartRe.lastIndex;
|
||
|
||
// continue if not a symlink
|
||
if (knownHard[base] || (cache && cache[base] === base)) {
|
||
continue;
|
||
}
|
||
|
||
var resolvedLink;
|
||
if (cache && Object.prototype.hasOwnProperty.call(cache, base)) {
|
||
// some known symbolic link. no need to stat again.
|
||
resolvedLink = cache[base];
|
||
} else {
|
||
var stat = fs.lstatSync(base);
|
||
if (!stat.isSymbolicLink()) {
|
||
knownHard[base] = true;
|
||
if (cache) cache[base] = base;
|
||
continue;
|
||
}
|
||
|
||
// read the link if it wasn't read before
|
||
// dev/ino always return 0 on windows, so skip the check.
|
||
var linkTarget = null;
|
||
if (!isWindows) {
|
||
var id = stat.dev.toString(32) + ':' + stat.ino.toString(32);
|
||
if (seenLinks.hasOwnProperty(id)) {
|
||
linkTarget = seenLinks[id];
|
||
}
|
||
}
|
||
if (linkTarget === null) {
|
||
fs.statSync(base);
|
||
linkTarget = fs.readlinkSync(base);
|
||
}
|
||
resolvedLink = pathModule.resolve(previous, linkTarget);
|
||
// track this, if given a cache.
|
||
if (cache) cache[base] = resolvedLink;
|
||
if (!isWindows) seenLinks[id] = linkTarget;
|
||
}
|
||
|
||
// resolve the link, then start over
|
||
p = pathModule.resolve(resolvedLink, p.slice(pos));
|
||
start();
|
||
}
|
||
|
||
if (cache) cache[original] = p;
|
||
|
||
return p;
|
||
};
|
||
|
||
|
||
exports.realpath = function realpath(p, cache, cb) {
|
||
if (typeof cb !== 'function') {
|
||
cb = maybeCallback(cache);
|
||
cache = null;
|
||
}
|
||
|
||
// make p is absolute
|
||
p = pathModule.resolve(p);
|
||
|
||
if (cache && Object.prototype.hasOwnProperty.call(cache, p)) {
|
||
return process.nextTick(cb.bind(null, null, cache[p]));
|
||
}
|
||
|
||
var original = p,
|
||
seenLinks = {},
|
||
knownHard = {};
|
||
|
||
// current character position in p
|
||
var pos;
|
||
// the partial path so far, including a trailing slash if any
|
||
var current;
|
||
// the partial path without a trailing slash (except when pointing at a root)
|
||
var base;
|
||
// the partial path scanned in the previous round, with slash
|
||
var previous;
|
||
|
||
start();
|
||
|
||
function start() {
|
||
// Skip over roots
|
||
var m = splitRootRe.exec(p);
|
||
pos = m[0].length;
|
||
current = m[0];
|
||
base = m[0];
|
||
previous = '';
|
||
|
||
// On windows, check that the root exists. On unix there is no need.
|
||
if (isWindows && !knownHard[base]) {
|
||
fs.lstat(base, function(err) {
|
||
if (err) return cb(err);
|
||
knownHard[base] = true;
|
||
LOOP();
|
||
});
|
||
} else {
|
||
process.nextTick(LOOP);
|
||
}
|
||
}
|
||
|
||
// walk down the path, swapping out linked pathparts for their real
|
||
// values
|
||
function LOOP() {
|
||
// stop if scanned past end of path
|
||
if (pos >= p.length) {
|
||
if (cache) cache[original] = p;
|
||
return cb(null, p);
|
||
}
|
||
|
||
// find the next part
|
||
nextPartRe.lastIndex = pos;
|
||
var result = nextPartRe.exec(p);
|
||
previous = current;
|
||
current += result[0];
|
||
base = previous + result[1];
|
||
pos = nextPartRe.lastIndex;
|
||
|
||
// continue if not a symlink
|
||
if (knownHard[base] || (cache && cache[base] === base)) {
|
||
return process.nextTick(LOOP);
|
||
}
|
||
|
||
if (cache && Object.prototype.hasOwnProperty.call(cache, base)) {
|
||
// known symbolic link. no need to stat again.
|
||
return gotResolvedLink(cache[base]);
|
||
}
|
||
|
||
return fs.lstat(base, gotStat);
|
||
}
|
||
|
||
function gotStat(err, stat) {
|
||
if (err) return cb(err);
|
||
|
||
// if not a symlink, skip to the next path part
|
||
if (!stat.isSymbolicLink()) {
|
||
knownHard[base] = true;
|
||
if (cache) cache[base] = base;
|
||
return process.nextTick(LOOP);
|
||
}
|
||
|
||
// stat & read the link if not read before
|
||
// call gotTarget as soon as the link target is known
|
||
// dev/ino always return 0 on windows, so skip the check.
|
||
if (!isWindows) {
|
||
var id = stat.dev.toString(32) + ':' + stat.ino.toString(32);
|
||
if (seenLinks.hasOwnProperty(id)) {
|
||
return gotTarget(null, seenLinks[id], base);
|
||
}
|
||
}
|
||
fs.stat(base, function(err) {
|
||
if (err) return cb(err);
|
||
|
||
fs.readlink(base, function(err, target) {
|
||
if (!isWindows) seenLinks[id] = target;
|
||
gotTarget(err, target);
|
||
});
|
||
});
|
||
}
|
||
|
||
function gotTarget(err, target, base) {
|
||
if (err) return cb(err);
|
||
|
||
var resolvedLink = pathModule.resolve(previous, target);
|
||
if (cache) cache[base] = resolvedLink;
|
||
gotResolvedLink(resolvedLink);
|
||
}
|
||
|
||
function gotResolvedLink(resolvedLink) {
|
||
// resolve the link, then start over
|
||
p = pathModule.resolve(resolvedLink, p.slice(pos));
|
||
start();
|
||
}
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 1256:
|
||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||
|
||
exports.setopts = setopts
|
||
exports.ownProp = ownProp
|
||
exports.makeAbs = makeAbs
|
||
exports.finish = finish
|
||
exports.mark = mark
|
||
exports.isIgnored = isIgnored
|
||
exports.childrenIgnored = childrenIgnored
|
||
|
||
function ownProp (obj, field) {
|
||
return Object.prototype.hasOwnProperty.call(obj, field)
|
||
}
|
||
|
||
var path = __nccwpck_require__(5622)
|
||
var minimatch = __nccwpck_require__(7066)
|
||
var isAbsolute = __nccwpck_require__(7342)
|
||
var Minimatch = minimatch.Minimatch
|
||
|
||
function alphasort (a, b) {
|
||
return a.localeCompare(b, 'en')
|
||
}
|
||
|
||
function setupIgnores (self, options) {
|
||
self.ignore = options.ignore || []
|
||
|
||
if (!Array.isArray(self.ignore))
|
||
self.ignore = [self.ignore]
|
||
|
||
if (self.ignore.length) {
|
||
self.ignore = self.ignore.map(ignoreMap)
|
||
}
|
||
}
|
||
|
||
// ignore patterns are always in dot:true mode.
|
||
function ignoreMap (pattern) {
|
||
var gmatcher = null
|
||
if (pattern.slice(-3) === '/**') {
|
||
var gpattern = pattern.replace(/(\/\*\*)+$/, '')
|
||
gmatcher = new Minimatch(gpattern, { dot: true })
|
||
}
|
||
|
||
return {
|
||
matcher: new Minimatch(pattern, { dot: true }),
|
||
gmatcher: gmatcher
|
||
}
|
||
}
|
||
|
||
function setopts (self, pattern, options) {
|
||
if (!options)
|
||
options = {}
|
||
|
||
// base-matching: just use globstar for that.
|
||
if (options.matchBase && -1 === pattern.indexOf("/")) {
|
||
if (options.noglobstar) {
|
||
throw new Error("base matching requires globstar")
|
||
}
|
||
pattern = "**/" + pattern
|
||
}
|
||
|
||
self.silent = !!options.silent
|
||
self.pattern = pattern
|
||
self.strict = options.strict !== false
|
||
self.realpath = !!options.realpath
|
||
self.realpathCache = options.realpathCache || Object.create(null)
|
||
self.follow = !!options.follow
|
||
self.dot = !!options.dot
|
||
self.mark = !!options.mark
|
||
self.nodir = !!options.nodir
|
||
if (self.nodir)
|
||
self.mark = true
|
||
self.sync = !!options.sync
|
||
self.nounique = !!options.nounique
|
||
self.nonull = !!options.nonull
|
||
self.nosort = !!options.nosort
|
||
self.nocase = !!options.nocase
|
||
self.stat = !!options.stat
|
||
self.noprocess = !!options.noprocess
|
||
self.absolute = !!options.absolute
|
||
|
||
self.maxLength = options.maxLength || Infinity
|
||
self.cache = options.cache || Object.create(null)
|
||
self.statCache = options.statCache || Object.create(null)
|
||
self.symlinks = options.symlinks || Object.create(null)
|
||
|
||
setupIgnores(self, options)
|
||
|
||
self.changedCwd = false
|
||
var cwd = process.cwd()
|
||
if (!ownProp(options, "cwd"))
|
||
self.cwd = cwd
|
||
else {
|
||
self.cwd = path.resolve(options.cwd)
|
||
self.changedCwd = self.cwd !== cwd
|
||
}
|
||
|
||
self.root = options.root || path.resolve(self.cwd, "/")
|
||
self.root = path.resolve(self.root)
|
||
if (process.platform === "win32")
|
||
self.root = self.root.replace(/\\/g, "/")
|
||
|
||
// TODO: is an absolute `cwd` supposed to be resolved against `root`?
|
||
// e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test')
|
||
self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd)
|
||
if (process.platform === "win32")
|
||
self.cwdAbs = self.cwdAbs.replace(/\\/g, "/")
|
||
self.nomount = !!options.nomount
|
||
|
||
// disable comments and negation in Minimatch.
|
||
// Note that they are not supported in Glob itself anyway.
|
||
options.nonegate = true
|
||
options.nocomment = true
|
||
|
||
self.minimatch = new Minimatch(pattern, options)
|
||
self.options = self.minimatch.options
|
||
}
|
||
|
||
function finish (self) {
|
||
var nou = self.nounique
|
||
var all = nou ? [] : Object.create(null)
|
||
|
||
for (var i = 0, l = self.matches.length; i < l; i ++) {
|
||
var matches = self.matches[i]
|
||
if (!matches || Object.keys(matches).length === 0) {
|
||
if (self.nonull) {
|
||
// do like the shell, and spit out the literal glob
|
||
var literal = self.minimatch.globSet[i]
|
||
if (nou)
|
||
all.push(literal)
|
||
else
|
||
all[literal] = true
|
||
}
|
||
} else {
|
||
// had matches
|
||
var m = Object.keys(matches)
|
||
if (nou)
|
||
all.push.apply(all, m)
|
||
else
|
||
m.forEach(function (m) {
|
||
all[m] = true
|
||
})
|
||
}
|
||
}
|
||
|
||
if (!nou)
|
||
all = Object.keys(all)
|
||
|
||
if (!self.nosort)
|
||
all = all.sort(alphasort)
|
||
|
||
// at *some* point we statted all of these
|
||
if (self.mark) {
|
||
for (var i = 0; i < all.length; i++) {
|
||
all[i] = self._mark(all[i])
|
||
}
|
||
if (self.nodir) {
|
||
all = all.filter(function (e) {
|
||
var notDir = !(/\/$/.test(e))
|
||
var c = self.cache[e] || self.cache[makeAbs(self, e)]
|
||
if (notDir && c)
|
||
notDir = c !== 'DIR' && !Array.isArray(c)
|
||
return notDir
|
||
})
|
||
}
|
||
}
|
||
|
||
if (self.ignore.length)
|
||
all = all.filter(function(m) {
|
||
return !isIgnored(self, m)
|
||
})
|
||
|
||
self.found = all
|
||
}
|
||
|
||
function mark (self, p) {
|
||
var abs = makeAbs(self, p)
|
||
var c = self.cache[abs]
|
||
var m = p
|
||
if (c) {
|
||
var isDir = c === 'DIR' || Array.isArray(c)
|
||
var slash = p.slice(-1) === '/'
|
||
|
||
if (isDir && !slash)
|
||
m += '/'
|
||
else if (!isDir && slash)
|
||
m = m.slice(0, -1)
|
||
|
||
if (m !== p) {
|
||
var mabs = makeAbs(self, m)
|
||
self.statCache[mabs] = self.statCache[abs]
|
||
self.cache[mabs] = self.cache[abs]
|
||
}
|
||
}
|
||
|
||
return m
|
||
}
|
||
|
||
// lotta situps...
|
||
function makeAbs (self, f) {
|
||
var abs = f
|
||
if (f.charAt(0) === '/') {
|
||
abs = path.join(self.root, f)
|
||
} else if (isAbsolute(f) || f === '') {
|
||
abs = f
|
||
} else if (self.changedCwd) {
|
||
abs = path.resolve(self.cwd, f)
|
||
} else {
|
||
abs = path.resolve(f)
|
||
}
|
||
|
||
if (process.platform === 'win32')
|
||
abs = abs.replace(/\\/g, '/')
|
||
|
||
return abs
|
||
}
|
||
|
||
|
||
// Return true, if pattern ends with globstar '**', for the accompanying parent directory.
|
||
// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents
|
||
function isIgnored (self, path) {
|
||
if (!self.ignore.length)
|
||
return false
|
||
|
||
return self.ignore.some(function(item) {
|
||
return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path))
|
||
})
|
||
}
|
||
|
||
function childrenIgnored (self, path) {
|
||
if (!self.ignore.length)
|
||
return false
|
||
|
||
return self.ignore.some(function(item) {
|
||
return !!(item.gmatcher && item.gmatcher.match(path))
|
||
})
|
||
}
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 4285:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
// Approach:
|
||
//
|
||
// 1. Get the minimatch set
|
||
// 2. For each pattern in the set, PROCESS(pattern, false)
|
||
// 3. Store matches per-set, then uniq them
|
||
//
|
||
// PROCESS(pattern, inGlobStar)
|
||
// Get the first [n] items from pattern that are all strings
|
||
// Join these together. This is PREFIX.
|
||
// If there is no more remaining, then stat(PREFIX) and
|
||
// add to matches if it succeeds. END.
|
||
//
|
||
// If inGlobStar and PREFIX is symlink and points to dir
|
||
// set ENTRIES = []
|
||
// else readdir(PREFIX) as ENTRIES
|
||
// If fail, END
|
||
//
|
||
// with ENTRIES
|
||
// If pattern[n] is GLOBSTAR
|
||
// // handle the case where the globstar match is empty
|
||
// // by pruning it out, and testing the resulting pattern
|
||
// PROCESS(pattern[0..n] + pattern[n+1 .. $], false)
|
||
// // handle other cases.
|
||
// for ENTRY in ENTRIES (not dotfiles)
|
||
// // attach globstar + tail onto the entry
|
||
// // Mark that this entry is a globstar match
|
||
// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true)
|
||
//
|
||
// else // not globstar
|
||
// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot)
|
||
// Test ENTRY against pattern[n]
|
||
// If fails, continue
|
||
// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $])
|
||
//
|
||
// Caveat:
|
||
// Cache all stats and readdirs results to minimize syscall. Since all
|
||
// we ever care about is existence and directory-ness, we can just keep
|
||
// `true` for files, and [children,...] for directories, or `false` for
|
||
// things that don't exist.
|
||
|
||
module.exports = glob
|
||
|
||
var fs = __nccwpck_require__(5747)
|
||
var rp = __nccwpck_require__(6904)
|
||
var minimatch = __nccwpck_require__(7066)
|
||
var Minimatch = minimatch.Minimatch
|
||
var inherits = __nccwpck_require__(3753)
|
||
var EE = __nccwpck_require__(8614).EventEmitter
|
||
var path = __nccwpck_require__(5622)
|
||
var assert = __nccwpck_require__(2357)
|
||
var isAbsolute = __nccwpck_require__(7342)
|
||
var globSync = __nccwpck_require__(2363)
|
||
var common = __nccwpck_require__(1256)
|
||
var setopts = common.setopts
|
||
var ownProp = common.ownProp
|
||
var inflight = __nccwpck_require__(9963)
|
||
var util = __nccwpck_require__(1669)
|
||
var childrenIgnored = common.childrenIgnored
|
||
var isIgnored = common.isIgnored
|
||
|
||
var once = __nccwpck_require__(2940)
|
||
|
||
function glob (pattern, options, cb) {
|
||
if (typeof options === 'function') cb = options, options = {}
|
||
if (!options) options = {}
|
||
|
||
if (options.sync) {
|
||
if (cb)
|
||
throw new TypeError('callback provided to sync glob')
|
||
return globSync(pattern, options)
|
||
}
|
||
|
||
return new Glob(pattern, options, cb)
|
||
}
|
||
|
||
glob.sync = globSync
|
||
var GlobSync = glob.GlobSync = globSync.GlobSync
|
||
|
||
// old api surface
|
||
glob.glob = glob
|
||
|
||
function extend (origin, add) {
|
||
if (add === null || typeof add !== 'object') {
|
||
return origin
|
||
}
|
||
|
||
var keys = Object.keys(add)
|
||
var i = keys.length
|
||
while (i--) {
|
||
origin[keys[i]] = add[keys[i]]
|
||
}
|
||
return origin
|
||
}
|
||
|
||
glob.hasMagic = function (pattern, options_) {
|
||
var options = extend({}, options_)
|
||
options.noprocess = true
|
||
|
||
var g = new Glob(pattern, options)
|
||
var set = g.minimatch.set
|
||
|
||
if (!pattern)
|
||
return false
|
||
|
||
if (set.length > 1)
|
||
return true
|
||
|
||
for (var j = 0; j < set[0].length; j++) {
|
||
if (typeof set[0][j] !== 'string')
|
||
return true
|
||
}
|
||
|
||
return false
|
||
}
|
||
|
||
glob.Glob = Glob
|
||
inherits(Glob, EE)
|
||
function Glob (pattern, options, cb) {
|
||
if (typeof options === 'function') {
|
||
cb = options
|
||
options = null
|
||
}
|
||
|
||
if (options && options.sync) {
|
||
if (cb)
|
||
throw new TypeError('callback provided to sync glob')
|
||
return new GlobSync(pattern, options)
|
||
}
|
||
|
||
if (!(this instanceof Glob))
|
||
return new Glob(pattern, options, cb)
|
||
|
||
setopts(this, pattern, options)
|
||
this._didRealPath = false
|
||
|
||
// process each pattern in the minimatch set
|
||
var n = this.minimatch.set.length
|
||
|
||
// The matches are stored as {<filename>: true,...} so that
|
||
// duplicates are automagically pruned.
|
||
// Later, we do an Object.keys() on these.
|
||
// Keep them as a list so we can fill in when nonull is set.
|
||
this.matches = new Array(n)
|
||
|
||
if (typeof cb === 'function') {
|
||
cb = once(cb)
|
||
this.on('error', cb)
|
||
this.on('end', function (matches) {
|
||
cb(null, matches)
|
||
})
|
||
}
|
||
|
||
var self = this
|
||
this._processing = 0
|
||
|
||
this._emitQueue = []
|
||
this._processQueue = []
|
||
this.paused = false
|
||
|
||
if (this.noprocess)
|
||
return this
|
||
|
||
if (n === 0)
|
||
return done()
|
||
|
||
var sync = true
|
||
for (var i = 0; i < n; i ++) {
|
||
this._process(this.minimatch.set[i], i, false, done)
|
||
}
|
||
sync = false
|
||
|
||
function done () {
|
||
--self._processing
|
||
if (self._processing <= 0) {
|
||
if (sync) {
|
||
process.nextTick(function () {
|
||
self._finish()
|
||
})
|
||
} else {
|
||
self._finish()
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
Glob.prototype._finish = function () {
|
||
assert(this instanceof Glob)
|
||
if (this.aborted)
|
||
return
|
||
|
||
if (this.realpath && !this._didRealpath)
|
||
return this._realpath()
|
||
|
||
common.finish(this)
|
||
this.emit('end', this.found)
|
||
}
|
||
|
||
Glob.prototype._realpath = function () {
|
||
if (this._didRealpath)
|
||
return
|
||
|
||
this._didRealpath = true
|
||
|
||
var n = this.matches.length
|
||
if (n === 0)
|
||
return this._finish()
|
||
|
||
var self = this
|
||
for (var i = 0; i < this.matches.length; i++)
|
||
this._realpathSet(i, next)
|
||
|
||
function next () {
|
||
if (--n === 0)
|
||
self._finish()
|
||
}
|
||
}
|
||
|
||
Glob.prototype._realpathSet = function (index, cb) {
|
||
var matchset = this.matches[index]
|
||
if (!matchset)
|
||
return cb()
|
||
|
||
var found = Object.keys(matchset)
|
||
var self = this
|
||
var n = found.length
|
||
|
||
if (n === 0)
|
||
return cb()
|
||
|
||
var set = this.matches[index] = Object.create(null)
|
||
found.forEach(function (p, i) {
|
||
// If there's a problem with the stat, then it means that
|
||
// one or more of the links in the realpath couldn't be
|
||
// resolved. just return the abs value in that case.
|
||
p = self._makeAbs(p)
|
||
rp.realpath(p, self.realpathCache, function (er, real) {
|
||
if (!er)
|
||
set[real] = true
|
||
else if (er.syscall === 'stat')
|
||
set[p] = true
|
||
else
|
||
self.emit('error', er) // srsly wtf right here
|
||
|
||
if (--n === 0) {
|
||
self.matches[index] = set
|
||
cb()
|
||
}
|
||
})
|
||
})
|
||
}
|
||
|
||
Glob.prototype._mark = function (p) {
|
||
return common.mark(this, p)
|
||
}
|
||
|
||
Glob.prototype._makeAbs = function (f) {
|
||
return common.makeAbs(this, f)
|
||
}
|
||
|
||
Glob.prototype.abort = function () {
|
||
this.aborted = true
|
||
this.emit('abort')
|
||
}
|
||
|
||
Glob.prototype.pause = function () {
|
||
if (!this.paused) {
|
||
this.paused = true
|
||
this.emit('pause')
|
||
}
|
||
}
|
||
|
||
Glob.prototype.resume = function () {
|
||
if (this.paused) {
|
||
this.emit('resume')
|
||
this.paused = false
|
||
if (this._emitQueue.length) {
|
||
var eq = this._emitQueue.slice(0)
|
||
this._emitQueue.length = 0
|
||
for (var i = 0; i < eq.length; i ++) {
|
||
var e = eq[i]
|
||
this._emitMatch(e[0], e[1])
|
||
}
|
||
}
|
||
if (this._processQueue.length) {
|
||
var pq = this._processQueue.slice(0)
|
||
this._processQueue.length = 0
|
||
for (var i = 0; i < pq.length; i ++) {
|
||
var p = pq[i]
|
||
this._processing--
|
||
this._process(p[0], p[1], p[2], p[3])
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
Glob.prototype._process = function (pattern, index, inGlobStar, cb) {
|
||
assert(this instanceof Glob)
|
||
assert(typeof cb === 'function')
|
||
|
||
if (this.aborted)
|
||
return
|
||
|
||
this._processing++
|
||
if (this.paused) {
|
||
this._processQueue.push([pattern, index, inGlobStar, cb])
|
||
return
|
||
}
|
||
|
||
//console.error('PROCESS %d', this._processing, pattern)
|
||
|
||
// Get the first [n] parts of pattern that are all strings.
|
||
var n = 0
|
||
while (typeof pattern[n] === 'string') {
|
||
n ++
|
||
}
|
||
// now n is the index of the first one that is *not* a string.
|
||
|
||
// see if there's anything else
|
||
var prefix
|
||
switch (n) {
|
||
// if not, then this is rather simple
|
||
case pattern.length:
|
||
this._processSimple(pattern.join('/'), index, cb)
|
||
return
|
||
|
||
case 0:
|
||
// pattern *starts* with some non-trivial item.
|
||
// going to readdir(cwd), but not include the prefix in matches.
|
||
prefix = null
|
||
break
|
||
|
||
default:
|
||
// pattern has some string bits in the front.
|
||
// whatever it starts with, whether that's 'absolute' like /foo/bar,
|
||
// or 'relative' like '../baz'
|
||
prefix = pattern.slice(0, n).join('/')
|
||
break
|
||
}
|
||
|
||
var remain = pattern.slice(n)
|
||
|
||
// get the list of entries.
|
||
var read
|
||
if (prefix === null)
|
||
read = '.'
|
||
else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) {
|
||
if (!prefix || !isAbsolute(prefix))
|
||
prefix = '/' + prefix
|
||
read = prefix
|
||
} else
|
||
read = prefix
|
||
|
||
var abs = this._makeAbs(read)
|
||
|
||
//if ignored, skip _processing
|
||
if (childrenIgnored(this, read))
|
||
return cb()
|
||
|
||
var isGlobStar = remain[0] === minimatch.GLOBSTAR
|
||
if (isGlobStar)
|
||
this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb)
|
||
else
|
||
this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb)
|
||
}
|
||
|
||
Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) {
|
||
var self = this
|
||
this._readdir(abs, inGlobStar, function (er, entries) {
|
||
return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
|
||
})
|
||
}
|
||
|
||
Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
|
||
|
||
// if the abs isn't a dir, then nothing can match!
|
||
if (!entries)
|
||
return cb()
|
||
|
||
// It will only match dot entries if it starts with a dot, or if
|
||
// dot is set. Stuff like @(.foo|.bar) isn't allowed.
|
||
var pn = remain[0]
|
||
var negate = !!this.minimatch.negate
|
||
var rawGlob = pn._glob
|
||
var dotOk = this.dot || rawGlob.charAt(0) === '.'
|
||
|
||
var matchedEntries = []
|
||
for (var i = 0; i < entries.length; i++) {
|
||
var e = entries[i]
|
||
if (e.charAt(0) !== '.' || dotOk) {
|
||
var m
|
||
if (negate && !prefix) {
|
||
m = !e.match(pn)
|
||
} else {
|
||
m = e.match(pn)
|
||
}
|
||
if (m)
|
||
matchedEntries.push(e)
|
||
}
|
||
}
|
||
|
||
//console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries)
|
||
|
||
var len = matchedEntries.length
|
||
// If there are no matched entries, then nothing matches.
|
||
if (len === 0)
|
||
return cb()
|
||
|
||
// if this is the last remaining pattern bit, then no need for
|
||
// an additional stat *unless* the user has specified mark or
|
||
// stat explicitly. We know they exist, since readdir returned
|
||
// them.
|
||
|
||
if (remain.length === 1 && !this.mark && !this.stat) {
|
||
if (!this.matches[index])
|
||
this.matches[index] = Object.create(null)
|
||
|
||
for (var i = 0; i < len; i ++) {
|
||
var e = matchedEntries[i]
|
||
if (prefix) {
|
||
if (prefix !== '/')
|
||
e = prefix + '/' + e
|
||
else
|
||
e = prefix + e
|
||
}
|
||
|
||
if (e.charAt(0) === '/' && !this.nomount) {
|
||
e = path.join(this.root, e)
|
||
}
|
||
this._emitMatch(index, e)
|
||
}
|
||
// This was the last one, and no stats were needed
|
||
return cb()
|
||
}
|
||
|
||
// now test all matched entries as stand-ins for that part
|
||
// of the pattern.
|
||
remain.shift()
|
||
for (var i = 0; i < len; i ++) {
|
||
var e = matchedEntries[i]
|
||
var newPattern
|
||
if (prefix) {
|
||
if (prefix !== '/')
|
||
e = prefix + '/' + e
|
||
else
|
||
e = prefix + e
|
||
}
|
||
this._process([e].concat(remain), index, inGlobStar, cb)
|
||
}
|
||
cb()
|
||
}
|
||
|
||
Glob.prototype._emitMatch = function (index, e) {
|
||
if (this.aborted)
|
||
return
|
||
|
||
if (isIgnored(this, e))
|
||
return
|
||
|
||
if (this.paused) {
|
||
this._emitQueue.push([index, e])
|
||
return
|
||
}
|
||
|
||
var abs = isAbsolute(e) ? e : this._makeAbs(e)
|
||
|
||
if (this.mark)
|
||
e = this._mark(e)
|
||
|
||
if (this.absolute)
|
||
e = abs
|
||
|
||
if (this.matches[index][e])
|
||
return
|
||
|
||
if (this.nodir) {
|
||
var c = this.cache[abs]
|
||
if (c === 'DIR' || Array.isArray(c))
|
||
return
|
||
}
|
||
|
||
this.matches[index][e] = true
|
||
|
||
var st = this.statCache[abs]
|
||
if (st)
|
||
this.emit('stat', e, st)
|
||
|
||
this.emit('match', e)
|
||
}
|
||
|
||
Glob.prototype._readdirInGlobStar = function (abs, cb) {
|
||
if (this.aborted)
|
||
return
|
||
|
||
// follow all symlinked directories forever
|
||
// just proceed as if this is a non-globstar situation
|
||
if (this.follow)
|
||
return this._readdir(abs, false, cb)
|
||
|
||
var lstatkey = 'lstat\0' + abs
|
||
var self = this
|
||
var lstatcb = inflight(lstatkey, lstatcb_)
|
||
|
||
if (lstatcb)
|
||
fs.lstat(abs, lstatcb)
|
||
|
||
function lstatcb_ (er, lstat) {
|
||
if (er && er.code === 'ENOENT')
|
||
return cb()
|
||
|
||
var isSym = lstat && lstat.isSymbolicLink()
|
||
self.symlinks[abs] = isSym
|
||
|
||
// If it's not a symlink or a dir, then it's definitely a regular file.
|
||
// don't bother doing a readdir in that case.
|
||
if (!isSym && lstat && !lstat.isDirectory()) {
|
||
self.cache[abs] = 'FILE'
|
||
cb()
|
||
} else
|
||
self._readdir(abs, false, cb)
|
||
}
|
||
}
|
||
|
||
Glob.prototype._readdir = function (abs, inGlobStar, cb) {
|
||
if (this.aborted)
|
||
return
|
||
|
||
cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb)
|
||
if (!cb)
|
||
return
|
||
|
||
//console.error('RD %j %j', +inGlobStar, abs)
|
||
if (inGlobStar && !ownProp(this.symlinks, abs))
|
||
return this._readdirInGlobStar(abs, cb)
|
||
|
||
if (ownProp(this.cache, abs)) {
|
||
var c = this.cache[abs]
|
||
if (!c || c === 'FILE')
|
||
return cb()
|
||
|
||
if (Array.isArray(c))
|
||
return cb(null, c)
|
||
}
|
||
|
||
var self = this
|
||
fs.readdir(abs, readdirCb(this, abs, cb))
|
||
}
|
||
|
||
function readdirCb (self, abs, cb) {
|
||
return function (er, entries) {
|
||
if (er)
|
||
self._readdirError(abs, er, cb)
|
||
else
|
||
self._readdirEntries(abs, entries, cb)
|
||
}
|
||
}
|
||
|
||
Glob.prototype._readdirEntries = function (abs, entries, cb) {
|
||
if (this.aborted)
|
||
return
|
||
|
||
// if we haven't asked to stat everything, then just
|
||
// assume that everything in there exists, so we can avoid
|
||
// having to stat it a second time.
|
||
if (!this.mark && !this.stat) {
|
||
for (var i = 0; i < entries.length; i ++) {
|
||
var e = entries[i]
|
||
if (abs === '/')
|
||
e = abs + e
|
||
else
|
||
e = abs + '/' + e
|
||
this.cache[e] = true
|
||
}
|
||
}
|
||
|
||
this.cache[abs] = entries
|
||
return cb(null, entries)
|
||
}
|
||
|
||
Glob.prototype._readdirError = function (f, er, cb) {
|
||
if (this.aborted)
|
||
return
|
||
|
||
// handle errors, and cache the information
|
||
switch (er.code) {
|
||
case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
|
||
case 'ENOTDIR': // totally normal. means it *does* exist.
|
||
var abs = this._makeAbs(f)
|
||
this.cache[abs] = 'FILE'
|
||
if (abs === this.cwdAbs) {
|
||
var error = new Error(er.code + ' invalid cwd ' + this.cwd)
|
||
error.path = this.cwd
|
||
error.code = er.code
|
||
this.emit('error', error)
|
||
this.abort()
|
||
}
|
||
break
|
||
|
||
case 'ENOENT': // not terribly unusual
|
||
case 'ELOOP':
|
||
case 'ENAMETOOLONG':
|
||
case 'UNKNOWN':
|
||
this.cache[this._makeAbs(f)] = false
|
||
break
|
||
|
||
default: // some unusual error. Treat as failure.
|
||
this.cache[this._makeAbs(f)] = false
|
||
if (this.strict) {
|
||
this.emit('error', er)
|
||
// If the error is handled, then we abort
|
||
// if not, we threw out of here
|
||
this.abort()
|
||
}
|
||
if (!this.silent)
|
||
console.error('glob error', er)
|
||
break
|
||
}
|
||
|
||
return cb()
|
||
}
|
||
|
||
Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) {
|
||
var self = this
|
||
this._readdir(abs, inGlobStar, function (er, entries) {
|
||
self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
|
||
})
|
||
}
|
||
|
||
|
||
Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
|
||
//console.error('pgs2', prefix, remain[0], entries)
|
||
|
||
// no entries means not a dir, so it can never have matches
|
||
// foo.txt/** doesn't match foo.txt
|
||
if (!entries)
|
||
return cb()
|
||
|
||
// test without the globstar, and with every child both below
|
||
// and replacing the globstar.
|
||
var remainWithoutGlobStar = remain.slice(1)
|
||
var gspref = prefix ? [ prefix ] : []
|
||
var noGlobStar = gspref.concat(remainWithoutGlobStar)
|
||
|
||
// the noGlobStar pattern exits the inGlobStar state
|
||
this._process(noGlobStar, index, false, cb)
|
||
|
||
var isSym = this.symlinks[abs]
|
||
var len = entries.length
|
||
|
||
// If it's a symlink, and we're in a globstar, then stop
|
||
if (isSym && inGlobStar)
|
||
return cb()
|
||
|
||
for (var i = 0; i < len; i++) {
|
||
var e = entries[i]
|
||
if (e.charAt(0) === '.' && !this.dot)
|
||
continue
|
||
|
||
// these two cases enter the inGlobStar state
|
||
var instead = gspref.concat(entries[i], remainWithoutGlobStar)
|
||
this._process(instead, index, true, cb)
|
||
|
||
var below = gspref.concat(entries[i], remain)
|
||
this._process(below, index, true, cb)
|
||
}
|
||
|
||
cb()
|
||
}
|
||
|
||
Glob.prototype._processSimple = function (prefix, index, cb) {
|
||
// XXX review this. Shouldn't it be doing the mounting etc
|
||
// before doing stat? kinda weird?
|
||
var self = this
|
||
this._stat(prefix, function (er, exists) {
|
||
self._processSimple2(prefix, index, er, exists, cb)
|
||
})
|
||
}
|
||
Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) {
|
||
|
||
//console.error('ps2', prefix, exists)
|
||
|
||
if (!this.matches[index])
|
||
this.matches[index] = Object.create(null)
|
||
|
||
// If it doesn't exist, then just mark the lack of results
|
||
if (!exists)
|
||
return cb()
|
||
|
||
if (prefix && isAbsolute(prefix) && !this.nomount) {
|
||
var trail = /[\/\\]$/.test(prefix)
|
||
if (prefix.charAt(0) === '/') {
|
||
prefix = path.join(this.root, prefix)
|
||
} else {
|
||
prefix = path.resolve(this.root, prefix)
|
||
if (trail)
|
||
prefix += '/'
|
||
}
|
||
}
|
||
|
||
if (process.platform === 'win32')
|
||
prefix = prefix.replace(/\\/g, '/')
|
||
|
||
// Mark this as a match
|
||
this._emitMatch(index, prefix)
|
||
cb()
|
||
}
|
||
|
||
// Returns either 'DIR', 'FILE', or false
|
||
Glob.prototype._stat = function (f, cb) {
|
||
var abs = this._makeAbs(f)
|
||
var needDir = f.slice(-1) === '/'
|
||
|
||
if (f.length > this.maxLength)
|
||
return cb()
|
||
|
||
if (!this.stat && ownProp(this.cache, abs)) {
|
||
var c = this.cache[abs]
|
||
|
||
if (Array.isArray(c))
|
||
c = 'DIR'
|
||
|
||
// It exists, but maybe not how we need it
|
||
if (!needDir || c === 'DIR')
|
||
return cb(null, c)
|
||
|
||
if (needDir && c === 'FILE')
|
||
return cb()
|
||
|
||
// otherwise we have to stat, because maybe c=true
|
||
// if we know it exists, but not what it is.
|
||
}
|
||
|
||
var exists
|
||
var stat = this.statCache[abs]
|
||
if (stat !== undefined) {
|
||
if (stat === false)
|
||
return cb(null, stat)
|
||
else {
|
||
var type = stat.isDirectory() ? 'DIR' : 'FILE'
|
||
if (needDir && type === 'FILE')
|
||
return cb()
|
||
else
|
||
return cb(null, type, stat)
|
||
}
|
||
}
|
||
|
||
var self = this
|
||
var statcb = inflight('stat\0' + abs, lstatcb_)
|
||
if (statcb)
|
||
fs.lstat(abs, statcb)
|
||
|
||
function lstatcb_ (er, lstat) {
|
||
if (lstat && lstat.isSymbolicLink()) {
|
||
// If it's a symlink, then treat it as the target, unless
|
||
// the target does not exist, then treat it as a file.
|
||
return fs.stat(abs, function (er, stat) {
|
||
if (er)
|
||
self._stat2(f, abs, null, lstat, cb)
|
||
else
|
||
self._stat2(f, abs, er, stat, cb)
|
||
})
|
||
} else {
|
||
self._stat2(f, abs, er, lstat, cb)
|
||
}
|
||
}
|
||
}
|
||
|
||
Glob.prototype._stat2 = function (f, abs, er, stat, cb) {
|
||
if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) {
|
||
this.statCache[abs] = false
|
||
return cb()
|
||
}
|
||
|
||
var needDir = f.slice(-1) === '/'
|
||
this.statCache[abs] = stat
|
||
|
||
if (abs.slice(-1) === '/' && stat && !stat.isDirectory())
|
||
return cb(null, false, stat)
|
||
|
||
var c = true
|
||
if (stat)
|
||
c = stat.isDirectory() ? 'DIR' : 'FILE'
|
||
this.cache[abs] = this.cache[abs] || c
|
||
|
||
if (needDir && c === 'FILE')
|
||
return cb()
|
||
|
||
return cb(null, c, stat)
|
||
}
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 2363:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
module.exports = globSync
|
||
globSync.GlobSync = GlobSync
|
||
|
||
var fs = __nccwpck_require__(5747)
|
||
var rp = __nccwpck_require__(6904)
|
||
var minimatch = __nccwpck_require__(7066)
|
||
var Minimatch = minimatch.Minimatch
|
||
var Glob = __nccwpck_require__(4285).Glob
|
||
var util = __nccwpck_require__(1669)
|
||
var path = __nccwpck_require__(5622)
|
||
var assert = __nccwpck_require__(2357)
|
||
var isAbsolute = __nccwpck_require__(7342)
|
||
var common = __nccwpck_require__(1256)
|
||
var setopts = common.setopts
|
||
var ownProp = common.ownProp
|
||
var childrenIgnored = common.childrenIgnored
|
||
var isIgnored = common.isIgnored
|
||
|
||
function globSync (pattern, options) {
|
||
if (typeof options === 'function' || arguments.length === 3)
|
||
throw new TypeError('callback provided to sync glob\n'+
|
||
'See: https://github.com/isaacs/node-glob/issues/167')
|
||
|
||
return new GlobSync(pattern, options).found
|
||
}
|
||
|
||
function GlobSync (pattern, options) {
|
||
if (!pattern)
|
||
throw new Error('must provide pattern')
|
||
|
||
if (typeof options === 'function' || arguments.length === 3)
|
||
throw new TypeError('callback provided to sync glob\n'+
|
||
'See: https://github.com/isaacs/node-glob/issues/167')
|
||
|
||
if (!(this instanceof GlobSync))
|
||
return new GlobSync(pattern, options)
|
||
|
||
setopts(this, pattern, options)
|
||
|
||
if (this.noprocess)
|
||
return this
|
||
|
||
var n = this.minimatch.set.length
|
||
this.matches = new Array(n)
|
||
for (var i = 0; i < n; i ++) {
|
||
this._process(this.minimatch.set[i], i, false)
|
||
}
|
||
this._finish()
|
||
}
|
||
|
||
GlobSync.prototype._finish = function () {
|
||
assert(this instanceof GlobSync)
|
||
if (this.realpath) {
|
||
var self = this
|
||
this.matches.forEach(function (matchset, index) {
|
||
var set = self.matches[index] = Object.create(null)
|
||
for (var p in matchset) {
|
||
try {
|
||
p = self._makeAbs(p)
|
||
var real = rp.realpathSync(p, self.realpathCache)
|
||
set[real] = true
|
||
} catch (er) {
|
||
if (er.syscall === 'stat')
|
||
set[self._makeAbs(p)] = true
|
||
else
|
||
throw er
|
||
}
|
||
}
|
||
})
|
||
}
|
||
common.finish(this)
|
||
}
|
||
|
||
|
||
GlobSync.prototype._process = function (pattern, index, inGlobStar) {
|
||
assert(this instanceof GlobSync)
|
||
|
||
// Get the first [n] parts of pattern that are all strings.
|
||
var n = 0
|
||
while (typeof pattern[n] === 'string') {
|
||
n ++
|
||
}
|
||
// now n is the index of the first one that is *not* a string.
|
||
|
||
// See if there's anything else
|
||
var prefix
|
||
switch (n) {
|
||
// if not, then this is rather simple
|
||
case pattern.length:
|
||
this._processSimple(pattern.join('/'), index)
|
||
return
|
||
|
||
case 0:
|
||
// pattern *starts* with some non-trivial item.
|
||
// going to readdir(cwd), but not include the prefix in matches.
|
||
prefix = null
|
||
break
|
||
|
||
default:
|
||
// pattern has some string bits in the front.
|
||
// whatever it starts with, whether that's 'absolute' like /foo/bar,
|
||
// or 'relative' like '../baz'
|
||
prefix = pattern.slice(0, n).join('/')
|
||
break
|
||
}
|
||
|
||
var remain = pattern.slice(n)
|
||
|
||
// get the list of entries.
|
||
var read
|
||
if (prefix === null)
|
||
read = '.'
|
||
else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) {
|
||
if (!prefix || !isAbsolute(prefix))
|
||
prefix = '/' + prefix
|
||
read = prefix
|
||
} else
|
||
read = prefix
|
||
|
||
var abs = this._makeAbs(read)
|
||
|
||
//if ignored, skip processing
|
||
if (childrenIgnored(this, read))
|
||
return
|
||
|
||
var isGlobStar = remain[0] === minimatch.GLOBSTAR
|
||
if (isGlobStar)
|
||
this._processGlobStar(prefix, read, abs, remain, index, inGlobStar)
|
||
else
|
||
this._processReaddir(prefix, read, abs, remain, index, inGlobStar)
|
||
}
|
||
|
||
|
||
GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) {
|
||
var entries = this._readdir(abs, inGlobStar)
|
||
|
||
// if the abs isn't a dir, then nothing can match!
|
||
if (!entries)
|
||
return
|
||
|
||
// It will only match dot entries if it starts with a dot, or if
|
||
// dot is set. Stuff like @(.foo|.bar) isn't allowed.
|
||
var pn = remain[0]
|
||
var negate = !!this.minimatch.negate
|
||
var rawGlob = pn._glob
|
||
var dotOk = this.dot || rawGlob.charAt(0) === '.'
|
||
|
||
var matchedEntries = []
|
||
for (var i = 0; i < entries.length; i++) {
|
||
var e = entries[i]
|
||
if (e.charAt(0) !== '.' || dotOk) {
|
||
var m
|
||
if (negate && !prefix) {
|
||
m = !e.match(pn)
|
||
} else {
|
||
m = e.match(pn)
|
||
}
|
||
if (m)
|
||
matchedEntries.push(e)
|
||
}
|
||
}
|
||
|
||
var len = matchedEntries.length
|
||
// If there are no matched entries, then nothing matches.
|
||
if (len === 0)
|
||
return
|
||
|
||
// if this is the last remaining pattern bit, then no need for
|
||
// an additional stat *unless* the user has specified mark or
|
||
// stat explicitly. We know they exist, since readdir returned
|
||
// them.
|
||
|
||
if (remain.length === 1 && !this.mark && !this.stat) {
|
||
if (!this.matches[index])
|
||
this.matches[index] = Object.create(null)
|
||
|
||
for (var i = 0; i < len; i ++) {
|
||
var e = matchedEntries[i]
|
||
if (prefix) {
|
||
if (prefix.slice(-1) !== '/')
|
||
e = prefix + '/' + e
|
||
else
|
||
e = prefix + e
|
||
}
|
||
|
||
if (e.charAt(0) === '/' && !this.nomount) {
|
||
e = path.join(this.root, e)
|
||
}
|
||
this._emitMatch(index, e)
|
||
}
|
||
// This was the last one, and no stats were needed
|
||
return
|
||
}
|
||
|
||
// now test all matched entries as stand-ins for that part
|
||
// of the pattern.
|
||
remain.shift()
|
||
for (var i = 0; i < len; i ++) {
|
||
var e = matchedEntries[i]
|
||
var newPattern
|
||
if (prefix)
|
||
newPattern = [prefix, e]
|
||
else
|
||
newPattern = [e]
|
||
this._process(newPattern.concat(remain), index, inGlobStar)
|
||
}
|
||
}
|
||
|
||
|
||
GlobSync.prototype._emitMatch = function (index, e) {
|
||
if (isIgnored(this, e))
|
||
return
|
||
|
||
var abs = this._makeAbs(e)
|
||
|
||
if (this.mark)
|
||
e = this._mark(e)
|
||
|
||
if (this.absolute) {
|
||
e = abs
|
||
}
|
||
|
||
if (this.matches[index][e])
|
||
return
|
||
|
||
if (this.nodir) {
|
||
var c = this.cache[abs]
|
||
if (c === 'DIR' || Array.isArray(c))
|
||
return
|
||
}
|
||
|
||
this.matches[index][e] = true
|
||
|
||
if (this.stat)
|
||
this._stat(e)
|
||
}
|
||
|
||
|
||
GlobSync.prototype._readdirInGlobStar = function (abs) {
|
||
// follow all symlinked directories forever
|
||
// just proceed as if this is a non-globstar situation
|
||
if (this.follow)
|
||
return this._readdir(abs, false)
|
||
|
||
var entries
|
||
var lstat
|
||
var stat
|
||
try {
|
||
lstat = fs.lstatSync(abs)
|
||
} catch (er) {
|
||
if (er.code === 'ENOENT') {
|
||
// lstat failed, doesn't exist
|
||
return null
|
||
}
|
||
}
|
||
|
||
var isSym = lstat && lstat.isSymbolicLink()
|
||
this.symlinks[abs] = isSym
|
||
|
||
// If it's not a symlink or a dir, then it's definitely a regular file.
|
||
// don't bother doing a readdir in that case.
|
||
if (!isSym && lstat && !lstat.isDirectory())
|
||
this.cache[abs] = 'FILE'
|
||
else
|
||
entries = this._readdir(abs, false)
|
||
|
||
return entries
|
||
}
|
||
|
||
GlobSync.prototype._readdir = function (abs, inGlobStar) {
|
||
var entries
|
||
|
||
if (inGlobStar && !ownProp(this.symlinks, abs))
|
||
return this._readdirInGlobStar(abs)
|
||
|
||
if (ownProp(this.cache, abs)) {
|
||
var c = this.cache[abs]
|
||
if (!c || c === 'FILE')
|
||
return null
|
||
|
||
if (Array.isArray(c))
|
||
return c
|
||
}
|
||
|
||
try {
|
||
return this._readdirEntries(abs, fs.readdirSync(abs))
|
||
} catch (er) {
|
||
this._readdirError(abs, er)
|
||
return null
|
||
}
|
||
}
|
||
|
||
GlobSync.prototype._readdirEntries = function (abs, entries) {
|
||
// if we haven't asked to stat everything, then just
|
||
// assume that everything in there exists, so we can avoid
|
||
// having to stat it a second time.
|
||
if (!this.mark && !this.stat) {
|
||
for (var i = 0; i < entries.length; i ++) {
|
||
var e = entries[i]
|
||
if (abs === '/')
|
||
e = abs + e
|
||
else
|
||
e = abs + '/' + e
|
||
this.cache[e] = true
|
||
}
|
||
}
|
||
|
||
this.cache[abs] = entries
|
||
|
||
// mark and cache dir-ness
|
||
return entries
|
||
}
|
||
|
||
GlobSync.prototype._readdirError = function (f, er) {
|
||
// handle errors, and cache the information
|
||
switch (er.code) {
|
||
case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
|
||
case 'ENOTDIR': // totally normal. means it *does* exist.
|
||
var abs = this._makeAbs(f)
|
||
this.cache[abs] = 'FILE'
|
||
if (abs === this.cwdAbs) {
|
||
var error = new Error(er.code + ' invalid cwd ' + this.cwd)
|
||
error.path = this.cwd
|
||
error.code = er.code
|
||
throw error
|
||
}
|
||
break
|
||
|
||
case 'ENOENT': // not terribly unusual
|
||
case 'ELOOP':
|
||
case 'ENAMETOOLONG':
|
||
case 'UNKNOWN':
|
||
this.cache[this._makeAbs(f)] = false
|
||
break
|
||
|
||
default: // some unusual error. Treat as failure.
|
||
this.cache[this._makeAbs(f)] = false
|
||
if (this.strict)
|
||
throw er
|
||
if (!this.silent)
|
||
console.error('glob error', er)
|
||
break
|
||
}
|
||
}
|
||
|
||
GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) {
|
||
|
||
var entries = this._readdir(abs, inGlobStar)
|
||
|
||
// no entries means not a dir, so it can never have matches
|
||
// foo.txt/** doesn't match foo.txt
|
||
if (!entries)
|
||
return
|
||
|
||
// test without the globstar, and with every child both below
|
||
// and replacing the globstar.
|
||
var remainWithoutGlobStar = remain.slice(1)
|
||
var gspref = prefix ? [ prefix ] : []
|
||
var noGlobStar = gspref.concat(remainWithoutGlobStar)
|
||
|
||
// the noGlobStar pattern exits the inGlobStar state
|
||
this._process(noGlobStar, index, false)
|
||
|
||
var len = entries.length
|
||
var isSym = this.symlinks[abs]
|
||
|
||
// If it's a symlink, and we're in a globstar, then stop
|
||
if (isSym && inGlobStar)
|
||
return
|
||
|
||
for (var i = 0; i < len; i++) {
|
||
var e = entries[i]
|
||
if (e.charAt(0) === '.' && !this.dot)
|
||
continue
|
||
|
||
// these two cases enter the inGlobStar state
|
||
var instead = gspref.concat(entries[i], remainWithoutGlobStar)
|
||
this._process(instead, index, true)
|
||
|
||
var below = gspref.concat(entries[i], remain)
|
||
this._process(below, index, true)
|
||
}
|
||
}
|
||
|
||
GlobSync.prototype._processSimple = function (prefix, index) {
|
||
// XXX review this. Shouldn't it be doing the mounting etc
|
||
// before doing stat? kinda weird?
|
||
var exists = this._stat(prefix)
|
||
|
||
if (!this.matches[index])
|
||
this.matches[index] = Object.create(null)
|
||
|
||
// If it doesn't exist, then just mark the lack of results
|
||
if (!exists)
|
||
return
|
||
|
||
if (prefix && isAbsolute(prefix) && !this.nomount) {
|
||
var trail = /[\/\\]$/.test(prefix)
|
||
if (prefix.charAt(0) === '/') {
|
||
prefix = path.join(this.root, prefix)
|
||
} else {
|
||
prefix = path.resolve(this.root, prefix)
|
||
if (trail)
|
||
prefix += '/'
|
||
}
|
||
}
|
||
|
||
if (process.platform === 'win32')
|
||
prefix = prefix.replace(/\\/g, '/')
|
||
|
||
// Mark this as a match
|
||
this._emitMatch(index, prefix)
|
||
}
|
||
|
||
// Returns either 'DIR', 'FILE', or false
|
||
GlobSync.prototype._stat = function (f) {
|
||
var abs = this._makeAbs(f)
|
||
var needDir = f.slice(-1) === '/'
|
||
|
||
if (f.length > this.maxLength)
|
||
return false
|
||
|
||
if (!this.stat && ownProp(this.cache, abs)) {
|
||
var c = this.cache[abs]
|
||
|
||
if (Array.isArray(c))
|
||
c = 'DIR'
|
||
|
||
// It exists, but maybe not how we need it
|
||
if (!needDir || c === 'DIR')
|
||
return c
|
||
|
||
if (needDir && c === 'FILE')
|
||
return false
|
||
|
||
// otherwise we have to stat, because maybe c=true
|
||
// if we know it exists, but not what it is.
|
||
}
|
||
|
||
var exists
|
||
var stat = this.statCache[abs]
|
||
if (!stat) {
|
||
var lstat
|
||
try {
|
||
lstat = fs.lstatSync(abs)
|
||
} catch (er) {
|
||
if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) {
|
||
this.statCache[abs] = false
|
||
return false
|
||
}
|
||
}
|
||
|
||
if (lstat && lstat.isSymbolicLink()) {
|
||
try {
|
||
stat = fs.statSync(abs)
|
||
} catch (er) {
|
||
stat = lstat
|
||
}
|
||
} else {
|
||
stat = lstat
|
||
}
|
||
}
|
||
|
||
this.statCache[abs] = stat
|
||
|
||
var c = true
|
||
if (stat)
|
||
c = stat.isDirectory() ? 'DIR' : 'FILE'
|
||
|
||
this.cache[abs] = this.cache[abs] || c
|
||
|
||
if (needDir && c === 'FILE')
|
||
return false
|
||
|
||
return c
|
||
}
|
||
|
||
GlobSync.prototype._mark = function (p) {
|
||
return common.mark(this, p)
|
||
}
|
||
|
||
GlobSync.prototype._makeAbs = function (f) {
|
||
return common.makeAbs(this, f)
|
||
}
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 1567:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
module.exports = clone
|
||
|
||
var getPrototypeOf = Object.getPrototypeOf || function (obj) {
|
||
return obj.__proto__
|
||
}
|
||
|
||
function clone (obj) {
|
||
if (obj === null || typeof obj !== 'object')
|
||
return obj
|
||
|
||
if (obj instanceof Object)
|
||
var copy = { __proto__: getPrototypeOf(obj) }
|
||
else
|
||
var copy = Object.create(null)
|
||
|
||
Object.getOwnPropertyNames(obj).forEach(function (key) {
|
||
Object.defineProperty(copy, key, Object.getOwnPropertyDescriptor(obj, key))
|
||
})
|
||
|
||
return copy
|
||
}
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 3224:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
var fs = __nccwpck_require__(5747)
|
||
var polyfills = __nccwpck_require__(2709)
|
||
var legacy = __nccwpck_require__(760)
|
||
var clone = __nccwpck_require__(1567)
|
||
|
||
var util = __nccwpck_require__(1669)
|
||
|
||
/* istanbul ignore next - node 0.x polyfill */
|
||
var gracefulQueue
|
||
var previousSymbol
|
||
|
||
/* istanbul ignore else - node 0.x polyfill */
|
||
if (typeof Symbol === 'function' && typeof Symbol.for === 'function') {
|
||
gracefulQueue = Symbol.for('graceful-fs.queue')
|
||
// This is used in testing by future versions
|
||
previousSymbol = Symbol.for('graceful-fs.previous')
|
||
} else {
|
||
gracefulQueue = '___graceful-fs.queue'
|
||
previousSymbol = '___graceful-fs.previous'
|
||
}
|
||
|
||
function noop () {}
|
||
|
||
function publishQueue(context, queue) {
|
||
Object.defineProperty(context, gracefulQueue, {
|
||
get: function() {
|
||
return queue
|
||
}
|
||
})
|
||
}
|
||
|
||
var debug = noop
|
||
if (util.debuglog)
|
||
debug = util.debuglog('gfs4')
|
||
else if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || ''))
|
||
debug = function() {
|
||
var m = util.format.apply(util, arguments)
|
||
m = 'GFS4: ' + m.split(/\n/).join('\nGFS4: ')
|
||
console.error(m)
|
||
}
|
||
|
||
// Once time initialization
|
||
if (!fs[gracefulQueue]) {
|
||
// This queue can be shared by multiple loaded instances
|
||
var queue = global[gracefulQueue] || []
|
||
publishQueue(fs, queue)
|
||
|
||
// Patch fs.close/closeSync to shared queue version, because we need
|
||
// to retry() whenever a close happens *anywhere* in the program.
|
||
// This is essential when multiple graceful-fs instances are
|
||
// in play at the same time.
|
||
fs.close = (function (fs$close) {
|
||
function close (fd, cb) {
|
||
return fs$close.call(fs, fd, function (err) {
|
||
// This function uses the graceful-fs shared queue
|
||
if (!err) {
|
||
retry()
|
||
}
|
||
|
||
if (typeof cb === 'function')
|
||
cb.apply(this, arguments)
|
||
})
|
||
}
|
||
|
||
Object.defineProperty(close, previousSymbol, {
|
||
value: fs$close
|
||
})
|
||
return close
|
||
})(fs.close)
|
||
|
||
fs.closeSync = (function (fs$closeSync) {
|
||
function closeSync (fd) {
|
||
// This function uses the graceful-fs shared queue
|
||
fs$closeSync.apply(fs, arguments)
|
||
retry()
|
||
}
|
||
|
||
Object.defineProperty(closeSync, previousSymbol, {
|
||
value: fs$closeSync
|
||
})
|
||
return closeSync
|
||
})(fs.closeSync)
|
||
|
||
if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) {
|
||
process.on('exit', function() {
|
||
debug(fs[gracefulQueue])
|
||
__nccwpck_require__(2357).equal(fs[gracefulQueue].length, 0)
|
||
})
|
||
}
|
||
}
|
||
|
||
if (!global[gracefulQueue]) {
|
||
publishQueue(global, fs[gracefulQueue]);
|
||
}
|
||
|
||
module.exports = patch(clone(fs))
|
||
if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs.__patched) {
|
||
module.exports = patch(fs)
|
||
fs.__patched = true;
|
||
}
|
||
|
||
function patch (fs) {
|
||
// Everything that references the open() function needs to be in here
|
||
polyfills(fs)
|
||
fs.gracefulify = patch
|
||
|
||
fs.createReadStream = createReadStream
|
||
fs.createWriteStream = createWriteStream
|
||
var fs$readFile = fs.readFile
|
||
fs.readFile = readFile
|
||
function readFile (path, options, cb) {
|
||
if (typeof options === 'function')
|
||
cb = options, options = null
|
||
|
||
return go$readFile(path, options, cb)
|
||
|
||
function go$readFile (path, options, cb) {
|
||
return fs$readFile(path, options, function (err) {
|
||
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
|
||
enqueue([go$readFile, [path, options, cb]])
|
||
else {
|
||
if (typeof cb === 'function')
|
||
cb.apply(this, arguments)
|
||
retry()
|
||
}
|
||
})
|
||
}
|
||
}
|
||
|
||
var fs$writeFile = fs.writeFile
|
||
fs.writeFile = writeFile
|
||
function writeFile (path, data, options, cb) {
|
||
if (typeof options === 'function')
|
||
cb = options, options = null
|
||
|
||
return go$writeFile(path, data, options, cb)
|
||
|
||
function go$writeFile (path, data, options, cb) {
|
||
return fs$writeFile(path, data, options, function (err) {
|
||
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
|
||
enqueue([go$writeFile, [path, data, options, cb]])
|
||
else {
|
||
if (typeof cb === 'function')
|
||
cb.apply(this, arguments)
|
||
retry()
|
||
}
|
||
})
|
||
}
|
||
}
|
||
|
||
var fs$appendFile = fs.appendFile
|
||
if (fs$appendFile)
|
||
fs.appendFile = appendFile
|
||
function appendFile (path, data, options, cb) {
|
||
if (typeof options === 'function')
|
||
cb = options, options = null
|
||
|
||
return go$appendFile(path, data, options, cb)
|
||
|
||
function go$appendFile (path, data, options, cb) {
|
||
return fs$appendFile(path, data, options, function (err) {
|
||
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
|
||
enqueue([go$appendFile, [path, data, options, cb]])
|
||
else {
|
||
if (typeof cb === 'function')
|
||
cb.apply(this, arguments)
|
||
retry()
|
||
}
|
||
})
|
||
}
|
||
}
|
||
|
||
var fs$copyFile = fs.copyFile
|
||
if (fs$copyFile)
|
||
fs.copyFile = copyFile
|
||
function copyFile (src, dest, flags, cb) {
|
||
if (typeof flags === 'function') {
|
||
cb = flags
|
||
flags = 0
|
||
}
|
||
return fs$copyFile(src, dest, flags, function (err) {
|
||
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
|
||
enqueue([fs$copyFile, [src, dest, flags, cb]])
|
||
else {
|
||
if (typeof cb === 'function')
|
||
cb.apply(this, arguments)
|
||
retry()
|
||
}
|
||
})
|
||
}
|
||
|
||
var fs$readdir = fs.readdir
|
||
fs.readdir = readdir
|
||
function readdir (path, options, cb) {
|
||
var args = [path]
|
||
if (typeof options !== 'function') {
|
||
args.push(options)
|
||
} else {
|
||
cb = options
|
||
}
|
||
args.push(go$readdir$cb)
|
||
|
||
return go$readdir(args)
|
||
|
||
function go$readdir$cb (err, files) {
|
||
if (files && files.sort)
|
||
files.sort()
|
||
|
||
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
|
||
enqueue([go$readdir, [args]])
|
||
|
||
else {
|
||
if (typeof cb === 'function')
|
||
cb.apply(this, arguments)
|
||
retry()
|
||
}
|
||
}
|
||
}
|
||
|
||
function go$readdir (args) {
|
||
return fs$readdir.apply(fs, args)
|
||
}
|
||
|
||
if (process.version.substr(0, 4) === 'v0.8') {
|
||
var legStreams = legacy(fs)
|
||
ReadStream = legStreams.ReadStream
|
||
WriteStream = legStreams.WriteStream
|
||
}
|
||
|
||
var fs$ReadStream = fs.ReadStream
|
||
if (fs$ReadStream) {
|
||
ReadStream.prototype = Object.create(fs$ReadStream.prototype)
|
||
ReadStream.prototype.open = ReadStream$open
|
||
}
|
||
|
||
var fs$WriteStream = fs.WriteStream
|
||
if (fs$WriteStream) {
|
||
WriteStream.prototype = Object.create(fs$WriteStream.prototype)
|
||
WriteStream.prototype.open = WriteStream$open
|
||
}
|
||
|
||
Object.defineProperty(fs, 'ReadStream', {
|
||
get: function () {
|
||
return ReadStream
|
||
},
|
||
set: function (val) {
|
||
ReadStream = val
|
||
},
|
||
enumerable: true,
|
||
configurable: true
|
||
})
|
||
Object.defineProperty(fs, 'WriteStream', {
|
||
get: function () {
|
||
return WriteStream
|
||
},
|
||
set: function (val) {
|
||
WriteStream = val
|
||
},
|
||
enumerable: true,
|
||
configurable: true
|
||
})
|
||
|
||
// legacy names
|
||
var FileReadStream = ReadStream
|
||
Object.defineProperty(fs, 'FileReadStream', {
|
||
get: function () {
|
||
return FileReadStream
|
||
},
|
||
set: function (val) {
|
||
FileReadStream = val
|
||
},
|
||
enumerable: true,
|
||
configurable: true
|
||
})
|
||
var FileWriteStream = WriteStream
|
||
Object.defineProperty(fs, 'FileWriteStream', {
|
||
get: function () {
|
||
return FileWriteStream
|
||
},
|
||
set: function (val) {
|
||
FileWriteStream = val
|
||
},
|
||
enumerable: true,
|
||
configurable: true
|
||
})
|
||
|
||
function ReadStream (path, options) {
|
||
if (this instanceof ReadStream)
|
||
return fs$ReadStream.apply(this, arguments), this
|
||
else
|
||
return ReadStream.apply(Object.create(ReadStream.prototype), arguments)
|
||
}
|
||
|
||
function ReadStream$open () {
|
||
var that = this
|
||
open(that.path, that.flags, that.mode, function (err, fd) {
|
||
if (err) {
|
||
if (that.autoClose)
|
||
that.destroy()
|
||
|
||
that.emit('error', err)
|
||
} else {
|
||
that.fd = fd
|
||
that.emit('open', fd)
|
||
that.read()
|
||
}
|
||
})
|
||
}
|
||
|
||
function WriteStream (path, options) {
|
||
if (this instanceof WriteStream)
|
||
return fs$WriteStream.apply(this, arguments), this
|
||
else
|
||
return WriteStream.apply(Object.create(WriteStream.prototype), arguments)
|
||
}
|
||
|
||
function WriteStream$open () {
|
||
var that = this
|
||
open(that.path, that.flags, that.mode, function (err, fd) {
|
||
if (err) {
|
||
that.destroy()
|
||
that.emit('error', err)
|
||
} else {
|
||
that.fd = fd
|
||
that.emit('open', fd)
|
||
}
|
||
})
|
||
}
|
||
|
||
function createReadStream (path, options) {
|
||
return new fs.ReadStream(path, options)
|
||
}
|
||
|
||
function createWriteStream (path, options) {
|
||
return new fs.WriteStream(path, options)
|
||
}
|
||
|
||
var fs$open = fs.open
|
||
fs.open = open
|
||
function open (path, flags, mode, cb) {
|
||
if (typeof mode === 'function')
|
||
cb = mode, mode = null
|
||
|
||
return go$open(path, flags, mode, cb)
|
||
|
||
function go$open (path, flags, mode, cb) {
|
||
return fs$open(path, flags, mode, function (err, fd) {
|
||
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
|
||
enqueue([go$open, [path, flags, mode, cb]])
|
||
else {
|
||
if (typeof cb === 'function')
|
||
cb.apply(this, arguments)
|
||
retry()
|
||
}
|
||
})
|
||
}
|
||
}
|
||
|
||
return fs
|
||
}
|
||
|
||
function enqueue (elem) {
|
||
debug('ENQUEUE', elem[0].name, elem[1])
|
||
fs[gracefulQueue].push(elem)
|
||
}
|
||
|
||
function retry () {
|
||
var elem = fs[gracefulQueue].shift()
|
||
if (elem) {
|
||
debug('RETRY', elem[0].name, elem[1])
|
||
elem[0].apply(null, elem[1])
|
||
}
|
||
}
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 760:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
var Stream = __nccwpck_require__(2413).Stream
|
||
|
||
module.exports = legacy
|
||
|
||
function legacy (fs) {
|
||
return {
|
||
ReadStream: ReadStream,
|
||
WriteStream: WriteStream
|
||
}
|
||
|
||
function ReadStream (path, options) {
|
||
if (!(this instanceof ReadStream)) return new ReadStream(path, options);
|
||
|
||
Stream.call(this);
|
||
|
||
var self = this;
|
||
|
||
this.path = path;
|
||
this.fd = null;
|
||
this.readable = true;
|
||
this.paused = false;
|
||
|
||
this.flags = 'r';
|
||
this.mode = 438; /*=0666*/
|
||
this.bufferSize = 64 * 1024;
|
||
|
||
options = options || {};
|
||
|
||
// Mixin options into this
|
||
var keys = Object.keys(options);
|
||
for (var index = 0, length = keys.length; index < length; index++) {
|
||
var key = keys[index];
|
||
this[key] = options[key];
|
||
}
|
||
|
||
if (this.encoding) this.setEncoding(this.encoding);
|
||
|
||
if (this.start !== undefined) {
|
||
if ('number' !== typeof this.start) {
|
||
throw TypeError('start must be a Number');
|
||
}
|
||
if (this.end === undefined) {
|
||
this.end = Infinity;
|
||
} else if ('number' !== typeof this.end) {
|
||
throw TypeError('end must be a Number');
|
||
}
|
||
|
||
if (this.start > this.end) {
|
||
throw new Error('start must be <= end');
|
||
}
|
||
|
||
this.pos = this.start;
|
||
}
|
||
|
||
if (this.fd !== null) {
|
||
process.nextTick(function() {
|
||
self._read();
|
||
});
|
||
return;
|
||
}
|
||
|
||
fs.open(this.path, this.flags, this.mode, function (err, fd) {
|
||
if (err) {
|
||
self.emit('error', err);
|
||
self.readable = false;
|
||
return;
|
||
}
|
||
|
||
self.fd = fd;
|
||
self.emit('open', fd);
|
||
self._read();
|
||
})
|
||
}
|
||
|
||
function WriteStream (path, options) {
|
||
if (!(this instanceof WriteStream)) return new WriteStream(path, options);
|
||
|
||
Stream.call(this);
|
||
|
||
this.path = path;
|
||
this.fd = null;
|
||
this.writable = true;
|
||
|
||
this.flags = 'w';
|
||
this.encoding = 'binary';
|
||
this.mode = 438; /*=0666*/
|
||
this.bytesWritten = 0;
|
||
|
||
options = options || {};
|
||
|
||
// Mixin options into this
|
||
var keys = Object.keys(options);
|
||
for (var index = 0, length = keys.length; index < length; index++) {
|
||
var key = keys[index];
|
||
this[key] = options[key];
|
||
}
|
||
|
||
if (this.start !== undefined) {
|
||
if ('number' !== typeof this.start) {
|
||
throw TypeError('start must be a Number');
|
||
}
|
||
if (this.start < 0) {
|
||
throw new Error('start must be >= zero');
|
||
}
|
||
|
||
this.pos = this.start;
|
||
}
|
||
|
||
this.busy = false;
|
||
this._queue = [];
|
||
|
||
if (this.fd === null) {
|
||
this._open = fs.open;
|
||
this._queue.push([this._open, this.path, this.flags, this.mode, undefined]);
|
||
this.flush();
|
||
}
|
||
}
|
||
}
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 2709:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
var constants = __nccwpck_require__(7619)
|
||
|
||
var origCwd = process.cwd
|
||
var cwd = null
|
||
|
||
var platform = process.env.GRACEFUL_FS_PLATFORM || process.platform
|
||
|
||
process.cwd = function() {
|
||
if (!cwd)
|
||
cwd = origCwd.call(process)
|
||
return cwd
|
||
}
|
||
try {
|
||
process.cwd()
|
||
} catch (er) {}
|
||
|
||
// This check is needed until node.js 12 is required
|
||
if (typeof process.chdir === 'function') {
|
||
var chdir = process.chdir
|
||
process.chdir = function (d) {
|
||
cwd = null
|
||
chdir.call(process, d)
|
||
}
|
||
if (Object.setPrototypeOf) Object.setPrototypeOf(process.chdir, chdir)
|
||
}
|
||
|
||
module.exports = patch
|
||
|
||
function patch (fs) {
|
||
// (re-)implement some things that are known busted or missing.
|
||
|
||
// lchmod, broken prior to 0.6.2
|
||
// back-port the fix here.
|
||
if (constants.hasOwnProperty('O_SYMLINK') &&
|
||
process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) {
|
||
patchLchmod(fs)
|
||
}
|
||
|
||
// lutimes implementation, or no-op
|
||
if (!fs.lutimes) {
|
||
patchLutimes(fs)
|
||
}
|
||
|
||
// https://github.com/isaacs/node-graceful-fs/issues/4
|
||
// Chown should not fail on einval or eperm if non-root.
|
||
// It should not fail on enosys ever, as this just indicates
|
||
// that a fs doesn't support the intended operation.
|
||
|
||
fs.chown = chownFix(fs.chown)
|
||
fs.fchown = chownFix(fs.fchown)
|
||
fs.lchown = chownFix(fs.lchown)
|
||
|
||
fs.chmod = chmodFix(fs.chmod)
|
||
fs.fchmod = chmodFix(fs.fchmod)
|
||
fs.lchmod = chmodFix(fs.lchmod)
|
||
|
||
fs.chownSync = chownFixSync(fs.chownSync)
|
||
fs.fchownSync = chownFixSync(fs.fchownSync)
|
||
fs.lchownSync = chownFixSync(fs.lchownSync)
|
||
|
||
fs.chmodSync = chmodFixSync(fs.chmodSync)
|
||
fs.fchmodSync = chmodFixSync(fs.fchmodSync)
|
||
fs.lchmodSync = chmodFixSync(fs.lchmodSync)
|
||
|
||
fs.stat = statFix(fs.stat)
|
||
fs.fstat = statFix(fs.fstat)
|
||
fs.lstat = statFix(fs.lstat)
|
||
|
||
fs.statSync = statFixSync(fs.statSync)
|
||
fs.fstatSync = statFixSync(fs.fstatSync)
|
||
fs.lstatSync = statFixSync(fs.lstatSync)
|
||
|
||
// if lchmod/lchown do not exist, then make them no-ops
|
||
if (!fs.lchmod) {
|
||
fs.lchmod = function (path, mode, cb) {
|
||
if (cb) process.nextTick(cb)
|
||
}
|
||
fs.lchmodSync = function () {}
|
||
}
|
||
if (!fs.lchown) {
|
||
fs.lchown = function (path, uid, gid, cb) {
|
||
if (cb) process.nextTick(cb)
|
||
}
|
||
fs.lchownSync = function () {}
|
||
}
|
||
|
||
// on Windows, A/V software can lock the directory, causing this
|
||
// to fail with an EACCES or EPERM if the directory contains newly
|
||
// created files. Try again on failure, for up to 60 seconds.
|
||
|
||
// Set the timeout this long because some Windows Anti-Virus, such as Parity
|
||
// bit9, may lock files for up to a minute, causing npm package install
|
||
// failures. Also, take care to yield the scheduler. Windows scheduling gives
|
||
// CPU to a busy looping process, which can cause the program causing the lock
|
||
// contention to be starved of CPU by node, so the contention doesn't resolve.
|
||
if (platform === "win32") {
|
||
fs.rename = (function (fs$rename) { return function (from, to, cb) {
|
||
var start = Date.now()
|
||
var backoff = 0;
|
||
fs$rename(from, to, function CB (er) {
|
||
if (er
|
||
&& (er.code === "EACCES" || er.code === "EPERM")
|
||
&& Date.now() - start < 60000) {
|
||
setTimeout(function() {
|
||
fs.stat(to, function (stater, st) {
|
||
if (stater && stater.code === "ENOENT")
|
||
fs$rename(from, to, CB);
|
||
else
|
||
cb(er)
|
||
})
|
||
}, backoff)
|
||
if (backoff < 100)
|
||
backoff += 10;
|
||
return;
|
||
}
|
||
if (cb) cb(er)
|
||
})
|
||
}})(fs.rename)
|
||
}
|
||
|
||
// if read() returns EAGAIN, then just try it again.
|
||
fs.read = (function (fs$read) {
|
||
function read (fd, buffer, offset, length, position, callback_) {
|
||
var callback
|
||
if (callback_ && typeof callback_ === 'function') {
|
||
var eagCounter = 0
|
||
callback = function (er, _, __) {
|
||
if (er && er.code === 'EAGAIN' && eagCounter < 10) {
|
||
eagCounter ++
|
||
return fs$read.call(fs, fd, buffer, offset, length, position, callback)
|
||
}
|
||
callback_.apply(this, arguments)
|
||
}
|
||
}
|
||
return fs$read.call(fs, fd, buffer, offset, length, position, callback)
|
||
}
|
||
|
||
// This ensures `util.promisify` works as it does for native `fs.read`.
|
||
if (Object.setPrototypeOf) Object.setPrototypeOf(read, fs$read)
|
||
return read
|
||
})(fs.read)
|
||
|
||
fs.readSync = (function (fs$readSync) { return function (fd, buffer, offset, length, position) {
|
||
var eagCounter = 0
|
||
while (true) {
|
||
try {
|
||
return fs$readSync.call(fs, fd, buffer, offset, length, position)
|
||
} catch (er) {
|
||
if (er.code === 'EAGAIN' && eagCounter < 10) {
|
||
eagCounter ++
|
||
continue
|
||
}
|
||
throw er
|
||
}
|
||
}
|
||
}})(fs.readSync)
|
||
|
||
function patchLchmod (fs) {
|
||
fs.lchmod = function (path, mode, callback) {
|
||
fs.open( path
|
||
, constants.O_WRONLY | constants.O_SYMLINK
|
||
, mode
|
||
, function (err, fd) {
|
||
if (err) {
|
||
if (callback) callback(err)
|
||
return
|
||
}
|
||
// prefer to return the chmod error, if one occurs,
|
||
// but still try to close, and report closing errors if they occur.
|
||
fs.fchmod(fd, mode, function (err) {
|
||
fs.close(fd, function(err2) {
|
||
if (callback) callback(err || err2)
|
||
})
|
||
})
|
||
})
|
||
}
|
||
|
||
fs.lchmodSync = function (path, mode) {
|
||
var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode)
|
||
|
||
// prefer to return the chmod error, if one occurs,
|
||
// but still try to close, and report closing errors if they occur.
|
||
var threw = true
|
||
var ret
|
||
try {
|
||
ret = fs.fchmodSync(fd, mode)
|
||
threw = false
|
||
} finally {
|
||
if (threw) {
|
||
try {
|
||
fs.closeSync(fd)
|
||
} catch (er) {}
|
||
} else {
|
||
fs.closeSync(fd)
|
||
}
|
||
}
|
||
return ret
|
||
}
|
||
}
|
||
|
||
function patchLutimes (fs) {
|
||
if (constants.hasOwnProperty("O_SYMLINK")) {
|
||
fs.lutimes = function (path, at, mt, cb) {
|
||
fs.open(path, constants.O_SYMLINK, function (er, fd) {
|
||
if (er) {
|
||
if (cb) cb(er)
|
||
return
|
||
}
|
||
fs.futimes(fd, at, mt, function (er) {
|
||
fs.close(fd, function (er2) {
|
||
if (cb) cb(er || er2)
|
||
})
|
||
})
|
||
})
|
||
}
|
||
|
||
fs.lutimesSync = function (path, at, mt) {
|
||
var fd = fs.openSync(path, constants.O_SYMLINK)
|
||
var ret
|
||
var threw = true
|
||
try {
|
||
ret = fs.futimesSync(fd, at, mt)
|
||
threw = false
|
||
} finally {
|
||
if (threw) {
|
||
try {
|
||
fs.closeSync(fd)
|
||
} catch (er) {}
|
||
} else {
|
||
fs.closeSync(fd)
|
||
}
|
||
}
|
||
return ret
|
||
}
|
||
|
||
} else {
|
||
fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb) }
|
||
fs.lutimesSync = function () {}
|
||
}
|
||
}
|
||
|
||
function chmodFix (orig) {
|
||
if (!orig) return orig
|
||
return function (target, mode, cb) {
|
||
return orig.call(fs, target, mode, function (er) {
|
||
if (chownErOk(er)) er = null
|
||
if (cb) cb.apply(this, arguments)
|
||
})
|
||
}
|
||
}
|
||
|
||
function chmodFixSync (orig) {
|
||
if (!orig) return orig
|
||
return function (target, mode) {
|
||
try {
|
||
return orig.call(fs, target, mode)
|
||
} catch (er) {
|
||
if (!chownErOk(er)) throw er
|
||
}
|
||
}
|
||
}
|
||
|
||
|
||
function chownFix (orig) {
|
||
if (!orig) return orig
|
||
return function (target, uid, gid, cb) {
|
||
return orig.call(fs, target, uid, gid, function (er) {
|
||
if (chownErOk(er)) er = null
|
||
if (cb) cb.apply(this, arguments)
|
||
})
|
||
}
|
||
}
|
||
|
||
function chownFixSync (orig) {
|
||
if (!orig) return orig
|
||
return function (target, uid, gid) {
|
||
try {
|
||
return orig.call(fs, target, uid, gid)
|
||
} catch (er) {
|
||
if (!chownErOk(er)) throw er
|
||
}
|
||
}
|
||
}
|
||
|
||
function statFix (orig) {
|
||
if (!orig) return orig
|
||
// Older versions of Node erroneously returned signed integers for
|
||
// uid + gid.
|
||
return function (target, options, cb) {
|
||
if (typeof options === 'function') {
|
||
cb = options
|
||
options = null
|
||
}
|
||
function callback (er, stats) {
|
||
if (stats) {
|
||
if (stats.uid < 0) stats.uid += 0x100000000
|
||
if (stats.gid < 0) stats.gid += 0x100000000
|
||
}
|
||
if (cb) cb.apply(this, arguments)
|
||
}
|
||
return options ? orig.call(fs, target, options, callback)
|
||
: orig.call(fs, target, callback)
|
||
}
|
||
}
|
||
|
||
function statFixSync (orig) {
|
||
if (!orig) return orig
|
||
// Older versions of Node erroneously returned signed integers for
|
||
// uid + gid.
|
||
return function (target, options) {
|
||
var stats = options ? orig.call(fs, target, options)
|
||
: orig.call(fs, target)
|
||
if (stats.uid < 0) stats.uid += 0x100000000
|
||
if (stats.gid < 0) stats.gid += 0x100000000
|
||
return stats;
|
||
}
|
||
}
|
||
|
||
// ENOSYS means that the fs doesn't support the op. Just ignore
|
||
// that, because it doesn't matter.
|
||
//
|
||
// if there's no getuid, or if getuid() is something other
|
||
// than 0, and the error is EINVAL or EPERM, then just ignore
|
||
// it.
|
||
//
|
||
// This specific case is a silent failure in cp, install, tar,
|
||
// and most other unix tools that manage permissions.
|
||
//
|
||
// When running as root, or if other types of errors are
|
||
// encountered, then it's strict.
|
||
function chownErOk (er) {
|
||
if (!er)
|
||
return true
|
||
|
||
if (er.code === "ENOSYS")
|
||
return true
|
||
|
||
var nonroot = !process.getuid || process.getuid() !== 0
|
||
if (nonroot) {
|
||
if (er.code === "EINVAL" || er.code === "EPERM")
|
||
return true
|
||
}
|
||
|
||
return false
|
||
}
|
||
}
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 8190:
|
||
/***/ (function(module, exports, __nccwpck_require__) {
|
||
|
||
/* module decorator */ module = __nccwpck_require__.nmd(module);
|
||
/*! https://mths.be/he v1.2.0 by @mathias | MIT license */
|
||
;(function(root) {
|
||
|
||
// Detect free variables `exports`.
|
||
var freeExports = true && exports;
|
||
|
||
// Detect free variable `module`.
|
||
var freeModule = true && module &&
|
||
module.exports == freeExports && module;
|
||
|
||
// Detect free variable `global`, from Node.js or Browserified code,
|
||
// and use it as `root`.
|
||
var freeGlobal = typeof global == 'object' && global;
|
||
if (freeGlobal.global === freeGlobal || freeGlobal.window === freeGlobal) {
|
||
root = freeGlobal;
|
||
}
|
||
|
||
/*--------------------------------------------------------------------------*/
|
||
|
||
// All astral symbols.
|
||
var regexAstralSymbols = /[\uD800-\uDBFF][\uDC00-\uDFFF]/g;
|
||
// All ASCII symbols (not just printable ASCII) except those listed in the
|
||
// first column of the overrides table.
|
||
// https://html.spec.whatwg.org/multipage/syntax.html#table-charref-overrides
|
||
var regexAsciiWhitelist = /[\x01-\x7F]/g;
|
||
// All BMP symbols that are not ASCII newlines, printable ASCII symbols, or
|
||
// code points listed in the first column of the overrides table on
|
||
// https://html.spec.whatwg.org/multipage/syntax.html#table-charref-overrides.
|
||
var regexBmpWhitelist = /[\x01-\t\x0B\f\x0E-\x1F\x7F\x81\x8D\x8F\x90\x9D\xA0-\uFFFF]/g;
|
||
|
||
var regexEncodeNonAscii = /<\u20D2|=\u20E5|>\u20D2|\u205F\u200A|\u219D\u0338|\u2202\u0338|\u2220\u20D2|\u2229\uFE00|\u222A\uFE00|\u223C\u20D2|\u223D\u0331|\u223E\u0333|\u2242\u0338|\u224B\u0338|\u224D\u20D2|\u224E\u0338|\u224F\u0338|\u2250\u0338|\u2261\u20E5|\u2264\u20D2|\u2265\u20D2|\u2266\u0338|\u2267\u0338|\u2268\uFE00|\u2269\uFE00|\u226A\u0338|\u226A\u20D2|\u226B\u0338|\u226B\u20D2|\u227F\u0338|\u2282\u20D2|\u2283\u20D2|\u228A\uFE00|\u228B\uFE00|\u228F\u0338|\u2290\u0338|\u2293\uFE00|\u2294\uFE00|\u22B4\u20D2|\u22B5\u20D2|\u22D8\u0338|\u22D9\u0338|\u22DA\uFE00|\u22DB\uFE00|\u22F5\u0338|\u22F9\u0338|\u2933\u0338|\u29CF\u0338|\u29D0\u0338|\u2A6D\u0338|\u2A70\u0338|\u2A7D\u0338|\u2A7E\u0338|\u2AA1\u0338|\u2AA2\u0338|\u2AAC\uFE00|\u2AAD\uFE00|\u2AAF\u0338|\u2AB0\u0338|\u2AC5\u0338|\u2AC6\u0338|\u2ACB\uFE00|\u2ACC\uFE00|\u2AFD\u20E5|[\xA0-\u0113\u0116-\u0122\u0124-\u012B\u012E-\u014D\u0150-\u017E\u0192\u01B5\u01F5\u0237\u02C6\u02C7\u02D8-\u02DD\u0311\u0391-\u03A1\u03A3-\u03A9\u03B1-\u03C9\u03D1\u03D2\u03D5\u03D6\u03DC\u03DD\u03F0\u03F1\u03F5\u03F6\u0401-\u040C\u040E-\u044F\u0451-\u045C\u045E\u045F\u2002-\u2005\u2007-\u2010\u2013-\u2016\u2018-\u201A\u201C-\u201E\u2020-\u2022\u2025\u2026\u2030-\u2035\u2039\u203A\u203E\u2041\u2043\u2044\u204F\u2057\u205F-\u2063\u20AC\u20DB\u20DC\u2102\u2105\u210A-\u2113\u2115-\u211E\u2122\u2124\u2127-\u2129\u212C\u212D\u212F-\u2131\u2133-\u2138\u2145-\u2148\u2153-\u215E\u2190-\u219B\u219D-\u21A7\u21A9-\u21AE\u21B0-\u21B3\u21B5-\u21B7\u21BA-\u21DB\u21DD\u21E4\u21E5\u21F5\u21FD-\u2205\u2207-\u2209\u220B\u220C\u220F-\u2214\u2216-\u2218\u221A\u221D-\u2238\u223A-\u2257\u2259\u225A\u225C\u225F-\u2262\u2264-\u228B\u228D-\u229B\u229D-\u22A5\u22A7-\u22B0\u22B2-\u22BB\u22BD-\u22DB\u22DE-\u22E3\u22E6-\u22F7\u22F9-\u22FE\u2305\u2306\u2308-\u2310\u2312\u2313\u2315\u2316\u231C-\u231F\u2322\u2323\u232D\u232E\u2336\u233D\u233F\u237C\u23B0\u23B1\u23B4-\u23B6\u23DC-\u23DF\u23E2\u23E7\u2423\u24C8\u2500\u2502\u250C\u2510\u2514\u2518\u251C\u2524\u252C\u2534\u253C\u2550-\u256C\u2580\u2584\u2588\u2591-\u2593\u25A1\u25AA\u25AB\u25AD\u25AE\u25B1\u25B3-\u25B5\u25B8\u25B9\u25BD-\u25BF\u25C2\u25C3\u25CA\u25CB\u25EC\u25EF\u25F8-\u25FC\u2605\u2606\u260E\u2640\u2642\u2660\u2663\u2665\u2666\u266A\u266D-\u266F\u2713\u2717\u2720\u2736\u2758\u2772\u2773\u27C8\u27C9\u27E6-\u27ED\u27F5-\u27FA\u27FC\u27FF\u2902-\u2905\u290C-\u2913\u2916\u2919-\u2920\u2923-\u292A\u2933\u2935-\u2939\u293C\u293D\u2945\u2948-\u294B\u294E-\u2976\u2978\u2979\u297B-\u297F\u2985\u2986\u298B-\u2996\u299A\u299C\u299D\u29A4-\u29B7\u29B9\u29BB\u29BC\u29BE-\u29C5\u29C9\u29CD-\u29D0\u29DC-\u29DE\u29E3-\u29E5\u29EB\u29F4\u29F6\u2A00-\u2A02\u2A04\u2A06\u2A0C\u2A0D\u2A10-\u2A17\u2A22-\u2A27\u2A29\u2A2A\u2A2D-\u2A31\u2A33-\u2A3C\u2A3F\u2A40\u2A42-\u2A4D\u2A50\u2A53-\u2A58\u2A5A-\u2A5D\u2A5F\u2A66\u2A6A\u2A6D-\u2A75\u2A77-\u2A9A\u2A9D-\u2AA2\u2AA4-\u2AB0\u2AB3-\u2AC8\u2ACB\u2ACC\u2ACF-\u2ADB\u2AE4\u2AE6-\u2AE9\u2AEB-\u2AF3\u2AFD\uFB00-\uFB04]|\uD835[\uDC9C\uDC9E\uDC9F\uDCA2\uDCA5\uDCA6\uDCA9-\uDCAC\uDCAE-\uDCB9\uDCBB\uDCBD-\uDCC3\uDCC5-\uDCCF\uDD04\uDD05\uDD07-\uDD0A\uDD0D-\uDD14\uDD16-\uDD1C\uDD1E-\uDD39\uDD3B-\uDD3E\uDD40-\uDD44\uDD46\uDD4A-\uDD50\uDD52-\uDD6B]/g;
|
||
var encodeMap = {'\xAD':'shy','\u200C':'zwnj','\u200D':'zwj','\u200E':'lrm','\u2063':'ic','\u2062':'it','\u2061':'af','\u200F':'rlm','\u200B':'ZeroWidthSpace','\u2060':'NoBreak','\u0311':'DownBreve','\u20DB':'tdot','\u20DC':'DotDot','\t':'Tab','\n':'NewLine','\u2008':'puncsp','\u205F':'MediumSpace','\u2009':'thinsp','\u200A':'hairsp','\u2004':'emsp13','\u2002':'ensp','\u2005':'emsp14','\u2003':'emsp','\u2007':'numsp','\xA0':'nbsp','\u205F\u200A':'ThickSpace','\u203E':'oline','_':'lowbar','\u2010':'dash','\u2013':'ndash','\u2014':'mdash','\u2015':'horbar',',':'comma',';':'semi','\u204F':'bsemi',':':'colon','\u2A74':'Colone','!':'excl','\xA1':'iexcl','?':'quest','\xBF':'iquest','.':'period','\u2025':'nldr','\u2026':'mldr','\xB7':'middot','\'':'apos','\u2018':'lsquo','\u2019':'rsquo','\u201A':'sbquo','\u2039':'lsaquo','\u203A':'rsaquo','"':'quot','\u201C':'ldquo','\u201D':'rdquo','\u201E':'bdquo','\xAB':'laquo','\xBB':'raquo','(':'lpar',')':'rpar','[':'lsqb',']':'rsqb','{':'lcub','}':'rcub','\u2308':'lceil','\u2309':'rceil','\u230A':'lfloor','\u230B':'rfloor','\u2985':'lopar','\u2986':'ropar','\u298B':'lbrke','\u298C':'rbrke','\u298D':'lbrkslu','\u298E':'rbrksld','\u298F':'lbrksld','\u2990':'rbrkslu','\u2991':'langd','\u2992':'rangd','\u2993':'lparlt','\u2994':'rpargt','\u2995':'gtlPar','\u2996':'ltrPar','\u27E6':'lobrk','\u27E7':'robrk','\u27E8':'lang','\u27E9':'rang','\u27EA':'Lang','\u27EB':'Rang','\u27EC':'loang','\u27ED':'roang','\u2772':'lbbrk','\u2773':'rbbrk','\u2016':'Vert','\xA7':'sect','\xB6':'para','@':'commat','*':'ast','/':'sol','undefined':null,'&':'amp','#':'num','%':'percnt','\u2030':'permil','\u2031':'pertenk','\u2020':'dagger','\u2021':'Dagger','\u2022':'bull','\u2043':'hybull','\u2032':'prime','\u2033':'Prime','\u2034':'tprime','\u2057':'qprime','\u2035':'bprime','\u2041':'caret','`':'grave','\xB4':'acute','\u02DC':'tilde','^':'Hat','\xAF':'macr','\u02D8':'breve','\u02D9':'dot','\xA8':'die','\u02DA':'ring','\u02DD':'dblac','\xB8':'cedil','\u02DB':'ogon','\u02C6':'circ','\u02C7':'caron','\xB0':'deg','\xA9':'copy','\xAE':'reg','\u2117':'copysr','\u2118':'wp','\u211E':'rx','\u2127':'mho','\u2129':'iiota','\u2190':'larr','\u219A':'nlarr','\u2192':'rarr','\u219B':'nrarr','\u2191':'uarr','\u2193':'darr','\u2194':'harr','\u21AE':'nharr','\u2195':'varr','\u2196':'nwarr','\u2197':'nearr','\u2198':'searr','\u2199':'swarr','\u219D':'rarrw','\u219D\u0338':'nrarrw','\u219E':'Larr','\u219F':'Uarr','\u21A0':'Rarr','\u21A1':'Darr','\u21A2':'larrtl','\u21A3':'rarrtl','\u21A4':'mapstoleft','\u21A5':'mapstoup','\u21A6':'map','\u21A7':'mapstodown','\u21A9':'larrhk','\u21AA':'rarrhk','\u21AB':'larrlp','\u21AC':'rarrlp','\u21AD':'harrw','\u21B0':'lsh','\u21B1':'rsh','\u21B2':'ldsh','\u21B3':'rdsh','\u21B5':'crarr','\u21B6':'cularr','\u21B7':'curarr','\u21BA':'olarr','\u21BB':'orarr','\u21BC':'lharu','\u21BD':'lhard','\u21BE':'uharr','\u21BF':'uharl','\u21C0':'rharu','\u21C1':'rhard','\u21C2':'dharr','\u21C3':'dharl','\u21C4':'rlarr','\u21C5':'udarr','\u21C6':'lrarr','\u21C7':'llarr','\u21C8':'uuarr','\u21C9':'rrarr','\u21CA':'ddarr','\u21CB':'lrhar','\u21CC':'rlhar','\u21D0':'lArr','\u21CD':'nlArr','\u21D1':'uArr','\u21D2':'rArr','\u21CF':'nrArr','\u21D3':'dArr','\u21D4':'iff','\u21CE':'nhArr','\u21D5':'vArr','\u21D6':'nwArr','\u21D7':'neArr','\u21D8':'seArr','\u21D9':'swArr','\u21DA':'lAarr','\u21DB':'rAarr','\u21DD':'zigrarr','\u21E4':'larrb','\u21E5':'rarrb','\u21F5':'duarr','\u21FD':'loarr','\u21FE':'roarr','\u21FF':'hoarr','\u2200':'forall','\u2201':'comp','\u2202':'part','\u2202\u0338':'npart','\u2203':'exist','\u2204':'nexist','\u2205':'empty','\u2207':'Del','\u2208':'in','\u2209':'notin','\u220B':'ni','\u220C':'notni','\u03F6':'bepsi','\u220F':'prod','\u2210':'coprod','\u2211':'sum','+':'plus','\xB1':'pm','\xF7':'div','\xD7':'times','<':'lt','\u226E':'nlt','<\u20D2':'nvlt','=':'equals','\u2260':'ne','=\u20E5':'bne','\u2A75':'Equal','>':'gt','\u226F':'ngt','>\u20D2':'nvgt','\xAC':'not','|':'vert','\xA6':'brvbar','\u2212':'minus','\u2213':'mp','\u2214':'plusdo','\u2044':'frasl','\u2216':'setmn','\u2217':'lowast','\u2218':'compfn','\u221A':'Sqrt','\u221D':'prop','\u221E':'infin','\u221F':'angrt','\u2220':'ang','\u2220\u20D2':'nang','\u2221':'angmsd','\u2222':'angsph','\u2223':'mid','\u2224':'nmid','\u2225':'par','\u2226':'npar','\u2227':'and','\u2228':'or','\u2229':'cap','\u2229\uFE00':'caps','\u222A':'cup','\u222A\uFE00':'cups','\u222B':'int','\u222C':'Int','\u222D':'tint','\u2A0C':'qint','\u222E':'oint','\u222F':'Conint','\u2230':'Cconint','\u2231':'cwint','\u2232':'cwconint','\u2233':'awconint','\u2234':'there4','\u2235':'becaus','\u2236':'ratio','\u2237':'Colon','\u2238':'minusd','\u223A':'mDDot','\u223B':'homtht','\u223C':'sim','\u2241':'nsim','\u223C\u20D2':'nvsim','\u223D':'bsim','\u223D\u0331':'race','\u223E':'ac','\u223E\u0333':'acE','\u223F':'acd','\u2240':'wr','\u2242':'esim','\u2242\u0338':'nesim','\u2243':'sime','\u2244':'nsime','\u2245':'cong','\u2247':'ncong','\u2246':'simne','\u2248':'ap','\u2249':'nap','\u224A':'ape','\u224B':'apid','\u224B\u0338':'napid','\u224C':'bcong','\u224D':'CupCap','\u226D':'NotCupCap','\u224D\u20D2':'nvap','\u224E':'bump','\u224E\u0338':'nbump','\u224F':'bumpe','\u224F\u0338':'nbumpe','\u2250':'doteq','\u2250\u0338':'nedot','\u2251':'eDot','\u2252':'efDot','\u2253':'erDot','\u2254':'colone','\u2255':'ecolon','\u2256':'ecir','\u2257':'cire','\u2259':'wedgeq','\u225A':'veeeq','\u225C':'trie','\u225F':'equest','\u2261':'equiv','\u2262':'nequiv','\u2261\u20E5':'bnequiv','\u2264':'le','\u2270':'nle','\u2264\u20D2':'nvle','\u2265':'ge','\u2271':'nge','\u2265\u20D2':'nvge','\u2266':'lE','\u2266\u0338':'nlE','\u2267':'gE','\u2267\u0338':'ngE','\u2268\uFE00':'lvnE','\u2268':'lnE','\u2269':'gnE','\u2269\uFE00':'gvnE','\u226A':'ll','\u226A\u0338':'nLtv','\u226A\u20D2':'nLt','\u226B':'gg','\u226B\u0338':'nGtv','\u226B\u20D2':'nGt','\u226C':'twixt','\u2272':'lsim','\u2274':'nlsim','\u2273':'gsim','\u2275':'ngsim','\u2276':'lg','\u2278':'ntlg','\u2277':'gl','\u2279':'ntgl','\u227A':'pr','\u2280':'npr','\u227B':'sc','\u2281':'nsc','\u227C':'prcue','\u22E0':'nprcue','\u227D':'sccue','\u22E1':'nsccue','\u227E':'prsim','\u227F':'scsim','\u227F\u0338':'NotSucceedsTilde','\u2282':'sub','\u2284':'nsub','\u2282\u20D2':'vnsub','\u2283':'sup','\u2285':'nsup','\u2283\u20D2':'vnsup','\u2286':'sube','\u2288':'nsube','\u2287':'supe','\u2289':'nsupe','\u228A\uFE00':'vsubne','\u228A':'subne','\u228B\uFE00':'vsupne','\u228B':'supne','\u228D':'cupdot','\u228E':'uplus','\u228F':'sqsub','\u228F\u0338':'NotSquareSubset','\u2290':'sqsup','\u2290\u0338':'NotSquareSuperset','\u2291':'sqsube','\u22E2':'nsqsube','\u2292':'sqsupe','\u22E3':'nsqsupe','\u2293':'sqcap','\u2293\uFE00':'sqcaps','\u2294':'sqcup','\u2294\uFE00':'sqcups','\u2295':'oplus','\u2296':'ominus','\u2297':'otimes','\u2298':'osol','\u2299':'odot','\u229A':'ocir','\u229B':'oast','\u229D':'odash','\u229E':'plusb','\u229F':'minusb','\u22A0':'timesb','\u22A1':'sdotb','\u22A2':'vdash','\u22AC':'nvdash','\u22A3':'dashv','\u22A4':'top','\u22A5':'bot','\u22A7':'models','\u22A8':'vDash','\u22AD':'nvDash','\u22A9':'Vdash','\u22AE':'nVdash','\u22AA':'Vvdash','\u22AB':'VDash','\u22AF':'nVDash','\u22B0':'prurel','\u22B2':'vltri','\u22EA':'nltri','\u22B3':'vrtri','\u22EB':'nrtri','\u22B4':'ltrie','\u22EC':'nltrie','\u22B4\u20D2':'nvltrie','\u22B5':'rtrie','\u22ED':'nrtrie','\u22B5\u20D2':'nvrtrie','\u22B6':'origof','\u22B7':'imof','\u22B8':'mumap','\u22B9':'hercon','\u22BA':'intcal','\u22BB':'veebar','\u22BD':'barvee','\u22BE':'angrtvb','\u22BF':'lrtri','\u22C0':'Wedge','\u22C1':'Vee','\u22C2':'xcap','\u22C3':'xcup','\u22C4':'diam','\u22C5':'sdot','\u22C6':'Star','\u22C7':'divonx','\u22C8':'bowtie','\u22C9':'ltimes','\u22CA':'rtimes','\u22CB':'lthree','\u22CC':'rthree','\u22CD':'bsime','\u22CE':'cuvee','\u22CF':'cuwed','\u22D0':'Sub','\u22D1':'Sup','\u22D2':'Cap','\u22D3':'Cup','\u22D4':'fork','\u22D5':'epar','\u22D6':'ltdot','\u22D7':'gtdot','\u22D8':'Ll','\u22D8\u0338':'nLl','\u22D9':'Gg','\u22D9\u0338':'nGg','\u22DA\uFE00':'lesg','\u22DA':'leg','\u22DB':'gel','\u22DB\uFE00':'gesl','\u22DE':'cuepr','\u22DF':'cuesc','\u22E6':'lnsim','\u22E7':'gnsim','\u22E8':'prnsim','\u22E9':'scnsim','\u22EE':'vellip','\u22EF':'ctdot','\u22F0':'utdot','\u22F1':'dtdot','\u22F2':'disin','\u22F3':'isinsv','\u22F4':'isins','\u22F5':'isindot','\u22F5\u0338':'notindot','\u22F6':'notinvc','\u22F7':'notinvb','\u22F9':'isinE','\u22F9\u0338':'notinE','\u22FA':'nisd','\u22FB':'xnis','\u22FC':'nis','\u22FD':'notnivc','\u22FE':'notnivb','\u2305':'barwed','\u2306':'Barwed','\u230C':'drcrop','\u230D':'dlcrop','\u230E':'urcrop','\u230F':'ulcrop','\u2310':'bnot','\u2312':'profline','\u2313':'profsurf','\u2315':'telrec','\u2316':'target','\u231C':'ulcorn','\u231D':'urcorn','\u231E':'dlcorn','\u231F':'drcorn','\u2322':'frown','\u2323':'smile','\u232D':'cylcty','\u232E':'profalar','\u2336':'topbot','\u233D':'ovbar','\u233F':'solbar','\u237C':'angzarr','\u23B0':'lmoust','\u23B1':'rmoust','\u23B4':'tbrk','\u23B5':'bbrk','\u23B6':'bbrktbrk','\u23DC':'OverParenthesis','\u23DD':'UnderParenthesis','\u23DE':'OverBrace','\u23DF':'UnderBrace','\u23E2':'trpezium','\u23E7':'elinters','\u2423':'blank','\u2500':'boxh','\u2502':'boxv','\u250C':'boxdr','\u2510':'boxdl','\u2514':'boxur','\u2518':'boxul','\u251C':'boxvr','\u2524':'boxvl','\u252C':'boxhd','\u2534':'boxhu','\u253C':'boxvh','\u2550':'boxH','\u2551':'boxV','\u2552':'boxdR','\u2553':'boxDr','\u2554':'boxDR','\u2555':'boxdL','\u2556':'boxDl','\u2557':'boxDL','\u2558':'boxuR','\u2559':'boxUr','\u255A':'boxUR','\u255B':'boxuL','\u255C':'boxUl','\u255D':'boxUL','\u255E':'boxvR','\u255F':'boxVr','\u2560':'boxVR','\u2561':'boxvL','\u2562':'boxVl','\u2563':'boxVL','\u2564':'boxHd','\u2565':'boxhD','\u2566':'boxHD','\u2567':'boxHu','\u2568':'boxhU','\u2569':'boxHU','\u256A':'boxvH','\u256B':'boxVh','\u256C':'boxVH','\u2580':'uhblk','\u2584':'lhblk','\u2588':'block','\u2591':'blk14','\u2592':'blk12','\u2593':'blk34','\u25A1':'squ','\u25AA':'squf','\u25AB':'EmptyVerySmallSquare','\u25AD':'rect','\u25AE':'marker','\u25B1':'fltns','\u25B3':'xutri','\u25B4':'utrif','\u25B5':'utri','\u25B8':'rtrif','\u25B9':'rtri','\u25BD':'xdtri','\u25BE':'dtrif','\u25BF':'dtri','\u25C2':'ltrif','\u25C3':'ltri','\u25CA':'loz','\u25CB':'cir','\u25EC':'tridot','\u25EF':'xcirc','\u25F8':'ultri','\u25F9':'urtri','\u25FA':'lltri','\u25FB':'EmptySmallSquare','\u25FC':'FilledSmallSquare','\u2605':'starf','\u2606':'star','\u260E':'phone','\u2640':'female','\u2642':'male','\u2660':'spades','\u2663':'clubs','\u2665':'hearts','\u2666':'diams','\u266A':'sung','\u2713':'check','\u2717':'cross','\u2720':'malt','\u2736':'sext','\u2758':'VerticalSeparator','\u27C8':'bsolhsub','\u27C9':'suphsol','\u27F5':'xlarr','\u27F6':'xrarr','\u27F7':'xharr','\u27F8':'xlArr','\u27F9':'xrArr','\u27FA':'xhArr','\u27FC':'xmap','\u27FF':'dzigrarr','\u2902':'nvlArr','\u2903':'nvrArr','\u2904':'nvHarr','\u2905':'Map','\u290C':'lbarr','\u290D':'rbarr','\u290E':'lBarr','\u290F':'rBarr','\u2910':'RBarr','\u2911':'DDotrahd','\u2912':'UpArrowBar','\u2913':'DownArrowBar','\u2916':'Rarrtl','\u2919':'latail','\u291A':'ratail','\u291B':'lAtail','\u291C':'rAtail','\u291D':'larrfs','\u291E':'rarrfs','\u291F':'larrbfs','\u2920':'rarrbfs','\u2923':'nwarhk','\u2924':'nearhk','\u2925':'searhk','\u2926':'swarhk','\u2927':'nwnear','\u2928':'toea','\u2929':'tosa','\u292A':'swnwar','\u2933':'rarrc','\u2933\u0338':'nrarrc','\u2935':'cudarrr','\u2936':'ldca','\u2937':'rdca','\u2938':'cudarrl','\u2939':'larrpl','\u293C':'curarrm','\u293D':'cularrp','\u2945':'rarrpl','\u2948':'harrcir','\u2949':'Uarrocir','\u294A':'lurdshar','\u294B':'ldrushar','\u294E':'LeftRightVector','\u294F':'RightUpDownVector','\u2950':'DownLeftRightVector','\u2951':'LeftUpDownVector','\u2952':'LeftVectorBar','\u2953':'RightVectorBar','\u2954':'RightUpVectorBar','\u2955':'RightDownVectorBar','\u2956':'DownLeftVectorBar','\u2957':'DownRightVectorBar','\u2958':'LeftUpVectorBar','\u2959':'LeftDownVectorBar','\u295A':'LeftTeeVector','\u295B':'RightTeeVector','\u295C':'RightUpTeeVector','\u295D':'RightDownTeeVector','\u295E':'DownLeftTeeVector','\u295F':'DownRightTeeVector','\u2960':'LeftUpTeeVector','\u2961':'LeftDownTeeVector','\u2962':'lHar','\u2963':'uHar','\u2964':'rHar','\u2965':'dHar','\u2966':'luruhar','\u2967':'ldrdhar','\u2968':'ruluhar','\u2969':'rdldhar','\u296A':'lharul','\u296B':'llhard','\u296C':'rharul','\u296D':'lrhard','\u296E':'udhar','\u296F':'duhar','\u2970':'RoundImplies','\u2971':'erarr','\u2972':'simrarr','\u2973':'larrsim','\u2974':'rarrsim','\u2975':'rarrap','\u2976':'ltlarr','\u2978':'gtrarr','\u2979':'subrarr','\u297B':'suplarr','\u297C':'lfisht','\u297D':'rfisht','\u297E':'ufisht','\u297F':'dfisht','\u299A':'vzigzag','\u299C':'vangrt','\u299D':'angrtvbd','\u29A4':'ange','\u29A5':'range','\u29A6':'dwangle','\u29A7':'uwangle','\u29A8':'angmsdaa','\u29A9':'angmsdab','\u29AA':'angmsdac','\u29AB':'angmsdad','\u29AC':'angmsdae','\u29AD':'angmsdaf','\u29AE':'angmsdag','\u29AF':'angmsdah','\u29B0':'bemptyv','\u29B1':'demptyv','\u29B2':'cemptyv','\u29B3':'raemptyv','\u29B4':'laemptyv','\u29B5':'ohbar','\u29B6':'omid','\u29B7':'opar','\u29B9':'operp','\u29BB':'olcross','\u29BC':'odsold','\u29BE':'olcir','\u29BF':'ofcir','\u29C0':'olt','\u29C1':'ogt','\u29C2':'cirscir','\u29C3':'cirE','\u29C4':'solb','\u29C5':'bsolb','\u29C9':'boxbox','\u29CD':'trisb','\u29CE':'rtriltri','\u29CF':'LeftTriangleBar','\u29CF\u0338':'NotLeftTriangleBar','\u29D0':'RightTriangleBar','\u29D0\u0338':'NotRightTriangleBar','\u29DC':'iinfin','\u29DD':'infintie','\u29DE':'nvinfin','\u29E3':'eparsl','\u29E4':'smeparsl','\u29E5':'eqvparsl','\u29EB':'lozf','\u29F4':'RuleDelayed','\u29F6':'dsol','\u2A00':'xodot','\u2A01':'xoplus','\u2A02':'xotime','\u2A04':'xuplus','\u2A06':'xsqcup','\u2A0D':'fpartint','\u2A10':'cirfnint','\u2A11':'awint','\u2A12':'rppolint','\u2A13':'scpolint','\u2A14':'npolint','\u2A15':'pointint','\u2A16':'quatint','\u2A17':'intlarhk','\u2A22':'pluscir','\u2A23':'plusacir','\u2A24':'simplus','\u2A25':'plusdu','\u2A26':'plussim','\u2A27':'plustwo','\u2A29':'mcomma','\u2A2A':'minusdu','\u2A2D':'loplus','\u2A2E':'roplus','\u2A2F':'Cross','\u2A30':'timesd','\u2A31':'timesbar','\u2A33':'smashp','\u2A34':'lotimes','\u2A35':'rotimes','\u2A36':'otimesas','\u2A37':'Otimes','\u2A38':'odiv','\u2A39':'triplus','\u2A3A':'triminus','\u2A3B':'tritime','\u2A3C':'iprod','\u2A3F':'amalg','\u2A40':'capdot','\u2A42':'ncup','\u2A43':'ncap','\u2A44':'capand','\u2A45':'cupor','\u2A46':'cupcap','\u2A47':'capcup','\u2A48':'cupbrcap','\u2A49':'capbrcup','\u2A4A':'cupcup','\u2A4B':'capcap','\u2A4C':'ccups','\u2A4D':'ccaps','\u2A50':'ccupssm','\u2A53':'And','\u2A54':'Or','\u2A55':'andand','\u2A56':'oror','\u2A57':'orslope','\u2A58':'andslope','\u2A5A':'andv','\u2A5B':'orv','\u2A5C':'andd','\u2A5D':'ord','\u2A5F':'wedbar','\u2A66':'sdote','\u2A6A':'simdot','\u2A6D':'congdot','\u2A6D\u0338':'ncongdot','\u2A6E':'easter','\u2A6F':'apacir','\u2A70':'apE','\u2A70\u0338':'napE','\u2A71':'eplus','\u2A72':'pluse','\u2A73':'Esim','\u2A77':'eDDot','\u2A78':'equivDD','\u2A79':'ltcir','\u2A7A':'gtcir','\u2A7B':'ltquest','\u2A7C':'gtquest','\u2A7D':'les','\u2A7D\u0338':'nles','\u2A7E':'ges','\u2A7E\u0338':'nges','\u2A7F':'lesdot','\u2A80':'gesdot','\u2A81':'lesdoto','\u2A82':'gesdoto','\u2A83':'lesdotor','\u2A84':'gesdotol','\u2A85':'lap','\u2A86':'gap','\u2A87':'lne','\u2A88':'gne','\u2A89':'lnap','\u2A8A':'gnap','\u2A8B':'lEg','\u2A8C':'gEl','\u2A8D':'lsime','\u2A8E':'gsime','\u2A8F':'lsimg','\u2A90':'gsiml','\u2A91':'lgE','\u2A92':'glE','\u2A93':'lesges','\u2A94':'gesles','\u2A95':'els','\u2A96':'egs','\u2A97':'elsdot','\u2A98':'egsdot','\u2A99':'el','\u2A9A':'eg','\u2A9D':'siml','\u2A9E':'simg','\u2A9F':'simlE','\u2AA0':'simgE','\u2AA1':'LessLess','\u2AA1\u0338':'NotNestedLessLess','\u2AA2':'GreaterGreater','\u2AA2\u0338':'NotNestedGreaterGreater','\u2AA4':'glj','\u2AA5':'gla','\u2AA6':'ltcc','\u2AA7':'gtcc','\u2AA8':'lescc','\u2AA9':'gescc','\u2AAA':'smt','\u2AAB':'lat','\u2AAC':'smte','\u2AAC\uFE00':'smtes','\u2AAD':'late','\u2AAD\uFE00':'lates','\u2AAE':'bumpE','\u2AAF':'pre','\u2AAF\u0338':'npre','\u2AB0':'sce','\u2AB0\u0338':'nsce','\u2AB3':'prE','\u2AB4':'scE','\u2AB5':'prnE','\u2AB6':'scnE','\u2AB7':'prap','\u2AB8':'scap','\u2AB9':'prnap','\u2ABA':'scnap','\u2ABB':'Pr','\u2ABC':'Sc','\u2ABD':'subdot','\u2ABE':'supdot','\u2ABF':'subplus','\u2AC0':'supplus','\u2AC1':'submult','\u2AC2':'supmult','\u2AC3':'subedot','\u2AC4':'supedot','\u2AC5':'subE','\u2AC5\u0338':'nsubE','\u2AC6':'supE','\u2AC6\u0338':'nsupE','\u2AC7':'subsim','\u2AC8':'supsim','\u2ACB\uFE00':'vsubnE','\u2ACB':'subnE','\u2ACC\uFE00':'vsupnE','\u2ACC':'supnE','\u2ACF':'csub','\u2AD0':'csup','\u2AD1':'csube','\u2AD2':'csupe','\u2AD3':'subsup','\u2AD4':'supsub','\u2AD5':'subsub','\u2AD6':'supsup','\u2AD7':'suphsub','\u2AD8':'supdsub','\u2AD9':'forkv','\u2ADA':'topfork','\u2ADB':'mlcp','\u2AE4':'Dashv','\u2AE6':'Vdashl','\u2AE7':'Barv','\u2AE8':'vBar','\u2AE9':'vBarv','\u2AEB':'Vbar','\u2AEC':'Not','\u2AED':'bNot','\u2AEE':'rnmid','\u2AEF':'cirmid','\u2AF0':'midcir','\u2AF1':'topcir','\u2AF2':'nhpar','\u2AF3':'parsim','\u2AFD':'parsl','\u2AFD\u20E5':'nparsl','\u266D':'flat','\u266E':'natur','\u266F':'sharp','\xA4':'curren','\xA2':'cent','$':'dollar','\xA3':'pound','\xA5':'yen','\u20AC':'euro','\xB9':'sup1','\xBD':'half','\u2153':'frac13','\xBC':'frac14','\u2155':'frac15','\u2159':'frac16','\u215B':'frac18','\xB2':'sup2','\u2154':'frac23','\u2156':'frac25','\xB3':'sup3','\xBE':'frac34','\u2157':'frac35','\u215C':'frac38','\u2158':'frac45','\u215A':'frac56','\u215D':'frac58','\u215E':'frac78','\uD835\uDCB6':'ascr','\uD835\uDD52':'aopf','\uD835\uDD1E':'afr','\uD835\uDD38':'Aopf','\uD835\uDD04':'Afr','\uD835\uDC9C':'Ascr','\xAA':'ordf','\xE1':'aacute','\xC1':'Aacute','\xE0':'agrave','\xC0':'Agrave','\u0103':'abreve','\u0102':'Abreve','\xE2':'acirc','\xC2':'Acirc','\xE5':'aring','\xC5':'angst','\xE4':'auml','\xC4':'Auml','\xE3':'atilde','\xC3':'Atilde','\u0105':'aogon','\u0104':'Aogon','\u0101':'amacr','\u0100':'Amacr','\xE6':'aelig','\xC6':'AElig','\uD835\uDCB7':'bscr','\uD835\uDD53':'bopf','\uD835\uDD1F':'bfr','\uD835\uDD39':'Bopf','\u212C':'Bscr','\uD835\uDD05':'Bfr','\uD835\uDD20':'cfr','\uD835\uDCB8':'cscr','\uD835\uDD54':'copf','\u212D':'Cfr','\uD835\uDC9E':'Cscr','\u2102':'Copf','\u0107':'cacute','\u0106':'Cacute','\u0109':'ccirc','\u0108':'Ccirc','\u010D':'ccaron','\u010C':'Ccaron','\u010B':'cdot','\u010A':'Cdot','\xE7':'ccedil','\xC7':'Ccedil','\u2105':'incare','\uD835\uDD21':'dfr','\u2146':'dd','\uD835\uDD55':'dopf','\uD835\uDCB9':'dscr','\uD835\uDC9F':'Dscr','\uD835\uDD07':'Dfr','\u2145':'DD','\uD835\uDD3B':'Dopf','\u010F':'dcaron','\u010E':'Dcaron','\u0111':'dstrok','\u0110':'Dstrok','\xF0':'eth','\xD0':'ETH','\u2147':'ee','\u212F':'escr','\uD835\uDD22':'efr','\uD835\uDD56':'eopf','\u2130':'Escr','\uD835\uDD08':'Efr','\uD835\uDD3C':'Eopf','\xE9':'eacute','\xC9':'Eacute','\xE8':'egrave','\xC8':'Egrave','\xEA':'ecirc','\xCA':'Ecirc','\u011B':'ecaron','\u011A':'Ecaron','\xEB':'euml','\xCB':'Euml','\u0117':'edot','\u0116':'Edot','\u0119':'eogon','\u0118':'Eogon','\u0113':'emacr','\u0112':'Emacr','\uD835\uDD23':'ffr','\uD835\uDD57':'fopf','\uD835\uDCBB':'fscr','\uD835\uDD09':'Ffr','\uD835\uDD3D':'Fopf','\u2131':'Fscr','\uFB00':'fflig','\uFB03':'ffilig','\uFB04':'ffllig','\uFB01':'filig','fj':'fjlig','\uFB02':'fllig','\u0192':'fnof','\u210A':'gscr','\uD835\uDD58':'gopf','\uD835\uDD24':'gfr','\uD835\uDCA2':'Gscr','\uD835\uDD3E':'Gopf','\uD835\uDD0A':'Gfr','\u01F5':'gacute','\u011F':'gbreve','\u011E':'Gbreve','\u011D':'gcirc','\u011C':'Gcirc','\u0121':'gdot','\u0120':'Gdot','\u0122':'Gcedil','\uD835\uDD25':'hfr','\u210E':'planckh','\uD835\uDCBD':'hscr','\uD835\uDD59':'hopf','\u210B':'Hscr','\u210C':'Hfr','\u210D':'Hopf','\u0125':'hcirc','\u0124':'Hcirc','\u210F':'hbar','\u0127':'hstrok','\u0126':'Hstrok','\uD835\uDD5A':'iopf','\uD835\uDD26':'ifr','\uD835\uDCBE':'iscr','\u2148':'ii','\uD835\uDD40':'Iopf','\u2110':'Iscr','\u2111':'Im','\xED':'iacute','\xCD':'Iacute','\xEC':'igrave','\xCC':'Igrave','\xEE':'icirc','\xCE':'Icirc','\xEF':'iuml','\xCF':'Iuml','\u0129':'itilde','\u0128':'Itilde','\u0130':'Idot','\u012F':'iogon','\u012E':'Iogon','\u012B':'imacr','\u012A':'Imacr','\u0133':'ijlig','\u0132':'IJlig','\u0131':'imath','\uD835\uDCBF':'jscr','\uD835\uDD5B':'jopf','\uD835\uDD27':'jfr','\uD835\uDCA5':'Jscr','\uD835\uDD0D':'Jfr','\uD835\uDD41':'Jopf','\u0135':'jcirc','\u0134':'Jcirc','\u0237':'jmath','\uD835\uDD5C':'kopf','\uD835\uDCC0':'kscr','\uD835\uDD28':'kfr','\uD835\uDCA6':'Kscr','\uD835\uDD42':'Kopf','\uD835\uDD0E':'Kfr','\u0137':'kcedil','\u0136':'Kcedil','\uD835\uDD29':'lfr','\uD835\uDCC1':'lscr','\u2113':'ell','\uD835\uDD5D':'lopf','\u2112':'Lscr','\uD835\uDD0F':'Lfr','\uD835\uDD43':'Lopf','\u013A':'lacute','\u0139':'Lacute','\u013E':'lcaron','\u013D':'Lcaron','\u013C':'lcedil','\u013B':'Lcedil','\u0142':'lstrok','\u0141':'Lstrok','\u0140':'lmidot','\u013F':'Lmidot','\uD835\uDD2A':'mfr','\uD835\uDD5E':'mopf','\uD835\uDCC2':'mscr','\uD835\uDD10':'Mfr','\uD835\uDD44':'Mopf','\u2133':'Mscr','\uD835\uDD2B':'nfr','\uD835\uDD5F':'nopf','\uD835\uDCC3':'nscr','\u2115':'Nopf','\uD835\uDCA9':'Nscr','\uD835\uDD11':'Nfr','\u0144':'nacute','\u0143':'Nacute','\u0148':'ncaron','\u0147':'Ncaron','\xF1':'ntilde','\xD1':'Ntilde','\u0146':'ncedil','\u0145':'Ncedil','\u2116':'numero','\u014B':'eng','\u014A':'ENG','\uD835\uDD60':'oopf','\uD835\uDD2C':'ofr','\u2134':'oscr','\uD835\uDCAA':'Oscr','\uD835\uDD12':'Ofr','\uD835\uDD46':'Oopf','\xBA':'ordm','\xF3':'oacute','\xD3':'Oacute','\xF2':'ograve','\xD2':'Ograve','\xF4':'ocirc','\xD4':'Ocirc','\xF6':'ouml','\xD6':'Ouml','\u0151':'odblac','\u0150':'Odblac','\xF5':'otilde','\xD5':'Otilde','\xF8':'oslash','\xD8':'Oslash','\u014D':'omacr','\u014C':'Omacr','\u0153':'oelig','\u0152':'OElig','\uD835\uDD2D':'pfr','\uD835\uDCC5':'pscr','\uD835\uDD61':'popf','\u2119':'Popf','\uD835\uDD13':'Pfr','\uD835\uDCAB':'Pscr','\uD835\uDD62':'qopf','\uD835\uDD2E':'qfr','\uD835\uDCC6':'qscr','\uD835\uDCAC':'Qscr','\uD835\uDD14':'Qfr','\u211A':'Qopf','\u0138':'kgreen','\uD835\uDD2F':'rfr','\uD835\uDD63':'ropf','\uD835\uDCC7':'rscr','\u211B':'Rscr','\u211C':'Re','\u211D':'Ropf','\u0155':'racute','\u0154':'Racute','\u0159':'rcaron','\u0158':'Rcaron','\u0157':'rcedil','\u0156':'Rcedil','\uD835\uDD64':'sopf','\uD835\uDCC8':'sscr','\uD835\uDD30':'sfr','\uD835\uDD4A':'Sopf','\uD835\uDD16':'Sfr','\uD835\uDCAE':'Sscr','\u24C8':'oS','\u015B':'sacute','\u015A':'Sacute','\u015D':'scirc','\u015C':'Scirc','\u0161':'scaron','\u0160':'Scaron','\u015F':'scedil','\u015E':'Scedil','\xDF':'szlig','\uD835\uDD31':'tfr','\uD835\uDCC9':'tscr','\uD835\uDD65':'topf','\uD835\uDCAF':'Tscr','\uD835\uDD17':'Tfr','\uD835\uDD4B':'Topf','\u0165':'tcaron','\u0164':'Tcaron','\u0163':'tcedil','\u0162':'Tcedil','\u2122':'trade','\u0167':'tstrok','\u0166':'Tstrok','\uD835\uDCCA':'uscr','\uD835\uDD66':'uopf','\uD835\uDD32':'ufr','\uD835\uDD4C':'Uopf','\uD835\uDD18':'Ufr','\uD835\uDCB0':'Uscr','\xFA':'uacute','\xDA':'Uacute','\xF9':'ugrave','\xD9':'Ugrave','\u016D':'ubreve','\u016C':'Ubreve','\xFB':'ucirc','\xDB':'Ucirc','\u016F':'uring','\u016E':'Uring','\xFC':'uuml','\xDC':'Uuml','\u0171':'udblac','\u0170':'Udblac','\u0169':'utilde','\u0168':'Utilde','\u0173':'uogon','\u0172':'Uogon','\u016B':'umacr','\u016A':'Umacr','\uD835\uDD33':'vfr','\uD835\uDD67':'vopf','\uD835\uDCCB':'vscr','\uD835\uDD19':'Vfr','\uD835\uDD4D':'Vopf','\uD835\uDCB1':'Vscr','\uD835\uDD68':'wopf','\uD835\uDCCC':'wscr','\uD835\uDD34':'wfr','\uD835\uDCB2':'Wscr','\uD835\uDD4E':'Wopf','\uD835\uDD1A':'Wfr','\u0175':'wcirc','\u0174':'Wcirc','\uD835\uDD35':'xfr','\uD835\uDCCD':'xscr','\uD835\uDD69':'xopf','\uD835\uDD4F':'Xopf','\uD835\uDD1B':'Xfr','\uD835\uDCB3':'Xscr','\uD835\uDD36':'yfr','\uD835\uDCCE':'yscr','\uD835\uDD6A':'yopf','\uD835\uDCB4':'Yscr','\uD835\uDD1C':'Yfr','\uD835\uDD50':'Yopf','\xFD':'yacute','\xDD':'Yacute','\u0177':'ycirc','\u0176':'Ycirc','\xFF':'yuml','\u0178':'Yuml','\uD835\uDCCF':'zscr','\uD835\uDD37':'zfr','\uD835\uDD6B':'zopf','\u2128':'Zfr','\u2124':'Zopf','\uD835\uDCB5':'Zscr','\u017A':'zacute','\u0179':'Zacute','\u017E':'zcaron','\u017D':'Zcaron','\u017C':'zdot','\u017B':'Zdot','\u01B5':'imped','\xFE':'thorn','\xDE':'THORN','\u0149':'napos','\u03B1':'alpha','\u0391':'Alpha','\u03B2':'beta','\u0392':'Beta','\u03B3':'gamma','\u0393':'Gamma','\u03B4':'delta','\u0394':'Delta','\u03B5':'epsi','\u03F5':'epsiv','\u0395':'Epsilon','\u03DD':'gammad','\u03DC':'Gammad','\u03B6':'zeta','\u0396':'Zeta','\u03B7':'eta','\u0397':'Eta','\u03B8':'theta','\u03D1':'thetav','\u0398':'Theta','\u03B9':'iota','\u0399':'Iota','\u03BA':'kappa','\u03F0':'kappav','\u039A':'Kappa','\u03BB':'lambda','\u039B':'Lambda','\u03BC':'mu','\xB5':'micro','\u039C':'Mu','\u03BD':'nu','\u039D':'Nu','\u03BE':'xi','\u039E':'Xi','\u03BF':'omicron','\u039F':'Omicron','\u03C0':'pi','\u03D6':'piv','\u03A0':'Pi','\u03C1':'rho','\u03F1':'rhov','\u03A1':'Rho','\u03C3':'sigma','\u03A3':'Sigma','\u03C2':'sigmaf','\u03C4':'tau','\u03A4':'Tau','\u03C5':'upsi','\u03A5':'Upsilon','\u03D2':'Upsi','\u03C6':'phi','\u03D5':'phiv','\u03A6':'Phi','\u03C7':'chi','\u03A7':'Chi','\u03C8':'psi','\u03A8':'Psi','\u03C9':'omega','\u03A9':'ohm','\u0430':'acy','\u0410':'Acy','\u0431':'bcy','\u0411':'Bcy','\u0432':'vcy','\u0412':'Vcy','\u0433':'gcy','\u0413':'Gcy','\u0453':'gjcy','\u0403':'GJcy','\u0434':'dcy','\u0414':'Dcy','\u0452':'djcy','\u0402':'DJcy','\u0435':'iecy','\u0415':'IEcy','\u0451':'iocy','\u0401':'IOcy','\u0454':'jukcy','\u0404':'Jukcy','\u0436':'zhcy','\u0416':'ZHcy','\u0437':'zcy','\u0417':'Zcy','\u0455':'dscy','\u0405':'DScy','\u0438':'icy','\u0418':'Icy','\u0456':'iukcy','\u0406':'Iukcy','\u0457':'yicy','\u0407':'YIcy','\u0439':'jcy','\u0419':'Jcy','\u0458':'jsercy','\u0408':'Jsercy','\u043A':'kcy','\u041A':'Kcy','\u045C':'kjcy','\u040C':'KJcy','\u043B':'lcy','\u041B':'Lcy','\u0459':'ljcy','\u0409':'LJcy','\u043C':'mcy','\u041C':'Mcy','\u043D':'ncy','\u041D':'Ncy','\u045A':'njcy','\u040A':'NJcy','\u043E':'ocy','\u041E':'Ocy','\u043F':'pcy','\u041F':'Pcy','\u0440':'rcy','\u0420':'Rcy','\u0441':'scy','\u0421':'Scy','\u0442':'tcy','\u0422':'Tcy','\u045B':'tshcy','\u040B':'TSHcy','\u0443':'ucy','\u0423':'Ucy','\u045E':'ubrcy','\u040E':'Ubrcy','\u0444':'fcy','\u0424':'Fcy','\u0445':'khcy','\u0425':'KHcy','\u0446':'tscy','\u0426':'TScy','\u0447':'chcy','\u0427':'CHcy','\u045F':'dzcy','\u040F':'DZcy','\u0448':'shcy','\u0428':'SHcy','\u0449':'shchcy','\u0429':'SHCHcy','\u044A':'hardcy','\u042A':'HARDcy','\u044B':'ycy','\u042B':'Ycy','\u044C':'softcy','\u042C':'SOFTcy','\u044D':'ecy','\u042D':'Ecy','\u044E':'yucy','\u042E':'YUcy','\u044F':'yacy','\u042F':'YAcy','\u2135':'aleph','\u2136':'beth','\u2137':'gimel','\u2138':'daleth'};
|
||
|
||
var regexEscape = /["&'<>`]/g;
|
||
var escapeMap = {
|
||
'"': '"',
|
||
'&': '&',
|
||
'\'': ''',
|
||
'<': '<',
|
||
// See https://mathiasbynens.be/notes/ambiguous-ampersands: in HTML, the
|
||
// following is not strictly necessary unless it’s part of a tag or an
|
||
// unquoted attribute value. We’re only escaping it to support those
|
||
// situations, and for XML support.
|
||
'>': '>',
|
||
// In Internet Explorer ≤ 8, the backtick character can be used
|
||
// to break out of (un)quoted attribute values or HTML comments.
|
||
// See http://html5sec.org/#102, http://html5sec.org/#108, and
|
||
// http://html5sec.org/#133.
|
||
'`': '`'
|
||
};
|
||
|
||
var regexInvalidEntity = /&#(?:[xX][^a-fA-F0-9]|[^0-9xX])/;
|
||
var regexInvalidRawCodePoint = /[\0-\x08\x0B\x0E-\x1F\x7F-\x9F\uFDD0-\uFDEF\uFFFE\uFFFF]|[\uD83F\uD87F\uD8BF\uD8FF\uD93F\uD97F\uD9BF\uD9FF\uDA3F\uDA7F\uDABF\uDAFF\uDB3F\uDB7F\uDBBF\uDBFF][\uDFFE\uDFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/;
|
||
var regexDecode = /&(CounterClockwiseContourIntegral|DoubleLongLeftRightArrow|ClockwiseContourIntegral|NotNestedGreaterGreater|NotSquareSupersetEqual|DiacriticalDoubleAcute|NotRightTriangleEqual|NotSucceedsSlantEqual|NotPrecedesSlantEqual|CloseCurlyDoubleQuote|NegativeVeryThinSpace|DoubleContourIntegral|FilledVerySmallSquare|CapitalDifferentialD|OpenCurlyDoubleQuote|EmptyVerySmallSquare|NestedGreaterGreater|DoubleLongRightArrow|NotLeftTriangleEqual|NotGreaterSlantEqual|ReverseUpEquilibrium|DoubleLeftRightArrow|NotSquareSubsetEqual|NotDoubleVerticalBar|RightArrowLeftArrow|NotGreaterFullEqual|NotRightTriangleBar|SquareSupersetEqual|DownLeftRightVector|DoubleLongLeftArrow|leftrightsquigarrow|LeftArrowRightArrow|NegativeMediumSpace|blacktriangleright|RightDownVectorBar|PrecedesSlantEqual|RightDoubleBracket|SucceedsSlantEqual|NotLeftTriangleBar|RightTriangleEqual|SquareIntersection|RightDownTeeVector|ReverseEquilibrium|NegativeThickSpace|longleftrightarrow|Longleftrightarrow|LongLeftRightArrow|DownRightTeeVector|DownRightVectorBar|GreaterSlantEqual|SquareSubsetEqual|LeftDownVectorBar|LeftDoubleBracket|VerticalSeparator|rightleftharpoons|NotGreaterGreater|NotSquareSuperset|blacktriangleleft|blacktriangledown|NegativeThinSpace|LeftDownTeeVector|NotLessSlantEqual|leftrightharpoons|DoubleUpDownArrow|DoubleVerticalBar|LeftTriangleEqual|FilledSmallSquare|twoheadrightarrow|NotNestedLessLess|DownLeftTeeVector|DownLeftVectorBar|RightAngleBracket|NotTildeFullEqual|NotReverseElement|RightUpDownVector|DiacriticalTilde|NotSucceedsTilde|circlearrowright|NotPrecedesEqual|rightharpoondown|DoubleRightArrow|NotSucceedsEqual|NonBreakingSpace|NotRightTriangle|LessEqualGreater|RightUpTeeVector|LeftAngleBracket|GreaterFullEqual|DownArrowUpArrow|RightUpVectorBar|twoheadleftarrow|GreaterEqualLess|downharpoonright|RightTriangleBar|ntrianglerighteq|NotSupersetEqual|LeftUpDownVector|DiacriticalAcute|rightrightarrows|vartriangleright|UpArrowDownArrow|DiacriticalGrave|UnderParenthesis|EmptySmallSquare|LeftUpVectorBar|leftrightarrows|DownRightVector|downharpoonleft|trianglerighteq|ShortRightArrow|OverParenthesis|DoubleLeftArrow|DoubleDownArrow|NotSquareSubset|bigtriangledown|ntrianglelefteq|UpperRightArrow|curvearrowright|vartriangleleft|NotLeftTriangle|nleftrightarrow|LowerRightArrow|NotHumpDownHump|NotGreaterTilde|rightthreetimes|LeftUpTeeVector|NotGreaterEqual|straightepsilon|LeftTriangleBar|rightsquigarrow|ContourIntegral|rightleftarrows|CloseCurlyQuote|RightDownVector|LeftRightVector|nLeftrightarrow|leftharpoondown|circlearrowleft|SquareSuperset|OpenCurlyQuote|hookrightarrow|HorizontalLine|DiacriticalDot|NotLessGreater|ntriangleright|DoubleRightTee|InvisibleComma|InvisibleTimes|LowerLeftArrow|DownLeftVector|NotSubsetEqual|curvearrowleft|trianglelefteq|NotVerticalBar|TildeFullEqual|downdownarrows|NotGreaterLess|RightTeeVector|ZeroWidthSpace|looparrowright|LongRightArrow|doublebarwedge|ShortLeftArrow|ShortDownArrow|RightVectorBar|GreaterGreater|ReverseElement|rightharpoonup|LessSlantEqual|leftthreetimes|upharpoonright|rightarrowtail|LeftDownVector|Longrightarrow|NestedLessLess|UpperLeftArrow|nshortparallel|leftleftarrows|leftrightarrow|Leftrightarrow|LeftRightArrow|longrightarrow|upharpoonleft|RightArrowBar|ApplyFunction|LeftTeeVector|leftarrowtail|NotEqualTilde|varsubsetneqq|varsupsetneqq|RightTeeArrow|SucceedsEqual|SucceedsTilde|LeftVectorBar|SupersetEqual|hookleftarrow|DifferentialD|VerticalTilde|VeryThinSpace|blacktriangle|bigtriangleup|LessFullEqual|divideontimes|leftharpoonup|UpEquilibrium|ntriangleleft|RightTriangle|measuredangle|shortparallel|longleftarrow|Longleftarrow|LongLeftArrow|DoubleLeftTee|Poincareplane|PrecedesEqual|triangleright|DoubleUpArrow|RightUpVector|fallingdotseq|looparrowleft|PrecedesTilde|NotTildeEqual|NotTildeTilde|smallsetminus|Proportional|triangleleft|triangledown|UnderBracket|NotHumpEqual|exponentiale|ExponentialE|NotLessTilde|HilbertSpace|RightCeiling|blacklozenge|varsupsetneq|HumpDownHump|GreaterEqual|VerticalLine|LeftTeeArrow|NotLessEqual|DownTeeArrow|LeftTriangle|varsubsetneq|Intersection|NotCongruent|DownArrowBar|LeftUpVector|LeftArrowBar|risingdotseq|GreaterTilde|RoundImplies|SquareSubset|ShortUpArrow|NotSuperset|quaternions|precnapprox|backepsilon|preccurlyeq|OverBracket|blacksquare|MediumSpace|VerticalBar|circledcirc|circleddash|CircleMinus|CircleTimes|LessGreater|curlyeqprec|curlyeqsucc|diamondsuit|UpDownArrow|Updownarrow|RuleDelayed|Rrightarrow|updownarrow|RightVector|nRightarrow|nrightarrow|eqslantless|LeftCeiling|Equilibrium|SmallCircle|expectation|NotSucceeds|thickapprox|GreaterLess|SquareUnion|NotPrecedes|NotLessLess|straightphi|succnapprox|succcurlyeq|SubsetEqual|sqsupseteq|Proportion|Laplacetrf|ImaginaryI|supsetneqq|NotGreater|gtreqqless|NotElement|ThickSpace|TildeEqual|TildeTilde|Fouriertrf|rmoustache|EqualTilde|eqslantgtr|UnderBrace|LeftVector|UpArrowBar|nLeftarrow|nsubseteqq|subsetneqq|nsupseteqq|nleftarrow|succapprox|lessapprox|UpTeeArrow|upuparrows|curlywedge|lesseqqgtr|varepsilon|varnothing|RightFloor|complement|CirclePlus|sqsubseteq|Lleftarrow|circledast|RightArrow|Rightarrow|rightarrow|lmoustache|Bernoullis|precapprox|mapstoleft|mapstodown|longmapsto|dotsquare|downarrow|DoubleDot|nsubseteq|supsetneq|leftarrow|nsupseteq|subsetneq|ThinSpace|ngeqslant|subseteqq|HumpEqual|NotSubset|triangleq|NotCupCap|lesseqgtr|heartsuit|TripleDot|Leftarrow|Coproduct|Congruent|varpropto|complexes|gvertneqq|LeftArrow|LessTilde|supseteqq|MinusPlus|CircleDot|nleqslant|NotExists|gtreqless|nparallel|UnionPlus|LeftFloor|checkmark|CenterDot|centerdot|Mellintrf|gtrapprox|bigotimes|OverBrace|spadesuit|therefore|pitchfork|rationals|PlusMinus|Backslash|Therefore|DownBreve|backsimeq|backprime|DownArrow|nshortmid|Downarrow|lvertneqq|eqvparsl|imagline|imagpart|infintie|integers|Integral|intercal|LessLess|Uarrocir|intlarhk|sqsupset|angmsdaf|sqsubset|llcorner|vartheta|cupbrcap|lnapprox|Superset|SuchThat|succnsim|succneqq|angmsdag|biguplus|curlyvee|trpezium|Succeeds|NotTilde|bigwedge|angmsdah|angrtvbd|triminus|cwconint|fpartint|lrcorner|smeparsl|subseteq|urcorner|lurdshar|laemptyv|DDotrahd|approxeq|ldrushar|awconint|mapstoup|backcong|shortmid|triangle|geqslant|gesdotol|timesbar|circledR|circledS|setminus|multimap|naturals|scpolint|ncongdot|RightTee|boxminus|gnapprox|boxtimes|andslope|thicksim|angmsdaa|varsigma|cirfnint|rtriltri|angmsdab|rppolint|angmsdac|barwedge|drbkarow|clubsuit|thetasym|bsolhsub|capbrcup|dzigrarr|doteqdot|DotEqual|dotminus|UnderBar|NotEqual|realpart|otimesas|ulcorner|hksearow|hkswarow|parallel|PartialD|elinters|emptyset|plusacir|bbrktbrk|angmsdad|pointint|bigoplus|angmsdae|Precedes|bigsqcup|varkappa|notindot|supseteq|precneqq|precnsim|profalar|profline|profsurf|leqslant|lesdotor|raemptyv|subplus|notnivb|notnivc|subrarr|zigrarr|vzigzag|submult|subedot|Element|between|cirscir|larrbfs|larrsim|lotimes|lbrksld|lbrkslu|lozenge|ldrdhar|dbkarow|bigcirc|epsilon|simrarr|simplus|ltquest|Epsilon|luruhar|gtquest|maltese|npolint|eqcolon|npreceq|bigodot|ddagger|gtrless|bnequiv|harrcir|ddotseq|equivDD|backsim|demptyv|nsqsube|nsqsupe|Upsilon|nsubset|upsilon|minusdu|nsucceq|swarrow|nsupset|coloneq|searrow|boxplus|napprox|natural|asympeq|alefsym|congdot|nearrow|bigstar|diamond|supplus|tritime|LeftTee|nvinfin|triplus|NewLine|nvltrie|nvrtrie|nwarrow|nexists|Diamond|ruluhar|Implies|supmult|angzarr|suplarr|suphsub|questeq|because|digamma|Because|olcross|bemptyv|omicron|Omicron|rotimes|NoBreak|intprod|angrtvb|orderof|uwangle|suphsol|lesdoto|orslope|DownTee|realine|cudarrl|rdldhar|OverBar|supedot|lessdot|supdsub|topfork|succsim|rbrkslu|rbrksld|pertenk|cudarrr|isindot|planckh|lessgtr|pluscir|gesdoto|plussim|plustwo|lesssim|cularrp|rarrsim|Cayleys|notinva|notinvb|notinvc|UpArrow|Uparrow|uparrow|NotLess|dwangle|precsim|Product|curarrm|Cconint|dotplus|rarrbfs|ccupssm|Cedilla|cemptyv|notniva|quatint|frac35|frac38|frac45|frac56|frac58|frac78|tridot|xoplus|gacute|gammad|Gammad|lfisht|lfloor|bigcup|sqsupe|gbreve|Gbreve|lharul|sqsube|sqcups|Gcedil|apacir|llhard|lmidot|Lmidot|lmoust|andand|sqcaps|approx|Abreve|spades|circeq|tprime|divide|topcir|Assign|topbot|gesdot|divonx|xuplus|timesd|gesles|atilde|solbar|SOFTcy|loplus|timesb|lowast|lowbar|dlcorn|dlcrop|softcy|dollar|lparlt|thksim|lrhard|Atilde|lsaquo|smashp|bigvee|thinsp|wreath|bkarow|lsquor|lstrok|Lstrok|lthree|ltimes|ltlarr|DotDot|simdot|ltrPar|weierp|xsqcup|angmsd|sigmav|sigmaf|zeetrf|Zcaron|zcaron|mapsto|vsupne|thetav|cirmid|marker|mcomma|Zacute|vsubnE|there4|gtlPar|vsubne|bottom|gtrarr|SHCHcy|shchcy|midast|midcir|middot|minusb|minusd|gtrdot|bowtie|sfrown|mnplus|models|colone|seswar|Colone|mstpos|searhk|gtrsim|nacute|Nacute|boxbox|telrec|hairsp|Tcedil|nbumpe|scnsim|ncaron|Ncaron|ncedil|Ncedil|hamilt|Scedil|nearhk|hardcy|HARDcy|tcedil|Tcaron|commat|nequiv|nesear|tcaron|target|hearts|nexist|varrho|scedil|Scaron|scaron|hellip|Sacute|sacute|hercon|swnwar|compfn|rtimes|rthree|rsquor|rsaquo|zacute|wedgeq|homtht|barvee|barwed|Barwed|rpargt|horbar|conint|swarhk|roplus|nltrie|hslash|hstrok|Hstrok|rmoust|Conint|bprime|hybull|hyphen|iacute|Iacute|supsup|supsub|supsim|varphi|coprod|brvbar|agrave|Supset|supset|igrave|Igrave|notinE|Agrave|iiiint|iinfin|copysr|wedbar|Verbar|vangrt|becaus|incare|verbar|inodot|bullet|drcorn|intcal|drcrop|cularr|vellip|Utilde|bumpeq|cupcap|dstrok|Dstrok|CupCap|cupcup|cupdot|eacute|Eacute|supdot|iquest|easter|ecaron|Ecaron|ecolon|isinsv|utilde|itilde|Itilde|curarr|succeq|Bumpeq|cacute|ulcrop|nparsl|Cacute|nprcue|egrave|Egrave|nrarrc|nrarrw|subsup|subsub|nrtrie|jsercy|nsccue|Jsercy|kappav|kcedil|Kcedil|subsim|ulcorn|nsimeq|egsdot|veebar|kgreen|capand|elsdot|Subset|subset|curren|aacute|lacute|Lacute|emptyv|ntilde|Ntilde|lagran|lambda|Lambda|capcap|Ugrave|langle|subdot|emsp13|numero|emsp14|nvdash|nvDash|nVdash|nVDash|ugrave|ufisht|nvHarr|larrfs|nvlArr|larrhk|larrlp|larrpl|nvrArr|Udblac|nwarhk|larrtl|nwnear|oacute|Oacute|latail|lAtail|sstarf|lbrace|odblac|Odblac|lbrack|udblac|odsold|eparsl|lcaron|Lcaron|ograve|Ograve|lcedil|Lcedil|Aacute|ssmile|ssetmn|squarf|ldquor|capcup|ominus|cylcty|rharul|eqcirc|dagger|rfloor|rfisht|Dagger|daleth|equals|origof|capdot|equest|dcaron|Dcaron|rdquor|oslash|Oslash|otilde|Otilde|otimes|Otimes|urcrop|Ubreve|ubreve|Yacute|Uacute|uacute|Rcedil|rcedil|urcorn|parsim|Rcaron|Vdashl|rcaron|Tstrok|percnt|period|permil|Exists|yacute|rbrack|rbrace|phmmat|ccaron|Ccaron|planck|ccedil|plankv|tstrok|female|plusdo|plusdu|ffilig|plusmn|ffllig|Ccedil|rAtail|dfisht|bernou|ratail|Rarrtl|rarrtl|angsph|rarrpl|rarrlp|rarrhk|xwedge|xotime|forall|ForAll|Vvdash|vsupnE|preceq|bigcap|frac12|frac13|frac14|primes|rarrfs|prnsim|frac15|Square|frac16|square|lesdot|frac18|frac23|propto|prurel|rarrap|rangle|puncsp|frac25|Racute|qprime|racute|lesges|frac34|abreve|AElig|eqsim|utdot|setmn|urtri|Equal|Uring|seArr|uring|searr|dashv|Dashv|mumap|nabla|iogon|Iogon|sdote|sdotb|scsim|napid|napos|equiv|natur|Acirc|dblac|erarr|nbump|iprod|erDot|ucirc|awint|esdot|angrt|ncong|isinE|scnap|Scirc|scirc|ndash|isins|Ubrcy|nearr|neArr|isinv|nedot|ubrcy|acute|Ycirc|iukcy|Iukcy|xutri|nesim|caret|jcirc|Jcirc|caron|twixt|ddarr|sccue|exist|jmath|sbquo|ngeqq|angst|ccaps|lceil|ngsim|UpTee|delta|Delta|rtrif|nharr|nhArr|nhpar|rtrie|jukcy|Jukcy|kappa|rsquo|Kappa|nlarr|nlArr|TSHcy|rrarr|aogon|Aogon|fflig|xrarr|tshcy|ccirc|nleqq|filig|upsih|nless|dharl|nlsim|fjlig|ropar|nltri|dharr|robrk|roarr|fllig|fltns|roang|rnmid|subnE|subne|lAarr|trisb|Ccirc|acirc|ccups|blank|VDash|forkv|Vdash|langd|cedil|blk12|blk14|laquo|strns|diams|notin|vDash|larrb|blk34|block|disin|uplus|vdash|vBarv|aelig|starf|Wedge|check|xrArr|lates|lbarr|lBarr|notni|lbbrk|bcong|frasl|lbrke|frown|vrtri|vprop|vnsup|gamma|Gamma|wedge|xodot|bdquo|srarr|doteq|ldquo|boxdl|boxdL|gcirc|Gcirc|boxDl|boxDL|boxdr|boxdR|boxDr|TRADE|trade|rlhar|boxDR|vnsub|npart|vltri|rlarr|boxhd|boxhD|nprec|gescc|nrarr|nrArr|boxHd|boxHD|boxhu|boxhU|nrtri|boxHu|clubs|boxHU|times|colon|Colon|gimel|xlArr|Tilde|nsime|tilde|nsmid|nspar|THORN|thorn|xlarr|nsube|nsubE|thkap|xhArr|comma|nsucc|boxul|boxuL|nsupe|nsupE|gneqq|gnsim|boxUl|boxUL|grave|boxur|boxuR|boxUr|boxUR|lescc|angle|bepsi|boxvh|varpi|boxvH|numsp|Theta|gsime|gsiml|theta|boxVh|boxVH|boxvl|gtcir|gtdot|boxvL|boxVl|boxVL|crarr|cross|Cross|nvsim|boxvr|nwarr|nwArr|sqsup|dtdot|Uogon|lhard|lharu|dtrif|ocirc|Ocirc|lhblk|duarr|odash|sqsub|Hacek|sqcup|llarr|duhar|oelig|OElig|ofcir|boxvR|uogon|lltri|boxVr|csube|uuarr|ohbar|csupe|ctdot|olarr|olcir|harrw|oline|sqcap|omacr|Omacr|omega|Omega|boxVR|aleph|lneqq|lnsim|loang|loarr|rharu|lobrk|hcirc|operp|oplus|rhard|Hcirc|orarr|Union|order|ecirc|Ecirc|cuepr|szlig|cuesc|breve|reals|eDDot|Breve|hoarr|lopar|utrif|rdquo|Umacr|umacr|efDot|swArr|ultri|alpha|rceil|ovbar|swarr|Wcirc|wcirc|smtes|smile|bsemi|lrarr|aring|parsl|lrhar|bsime|uhblk|lrtri|cupor|Aring|uharr|uharl|slarr|rbrke|bsolb|lsime|rbbrk|RBarr|lsimg|phone|rBarr|rbarr|icirc|lsquo|Icirc|emacr|Emacr|ratio|simne|plusb|simlE|simgE|simeq|pluse|ltcir|ltdot|empty|xharr|xdtri|iexcl|Alpha|ltrie|rarrw|pound|ltrif|xcirc|bumpe|prcue|bumpE|asymp|amacr|cuvee|Sigma|sigma|iiint|udhar|iiota|ijlig|IJlig|supnE|imacr|Imacr|prime|Prime|image|prnap|eogon|Eogon|rarrc|mdash|mDDot|cuwed|imath|supne|imped|Amacr|udarr|prsim|micro|rarrb|cwint|raquo|infin|eplus|range|rangd|Ucirc|radic|minus|amalg|veeeq|rAarr|epsiv|ycirc|quest|sharp|quot|zwnj|Qscr|race|qscr|Qopf|qopf|qint|rang|Rang|Zscr|zscr|Zopf|zopf|rarr|rArr|Rarr|Pscr|pscr|prop|prod|prnE|prec|ZHcy|zhcy|prap|Zeta|zeta|Popf|popf|Zdot|plus|zdot|Yuml|yuml|phiv|YUcy|yucy|Yscr|yscr|perp|Yopf|yopf|part|para|YIcy|Ouml|rcub|yicy|YAcy|rdca|ouml|osol|Oscr|rdsh|yacy|real|oscr|xvee|andd|rect|andv|Xscr|oror|ordm|ordf|xscr|ange|aopf|Aopf|rHar|Xopf|opar|Oopf|xopf|xnis|rhov|oopf|omid|xmap|oint|apid|apos|ogon|ascr|Ascr|odot|odiv|xcup|xcap|ocir|oast|nvlt|nvle|nvgt|nvge|nvap|Wscr|wscr|auml|ntlg|ntgl|nsup|nsub|nsim|Nscr|nscr|nsce|Wopf|ring|npre|wopf|npar|Auml|Barv|bbrk|Nopf|nopf|nmid|nLtv|beta|ropf|Ropf|Beta|beth|nles|rpar|nleq|bnot|bNot|nldr|NJcy|rscr|Rscr|Vscr|vscr|rsqb|njcy|bopf|nisd|Bopf|rtri|Vopf|nGtv|ngtr|vopf|boxh|boxH|boxv|nges|ngeq|boxV|bscr|scap|Bscr|bsim|Vert|vert|bsol|bull|bump|caps|cdot|ncup|scnE|ncap|nbsp|napE|Cdot|cent|sdot|Vbar|nang|vBar|chcy|Mscr|mscr|sect|semi|CHcy|Mopf|mopf|sext|circ|cire|mldr|mlcp|cirE|comp|shcy|SHcy|vArr|varr|cong|copf|Copf|copy|COPY|malt|male|macr|lvnE|cscr|ltri|sime|ltcc|simg|Cscr|siml|csub|Uuml|lsqb|lsim|uuml|csup|Lscr|lscr|utri|smid|lpar|cups|smte|lozf|darr|Lopf|Uscr|solb|lopf|sopf|Sopf|lneq|uscr|spar|dArr|lnap|Darr|dash|Sqrt|LJcy|ljcy|lHar|dHar|Upsi|upsi|diam|lesg|djcy|DJcy|leqq|dopf|Dopf|dscr|Dscr|dscy|ldsh|ldca|squf|DScy|sscr|Sscr|dsol|lcub|late|star|Star|Uopf|Larr|lArr|larr|uopf|dtri|dzcy|sube|subE|Lang|lang|Kscr|kscr|Kopf|kopf|KJcy|kjcy|KHcy|khcy|DZcy|ecir|edot|eDot|Jscr|jscr|succ|Jopf|jopf|Edot|uHar|emsp|ensp|Iuml|iuml|eopf|isin|Iscr|iscr|Eopf|epar|sung|epsi|escr|sup1|sup2|sup3|Iota|iota|supe|supE|Iopf|iopf|IOcy|iocy|Escr|esim|Esim|imof|Uarr|QUOT|uArr|uarr|euml|IEcy|iecy|Idot|Euml|euro|excl|Hscr|hscr|Hopf|hopf|TScy|tscy|Tscr|hbar|tscr|flat|tbrk|fnof|hArr|harr|half|fopf|Fopf|tdot|gvnE|fork|trie|gtcc|fscr|Fscr|gdot|gsim|Gscr|gscr|Gopf|gopf|gneq|Gdot|tosa|gnap|Topf|topf|geqq|toea|GJcy|gjcy|tint|gesl|mid|Sfr|ggg|top|ges|gla|glE|glj|geq|gne|gEl|gel|gnE|Gcy|gcy|gap|Tfr|tfr|Tcy|tcy|Hat|Tau|Ffr|tau|Tab|hfr|Hfr|ffr|Fcy|fcy|icy|Icy|iff|ETH|eth|ifr|Ifr|Eta|eta|int|Int|Sup|sup|ucy|Ucy|Sum|sum|jcy|ENG|ufr|Ufr|eng|Jcy|jfr|els|ell|egs|Efr|efr|Jfr|uml|kcy|Kcy|Ecy|ecy|kfr|Kfr|lap|Sub|sub|lat|lcy|Lcy|leg|Dot|dot|lEg|leq|les|squ|div|die|lfr|Lfr|lgE|Dfr|dfr|Del|deg|Dcy|dcy|lne|lnE|sol|loz|smt|Cup|lrm|cup|lsh|Lsh|sim|shy|map|Map|mcy|Mcy|mfr|Mfr|mho|gfr|Gfr|sfr|cir|Chi|chi|nap|Cfr|vcy|Vcy|cfr|Scy|scy|ncy|Ncy|vee|Vee|Cap|cap|nfr|scE|sce|Nfr|nge|ngE|nGg|vfr|Vfr|ngt|bot|nGt|nis|niv|Rsh|rsh|nle|nlE|bne|Bfr|bfr|nLl|nlt|nLt|Bcy|bcy|not|Not|rlm|wfr|Wfr|npr|nsc|num|ocy|ast|Ocy|ofr|xfr|Xfr|Ofr|ogt|ohm|apE|olt|Rho|ape|rho|Rfr|rfr|ord|REG|ang|reg|orv|And|and|AMP|Rcy|amp|Afr|ycy|Ycy|yen|yfr|Yfr|rcy|par|pcy|Pcy|pfr|Pfr|phi|Phi|afr|Acy|acy|zcy|Zcy|piv|acE|acd|zfr|Zfr|pre|prE|psi|Psi|qfr|Qfr|zwj|Or|ge|Gg|gt|gg|el|oS|lt|Lt|LT|Re|lg|gl|eg|ne|Im|it|le|DD|wp|wr|nu|Nu|dd|lE|Sc|sc|pi|Pi|ee|af|ll|Ll|rx|gE|xi|pm|Xi|ic|pr|Pr|in|ni|mp|mu|ac|Mu|or|ap|Gt|GT|ii);|&(Aacute|Agrave|Atilde|Ccedil|Eacute|Egrave|Iacute|Igrave|Ntilde|Oacute|Ograve|Oslash|Otilde|Uacute|Ugrave|Yacute|aacute|agrave|atilde|brvbar|ccedil|curren|divide|eacute|egrave|frac12|frac14|frac34|iacute|igrave|iquest|middot|ntilde|oacute|ograve|oslash|otilde|plusmn|uacute|ugrave|yacute|AElig|Acirc|Aring|Ecirc|Icirc|Ocirc|THORN|Ucirc|acirc|acute|aelig|aring|cedil|ecirc|icirc|iexcl|laquo|micro|ocirc|pound|raquo|szlig|thorn|times|ucirc|Auml|COPY|Euml|Iuml|Ouml|QUOT|Uuml|auml|cent|copy|euml|iuml|macr|nbsp|ordf|ordm|ouml|para|quot|sect|sup1|sup2|sup3|uuml|yuml|AMP|ETH|REG|amp|deg|eth|not|reg|shy|uml|yen|GT|LT|gt|lt)(?!;)([=a-zA-Z0-9]?)|&#([0-9]+)(;?)|&#[xX]([a-fA-F0-9]+)(;?)|&([0-9a-zA-Z]+)/g;
|
||
var decodeMap = {'aacute':'\xE1','Aacute':'\xC1','abreve':'\u0103','Abreve':'\u0102','ac':'\u223E','acd':'\u223F','acE':'\u223E\u0333','acirc':'\xE2','Acirc':'\xC2','acute':'\xB4','acy':'\u0430','Acy':'\u0410','aelig':'\xE6','AElig':'\xC6','af':'\u2061','afr':'\uD835\uDD1E','Afr':'\uD835\uDD04','agrave':'\xE0','Agrave':'\xC0','alefsym':'\u2135','aleph':'\u2135','alpha':'\u03B1','Alpha':'\u0391','amacr':'\u0101','Amacr':'\u0100','amalg':'\u2A3F','amp':'&','AMP':'&','and':'\u2227','And':'\u2A53','andand':'\u2A55','andd':'\u2A5C','andslope':'\u2A58','andv':'\u2A5A','ang':'\u2220','ange':'\u29A4','angle':'\u2220','angmsd':'\u2221','angmsdaa':'\u29A8','angmsdab':'\u29A9','angmsdac':'\u29AA','angmsdad':'\u29AB','angmsdae':'\u29AC','angmsdaf':'\u29AD','angmsdag':'\u29AE','angmsdah':'\u29AF','angrt':'\u221F','angrtvb':'\u22BE','angrtvbd':'\u299D','angsph':'\u2222','angst':'\xC5','angzarr':'\u237C','aogon':'\u0105','Aogon':'\u0104','aopf':'\uD835\uDD52','Aopf':'\uD835\uDD38','ap':'\u2248','apacir':'\u2A6F','ape':'\u224A','apE':'\u2A70','apid':'\u224B','apos':'\'','ApplyFunction':'\u2061','approx':'\u2248','approxeq':'\u224A','aring':'\xE5','Aring':'\xC5','ascr':'\uD835\uDCB6','Ascr':'\uD835\uDC9C','Assign':'\u2254','ast':'*','asymp':'\u2248','asympeq':'\u224D','atilde':'\xE3','Atilde':'\xC3','auml':'\xE4','Auml':'\xC4','awconint':'\u2233','awint':'\u2A11','backcong':'\u224C','backepsilon':'\u03F6','backprime':'\u2035','backsim':'\u223D','backsimeq':'\u22CD','Backslash':'\u2216','Barv':'\u2AE7','barvee':'\u22BD','barwed':'\u2305','Barwed':'\u2306','barwedge':'\u2305','bbrk':'\u23B5','bbrktbrk':'\u23B6','bcong':'\u224C','bcy':'\u0431','Bcy':'\u0411','bdquo':'\u201E','becaus':'\u2235','because':'\u2235','Because':'\u2235','bemptyv':'\u29B0','bepsi':'\u03F6','bernou':'\u212C','Bernoullis':'\u212C','beta':'\u03B2','Beta':'\u0392','beth':'\u2136','between':'\u226C','bfr':'\uD835\uDD1F','Bfr':'\uD835\uDD05','bigcap':'\u22C2','bigcirc':'\u25EF','bigcup':'\u22C3','bigodot':'\u2A00','bigoplus':'\u2A01','bigotimes':'\u2A02','bigsqcup':'\u2A06','bigstar':'\u2605','bigtriangledown':'\u25BD','bigtriangleup':'\u25B3','biguplus':'\u2A04','bigvee':'\u22C1','bigwedge':'\u22C0','bkarow':'\u290D','blacklozenge':'\u29EB','blacksquare':'\u25AA','blacktriangle':'\u25B4','blacktriangledown':'\u25BE','blacktriangleleft':'\u25C2','blacktriangleright':'\u25B8','blank':'\u2423','blk12':'\u2592','blk14':'\u2591','blk34':'\u2593','block':'\u2588','bne':'=\u20E5','bnequiv':'\u2261\u20E5','bnot':'\u2310','bNot':'\u2AED','bopf':'\uD835\uDD53','Bopf':'\uD835\uDD39','bot':'\u22A5','bottom':'\u22A5','bowtie':'\u22C8','boxbox':'\u29C9','boxdl':'\u2510','boxdL':'\u2555','boxDl':'\u2556','boxDL':'\u2557','boxdr':'\u250C','boxdR':'\u2552','boxDr':'\u2553','boxDR':'\u2554','boxh':'\u2500','boxH':'\u2550','boxhd':'\u252C','boxhD':'\u2565','boxHd':'\u2564','boxHD':'\u2566','boxhu':'\u2534','boxhU':'\u2568','boxHu':'\u2567','boxHU':'\u2569','boxminus':'\u229F','boxplus':'\u229E','boxtimes':'\u22A0','boxul':'\u2518','boxuL':'\u255B','boxUl':'\u255C','boxUL':'\u255D','boxur':'\u2514','boxuR':'\u2558','boxUr':'\u2559','boxUR':'\u255A','boxv':'\u2502','boxV':'\u2551','boxvh':'\u253C','boxvH':'\u256A','boxVh':'\u256B','boxVH':'\u256C','boxvl':'\u2524','boxvL':'\u2561','boxVl':'\u2562','boxVL':'\u2563','boxvr':'\u251C','boxvR':'\u255E','boxVr':'\u255F','boxVR':'\u2560','bprime':'\u2035','breve':'\u02D8','Breve':'\u02D8','brvbar':'\xA6','bscr':'\uD835\uDCB7','Bscr':'\u212C','bsemi':'\u204F','bsim':'\u223D','bsime':'\u22CD','bsol':'\\','bsolb':'\u29C5','bsolhsub':'\u27C8','bull':'\u2022','bullet':'\u2022','bump':'\u224E','bumpe':'\u224F','bumpE':'\u2AAE','bumpeq':'\u224F','Bumpeq':'\u224E','cacute':'\u0107','Cacute':'\u0106','cap':'\u2229','Cap':'\u22D2','capand':'\u2A44','capbrcup':'\u2A49','capcap':'\u2A4B','capcup':'\u2A47','capdot':'\u2A40','CapitalDifferentialD':'\u2145','caps':'\u2229\uFE00','caret':'\u2041','caron':'\u02C7','Cayleys':'\u212D','ccaps':'\u2A4D','ccaron':'\u010D','Ccaron':'\u010C','ccedil':'\xE7','Ccedil':'\xC7','ccirc':'\u0109','Ccirc':'\u0108','Cconint':'\u2230','ccups':'\u2A4C','ccupssm':'\u2A50','cdot':'\u010B','Cdot':'\u010A','cedil':'\xB8','Cedilla':'\xB8','cemptyv':'\u29B2','cent':'\xA2','centerdot':'\xB7','CenterDot':'\xB7','cfr':'\uD835\uDD20','Cfr':'\u212D','chcy':'\u0447','CHcy':'\u0427','check':'\u2713','checkmark':'\u2713','chi':'\u03C7','Chi':'\u03A7','cir':'\u25CB','circ':'\u02C6','circeq':'\u2257','circlearrowleft':'\u21BA','circlearrowright':'\u21BB','circledast':'\u229B','circledcirc':'\u229A','circleddash':'\u229D','CircleDot':'\u2299','circledR':'\xAE','circledS':'\u24C8','CircleMinus':'\u2296','CirclePlus':'\u2295','CircleTimes':'\u2297','cire':'\u2257','cirE':'\u29C3','cirfnint':'\u2A10','cirmid':'\u2AEF','cirscir':'\u29C2','ClockwiseContourIntegral':'\u2232','CloseCurlyDoubleQuote':'\u201D','CloseCurlyQuote':'\u2019','clubs':'\u2663','clubsuit':'\u2663','colon':':','Colon':'\u2237','colone':'\u2254','Colone':'\u2A74','coloneq':'\u2254','comma':',','commat':'@','comp':'\u2201','compfn':'\u2218','complement':'\u2201','complexes':'\u2102','cong':'\u2245','congdot':'\u2A6D','Congruent':'\u2261','conint':'\u222E','Conint':'\u222F','ContourIntegral':'\u222E','copf':'\uD835\uDD54','Copf':'\u2102','coprod':'\u2210','Coproduct':'\u2210','copy':'\xA9','COPY':'\xA9','copysr':'\u2117','CounterClockwiseContourIntegral':'\u2233','crarr':'\u21B5','cross':'\u2717','Cross':'\u2A2F','cscr':'\uD835\uDCB8','Cscr':'\uD835\uDC9E','csub':'\u2ACF','csube':'\u2AD1','csup':'\u2AD0','csupe':'\u2AD2','ctdot':'\u22EF','cudarrl':'\u2938','cudarrr':'\u2935','cuepr':'\u22DE','cuesc':'\u22DF','cularr':'\u21B6','cularrp':'\u293D','cup':'\u222A','Cup':'\u22D3','cupbrcap':'\u2A48','cupcap':'\u2A46','CupCap':'\u224D','cupcup':'\u2A4A','cupdot':'\u228D','cupor':'\u2A45','cups':'\u222A\uFE00','curarr':'\u21B7','curarrm':'\u293C','curlyeqprec':'\u22DE','curlyeqsucc':'\u22DF','curlyvee':'\u22CE','curlywedge':'\u22CF','curren':'\xA4','curvearrowleft':'\u21B6','curvearrowright':'\u21B7','cuvee':'\u22CE','cuwed':'\u22CF','cwconint':'\u2232','cwint':'\u2231','cylcty':'\u232D','dagger':'\u2020','Dagger':'\u2021','daleth':'\u2138','darr':'\u2193','dArr':'\u21D3','Darr':'\u21A1','dash':'\u2010','dashv':'\u22A3','Dashv':'\u2AE4','dbkarow':'\u290F','dblac':'\u02DD','dcaron':'\u010F','Dcaron':'\u010E','dcy':'\u0434','Dcy':'\u0414','dd':'\u2146','DD':'\u2145','ddagger':'\u2021','ddarr':'\u21CA','DDotrahd':'\u2911','ddotseq':'\u2A77','deg':'\xB0','Del':'\u2207','delta':'\u03B4','Delta':'\u0394','demptyv':'\u29B1','dfisht':'\u297F','dfr':'\uD835\uDD21','Dfr':'\uD835\uDD07','dHar':'\u2965','dharl':'\u21C3','dharr':'\u21C2','DiacriticalAcute':'\xB4','DiacriticalDot':'\u02D9','DiacriticalDoubleAcute':'\u02DD','DiacriticalGrave':'`','DiacriticalTilde':'\u02DC','diam':'\u22C4','diamond':'\u22C4','Diamond':'\u22C4','diamondsuit':'\u2666','diams':'\u2666','die':'\xA8','DifferentialD':'\u2146','digamma':'\u03DD','disin':'\u22F2','div':'\xF7','divide':'\xF7','divideontimes':'\u22C7','divonx':'\u22C7','djcy':'\u0452','DJcy':'\u0402','dlcorn':'\u231E','dlcrop':'\u230D','dollar':'$','dopf':'\uD835\uDD55','Dopf':'\uD835\uDD3B','dot':'\u02D9','Dot':'\xA8','DotDot':'\u20DC','doteq':'\u2250','doteqdot':'\u2251','DotEqual':'\u2250','dotminus':'\u2238','dotplus':'\u2214','dotsquare':'\u22A1','doublebarwedge':'\u2306','DoubleContourIntegral':'\u222F','DoubleDot':'\xA8','DoubleDownArrow':'\u21D3','DoubleLeftArrow':'\u21D0','DoubleLeftRightArrow':'\u21D4','DoubleLeftTee':'\u2AE4','DoubleLongLeftArrow':'\u27F8','DoubleLongLeftRightArrow':'\u27FA','DoubleLongRightArrow':'\u27F9','DoubleRightArrow':'\u21D2','DoubleRightTee':'\u22A8','DoubleUpArrow':'\u21D1','DoubleUpDownArrow':'\u21D5','DoubleVerticalBar':'\u2225','downarrow':'\u2193','Downarrow':'\u21D3','DownArrow':'\u2193','DownArrowBar':'\u2913','DownArrowUpArrow':'\u21F5','DownBreve':'\u0311','downdownarrows':'\u21CA','downharpoonleft':'\u21C3','downharpoonright':'\u21C2','DownLeftRightVector':'\u2950','DownLeftTeeVector':'\u295E','DownLeftVector':'\u21BD','DownLeftVectorBar':'\u2956','DownRightTeeVector':'\u295F','DownRightVector':'\u21C1','DownRightVectorBar':'\u2957','DownTee':'\u22A4','DownTeeArrow':'\u21A7','drbkarow':'\u2910','drcorn':'\u231F','drcrop':'\u230C','dscr':'\uD835\uDCB9','Dscr':'\uD835\uDC9F','dscy':'\u0455','DScy':'\u0405','dsol':'\u29F6','dstrok':'\u0111','Dstrok':'\u0110','dtdot':'\u22F1','dtri':'\u25BF','dtrif':'\u25BE','duarr':'\u21F5','duhar':'\u296F','dwangle':'\u29A6','dzcy':'\u045F','DZcy':'\u040F','dzigrarr':'\u27FF','eacute':'\xE9','Eacute':'\xC9','easter':'\u2A6E','ecaron':'\u011B','Ecaron':'\u011A','ecir':'\u2256','ecirc':'\xEA','Ecirc':'\xCA','ecolon':'\u2255','ecy':'\u044D','Ecy':'\u042D','eDDot':'\u2A77','edot':'\u0117','eDot':'\u2251','Edot':'\u0116','ee':'\u2147','efDot':'\u2252','efr':'\uD835\uDD22','Efr':'\uD835\uDD08','eg':'\u2A9A','egrave':'\xE8','Egrave':'\xC8','egs':'\u2A96','egsdot':'\u2A98','el':'\u2A99','Element':'\u2208','elinters':'\u23E7','ell':'\u2113','els':'\u2A95','elsdot':'\u2A97','emacr':'\u0113','Emacr':'\u0112','empty':'\u2205','emptyset':'\u2205','EmptySmallSquare':'\u25FB','emptyv':'\u2205','EmptyVerySmallSquare':'\u25AB','emsp':'\u2003','emsp13':'\u2004','emsp14':'\u2005','eng':'\u014B','ENG':'\u014A','ensp':'\u2002','eogon':'\u0119','Eogon':'\u0118','eopf':'\uD835\uDD56','Eopf':'\uD835\uDD3C','epar':'\u22D5','eparsl':'\u29E3','eplus':'\u2A71','epsi':'\u03B5','epsilon':'\u03B5','Epsilon':'\u0395','epsiv':'\u03F5','eqcirc':'\u2256','eqcolon':'\u2255','eqsim':'\u2242','eqslantgtr':'\u2A96','eqslantless':'\u2A95','Equal':'\u2A75','equals':'=','EqualTilde':'\u2242','equest':'\u225F','Equilibrium':'\u21CC','equiv':'\u2261','equivDD':'\u2A78','eqvparsl':'\u29E5','erarr':'\u2971','erDot':'\u2253','escr':'\u212F','Escr':'\u2130','esdot':'\u2250','esim':'\u2242','Esim':'\u2A73','eta':'\u03B7','Eta':'\u0397','eth':'\xF0','ETH':'\xD0','euml':'\xEB','Euml':'\xCB','euro':'\u20AC','excl':'!','exist':'\u2203','Exists':'\u2203','expectation':'\u2130','exponentiale':'\u2147','ExponentialE':'\u2147','fallingdotseq':'\u2252','fcy':'\u0444','Fcy':'\u0424','female':'\u2640','ffilig':'\uFB03','fflig':'\uFB00','ffllig':'\uFB04','ffr':'\uD835\uDD23','Ffr':'\uD835\uDD09','filig':'\uFB01','FilledSmallSquare':'\u25FC','FilledVerySmallSquare':'\u25AA','fjlig':'fj','flat':'\u266D','fllig':'\uFB02','fltns':'\u25B1','fnof':'\u0192','fopf':'\uD835\uDD57','Fopf':'\uD835\uDD3D','forall':'\u2200','ForAll':'\u2200','fork':'\u22D4','forkv':'\u2AD9','Fouriertrf':'\u2131','fpartint':'\u2A0D','frac12':'\xBD','frac13':'\u2153','frac14':'\xBC','frac15':'\u2155','frac16':'\u2159','frac18':'\u215B','frac23':'\u2154','frac25':'\u2156','frac34':'\xBE','frac35':'\u2157','frac38':'\u215C','frac45':'\u2158','frac56':'\u215A','frac58':'\u215D','frac78':'\u215E','frasl':'\u2044','frown':'\u2322','fscr':'\uD835\uDCBB','Fscr':'\u2131','gacute':'\u01F5','gamma':'\u03B3','Gamma':'\u0393','gammad':'\u03DD','Gammad':'\u03DC','gap':'\u2A86','gbreve':'\u011F','Gbreve':'\u011E','Gcedil':'\u0122','gcirc':'\u011D','Gcirc':'\u011C','gcy':'\u0433','Gcy':'\u0413','gdot':'\u0121','Gdot':'\u0120','ge':'\u2265','gE':'\u2267','gel':'\u22DB','gEl':'\u2A8C','geq':'\u2265','geqq':'\u2267','geqslant':'\u2A7E','ges':'\u2A7E','gescc':'\u2AA9','gesdot':'\u2A80','gesdoto':'\u2A82','gesdotol':'\u2A84','gesl':'\u22DB\uFE00','gesles':'\u2A94','gfr':'\uD835\uDD24','Gfr':'\uD835\uDD0A','gg':'\u226B','Gg':'\u22D9','ggg':'\u22D9','gimel':'\u2137','gjcy':'\u0453','GJcy':'\u0403','gl':'\u2277','gla':'\u2AA5','glE':'\u2A92','glj':'\u2AA4','gnap':'\u2A8A','gnapprox':'\u2A8A','gne':'\u2A88','gnE':'\u2269','gneq':'\u2A88','gneqq':'\u2269','gnsim':'\u22E7','gopf':'\uD835\uDD58','Gopf':'\uD835\uDD3E','grave':'`','GreaterEqual':'\u2265','GreaterEqualLess':'\u22DB','GreaterFullEqual':'\u2267','GreaterGreater':'\u2AA2','GreaterLess':'\u2277','GreaterSlantEqual':'\u2A7E','GreaterTilde':'\u2273','gscr':'\u210A','Gscr':'\uD835\uDCA2','gsim':'\u2273','gsime':'\u2A8E','gsiml':'\u2A90','gt':'>','Gt':'\u226B','GT':'>','gtcc':'\u2AA7','gtcir':'\u2A7A','gtdot':'\u22D7','gtlPar':'\u2995','gtquest':'\u2A7C','gtrapprox':'\u2A86','gtrarr':'\u2978','gtrdot':'\u22D7','gtreqless':'\u22DB','gtreqqless':'\u2A8C','gtrless':'\u2277','gtrsim':'\u2273','gvertneqq':'\u2269\uFE00','gvnE':'\u2269\uFE00','Hacek':'\u02C7','hairsp':'\u200A','half':'\xBD','hamilt':'\u210B','hardcy':'\u044A','HARDcy':'\u042A','harr':'\u2194','hArr':'\u21D4','harrcir':'\u2948','harrw':'\u21AD','Hat':'^','hbar':'\u210F','hcirc':'\u0125','Hcirc':'\u0124','hearts':'\u2665','heartsuit':'\u2665','hellip':'\u2026','hercon':'\u22B9','hfr':'\uD835\uDD25','Hfr':'\u210C','HilbertSpace':'\u210B','hksearow':'\u2925','hkswarow':'\u2926','hoarr':'\u21FF','homtht':'\u223B','hookleftarrow':'\u21A9','hookrightarrow':'\u21AA','hopf':'\uD835\uDD59','Hopf':'\u210D','horbar':'\u2015','HorizontalLine':'\u2500','hscr':'\uD835\uDCBD','Hscr':'\u210B','hslash':'\u210F','hstrok':'\u0127','Hstrok':'\u0126','HumpDownHump':'\u224E','HumpEqual':'\u224F','hybull':'\u2043','hyphen':'\u2010','iacute':'\xED','Iacute':'\xCD','ic':'\u2063','icirc':'\xEE','Icirc':'\xCE','icy':'\u0438','Icy':'\u0418','Idot':'\u0130','iecy':'\u0435','IEcy':'\u0415','iexcl':'\xA1','iff':'\u21D4','ifr':'\uD835\uDD26','Ifr':'\u2111','igrave':'\xEC','Igrave':'\xCC','ii':'\u2148','iiiint':'\u2A0C','iiint':'\u222D','iinfin':'\u29DC','iiota':'\u2129','ijlig':'\u0133','IJlig':'\u0132','Im':'\u2111','imacr':'\u012B','Imacr':'\u012A','image':'\u2111','ImaginaryI':'\u2148','imagline':'\u2110','imagpart':'\u2111','imath':'\u0131','imof':'\u22B7','imped':'\u01B5','Implies':'\u21D2','in':'\u2208','incare':'\u2105','infin':'\u221E','infintie':'\u29DD','inodot':'\u0131','int':'\u222B','Int':'\u222C','intcal':'\u22BA','integers':'\u2124','Integral':'\u222B','intercal':'\u22BA','Intersection':'\u22C2','intlarhk':'\u2A17','intprod':'\u2A3C','InvisibleComma':'\u2063','InvisibleTimes':'\u2062','iocy':'\u0451','IOcy':'\u0401','iogon':'\u012F','Iogon':'\u012E','iopf':'\uD835\uDD5A','Iopf':'\uD835\uDD40','iota':'\u03B9','Iota':'\u0399','iprod':'\u2A3C','iquest':'\xBF','iscr':'\uD835\uDCBE','Iscr':'\u2110','isin':'\u2208','isindot':'\u22F5','isinE':'\u22F9','isins':'\u22F4','isinsv':'\u22F3','isinv':'\u2208','it':'\u2062','itilde':'\u0129','Itilde':'\u0128','iukcy':'\u0456','Iukcy':'\u0406','iuml':'\xEF','Iuml':'\xCF','jcirc':'\u0135','Jcirc':'\u0134','jcy':'\u0439','Jcy':'\u0419','jfr':'\uD835\uDD27','Jfr':'\uD835\uDD0D','jmath':'\u0237','jopf':'\uD835\uDD5B','Jopf':'\uD835\uDD41','jscr':'\uD835\uDCBF','Jscr':'\uD835\uDCA5','jsercy':'\u0458','Jsercy':'\u0408','jukcy':'\u0454','Jukcy':'\u0404','kappa':'\u03BA','Kappa':'\u039A','kappav':'\u03F0','kcedil':'\u0137','Kcedil':'\u0136','kcy':'\u043A','Kcy':'\u041A','kfr':'\uD835\uDD28','Kfr':'\uD835\uDD0E','kgreen':'\u0138','khcy':'\u0445','KHcy':'\u0425','kjcy':'\u045C','KJcy':'\u040C','kopf':'\uD835\uDD5C','Kopf':'\uD835\uDD42','kscr':'\uD835\uDCC0','Kscr':'\uD835\uDCA6','lAarr':'\u21DA','lacute':'\u013A','Lacute':'\u0139','laemptyv':'\u29B4','lagran':'\u2112','lambda':'\u03BB','Lambda':'\u039B','lang':'\u27E8','Lang':'\u27EA','langd':'\u2991','langle':'\u27E8','lap':'\u2A85','Laplacetrf':'\u2112','laquo':'\xAB','larr':'\u2190','lArr':'\u21D0','Larr':'\u219E','larrb':'\u21E4','larrbfs':'\u291F','larrfs':'\u291D','larrhk':'\u21A9','larrlp':'\u21AB','larrpl':'\u2939','larrsim':'\u2973','larrtl':'\u21A2','lat':'\u2AAB','latail':'\u2919','lAtail':'\u291B','late':'\u2AAD','lates':'\u2AAD\uFE00','lbarr':'\u290C','lBarr':'\u290E','lbbrk':'\u2772','lbrace':'{','lbrack':'[','lbrke':'\u298B','lbrksld':'\u298F','lbrkslu':'\u298D','lcaron':'\u013E','Lcaron':'\u013D','lcedil':'\u013C','Lcedil':'\u013B','lceil':'\u2308','lcub':'{','lcy':'\u043B','Lcy':'\u041B','ldca':'\u2936','ldquo':'\u201C','ldquor':'\u201E','ldrdhar':'\u2967','ldrushar':'\u294B','ldsh':'\u21B2','le':'\u2264','lE':'\u2266','LeftAngleBracket':'\u27E8','leftarrow':'\u2190','Leftarrow':'\u21D0','LeftArrow':'\u2190','LeftArrowBar':'\u21E4','LeftArrowRightArrow':'\u21C6','leftarrowtail':'\u21A2','LeftCeiling':'\u2308','LeftDoubleBracket':'\u27E6','LeftDownTeeVector':'\u2961','LeftDownVector':'\u21C3','LeftDownVectorBar':'\u2959','LeftFloor':'\u230A','leftharpoondown':'\u21BD','leftharpoonup':'\u21BC','leftleftarrows':'\u21C7','leftrightarrow':'\u2194','Leftrightarrow':'\u21D4','LeftRightArrow':'\u2194','leftrightarrows':'\u21C6','leftrightharpoons':'\u21CB','leftrightsquigarrow':'\u21AD','LeftRightVector':'\u294E','LeftTee':'\u22A3','LeftTeeArrow':'\u21A4','LeftTeeVector':'\u295A','leftthreetimes':'\u22CB','LeftTriangle':'\u22B2','LeftTriangleBar':'\u29CF','LeftTriangleEqual':'\u22B4','LeftUpDownVector':'\u2951','LeftUpTeeVector':'\u2960','LeftUpVector':'\u21BF','LeftUpVectorBar':'\u2958','LeftVector':'\u21BC','LeftVectorBar':'\u2952','leg':'\u22DA','lEg':'\u2A8B','leq':'\u2264','leqq':'\u2266','leqslant':'\u2A7D','les':'\u2A7D','lescc':'\u2AA8','lesdot':'\u2A7F','lesdoto':'\u2A81','lesdotor':'\u2A83','lesg':'\u22DA\uFE00','lesges':'\u2A93','lessapprox':'\u2A85','lessdot':'\u22D6','lesseqgtr':'\u22DA','lesseqqgtr':'\u2A8B','LessEqualGreater':'\u22DA','LessFullEqual':'\u2266','LessGreater':'\u2276','lessgtr':'\u2276','LessLess':'\u2AA1','lesssim':'\u2272','LessSlantEqual':'\u2A7D','LessTilde':'\u2272','lfisht':'\u297C','lfloor':'\u230A','lfr':'\uD835\uDD29','Lfr':'\uD835\uDD0F','lg':'\u2276','lgE':'\u2A91','lHar':'\u2962','lhard':'\u21BD','lharu':'\u21BC','lharul':'\u296A','lhblk':'\u2584','ljcy':'\u0459','LJcy':'\u0409','ll':'\u226A','Ll':'\u22D8','llarr':'\u21C7','llcorner':'\u231E','Lleftarrow':'\u21DA','llhard':'\u296B','lltri':'\u25FA','lmidot':'\u0140','Lmidot':'\u013F','lmoust':'\u23B0','lmoustache':'\u23B0','lnap':'\u2A89','lnapprox':'\u2A89','lne':'\u2A87','lnE':'\u2268','lneq':'\u2A87','lneqq':'\u2268','lnsim':'\u22E6','loang':'\u27EC','loarr':'\u21FD','lobrk':'\u27E6','longleftarrow':'\u27F5','Longleftarrow':'\u27F8','LongLeftArrow':'\u27F5','longleftrightarrow':'\u27F7','Longleftrightarrow':'\u27FA','LongLeftRightArrow':'\u27F7','longmapsto':'\u27FC','longrightarrow':'\u27F6','Longrightarrow':'\u27F9','LongRightArrow':'\u27F6','looparrowleft':'\u21AB','looparrowright':'\u21AC','lopar':'\u2985','lopf':'\uD835\uDD5D','Lopf':'\uD835\uDD43','loplus':'\u2A2D','lotimes':'\u2A34','lowast':'\u2217','lowbar':'_','LowerLeftArrow':'\u2199','LowerRightArrow':'\u2198','loz':'\u25CA','lozenge':'\u25CA','lozf':'\u29EB','lpar':'(','lparlt':'\u2993','lrarr':'\u21C6','lrcorner':'\u231F','lrhar':'\u21CB','lrhard':'\u296D','lrm':'\u200E','lrtri':'\u22BF','lsaquo':'\u2039','lscr':'\uD835\uDCC1','Lscr':'\u2112','lsh':'\u21B0','Lsh':'\u21B0','lsim':'\u2272','lsime':'\u2A8D','lsimg':'\u2A8F','lsqb':'[','lsquo':'\u2018','lsquor':'\u201A','lstrok':'\u0142','Lstrok':'\u0141','lt':'<','Lt':'\u226A','LT':'<','ltcc':'\u2AA6','ltcir':'\u2A79','ltdot':'\u22D6','lthree':'\u22CB','ltimes':'\u22C9','ltlarr':'\u2976','ltquest':'\u2A7B','ltri':'\u25C3','ltrie':'\u22B4','ltrif':'\u25C2','ltrPar':'\u2996','lurdshar':'\u294A','luruhar':'\u2966','lvertneqq':'\u2268\uFE00','lvnE':'\u2268\uFE00','macr':'\xAF','male':'\u2642','malt':'\u2720','maltese':'\u2720','map':'\u21A6','Map':'\u2905','mapsto':'\u21A6','mapstodown':'\u21A7','mapstoleft':'\u21A4','mapstoup':'\u21A5','marker':'\u25AE','mcomma':'\u2A29','mcy':'\u043C','Mcy':'\u041C','mdash':'\u2014','mDDot':'\u223A','measuredangle':'\u2221','MediumSpace':'\u205F','Mellintrf':'\u2133','mfr':'\uD835\uDD2A','Mfr':'\uD835\uDD10','mho':'\u2127','micro':'\xB5','mid':'\u2223','midast':'*','midcir':'\u2AF0','middot':'\xB7','minus':'\u2212','minusb':'\u229F','minusd':'\u2238','minusdu':'\u2A2A','MinusPlus':'\u2213','mlcp':'\u2ADB','mldr':'\u2026','mnplus':'\u2213','models':'\u22A7','mopf':'\uD835\uDD5E','Mopf':'\uD835\uDD44','mp':'\u2213','mscr':'\uD835\uDCC2','Mscr':'\u2133','mstpos':'\u223E','mu':'\u03BC','Mu':'\u039C','multimap':'\u22B8','mumap':'\u22B8','nabla':'\u2207','nacute':'\u0144','Nacute':'\u0143','nang':'\u2220\u20D2','nap':'\u2249','napE':'\u2A70\u0338','napid':'\u224B\u0338','napos':'\u0149','napprox':'\u2249','natur':'\u266E','natural':'\u266E','naturals':'\u2115','nbsp':'\xA0','nbump':'\u224E\u0338','nbumpe':'\u224F\u0338','ncap':'\u2A43','ncaron':'\u0148','Ncaron':'\u0147','ncedil':'\u0146','Ncedil':'\u0145','ncong':'\u2247','ncongdot':'\u2A6D\u0338','ncup':'\u2A42','ncy':'\u043D','Ncy':'\u041D','ndash':'\u2013','ne':'\u2260','nearhk':'\u2924','nearr':'\u2197','neArr':'\u21D7','nearrow':'\u2197','nedot':'\u2250\u0338','NegativeMediumSpace':'\u200B','NegativeThickSpace':'\u200B','NegativeThinSpace':'\u200B','NegativeVeryThinSpace':'\u200B','nequiv':'\u2262','nesear':'\u2928','nesim':'\u2242\u0338','NestedGreaterGreater':'\u226B','NestedLessLess':'\u226A','NewLine':'\n','nexist':'\u2204','nexists':'\u2204','nfr':'\uD835\uDD2B','Nfr':'\uD835\uDD11','nge':'\u2271','ngE':'\u2267\u0338','ngeq':'\u2271','ngeqq':'\u2267\u0338','ngeqslant':'\u2A7E\u0338','nges':'\u2A7E\u0338','nGg':'\u22D9\u0338','ngsim':'\u2275','ngt':'\u226F','nGt':'\u226B\u20D2','ngtr':'\u226F','nGtv':'\u226B\u0338','nharr':'\u21AE','nhArr':'\u21CE','nhpar':'\u2AF2','ni':'\u220B','nis':'\u22FC','nisd':'\u22FA','niv':'\u220B','njcy':'\u045A','NJcy':'\u040A','nlarr':'\u219A','nlArr':'\u21CD','nldr':'\u2025','nle':'\u2270','nlE':'\u2266\u0338','nleftarrow':'\u219A','nLeftarrow':'\u21CD','nleftrightarrow':'\u21AE','nLeftrightarrow':'\u21CE','nleq':'\u2270','nleqq':'\u2266\u0338','nleqslant':'\u2A7D\u0338','nles':'\u2A7D\u0338','nless':'\u226E','nLl':'\u22D8\u0338','nlsim':'\u2274','nlt':'\u226E','nLt':'\u226A\u20D2','nltri':'\u22EA','nltrie':'\u22EC','nLtv':'\u226A\u0338','nmid':'\u2224','NoBreak':'\u2060','NonBreakingSpace':'\xA0','nopf':'\uD835\uDD5F','Nopf':'\u2115','not':'\xAC','Not':'\u2AEC','NotCongruent':'\u2262','NotCupCap':'\u226D','NotDoubleVerticalBar':'\u2226','NotElement':'\u2209','NotEqual':'\u2260','NotEqualTilde':'\u2242\u0338','NotExists':'\u2204','NotGreater':'\u226F','NotGreaterEqual':'\u2271','NotGreaterFullEqual':'\u2267\u0338','NotGreaterGreater':'\u226B\u0338','NotGreaterLess':'\u2279','NotGreaterSlantEqual':'\u2A7E\u0338','NotGreaterTilde':'\u2275','NotHumpDownHump':'\u224E\u0338','NotHumpEqual':'\u224F\u0338','notin':'\u2209','notindot':'\u22F5\u0338','notinE':'\u22F9\u0338','notinva':'\u2209','notinvb':'\u22F7','notinvc':'\u22F6','NotLeftTriangle':'\u22EA','NotLeftTriangleBar':'\u29CF\u0338','NotLeftTriangleEqual':'\u22EC','NotLess':'\u226E','NotLessEqual':'\u2270','NotLessGreater':'\u2278','NotLessLess':'\u226A\u0338','NotLessSlantEqual':'\u2A7D\u0338','NotLessTilde':'\u2274','NotNestedGreaterGreater':'\u2AA2\u0338','NotNestedLessLess':'\u2AA1\u0338','notni':'\u220C','notniva':'\u220C','notnivb':'\u22FE','notnivc':'\u22FD','NotPrecedes':'\u2280','NotPrecedesEqual':'\u2AAF\u0338','NotPrecedesSlantEqual':'\u22E0','NotReverseElement':'\u220C','NotRightTriangle':'\u22EB','NotRightTriangleBar':'\u29D0\u0338','NotRightTriangleEqual':'\u22ED','NotSquareSubset':'\u228F\u0338','NotSquareSubsetEqual':'\u22E2','NotSquareSuperset':'\u2290\u0338','NotSquareSupersetEqual':'\u22E3','NotSubset':'\u2282\u20D2','NotSubsetEqual':'\u2288','NotSucceeds':'\u2281','NotSucceedsEqual':'\u2AB0\u0338','NotSucceedsSlantEqual':'\u22E1','NotSucceedsTilde':'\u227F\u0338','NotSuperset':'\u2283\u20D2','NotSupersetEqual':'\u2289','NotTilde':'\u2241','NotTildeEqual':'\u2244','NotTildeFullEqual':'\u2247','NotTildeTilde':'\u2249','NotVerticalBar':'\u2224','npar':'\u2226','nparallel':'\u2226','nparsl':'\u2AFD\u20E5','npart':'\u2202\u0338','npolint':'\u2A14','npr':'\u2280','nprcue':'\u22E0','npre':'\u2AAF\u0338','nprec':'\u2280','npreceq':'\u2AAF\u0338','nrarr':'\u219B','nrArr':'\u21CF','nrarrc':'\u2933\u0338','nrarrw':'\u219D\u0338','nrightarrow':'\u219B','nRightarrow':'\u21CF','nrtri':'\u22EB','nrtrie':'\u22ED','nsc':'\u2281','nsccue':'\u22E1','nsce':'\u2AB0\u0338','nscr':'\uD835\uDCC3','Nscr':'\uD835\uDCA9','nshortmid':'\u2224','nshortparallel':'\u2226','nsim':'\u2241','nsime':'\u2244','nsimeq':'\u2244','nsmid':'\u2224','nspar':'\u2226','nsqsube':'\u22E2','nsqsupe':'\u22E3','nsub':'\u2284','nsube':'\u2288','nsubE':'\u2AC5\u0338','nsubset':'\u2282\u20D2','nsubseteq':'\u2288','nsubseteqq':'\u2AC5\u0338','nsucc':'\u2281','nsucceq':'\u2AB0\u0338','nsup':'\u2285','nsupe':'\u2289','nsupE':'\u2AC6\u0338','nsupset':'\u2283\u20D2','nsupseteq':'\u2289','nsupseteqq':'\u2AC6\u0338','ntgl':'\u2279','ntilde':'\xF1','Ntilde':'\xD1','ntlg':'\u2278','ntriangleleft':'\u22EA','ntrianglelefteq':'\u22EC','ntriangleright':'\u22EB','ntrianglerighteq':'\u22ED','nu':'\u03BD','Nu':'\u039D','num':'#','numero':'\u2116','numsp':'\u2007','nvap':'\u224D\u20D2','nvdash':'\u22AC','nvDash':'\u22AD','nVdash':'\u22AE','nVDash':'\u22AF','nvge':'\u2265\u20D2','nvgt':'>\u20D2','nvHarr':'\u2904','nvinfin':'\u29DE','nvlArr':'\u2902','nvle':'\u2264\u20D2','nvlt':'<\u20D2','nvltrie':'\u22B4\u20D2','nvrArr':'\u2903','nvrtrie':'\u22B5\u20D2','nvsim':'\u223C\u20D2','nwarhk':'\u2923','nwarr':'\u2196','nwArr':'\u21D6','nwarrow':'\u2196','nwnear':'\u2927','oacute':'\xF3','Oacute':'\xD3','oast':'\u229B','ocir':'\u229A','ocirc':'\xF4','Ocirc':'\xD4','ocy':'\u043E','Ocy':'\u041E','odash':'\u229D','odblac':'\u0151','Odblac':'\u0150','odiv':'\u2A38','odot':'\u2299','odsold':'\u29BC','oelig':'\u0153','OElig':'\u0152','ofcir':'\u29BF','ofr':'\uD835\uDD2C','Ofr':'\uD835\uDD12','ogon':'\u02DB','ograve':'\xF2','Ograve':'\xD2','ogt':'\u29C1','ohbar':'\u29B5','ohm':'\u03A9','oint':'\u222E','olarr':'\u21BA','olcir':'\u29BE','olcross':'\u29BB','oline':'\u203E','olt':'\u29C0','omacr':'\u014D','Omacr':'\u014C','omega':'\u03C9','Omega':'\u03A9','omicron':'\u03BF','Omicron':'\u039F','omid':'\u29B6','ominus':'\u2296','oopf':'\uD835\uDD60','Oopf':'\uD835\uDD46','opar':'\u29B7','OpenCurlyDoubleQuote':'\u201C','OpenCurlyQuote':'\u2018','operp':'\u29B9','oplus':'\u2295','or':'\u2228','Or':'\u2A54','orarr':'\u21BB','ord':'\u2A5D','order':'\u2134','orderof':'\u2134','ordf':'\xAA','ordm':'\xBA','origof':'\u22B6','oror':'\u2A56','orslope':'\u2A57','orv':'\u2A5B','oS':'\u24C8','oscr':'\u2134','Oscr':'\uD835\uDCAA','oslash':'\xF8','Oslash':'\xD8','osol':'\u2298','otilde':'\xF5','Otilde':'\xD5','otimes':'\u2297','Otimes':'\u2A37','otimesas':'\u2A36','ouml':'\xF6','Ouml':'\xD6','ovbar':'\u233D','OverBar':'\u203E','OverBrace':'\u23DE','OverBracket':'\u23B4','OverParenthesis':'\u23DC','par':'\u2225','para':'\xB6','parallel':'\u2225','parsim':'\u2AF3','parsl':'\u2AFD','part':'\u2202','PartialD':'\u2202','pcy':'\u043F','Pcy':'\u041F','percnt':'%','period':'.','permil':'\u2030','perp':'\u22A5','pertenk':'\u2031','pfr':'\uD835\uDD2D','Pfr':'\uD835\uDD13','phi':'\u03C6','Phi':'\u03A6','phiv':'\u03D5','phmmat':'\u2133','phone':'\u260E','pi':'\u03C0','Pi':'\u03A0','pitchfork':'\u22D4','piv':'\u03D6','planck':'\u210F','planckh':'\u210E','plankv':'\u210F','plus':'+','plusacir':'\u2A23','plusb':'\u229E','pluscir':'\u2A22','plusdo':'\u2214','plusdu':'\u2A25','pluse':'\u2A72','PlusMinus':'\xB1','plusmn':'\xB1','plussim':'\u2A26','plustwo':'\u2A27','pm':'\xB1','Poincareplane':'\u210C','pointint':'\u2A15','popf':'\uD835\uDD61','Popf':'\u2119','pound':'\xA3','pr':'\u227A','Pr':'\u2ABB','prap':'\u2AB7','prcue':'\u227C','pre':'\u2AAF','prE':'\u2AB3','prec':'\u227A','precapprox':'\u2AB7','preccurlyeq':'\u227C','Precedes':'\u227A','PrecedesEqual':'\u2AAF','PrecedesSlantEqual':'\u227C','PrecedesTilde':'\u227E','preceq':'\u2AAF','precnapprox':'\u2AB9','precneqq':'\u2AB5','precnsim':'\u22E8','precsim':'\u227E','prime':'\u2032','Prime':'\u2033','primes':'\u2119','prnap':'\u2AB9','prnE':'\u2AB5','prnsim':'\u22E8','prod':'\u220F','Product':'\u220F','profalar':'\u232E','profline':'\u2312','profsurf':'\u2313','prop':'\u221D','Proportion':'\u2237','Proportional':'\u221D','propto':'\u221D','prsim':'\u227E','prurel':'\u22B0','pscr':'\uD835\uDCC5','Pscr':'\uD835\uDCAB','psi':'\u03C8','Psi':'\u03A8','puncsp':'\u2008','qfr':'\uD835\uDD2E','Qfr':'\uD835\uDD14','qint':'\u2A0C','qopf':'\uD835\uDD62','Qopf':'\u211A','qprime':'\u2057','qscr':'\uD835\uDCC6','Qscr':'\uD835\uDCAC','quaternions':'\u210D','quatint':'\u2A16','quest':'?','questeq':'\u225F','quot':'"','QUOT':'"','rAarr':'\u21DB','race':'\u223D\u0331','racute':'\u0155','Racute':'\u0154','radic':'\u221A','raemptyv':'\u29B3','rang':'\u27E9','Rang':'\u27EB','rangd':'\u2992','range':'\u29A5','rangle':'\u27E9','raquo':'\xBB','rarr':'\u2192','rArr':'\u21D2','Rarr':'\u21A0','rarrap':'\u2975','rarrb':'\u21E5','rarrbfs':'\u2920','rarrc':'\u2933','rarrfs':'\u291E','rarrhk':'\u21AA','rarrlp':'\u21AC','rarrpl':'\u2945','rarrsim':'\u2974','rarrtl':'\u21A3','Rarrtl':'\u2916','rarrw':'\u219D','ratail':'\u291A','rAtail':'\u291C','ratio':'\u2236','rationals':'\u211A','rbarr':'\u290D','rBarr':'\u290F','RBarr':'\u2910','rbbrk':'\u2773','rbrace':'}','rbrack':']','rbrke':'\u298C','rbrksld':'\u298E','rbrkslu':'\u2990','rcaron':'\u0159','Rcaron':'\u0158','rcedil':'\u0157','Rcedil':'\u0156','rceil':'\u2309','rcub':'}','rcy':'\u0440','Rcy':'\u0420','rdca':'\u2937','rdldhar':'\u2969','rdquo':'\u201D','rdquor':'\u201D','rdsh':'\u21B3','Re':'\u211C','real':'\u211C','realine':'\u211B','realpart':'\u211C','reals':'\u211D','rect':'\u25AD','reg':'\xAE','REG':'\xAE','ReverseElement':'\u220B','ReverseEquilibrium':'\u21CB','ReverseUpEquilibrium':'\u296F','rfisht':'\u297D','rfloor':'\u230B','rfr':'\uD835\uDD2F','Rfr':'\u211C','rHar':'\u2964','rhard':'\u21C1','rharu':'\u21C0','rharul':'\u296C','rho':'\u03C1','Rho':'\u03A1','rhov':'\u03F1','RightAngleBracket':'\u27E9','rightarrow':'\u2192','Rightarrow':'\u21D2','RightArrow':'\u2192','RightArrowBar':'\u21E5','RightArrowLeftArrow':'\u21C4','rightarrowtail':'\u21A3','RightCeiling':'\u2309','RightDoubleBracket':'\u27E7','RightDownTeeVector':'\u295D','RightDownVector':'\u21C2','RightDownVectorBar':'\u2955','RightFloor':'\u230B','rightharpoondown':'\u21C1','rightharpoonup':'\u21C0','rightleftarrows':'\u21C4','rightleftharpoons':'\u21CC','rightrightarrows':'\u21C9','rightsquigarrow':'\u219D','RightTee':'\u22A2','RightTeeArrow':'\u21A6','RightTeeVector':'\u295B','rightthreetimes':'\u22CC','RightTriangle':'\u22B3','RightTriangleBar':'\u29D0','RightTriangleEqual':'\u22B5','RightUpDownVector':'\u294F','RightUpTeeVector':'\u295C','RightUpVector':'\u21BE','RightUpVectorBar':'\u2954','RightVector':'\u21C0','RightVectorBar':'\u2953','ring':'\u02DA','risingdotseq':'\u2253','rlarr':'\u21C4','rlhar':'\u21CC','rlm':'\u200F','rmoust':'\u23B1','rmoustache':'\u23B1','rnmid':'\u2AEE','roang':'\u27ED','roarr':'\u21FE','robrk':'\u27E7','ropar':'\u2986','ropf':'\uD835\uDD63','Ropf':'\u211D','roplus':'\u2A2E','rotimes':'\u2A35','RoundImplies':'\u2970','rpar':')','rpargt':'\u2994','rppolint':'\u2A12','rrarr':'\u21C9','Rrightarrow':'\u21DB','rsaquo':'\u203A','rscr':'\uD835\uDCC7','Rscr':'\u211B','rsh':'\u21B1','Rsh':'\u21B1','rsqb':']','rsquo':'\u2019','rsquor':'\u2019','rthree':'\u22CC','rtimes':'\u22CA','rtri':'\u25B9','rtrie':'\u22B5','rtrif':'\u25B8','rtriltri':'\u29CE','RuleDelayed':'\u29F4','ruluhar':'\u2968','rx':'\u211E','sacute':'\u015B','Sacute':'\u015A','sbquo':'\u201A','sc':'\u227B','Sc':'\u2ABC','scap':'\u2AB8','scaron':'\u0161','Scaron':'\u0160','sccue':'\u227D','sce':'\u2AB0','scE':'\u2AB4','scedil':'\u015F','Scedil':'\u015E','scirc':'\u015D','Scirc':'\u015C','scnap':'\u2ABA','scnE':'\u2AB6','scnsim':'\u22E9','scpolint':'\u2A13','scsim':'\u227F','scy':'\u0441','Scy':'\u0421','sdot':'\u22C5','sdotb':'\u22A1','sdote':'\u2A66','searhk':'\u2925','searr':'\u2198','seArr':'\u21D8','searrow':'\u2198','sect':'\xA7','semi':';','seswar':'\u2929','setminus':'\u2216','setmn':'\u2216','sext':'\u2736','sfr':'\uD835\uDD30','Sfr':'\uD835\uDD16','sfrown':'\u2322','sharp':'\u266F','shchcy':'\u0449','SHCHcy':'\u0429','shcy':'\u0448','SHcy':'\u0428','ShortDownArrow':'\u2193','ShortLeftArrow':'\u2190','shortmid':'\u2223','shortparallel':'\u2225','ShortRightArrow':'\u2192','ShortUpArrow':'\u2191','shy':'\xAD','sigma':'\u03C3','Sigma':'\u03A3','sigmaf':'\u03C2','sigmav':'\u03C2','sim':'\u223C','simdot':'\u2A6A','sime':'\u2243','simeq':'\u2243','simg':'\u2A9E','simgE':'\u2AA0','siml':'\u2A9D','simlE':'\u2A9F','simne':'\u2246','simplus':'\u2A24','simrarr':'\u2972','slarr':'\u2190','SmallCircle':'\u2218','smallsetminus':'\u2216','smashp':'\u2A33','smeparsl':'\u29E4','smid':'\u2223','smile':'\u2323','smt':'\u2AAA','smte':'\u2AAC','smtes':'\u2AAC\uFE00','softcy':'\u044C','SOFTcy':'\u042C','sol':'/','solb':'\u29C4','solbar':'\u233F','sopf':'\uD835\uDD64','Sopf':'\uD835\uDD4A','spades':'\u2660','spadesuit':'\u2660','spar':'\u2225','sqcap':'\u2293','sqcaps':'\u2293\uFE00','sqcup':'\u2294','sqcups':'\u2294\uFE00','Sqrt':'\u221A','sqsub':'\u228F','sqsube':'\u2291','sqsubset':'\u228F','sqsubseteq':'\u2291','sqsup':'\u2290','sqsupe':'\u2292','sqsupset':'\u2290','sqsupseteq':'\u2292','squ':'\u25A1','square':'\u25A1','Square':'\u25A1','SquareIntersection':'\u2293','SquareSubset':'\u228F','SquareSubsetEqual':'\u2291','SquareSuperset':'\u2290','SquareSupersetEqual':'\u2292','SquareUnion':'\u2294','squarf':'\u25AA','squf':'\u25AA','srarr':'\u2192','sscr':'\uD835\uDCC8','Sscr':'\uD835\uDCAE','ssetmn':'\u2216','ssmile':'\u2323','sstarf':'\u22C6','star':'\u2606','Star':'\u22C6','starf':'\u2605','straightepsilon':'\u03F5','straightphi':'\u03D5','strns':'\xAF','sub':'\u2282','Sub':'\u22D0','subdot':'\u2ABD','sube':'\u2286','subE':'\u2AC5','subedot':'\u2AC3','submult':'\u2AC1','subne':'\u228A','subnE':'\u2ACB','subplus':'\u2ABF','subrarr':'\u2979','subset':'\u2282','Subset':'\u22D0','subseteq':'\u2286','subseteqq':'\u2AC5','SubsetEqual':'\u2286','subsetneq':'\u228A','subsetneqq':'\u2ACB','subsim':'\u2AC7','subsub':'\u2AD5','subsup':'\u2AD3','succ':'\u227B','succapprox':'\u2AB8','succcurlyeq':'\u227D','Succeeds':'\u227B','SucceedsEqual':'\u2AB0','SucceedsSlantEqual':'\u227D','SucceedsTilde':'\u227F','succeq':'\u2AB0','succnapprox':'\u2ABA','succneqq':'\u2AB6','succnsim':'\u22E9','succsim':'\u227F','SuchThat':'\u220B','sum':'\u2211','Sum':'\u2211','sung':'\u266A','sup':'\u2283','Sup':'\u22D1','sup1':'\xB9','sup2':'\xB2','sup3':'\xB3','supdot':'\u2ABE','supdsub':'\u2AD8','supe':'\u2287','supE':'\u2AC6','supedot':'\u2AC4','Superset':'\u2283','SupersetEqual':'\u2287','suphsol':'\u27C9','suphsub':'\u2AD7','suplarr':'\u297B','supmult':'\u2AC2','supne':'\u228B','supnE':'\u2ACC','supplus':'\u2AC0','supset':'\u2283','Supset':'\u22D1','supseteq':'\u2287','supseteqq':'\u2AC6','supsetneq':'\u228B','supsetneqq':'\u2ACC','supsim':'\u2AC8','supsub':'\u2AD4','supsup':'\u2AD6','swarhk':'\u2926','swarr':'\u2199','swArr':'\u21D9','swarrow':'\u2199','swnwar':'\u292A','szlig':'\xDF','Tab':'\t','target':'\u2316','tau':'\u03C4','Tau':'\u03A4','tbrk':'\u23B4','tcaron':'\u0165','Tcaron':'\u0164','tcedil':'\u0163','Tcedil':'\u0162','tcy':'\u0442','Tcy':'\u0422','tdot':'\u20DB','telrec':'\u2315','tfr':'\uD835\uDD31','Tfr':'\uD835\uDD17','there4':'\u2234','therefore':'\u2234','Therefore':'\u2234','theta':'\u03B8','Theta':'\u0398','thetasym':'\u03D1','thetav':'\u03D1','thickapprox':'\u2248','thicksim':'\u223C','ThickSpace':'\u205F\u200A','thinsp':'\u2009','ThinSpace':'\u2009','thkap':'\u2248','thksim':'\u223C','thorn':'\xFE','THORN':'\xDE','tilde':'\u02DC','Tilde':'\u223C','TildeEqual':'\u2243','TildeFullEqual':'\u2245','TildeTilde':'\u2248','times':'\xD7','timesb':'\u22A0','timesbar':'\u2A31','timesd':'\u2A30','tint':'\u222D','toea':'\u2928','top':'\u22A4','topbot':'\u2336','topcir':'\u2AF1','topf':'\uD835\uDD65','Topf':'\uD835\uDD4B','topfork':'\u2ADA','tosa':'\u2929','tprime':'\u2034','trade':'\u2122','TRADE':'\u2122','triangle':'\u25B5','triangledown':'\u25BF','triangleleft':'\u25C3','trianglelefteq':'\u22B4','triangleq':'\u225C','triangleright':'\u25B9','trianglerighteq':'\u22B5','tridot':'\u25EC','trie':'\u225C','triminus':'\u2A3A','TripleDot':'\u20DB','triplus':'\u2A39','trisb':'\u29CD','tritime':'\u2A3B','trpezium':'\u23E2','tscr':'\uD835\uDCC9','Tscr':'\uD835\uDCAF','tscy':'\u0446','TScy':'\u0426','tshcy':'\u045B','TSHcy':'\u040B','tstrok':'\u0167','Tstrok':'\u0166','twixt':'\u226C','twoheadleftarrow':'\u219E','twoheadrightarrow':'\u21A0','uacute':'\xFA','Uacute':'\xDA','uarr':'\u2191','uArr':'\u21D1','Uarr':'\u219F','Uarrocir':'\u2949','ubrcy':'\u045E','Ubrcy':'\u040E','ubreve':'\u016D','Ubreve':'\u016C','ucirc':'\xFB','Ucirc':'\xDB','ucy':'\u0443','Ucy':'\u0423','udarr':'\u21C5','udblac':'\u0171','Udblac':'\u0170','udhar':'\u296E','ufisht':'\u297E','ufr':'\uD835\uDD32','Ufr':'\uD835\uDD18','ugrave':'\xF9','Ugrave':'\xD9','uHar':'\u2963','uharl':'\u21BF','uharr':'\u21BE','uhblk':'\u2580','ulcorn':'\u231C','ulcorner':'\u231C','ulcrop':'\u230F','ultri':'\u25F8','umacr':'\u016B','Umacr':'\u016A','uml':'\xA8','UnderBar':'_','UnderBrace':'\u23DF','UnderBracket':'\u23B5','UnderParenthesis':'\u23DD','Union':'\u22C3','UnionPlus':'\u228E','uogon':'\u0173','Uogon':'\u0172','uopf':'\uD835\uDD66','Uopf':'\uD835\uDD4C','uparrow':'\u2191','Uparrow':'\u21D1','UpArrow':'\u2191','UpArrowBar':'\u2912','UpArrowDownArrow':'\u21C5','updownarrow':'\u2195','Updownarrow':'\u21D5','UpDownArrow':'\u2195','UpEquilibrium':'\u296E','upharpoonleft':'\u21BF','upharpoonright':'\u21BE','uplus':'\u228E','UpperLeftArrow':'\u2196','UpperRightArrow':'\u2197','upsi':'\u03C5','Upsi':'\u03D2','upsih':'\u03D2','upsilon':'\u03C5','Upsilon':'\u03A5','UpTee':'\u22A5','UpTeeArrow':'\u21A5','upuparrows':'\u21C8','urcorn':'\u231D','urcorner':'\u231D','urcrop':'\u230E','uring':'\u016F','Uring':'\u016E','urtri':'\u25F9','uscr':'\uD835\uDCCA','Uscr':'\uD835\uDCB0','utdot':'\u22F0','utilde':'\u0169','Utilde':'\u0168','utri':'\u25B5','utrif':'\u25B4','uuarr':'\u21C8','uuml':'\xFC','Uuml':'\xDC','uwangle':'\u29A7','vangrt':'\u299C','varepsilon':'\u03F5','varkappa':'\u03F0','varnothing':'\u2205','varphi':'\u03D5','varpi':'\u03D6','varpropto':'\u221D','varr':'\u2195','vArr':'\u21D5','varrho':'\u03F1','varsigma':'\u03C2','varsubsetneq':'\u228A\uFE00','varsubsetneqq':'\u2ACB\uFE00','varsupsetneq':'\u228B\uFE00','varsupsetneqq':'\u2ACC\uFE00','vartheta':'\u03D1','vartriangleleft':'\u22B2','vartriangleright':'\u22B3','vBar':'\u2AE8','Vbar':'\u2AEB','vBarv':'\u2AE9','vcy':'\u0432','Vcy':'\u0412','vdash':'\u22A2','vDash':'\u22A8','Vdash':'\u22A9','VDash':'\u22AB','Vdashl':'\u2AE6','vee':'\u2228','Vee':'\u22C1','veebar':'\u22BB','veeeq':'\u225A','vellip':'\u22EE','verbar':'|','Verbar':'\u2016','vert':'|','Vert':'\u2016','VerticalBar':'\u2223','VerticalLine':'|','VerticalSeparator':'\u2758','VerticalTilde':'\u2240','VeryThinSpace':'\u200A','vfr':'\uD835\uDD33','Vfr':'\uD835\uDD19','vltri':'\u22B2','vnsub':'\u2282\u20D2','vnsup':'\u2283\u20D2','vopf':'\uD835\uDD67','Vopf':'\uD835\uDD4D','vprop':'\u221D','vrtri':'\u22B3','vscr':'\uD835\uDCCB','Vscr':'\uD835\uDCB1','vsubne':'\u228A\uFE00','vsubnE':'\u2ACB\uFE00','vsupne':'\u228B\uFE00','vsupnE':'\u2ACC\uFE00','Vvdash':'\u22AA','vzigzag':'\u299A','wcirc':'\u0175','Wcirc':'\u0174','wedbar':'\u2A5F','wedge':'\u2227','Wedge':'\u22C0','wedgeq':'\u2259','weierp':'\u2118','wfr':'\uD835\uDD34','Wfr':'\uD835\uDD1A','wopf':'\uD835\uDD68','Wopf':'\uD835\uDD4E','wp':'\u2118','wr':'\u2240','wreath':'\u2240','wscr':'\uD835\uDCCC','Wscr':'\uD835\uDCB2','xcap':'\u22C2','xcirc':'\u25EF','xcup':'\u22C3','xdtri':'\u25BD','xfr':'\uD835\uDD35','Xfr':'\uD835\uDD1B','xharr':'\u27F7','xhArr':'\u27FA','xi':'\u03BE','Xi':'\u039E','xlarr':'\u27F5','xlArr':'\u27F8','xmap':'\u27FC','xnis':'\u22FB','xodot':'\u2A00','xopf':'\uD835\uDD69','Xopf':'\uD835\uDD4F','xoplus':'\u2A01','xotime':'\u2A02','xrarr':'\u27F6','xrArr':'\u27F9','xscr':'\uD835\uDCCD','Xscr':'\uD835\uDCB3','xsqcup':'\u2A06','xuplus':'\u2A04','xutri':'\u25B3','xvee':'\u22C1','xwedge':'\u22C0','yacute':'\xFD','Yacute':'\xDD','yacy':'\u044F','YAcy':'\u042F','ycirc':'\u0177','Ycirc':'\u0176','ycy':'\u044B','Ycy':'\u042B','yen':'\xA5','yfr':'\uD835\uDD36','Yfr':'\uD835\uDD1C','yicy':'\u0457','YIcy':'\u0407','yopf':'\uD835\uDD6A','Yopf':'\uD835\uDD50','yscr':'\uD835\uDCCE','Yscr':'\uD835\uDCB4','yucy':'\u044E','YUcy':'\u042E','yuml':'\xFF','Yuml':'\u0178','zacute':'\u017A','Zacute':'\u0179','zcaron':'\u017E','Zcaron':'\u017D','zcy':'\u0437','Zcy':'\u0417','zdot':'\u017C','Zdot':'\u017B','zeetrf':'\u2128','ZeroWidthSpace':'\u200B','zeta':'\u03B6','Zeta':'\u0396','zfr':'\uD835\uDD37','Zfr':'\u2128','zhcy':'\u0436','ZHcy':'\u0416','zigrarr':'\u21DD','zopf':'\uD835\uDD6B','Zopf':'\u2124','zscr':'\uD835\uDCCF','Zscr':'\uD835\uDCB5','zwj':'\u200D','zwnj':'\u200C'};
|
||
var decodeMapLegacy = {'aacute':'\xE1','Aacute':'\xC1','acirc':'\xE2','Acirc':'\xC2','acute':'\xB4','aelig':'\xE6','AElig':'\xC6','agrave':'\xE0','Agrave':'\xC0','amp':'&','AMP':'&','aring':'\xE5','Aring':'\xC5','atilde':'\xE3','Atilde':'\xC3','auml':'\xE4','Auml':'\xC4','brvbar':'\xA6','ccedil':'\xE7','Ccedil':'\xC7','cedil':'\xB8','cent':'\xA2','copy':'\xA9','COPY':'\xA9','curren':'\xA4','deg':'\xB0','divide':'\xF7','eacute':'\xE9','Eacute':'\xC9','ecirc':'\xEA','Ecirc':'\xCA','egrave':'\xE8','Egrave':'\xC8','eth':'\xF0','ETH':'\xD0','euml':'\xEB','Euml':'\xCB','frac12':'\xBD','frac14':'\xBC','frac34':'\xBE','gt':'>','GT':'>','iacute':'\xED','Iacute':'\xCD','icirc':'\xEE','Icirc':'\xCE','iexcl':'\xA1','igrave':'\xEC','Igrave':'\xCC','iquest':'\xBF','iuml':'\xEF','Iuml':'\xCF','laquo':'\xAB','lt':'<','LT':'<','macr':'\xAF','micro':'\xB5','middot':'\xB7','nbsp':'\xA0','not':'\xAC','ntilde':'\xF1','Ntilde':'\xD1','oacute':'\xF3','Oacute':'\xD3','ocirc':'\xF4','Ocirc':'\xD4','ograve':'\xF2','Ograve':'\xD2','ordf':'\xAA','ordm':'\xBA','oslash':'\xF8','Oslash':'\xD8','otilde':'\xF5','Otilde':'\xD5','ouml':'\xF6','Ouml':'\xD6','para':'\xB6','plusmn':'\xB1','pound':'\xA3','quot':'"','QUOT':'"','raquo':'\xBB','reg':'\xAE','REG':'\xAE','sect':'\xA7','shy':'\xAD','sup1':'\xB9','sup2':'\xB2','sup3':'\xB3','szlig':'\xDF','thorn':'\xFE','THORN':'\xDE','times':'\xD7','uacute':'\xFA','Uacute':'\xDA','ucirc':'\xFB','Ucirc':'\xDB','ugrave':'\xF9','Ugrave':'\xD9','uml':'\xA8','uuml':'\xFC','Uuml':'\xDC','yacute':'\xFD','Yacute':'\xDD','yen':'\xA5','yuml':'\xFF'};
|
||
var decodeMapNumeric = {'0':'\uFFFD','128':'\u20AC','130':'\u201A','131':'\u0192','132':'\u201E','133':'\u2026','134':'\u2020','135':'\u2021','136':'\u02C6','137':'\u2030','138':'\u0160','139':'\u2039','140':'\u0152','142':'\u017D','145':'\u2018','146':'\u2019','147':'\u201C','148':'\u201D','149':'\u2022','150':'\u2013','151':'\u2014','152':'\u02DC','153':'\u2122','154':'\u0161','155':'\u203A','156':'\u0153','158':'\u017E','159':'\u0178'};
|
||
var invalidReferenceCodePoints = [1,2,3,4,5,6,7,8,11,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,64976,64977,64978,64979,64980,64981,64982,64983,64984,64985,64986,64987,64988,64989,64990,64991,64992,64993,64994,64995,64996,64997,64998,64999,65000,65001,65002,65003,65004,65005,65006,65007,65534,65535,131070,131071,196606,196607,262142,262143,327678,327679,393214,393215,458750,458751,524286,524287,589822,589823,655358,655359,720894,720895,786430,786431,851966,851967,917502,917503,983038,983039,1048574,1048575,1114110,1114111];
|
||
|
||
/*--------------------------------------------------------------------------*/
|
||
|
||
var stringFromCharCode = String.fromCharCode;
|
||
|
||
var object = {};
|
||
var hasOwnProperty = object.hasOwnProperty;
|
||
var has = function(object, propertyName) {
|
||
return hasOwnProperty.call(object, propertyName);
|
||
};
|
||
|
||
var contains = function(array, value) {
|
||
var index = -1;
|
||
var length = array.length;
|
||
while (++index < length) {
|
||
if (array[index] == value) {
|
||
return true;
|
||
}
|
||
}
|
||
return false;
|
||
};
|
||
|
||
var merge = function(options, defaults) {
|
||
if (!options) {
|
||
return defaults;
|
||
}
|
||
var result = {};
|
||
var key;
|
||
for (key in defaults) {
|
||
// A `hasOwnProperty` check is not needed here, since only recognized
|
||
// option names are used anyway. Any others are ignored.
|
||
result[key] = has(options, key) ? options[key] : defaults[key];
|
||
}
|
||
return result;
|
||
};
|
||
|
||
// Modified version of `ucs2encode`; see https://mths.be/punycode.
|
||
var codePointToSymbol = function(codePoint, strict) {
|
||
var output = '';
|
||
if ((codePoint >= 0xD800 && codePoint <= 0xDFFF) || codePoint > 0x10FFFF) {
|
||
// See issue #4:
|
||
// “Otherwise, if the number is in the range 0xD800 to 0xDFFF or is
|
||
// greater than 0x10FFFF, then this is a parse error. Return a U+FFFD
|
||
// REPLACEMENT CHARACTER.”
|
||
if (strict) {
|
||
parseError('character reference outside the permissible Unicode range');
|
||
}
|
||
return '\uFFFD';
|
||
}
|
||
if (has(decodeMapNumeric, codePoint)) {
|
||
if (strict) {
|
||
parseError('disallowed character reference');
|
||
}
|
||
return decodeMapNumeric[codePoint];
|
||
}
|
||
if (strict && contains(invalidReferenceCodePoints, codePoint)) {
|
||
parseError('disallowed character reference');
|
||
}
|
||
if (codePoint > 0xFFFF) {
|
||
codePoint -= 0x10000;
|
||
output += stringFromCharCode(codePoint >>> 10 & 0x3FF | 0xD800);
|
||
codePoint = 0xDC00 | codePoint & 0x3FF;
|
||
}
|
||
output += stringFromCharCode(codePoint);
|
||
return output;
|
||
};
|
||
|
||
var hexEscape = function(codePoint) {
|
||
return '&#x' + codePoint.toString(16).toUpperCase() + ';';
|
||
};
|
||
|
||
var decEscape = function(codePoint) {
|
||
return '&#' + codePoint + ';';
|
||
};
|
||
|
||
var parseError = function(message) {
|
||
throw Error('Parse error: ' + message);
|
||
};
|
||
|
||
/*--------------------------------------------------------------------------*/
|
||
|
||
var encode = function(string, options) {
|
||
options = merge(options, encode.options);
|
||
var strict = options.strict;
|
||
if (strict && regexInvalidRawCodePoint.test(string)) {
|
||
parseError('forbidden code point');
|
||
}
|
||
var encodeEverything = options.encodeEverything;
|
||
var useNamedReferences = options.useNamedReferences;
|
||
var allowUnsafeSymbols = options.allowUnsafeSymbols;
|
||
var escapeCodePoint = options.decimal ? decEscape : hexEscape;
|
||
|
||
var escapeBmpSymbol = function(symbol) {
|
||
return escapeCodePoint(symbol.charCodeAt(0));
|
||
};
|
||
|
||
if (encodeEverything) {
|
||
// Encode ASCII symbols.
|
||
string = string.replace(regexAsciiWhitelist, function(symbol) {
|
||
// Use named references if requested & possible.
|
||
if (useNamedReferences && has(encodeMap, symbol)) {
|
||
return '&' + encodeMap[symbol] + ';';
|
||
}
|
||
return escapeBmpSymbol(symbol);
|
||
});
|
||
// Shorten a few escapes that represent two symbols, of which at least one
|
||
// is within the ASCII range.
|
||
if (useNamedReferences) {
|
||
string = string
|
||
.replace(/>\u20D2/g, '>⃒')
|
||
.replace(/<\u20D2/g, '<⃒')
|
||
.replace(/fj/g, 'fj');
|
||
}
|
||
// Encode non-ASCII symbols.
|
||
if (useNamedReferences) {
|
||
// Encode non-ASCII symbols that can be replaced with a named reference.
|
||
string = string.replace(regexEncodeNonAscii, function(string) {
|
||
// Note: there is no need to check `has(encodeMap, string)` here.
|
||
return '&' + encodeMap[string] + ';';
|
||
});
|
||
}
|
||
// Note: any remaining non-ASCII symbols are handled outside of the `if`.
|
||
} else if (useNamedReferences) {
|
||
// Apply named character references.
|
||
// Encode `<>"'&` using named character references.
|
||
if (!allowUnsafeSymbols) {
|
||
string = string.replace(regexEscape, function(string) {
|
||
return '&' + encodeMap[string] + ';'; // no need to check `has()` here
|
||
});
|
||
}
|
||
// Shorten escapes that represent two symbols, of which at least one is
|
||
// `<>"'&`.
|
||
string = string
|
||
.replace(/>\u20D2/g, '>⃒')
|
||
.replace(/<\u20D2/g, '<⃒');
|
||
// Encode non-ASCII symbols that can be replaced with a named reference.
|
||
string = string.replace(regexEncodeNonAscii, function(string) {
|
||
// Note: there is no need to check `has(encodeMap, string)` here.
|
||
return '&' + encodeMap[string] + ';';
|
||
});
|
||
} else if (!allowUnsafeSymbols) {
|
||
// Encode `<>"'&` using hexadecimal escapes, now that they’re not handled
|
||
// using named character references.
|
||
string = string.replace(regexEscape, escapeBmpSymbol);
|
||
}
|
||
return string
|
||
// Encode astral symbols.
|
||
.replace(regexAstralSymbols, function($0) {
|
||
// https://mathiasbynens.be/notes/javascript-encoding#surrogate-formulae
|
||
var high = $0.charCodeAt(0);
|
||
var low = $0.charCodeAt(1);
|
||
var codePoint = (high - 0xD800) * 0x400 + low - 0xDC00 + 0x10000;
|
||
return escapeCodePoint(codePoint);
|
||
})
|
||
// Encode any remaining BMP symbols that are not printable ASCII symbols
|
||
// using a hexadecimal escape.
|
||
.replace(regexBmpWhitelist, escapeBmpSymbol);
|
||
};
|
||
// Expose default options (so they can be overridden globally).
|
||
encode.options = {
|
||
'allowUnsafeSymbols': false,
|
||
'encodeEverything': false,
|
||
'strict': false,
|
||
'useNamedReferences': false,
|
||
'decimal' : false
|
||
};
|
||
|
||
var decode = function(html, options) {
|
||
options = merge(options, decode.options);
|
||
var strict = options.strict;
|
||
if (strict && regexInvalidEntity.test(html)) {
|
||
parseError('malformed character reference');
|
||
}
|
||
return html.replace(regexDecode, function($0, $1, $2, $3, $4, $5, $6, $7, $8) {
|
||
var codePoint;
|
||
var semicolon;
|
||
var decDigits;
|
||
var hexDigits;
|
||
var reference;
|
||
var next;
|
||
|
||
if ($1) {
|
||
reference = $1;
|
||
// Note: there is no need to check `has(decodeMap, reference)`.
|
||
return decodeMap[reference];
|
||
}
|
||
|
||
if ($2) {
|
||
// Decode named character references without trailing `;`, e.g. `&`.
|
||
// This is only a parse error if it gets converted to `&`, or if it is
|
||
// followed by `=` in an attribute context.
|
||
reference = $2;
|
||
next = $3;
|
||
if (next && options.isAttributeValue) {
|
||
if (strict && next == '=') {
|
||
parseError('`&` did not start a character reference');
|
||
}
|
||
return $0;
|
||
} else {
|
||
if (strict) {
|
||
parseError(
|
||
'named character reference was not terminated by a semicolon'
|
||
);
|
||
}
|
||
// Note: there is no need to check `has(decodeMapLegacy, reference)`.
|
||
return decodeMapLegacy[reference] + (next || '');
|
||
}
|
||
}
|
||
|
||
if ($4) {
|
||
// Decode decimal escapes, e.g. `𝌆`.
|
||
decDigits = $4;
|
||
semicolon = $5;
|
||
if (strict && !semicolon) {
|
||
parseError('character reference was not terminated by a semicolon');
|
||
}
|
||
codePoint = parseInt(decDigits, 10);
|
||
return codePointToSymbol(codePoint, strict);
|
||
}
|
||
|
||
if ($6) {
|
||
// Decode hexadecimal escapes, e.g. `𝌆`.
|
||
hexDigits = $6;
|
||
semicolon = $7;
|
||
if (strict && !semicolon) {
|
||
parseError('character reference was not terminated by a semicolon');
|
||
}
|
||
codePoint = parseInt(hexDigits, 16);
|
||
return codePointToSymbol(codePoint, strict);
|
||
}
|
||
|
||
// If we’re still here, `if ($7)` is implied; it’s an ambiguous
|
||
// ampersand for sure. https://mths.be/notes/ambiguous-ampersands
|
||
if (strict) {
|
||
parseError(
|
||
'named character reference was not terminated by a semicolon'
|
||
);
|
||
}
|
||
return $0;
|
||
});
|
||
};
|
||
// Expose default options (so they can be overridden globally).
|
||
decode.options = {
|
||
'isAttributeValue': false,
|
||
'strict': false
|
||
};
|
||
|
||
var escape = function(string) {
|
||
return string.replace(regexEscape, function($0) {
|
||
// Note: there is no need to check `has(escapeMap, $0)` here.
|
||
return escapeMap[$0];
|
||
});
|
||
};
|
||
|
||
/*--------------------------------------------------------------------------*/
|
||
|
||
var he = {
|
||
'version': '1.2.0',
|
||
'encode': encode,
|
||
'decode': decode,
|
||
'escape': escape,
|
||
'unescape': decode
|
||
};
|
||
|
||
// Some AMD build optimizers, like r.js, check for specific condition patterns
|
||
// like the following:
|
||
if (
|
||
typeof define == 'function' &&
|
||
typeof define.amd == 'object' &&
|
||
define.amd
|
||
) {
|
||
define(function() {
|
||
return he;
|
||
});
|
||
} else if (freeExports && !freeExports.nodeType) {
|
||
if (freeModule) { // in Node.js, io.js, or RingoJS v0.8.0+
|
||
freeModule.exports = he;
|
||
} else { // in Narwhal or RingoJS v0.7.0-
|
||
for (var key in he) {
|
||
has(he, key) && (freeExports[key] = he[key]);
|
||
}
|
||
}
|
||
} else { // in Rhino or a web browser
|
||
root.he = he;
|
||
}
|
||
|
||
}(this));
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 5679:
|
||
/***/ ((module) => {
|
||
|
||
function sequence(...methods) {
|
||
if (methods.length === 0) {
|
||
throw new Error("Failed creating sequence: No functions provided");
|
||
}
|
||
return function __executeSequence(...args) {
|
||
let result = args;
|
||
const _this = this;
|
||
while (methods.length > 0) {
|
||
const method = methods.shift();
|
||
result = [method.apply(_this, result)];
|
||
}
|
||
return result[0];
|
||
};
|
||
}
|
||
|
||
module.exports = {
|
||
sequence
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 3839:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
const { sequence } = __nccwpck_require__(5679);
|
||
|
||
const HOT_PATCHER_TYPE = "@@HOTPATCHER";
|
||
const NOOP = () => {};
|
||
|
||
function createNewItem(method) {
|
||
return {
|
||
original: method,
|
||
methods: [method],
|
||
final: false
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Hot patching manager class
|
||
*/
|
||
class HotPatcher {
|
||
constructor() {
|
||
this._configuration = {
|
||
registry: {},
|
||
getEmptyAction: "null"
|
||
};
|
||
this.__type__ = HOT_PATCHER_TYPE;
|
||
}
|
||
|
||
/**
|
||
* Configuration object reference
|
||
* @type {Object}
|
||
* @memberof HotPatcher
|
||
* @readonly
|
||
*/
|
||
get configuration() {
|
||
return this._configuration;
|
||
}
|
||
|
||
/**
|
||
* The action to take when a non-set method is requested
|
||
* Possible values: null/throw
|
||
* @type {String}
|
||
* @memberof HotPatcher
|
||
*/
|
||
get getEmptyAction() {
|
||
return this.configuration.getEmptyAction;
|
||
}
|
||
|
||
set getEmptyAction(newAction) {
|
||
this.configuration.getEmptyAction = newAction;
|
||
}
|
||
|
||
/**
|
||
* Control another hot-patcher instance
|
||
* Force the remote instance to use patched methods from calling instance
|
||
* @param {HotPatcher} target The target instance to control
|
||
* @param {Boolean=} allowTargetOverrides Allow the target to override patched methods on
|
||
* the controller (default is false)
|
||
* @memberof HotPatcher
|
||
* @returns {HotPatcher} Returns self
|
||
* @throws {Error} Throws if the target is invalid
|
||
*/
|
||
control(target, allowTargetOverrides = false) {
|
||
if (!target || target.__type__ !== HOT_PATCHER_TYPE) {
|
||
throw new Error(
|
||
"Failed taking control of target HotPatcher instance: Invalid type or object"
|
||
);
|
||
}
|
||
Object.keys(target.configuration.registry).forEach(foreignKey => {
|
||
if (this.configuration.registry.hasOwnProperty(foreignKey)) {
|
||
if (allowTargetOverrides) {
|
||
this.configuration.registry[foreignKey] = Object.assign(
|
||
{},
|
||
target.configuration.registry[foreignKey]
|
||
);
|
||
}
|
||
} else {
|
||
this.configuration.registry[foreignKey] = Object.assign(
|
||
{},
|
||
target.configuration.registry[foreignKey]
|
||
);
|
||
}
|
||
});
|
||
target._configuration = this.configuration;
|
||
return this;
|
||
}
|
||
|
||
/**
|
||
* Execute a patched method
|
||
* @param {String} key The method key
|
||
* @param {...*} args Arguments to pass to the method (optional)
|
||
* @memberof HotPatcher
|
||
* @see HotPatcher#get
|
||
* @returns {*} The output of the called method
|
||
*/
|
||
execute(key, ...args) {
|
||
const method = this.get(key) || NOOP;
|
||
return method(...args);
|
||
}
|
||
|
||
/**
|
||
* Get a method for a key
|
||
* @param {String} key The method key
|
||
* @returns {Function|null} Returns the requested function or null if the function
|
||
* does not exist and the host is configured to return null (and not throw)
|
||
* @memberof HotPatcher
|
||
* @throws {Error} Throws if the configuration specifies to throw and the method
|
||
* does not exist
|
||
* @throws {Error} Throws if the `getEmptyAction` value is invalid
|
||
*/
|
||
get(key) {
|
||
const item = this.configuration.registry[key];
|
||
if (!item) {
|
||
switch (this.getEmptyAction) {
|
||
case "null":
|
||
return null;
|
||
case "throw":
|
||
throw new Error(
|
||
`Failed handling method request: No method provided for override: ${key}`
|
||
);
|
||
default:
|
||
throw new Error(
|
||
`Failed handling request which resulted in an empty method: Invalid empty-action specified: ${
|
||
this.getEmptyAction
|
||
}`
|
||
);
|
||
}
|
||
}
|
||
return sequence(...item.methods);
|
||
}
|
||
|
||
/**
|
||
* Check if a method has been patched
|
||
* @param {String} key The function key
|
||
* @returns {Boolean} True if already patched
|
||
* @memberof HotPatcher
|
||
*/
|
||
isPatched(key) {
|
||
return !!this.configuration.registry[key];
|
||
}
|
||
|
||
/**
|
||
* @typedef {Object} PatchOptions
|
||
* @property {Boolean=} chain - Whether or not to allow chaining execution. Chained
|
||
* execution allows for attaching multiple callbacks to a key, where the callbacks
|
||
* will be executed in order of when they were patched (oldest to newest), the
|
||
* values being passed from one method to another.
|
||
*/
|
||
|
||
/**
|
||
* Patch a method name
|
||
* @param {String} key The method key to patch
|
||
* @param {Function} method The function to set
|
||
* @param {PatchOptions=} options Patch options
|
||
* @memberof HotPatcher
|
||
* @returns {HotPatcher} Returns self
|
||
*/
|
||
patch(key, method, { chain = false } = {}) {
|
||
if (this.configuration.registry[key] && this.configuration.registry[key].final) {
|
||
throw new Error(`Failed patching '${key}': Method marked as being final`);
|
||
}
|
||
if (typeof method !== "function") {
|
||
throw new Error(`Failed patching '${key}': Provided method is not a function`);
|
||
}
|
||
if (chain) {
|
||
// Add new method to the chain
|
||
if (!this.configuration.registry[key]) {
|
||
// New key, create item
|
||
this.configuration.registry[key] = createNewItem(method);
|
||
} else {
|
||
// Existing, push the method
|
||
this.configuration.registry[key].methods.push(method);
|
||
}
|
||
} else {
|
||
// Replace the original
|
||
if (this.isPatched(key)) {
|
||
const { original } = this.configuration.registry[key];
|
||
this.configuration.registry[key] = Object.assign(createNewItem(method), {
|
||
original
|
||
});
|
||
} else {
|
||
this.configuration.registry[key] = createNewItem(method);
|
||
}
|
||
}
|
||
return this;
|
||
}
|
||
|
||
/**
|
||
* Patch a method inline, execute it and return the value
|
||
* Used for patching contents of functions. This method will not apply a patched
|
||
* function if it has already been patched, allowing for external overrides to
|
||
* function. It also means that the function is cached so that it is not
|
||
* instantiated every time the outer function is invoked.
|
||
* @param {String} key The function key to use
|
||
* @param {Function} method The function to patch (once, only if not patched)
|
||
* @param {...*} args Arguments to pass to the function
|
||
* @returns {*} The output of the patched function
|
||
* @memberof HotPatcher
|
||
* @example
|
||
* function mySpecialFunction(a, b) {
|
||
* return hotPatcher.patchInline("func", (a, b) => {
|
||
* return a + b;
|
||
* }, a, b);
|
||
* }
|
||
*/
|
||
patchInline(key, method, ...args) {
|
||
if (!this.isPatched(key)) {
|
||
this.patch(key, method);
|
||
}
|
||
return this.execute(key, ...args);
|
||
}
|
||
|
||
/**
|
||
* Patch a method (or methods) in sequential-mode
|
||
* See `patch()` with the option `chain: true`
|
||
* @see patch
|
||
* @param {String} key The key to patch
|
||
* @param {...Function} methods The methods to patch
|
||
* @returns {HotPatcher} Returns self
|
||
* @memberof HotPatcher
|
||
*/
|
||
plugin(key, ...methods) {
|
||
methods.forEach(method => {
|
||
this.patch(key, method, { chain: true });
|
||
});
|
||
return this;
|
||
}
|
||
|
||
/**
|
||
* Restore a patched method if it has been overridden
|
||
* @param {String} key The method key
|
||
* @memberof HotPatcher
|
||
*/
|
||
restore(key) {
|
||
if (!this.isPatched(key)) {
|
||
throw new Error(`Failed restoring method: No method present for key: ${key}`);
|
||
} else if (typeof this.configuration.registry[key].original !== "function") {
|
||
throw new Error(
|
||
`Failed restoring method: Original method not found or of invalid type for key: ${key}`
|
||
);
|
||
}
|
||
this.configuration.registry[key].methods = [this.configuration.registry[key].original];
|
||
}
|
||
|
||
/**
|
||
* Set a method as being final
|
||
* This sets a method as having been finally overridden. Attempts at overriding
|
||
* again will fail with an error.
|
||
* @param {String} key The key to make final
|
||
* @memberof HotPatcher
|
||
* @returns {HotPatcher} Returns self
|
||
*/
|
||
setFinal(key) {
|
||
if (!this.configuration.registry.hasOwnProperty(key)) {
|
||
throw new Error(`Failed marking '${key}' as final: No method found for key`);
|
||
}
|
||
this.configuration.registry[key].final = true;
|
||
return this;
|
||
}
|
||
}
|
||
|
||
module.exports = HotPatcher;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 9963:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
var wrappy = __nccwpck_require__(682)
|
||
var reqs = Object.create(null)
|
||
var once = __nccwpck_require__(2940)
|
||
|
||
module.exports = wrappy(inflight)
|
||
|
||
function inflight (key, cb) {
|
||
if (reqs[key]) {
|
||
reqs[key].push(cb)
|
||
return null
|
||
} else {
|
||
reqs[key] = [cb]
|
||
return makeres(key)
|
||
}
|
||
}
|
||
|
||
function makeres (key) {
|
||
return once(function RES () {
|
||
var cbs = reqs[key]
|
||
var len = cbs.length
|
||
var args = slice(arguments)
|
||
|
||
// XXX It's somewhat ambiguous whether a new callback added in this
|
||
// pass should be queued for later execution if something in the
|
||
// list of callbacks throws, or if it should just be discarded.
|
||
// However, it's such an edge case that it hardly matters, and either
|
||
// choice is likely as surprising as the other.
|
||
// As it happens, we do go ahead and schedule it for later execution.
|
||
try {
|
||
for (var i = 0; i < len; i++) {
|
||
cbs[i].apply(null, args)
|
||
}
|
||
} finally {
|
||
if (cbs.length > len) {
|
||
// added more in the interim.
|
||
// de-zalgo, just in case, but don't call again.
|
||
cbs.splice(0, len)
|
||
process.nextTick(function () {
|
||
RES.apply(null, args)
|
||
})
|
||
} else {
|
||
delete reqs[key]
|
||
}
|
||
}
|
||
})
|
||
}
|
||
|
||
function slice (args) {
|
||
var length = args.length
|
||
var array = []
|
||
|
||
for (var i = 0; i < length; i++) array[i] = args[i]
|
||
return array
|
||
}
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 3753:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
try {
|
||
var util = __nccwpck_require__(1669);
|
||
/* istanbul ignore next */
|
||
if (typeof util.inherits !== 'function') throw '';
|
||
module.exports = util.inherits;
|
||
} catch (e) {
|
||
/* istanbul ignore next */
|
||
module.exports = __nccwpck_require__(8181);
|
||
}
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 8181:
|
||
/***/ ((module) => {
|
||
|
||
if (typeof Object.create === 'function') {
|
||
// implementation from standard node.js 'util' module
|
||
module.exports = function inherits(ctor, superCtor) {
|
||
if (superCtor) {
|
||
ctor.super_ = superCtor
|
||
ctor.prototype = Object.create(superCtor.prototype, {
|
||
constructor: {
|
||
value: ctor,
|
||
enumerable: false,
|
||
writable: true,
|
||
configurable: true
|
||
}
|
||
})
|
||
}
|
||
};
|
||
} else {
|
||
// old school shim for old browsers
|
||
module.exports = function inherits(ctor, superCtor) {
|
||
if (superCtor) {
|
||
ctor.super_ = superCtor
|
||
var TempCtor = function () {}
|
||
TempCtor.prototype = superCtor.prototype
|
||
ctor.prototype = new TempCtor()
|
||
ctor.prototype.constructor = ctor
|
||
}
|
||
}
|
||
}
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 3125:
|
||
/***/ ((module) => {
|
||
|
||
/*!
|
||
* Determine if an object is a Buffer
|
||
*
|
||
* @author Feross Aboukhadijeh <https://feross.org>
|
||
* @license MIT
|
||
*/
|
||
|
||
// The _isBuffer check is for Safari 5-7 support, because it's missing
|
||
// Object.prototype.constructor. Remove this eventually
|
||
module.exports = function (obj) {
|
||
return obj != null && (isBuffer(obj) || isSlowBuffer(obj) || !!obj._isBuffer)
|
||
}
|
||
|
||
function isBuffer (obj) {
|
||
return !!obj.constructor && typeof obj.constructor.isBuffer === 'function' && obj.constructor.isBuffer(obj)
|
||
}
|
||
|
||
// For Node v0.10 support. Remove this eventually.
|
||
function isSlowBuffer (obj) {
|
||
return typeof obj.readFloatLE === 'function' && typeof obj.slice === 'function' && isBuffer(obj.slice(0, 0))
|
||
}
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 6426:
|
||
/***/ ((module) => {
|
||
|
||
var toString = {}.toString;
|
||
|
||
module.exports = Array.isArray || function (arr) {
|
||
return toString.call(arr) == '[object Array]';
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 205:
|
||
/***/ ((__unused_webpack_module, exports) => {
|
||
|
||
"use strict";
|
||
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.isError = exports.inherit = exports.assertError = void 0;
|
||
function assertError(err) {
|
||
if (!isError(err)) {
|
||
throw new Error("Parameter was not an error");
|
||
}
|
||
}
|
||
exports.assertError = assertError;
|
||
function inherit(ctor, superCtor) {
|
||
ctor.super_ = superCtor;
|
||
ctor.prototype = Object.create(superCtor.prototype, {
|
||
constructor: {
|
||
value: ctor,
|
||
enumerable: false,
|
||
writable: true,
|
||
configurable: true
|
||
}
|
||
});
|
||
}
|
||
exports.inherit = inherit;
|
||
function isError(err) {
|
||
return objectToString(err) === "[object Error]" || err instanceof Error;
|
||
}
|
||
exports.isError = isError;
|
||
function objectToString(obj) {
|
||
return Object.prototype.toString.call(obj);
|
||
}
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 7546:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||
if (k2 === undefined) k2 = k;
|
||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||
}) : (function(o, m, k, k2) {
|
||
if (k2 === undefined) k2 = k;
|
||
o[k2] = m[k];
|
||
}));
|
||
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
||
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.Layerr = void 0;
|
||
var layerr_1 = __nccwpck_require__(3227);
|
||
Object.defineProperty(exports, "Layerr", ({ enumerable: true, get: function () { return layerr_1.Layerr; } }));
|
||
__exportStar(__nccwpck_require__(380), exports);
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 3227:
|
||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.Layerr = void 0;
|
||
const error_1 = __nccwpck_require__(205);
|
||
const tools_1 = __nccwpck_require__(1631);
|
||
function Layerr(errorOptionsOrMessage, messageText) {
|
||
const args = [...arguments];
|
||
if (this instanceof Layerr === false) {
|
||
throw new Error("Cannot invoke 'Layerr' like a function: It must be called with 'new'");
|
||
}
|
||
const { options, shortMessage } = tools_1.parseArguments(args);
|
||
this.name = "Layerr";
|
||
if (options.name && typeof options.name === "string") {
|
||
this.name = options.name;
|
||
}
|
||
let message = shortMessage;
|
||
if (options.cause) {
|
||
Object.defineProperty(this, "_cause", { value: options.cause });
|
||
message = `${message}: ${options.cause.message}`;
|
||
}
|
||
this.message = message;
|
||
Object.defineProperty(this, "_info", { value: {} });
|
||
if (options.info && typeof options.info === "object") {
|
||
Object.assign(this._info, options.info);
|
||
}
|
||
Error.call(this, message);
|
||
if (Error.captureStackTrace) {
|
||
const ctor = options.constructorOpt || this.constructor;
|
||
Error.captureStackTrace(this, ctor);
|
||
}
|
||
return this;
|
||
}
|
||
exports.Layerr = Layerr;
|
||
error_1.inherit(Layerr, Error);
|
||
Layerr.prototype.cause = function _getCause() {
|
||
return Layerr.cause(this) || undefined;
|
||
};
|
||
Layerr.prototype.toString = function _toString() {
|
||
let output = this.name || this.constructor.name || this.constructor.prototype.name;
|
||
if (this.message) {
|
||
output = `${output}: ${this.message}`;
|
||
}
|
||
return output;
|
||
};
|
||
Layerr.cause = function __getCause(err) {
|
||
error_1.assertError(err);
|
||
return error_1.isError(err._cause) ? err._cause : null;
|
||
};
|
||
Layerr.fullStack = function __getFullStack(err) {
|
||
error_1.assertError(err);
|
||
const cause = Layerr.cause(err);
|
||
if (cause) {
|
||
return `${err.stack}\ncaused by: ${Layerr.fullStack(cause)}`;
|
||
}
|
||
return err.stack;
|
||
};
|
||
Layerr.info = function __getInfo(err) {
|
||
error_1.assertError(err);
|
||
const output = {};
|
||
const cause = Layerr.cause(err);
|
||
if (cause) {
|
||
Object.assign(output, Layerr.info(cause));
|
||
}
|
||
if (err._info) {
|
||
Object.assign(output, err._info);
|
||
}
|
||
return output;
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 1631:
|
||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.parseArguments = void 0;
|
||
const error_1 = __nccwpck_require__(205);
|
||
function parseArguments(args) {
|
||
let options, shortMessage = "";
|
||
if (args.length === 0) {
|
||
options = {};
|
||
}
|
||
else if (error_1.isError(args[0])) {
|
||
options = {
|
||
cause: args[0]
|
||
};
|
||
shortMessage = args.slice(1).join(" ") || "";
|
||
}
|
||
else if (args[0] && typeof args[0] === "object") {
|
||
options = Object.assign({}, args[0]);
|
||
shortMessage = args.slice(1).join(" ") || "";
|
||
}
|
||
else if (typeof args[0] === "string") {
|
||
options = {};
|
||
shortMessage = shortMessage = args.join(" ") || "";
|
||
}
|
||
else {
|
||
throw new Error("Invalid arguments passed to Layerr");
|
||
}
|
||
return {
|
||
options,
|
||
shortMessage
|
||
};
|
||
}
|
||
exports.parseArguments = parseArguments;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 380:
|
||
/***/ ((__unused_webpack_module, exports) => {
|
||
|
||
"use strict";
|
||
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 4556:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
var util = __nccwpck_require__(1669);
|
||
var PassThrough = __nccwpck_require__(7775);
|
||
|
||
module.exports = {
|
||
Readable: Readable,
|
||
Writable: Writable
|
||
};
|
||
|
||
util.inherits(Readable, PassThrough);
|
||
util.inherits(Writable, PassThrough);
|
||
|
||
// Patch the given method of instance so that the callback
|
||
// is executed once, before the actual method is called the
|
||
// first time.
|
||
function beforeFirstCall(instance, method, callback) {
|
||
instance[method] = function() {
|
||
delete instance[method];
|
||
callback.apply(this, arguments);
|
||
return this[method].apply(this, arguments);
|
||
};
|
||
}
|
||
|
||
function Readable(fn, options) {
|
||
if (!(this instanceof Readable))
|
||
return new Readable(fn, options);
|
||
|
||
PassThrough.call(this, options);
|
||
|
||
beforeFirstCall(this, '_read', function() {
|
||
var source = fn.call(this, options);
|
||
var emit = this.emit.bind(this, 'error');
|
||
source.on('error', emit);
|
||
source.pipe(this);
|
||
});
|
||
|
||
this.emit('readable');
|
||
}
|
||
|
||
function Writable(fn, options) {
|
||
if (!(this instanceof Writable))
|
||
return new Writable(fn, options);
|
||
|
||
PassThrough.call(this, options);
|
||
|
||
beforeFirstCall(this, '_write', function() {
|
||
var destination = fn.call(this, options);
|
||
var emit = this.emit.bind(this, 'error');
|
||
destination.on('error', emit);
|
||
this.pipe(destination);
|
||
});
|
||
|
||
this.emit('writable');
|
||
}
|
||
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 415:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
// Copyright Joyent, Inc. and other Node contributors.
|
||
//
|
||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||
// copy of this software and associated documentation files (the
|
||
// "Software"), to deal in the Software without restriction, including
|
||
// without limitation the rights to use, copy, modify, merge, publish,
|
||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||
// persons to whom the Software is furnished to do so, subject to the
|
||
// following conditions:
|
||
//
|
||
// The above copyright notice and this permission notice shall be included
|
||
// in all copies or substantial portions of the Software.
|
||
//
|
||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||
|
||
// a duplex stream is just a stream that is both readable and writable.
|
||
// Since JS doesn't have multiple prototypal inheritance, this class
|
||
// prototypally inherits from Readable, and then parasitically from
|
||
// Writable.
|
||
|
||
|
||
|
||
/*<replacement>*/
|
||
|
||
var pna = __nccwpck_require__(9029);
|
||
/*</replacement>*/
|
||
|
||
/*<replacement>*/
|
||
var objectKeys = Object.keys || function (obj) {
|
||
var keys = [];
|
||
for (var key in obj) {
|
||
keys.push(key);
|
||
}return keys;
|
||
};
|
||
/*</replacement>*/
|
||
|
||
module.exports = Duplex;
|
||
|
||
/*<replacement>*/
|
||
var util = Object.create(__nccwpck_require__(9300));
|
||
util.inherits = __nccwpck_require__(3753);
|
||
/*</replacement>*/
|
||
|
||
var Readable = __nccwpck_require__(3307);
|
||
var Writable = __nccwpck_require__(5104);
|
||
|
||
util.inherits(Duplex, Readable);
|
||
|
||
{
|
||
// avoid scope creep, the keys array can then be collected
|
||
var keys = objectKeys(Writable.prototype);
|
||
for (var v = 0; v < keys.length; v++) {
|
||
var method = keys[v];
|
||
if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method];
|
||
}
|
||
}
|
||
|
||
function Duplex(options) {
|
||
if (!(this instanceof Duplex)) return new Duplex(options);
|
||
|
||
Readable.call(this, options);
|
||
Writable.call(this, options);
|
||
|
||
if (options && options.readable === false) this.readable = false;
|
||
|
||
if (options && options.writable === false) this.writable = false;
|
||
|
||
this.allowHalfOpen = true;
|
||
if (options && options.allowHalfOpen === false) this.allowHalfOpen = false;
|
||
|
||
this.once('end', onend);
|
||
}
|
||
|
||
Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function () {
|
||
return this._writableState.highWaterMark;
|
||
}
|
||
});
|
||
|
||
// the no-half-open enforcer
|
||
function onend() {
|
||
// if we allow half-open state, or if the writable side ended,
|
||
// then we're ok.
|
||
if (this.allowHalfOpen || this._writableState.ended) return;
|
||
|
||
// no more data can be written.
|
||
// But allow more writes to happen in this tick.
|
||
pna.nextTick(onEndNT, this);
|
||
}
|
||
|
||
function onEndNT(self) {
|
||
self.end();
|
||
}
|
||
|
||
Object.defineProperty(Duplex.prototype, 'destroyed', {
|
||
get: function () {
|
||
if (this._readableState === undefined || this._writableState === undefined) {
|
||
return false;
|
||
}
|
||
return this._readableState.destroyed && this._writableState.destroyed;
|
||
},
|
||
set: function (value) {
|
||
// we ignore the value if the stream
|
||
// has not been initialized yet
|
||
if (this._readableState === undefined || this._writableState === undefined) {
|
||
return;
|
||
}
|
||
|
||
// backward compatibility, the user is explicitly
|
||
// managing destroyed
|
||
this._readableState.destroyed = value;
|
||
this._writableState.destroyed = value;
|
||
}
|
||
});
|
||
|
||
Duplex.prototype._destroy = function (err, cb) {
|
||
this.push(null);
|
||
this.end();
|
||
|
||
pna.nextTick(cb, err);
|
||
};
|
||
|
||
/***/ }),
|
||
|
||
/***/ 9412:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
// Copyright Joyent, Inc. and other Node contributors.
|
||
//
|
||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||
// copy of this software and associated documentation files (the
|
||
// "Software"), to deal in the Software without restriction, including
|
||
// without limitation the rights to use, copy, modify, merge, publish,
|
||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||
// persons to whom the Software is furnished to do so, subject to the
|
||
// following conditions:
|
||
//
|
||
// The above copyright notice and this permission notice shall be included
|
||
// in all copies or substantial portions of the Software.
|
||
//
|
||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||
|
||
// a passthrough stream.
|
||
// basically just the most minimal sort of Transform stream.
|
||
// Every written chunk gets output as-is.
|
||
|
||
|
||
|
||
module.exports = PassThrough;
|
||
|
||
var Transform = __nccwpck_require__(3533);
|
||
|
||
/*<replacement>*/
|
||
var util = Object.create(__nccwpck_require__(9300));
|
||
util.inherits = __nccwpck_require__(3753);
|
||
/*</replacement>*/
|
||
|
||
util.inherits(PassThrough, Transform);
|
||
|
||
function PassThrough(options) {
|
||
if (!(this instanceof PassThrough)) return new PassThrough(options);
|
||
|
||
Transform.call(this, options);
|
||
}
|
||
|
||
PassThrough.prototype._transform = function (chunk, encoding, cb) {
|
||
cb(null, chunk);
|
||
};
|
||
|
||
/***/ }),
|
||
|
||
/***/ 3307:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
// Copyright Joyent, Inc. and other Node contributors.
|
||
//
|
||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||
// copy of this software and associated documentation files (the
|
||
// "Software"), to deal in the Software without restriction, including
|
||
// without limitation the rights to use, copy, modify, merge, publish,
|
||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||
// persons to whom the Software is furnished to do so, subject to the
|
||
// following conditions:
|
||
//
|
||
// The above copyright notice and this permission notice shall be included
|
||
// in all copies or substantial portions of the Software.
|
||
//
|
||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||
|
||
|
||
|
||
/*<replacement>*/
|
||
|
||
var pna = __nccwpck_require__(9029);
|
||
/*</replacement>*/
|
||
|
||
module.exports = Readable;
|
||
|
||
/*<replacement>*/
|
||
var isArray = __nccwpck_require__(6426);
|
||
/*</replacement>*/
|
||
|
||
/*<replacement>*/
|
||
var Duplex;
|
||
/*</replacement>*/
|
||
|
||
Readable.ReadableState = ReadableState;
|
||
|
||
/*<replacement>*/
|
||
var EE = __nccwpck_require__(8614).EventEmitter;
|
||
|
||
var EElistenerCount = function (emitter, type) {
|
||
return emitter.listeners(type).length;
|
||
};
|
||
/*</replacement>*/
|
||
|
||
/*<replacement>*/
|
||
var Stream = __nccwpck_require__(3661);
|
||
/*</replacement>*/
|
||
|
||
/*<replacement>*/
|
||
|
||
var Buffer = __nccwpck_require__(1534).Buffer;
|
||
var OurUint8Array = global.Uint8Array || function () {};
|
||
function _uint8ArrayToBuffer(chunk) {
|
||
return Buffer.from(chunk);
|
||
}
|
||
function _isUint8Array(obj) {
|
||
return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
|
||
}
|
||
|
||
/*</replacement>*/
|
||
|
||
/*<replacement>*/
|
||
var util = Object.create(__nccwpck_require__(9300));
|
||
util.inherits = __nccwpck_require__(3753);
|
||
/*</replacement>*/
|
||
|
||
/*<replacement>*/
|
||
var debugUtil = __nccwpck_require__(1669);
|
||
var debug = void 0;
|
||
if (debugUtil && debugUtil.debuglog) {
|
||
debug = debugUtil.debuglog('stream');
|
||
} else {
|
||
debug = function () {};
|
||
}
|
||
/*</replacement>*/
|
||
|
||
var BufferList = __nccwpck_require__(765);
|
||
var destroyImpl = __nccwpck_require__(7174);
|
||
var StringDecoder;
|
||
|
||
util.inherits(Readable, Stream);
|
||
|
||
var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume'];
|
||
|
||
function prependListener(emitter, event, fn) {
|
||
// Sadly this is not cacheable as some libraries bundle their own
|
||
// event emitter implementation with them.
|
||
if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn);
|
||
|
||
// This is a hack to make sure that our error handler is attached before any
|
||
// userland ones. NEVER DO THIS. This is here only because this code needs
|
||
// to continue to work with older versions of Node.js that do not include
|
||
// the prependListener() method. The goal is to eventually remove this hack.
|
||
if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]];
|
||
}
|
||
|
||
function ReadableState(options, stream) {
|
||
Duplex = Duplex || __nccwpck_require__(415);
|
||
|
||
options = options || {};
|
||
|
||
// Duplex streams are both readable and writable, but share
|
||
// the same options object.
|
||
// However, some cases require setting options to different
|
||
// values for the readable and the writable sides of the duplex stream.
|
||
// These options can be provided separately as readableXXX and writableXXX.
|
||
var isDuplex = stream instanceof Duplex;
|
||
|
||
// object stream flag. Used to make read(n) ignore n and to
|
||
// make all the buffer merging and length checks go away
|
||
this.objectMode = !!options.objectMode;
|
||
|
||
if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode;
|
||
|
||
// the point at which it stops calling _read() to fill the buffer
|
||
// Note: 0 is a valid value, means "don't call _read preemptively ever"
|
||
var hwm = options.highWaterMark;
|
||
var readableHwm = options.readableHighWaterMark;
|
||
var defaultHwm = this.objectMode ? 16 : 16 * 1024;
|
||
|
||
if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm;
|
||
|
||
// cast to ints.
|
||
this.highWaterMark = Math.floor(this.highWaterMark);
|
||
|
||
// A linked list is used to store data chunks instead of an array because the
|
||
// linked list can remove elements from the beginning faster than
|
||
// array.shift()
|
||
this.buffer = new BufferList();
|
||
this.length = 0;
|
||
this.pipes = null;
|
||
this.pipesCount = 0;
|
||
this.flowing = null;
|
||
this.ended = false;
|
||
this.endEmitted = false;
|
||
this.reading = false;
|
||
|
||
// a flag to be able to tell if the event 'readable'/'data' is emitted
|
||
// immediately, or on a later tick. We set this to true at first, because
|
||
// any actions that shouldn't happen until "later" should generally also
|
||
// not happen before the first read call.
|
||
this.sync = true;
|
||
|
||
// whenever we return null, then we set a flag to say
|
||
// that we're awaiting a 'readable' event emission.
|
||
this.needReadable = false;
|
||
this.emittedReadable = false;
|
||
this.readableListening = false;
|
||
this.resumeScheduled = false;
|
||
|
||
// has it been destroyed
|
||
this.destroyed = false;
|
||
|
||
// Crypto is kind of old and crusty. Historically, its default string
|
||
// encoding is 'binary' so we have to make this configurable.
|
||
// Everything else in the universe uses 'utf8', though.
|
||
this.defaultEncoding = options.defaultEncoding || 'utf8';
|
||
|
||
// the number of writers that are awaiting a drain event in .pipe()s
|
||
this.awaitDrain = 0;
|
||
|
||
// if true, a maybeReadMore has been scheduled
|
||
this.readingMore = false;
|
||
|
||
this.decoder = null;
|
||
this.encoding = null;
|
||
if (options.encoding) {
|
||
if (!StringDecoder) StringDecoder = __nccwpck_require__(781)/* .StringDecoder */ .s;
|
||
this.decoder = new StringDecoder(options.encoding);
|
||
this.encoding = options.encoding;
|
||
}
|
||
}
|
||
|
||
function Readable(options) {
|
||
Duplex = Duplex || __nccwpck_require__(415);
|
||
|
||
if (!(this instanceof Readable)) return new Readable(options);
|
||
|
||
this._readableState = new ReadableState(options, this);
|
||
|
||
// legacy
|
||
this.readable = true;
|
||
|
||
if (options) {
|
||
if (typeof options.read === 'function') this._read = options.read;
|
||
|
||
if (typeof options.destroy === 'function') this._destroy = options.destroy;
|
||
}
|
||
|
||
Stream.call(this);
|
||
}
|
||
|
||
Object.defineProperty(Readable.prototype, 'destroyed', {
|
||
get: function () {
|
||
if (this._readableState === undefined) {
|
||
return false;
|
||
}
|
||
return this._readableState.destroyed;
|
||
},
|
||
set: function (value) {
|
||
// we ignore the value if the stream
|
||
// has not been initialized yet
|
||
if (!this._readableState) {
|
||
return;
|
||
}
|
||
|
||
// backward compatibility, the user is explicitly
|
||
// managing destroyed
|
||
this._readableState.destroyed = value;
|
||
}
|
||
});
|
||
|
||
Readable.prototype.destroy = destroyImpl.destroy;
|
||
Readable.prototype._undestroy = destroyImpl.undestroy;
|
||
Readable.prototype._destroy = function (err, cb) {
|
||
this.push(null);
|
||
cb(err);
|
||
};
|
||
|
||
// Manually shove something into the read() buffer.
|
||
// This returns true if the highWaterMark has not been hit yet,
|
||
// similar to how Writable.write() returns true if you should
|
||
// write() some more.
|
||
Readable.prototype.push = function (chunk, encoding) {
|
||
var state = this._readableState;
|
||
var skipChunkCheck;
|
||
|
||
if (!state.objectMode) {
|
||
if (typeof chunk === 'string') {
|
||
encoding = encoding || state.defaultEncoding;
|
||
if (encoding !== state.encoding) {
|
||
chunk = Buffer.from(chunk, encoding);
|
||
encoding = '';
|
||
}
|
||
skipChunkCheck = true;
|
||
}
|
||
} else {
|
||
skipChunkCheck = true;
|
||
}
|
||
|
||
return readableAddChunk(this, chunk, encoding, false, skipChunkCheck);
|
||
};
|
||
|
||
// Unshift should *always* be something directly out of read()
|
||
Readable.prototype.unshift = function (chunk) {
|
||
return readableAddChunk(this, chunk, null, true, false);
|
||
};
|
||
|
||
function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) {
|
||
var state = stream._readableState;
|
||
if (chunk === null) {
|
||
state.reading = false;
|
||
onEofChunk(stream, state);
|
||
} else {
|
||
var er;
|
||
if (!skipChunkCheck) er = chunkInvalid(state, chunk);
|
||
if (er) {
|
||
stream.emit('error', er);
|
||
} else if (state.objectMode || chunk && chunk.length > 0) {
|
||
if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) {
|
||
chunk = _uint8ArrayToBuffer(chunk);
|
||
}
|
||
|
||
if (addToFront) {
|
||
if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true);
|
||
} else if (state.ended) {
|
||
stream.emit('error', new Error('stream.push() after EOF'));
|
||
} else {
|
||
state.reading = false;
|
||
if (state.decoder && !encoding) {
|
||
chunk = state.decoder.write(chunk);
|
||
if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state);
|
||
} else {
|
||
addChunk(stream, state, chunk, false);
|
||
}
|
||
}
|
||
} else if (!addToFront) {
|
||
state.reading = false;
|
||
}
|
||
}
|
||
|
||
return needMoreData(state);
|
||
}
|
||
|
||
function addChunk(stream, state, chunk, addToFront) {
|
||
if (state.flowing && state.length === 0 && !state.sync) {
|
||
stream.emit('data', chunk);
|
||
stream.read(0);
|
||
} else {
|
||
// update the buffer info.
|
||
state.length += state.objectMode ? 1 : chunk.length;
|
||
if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk);
|
||
|
||
if (state.needReadable) emitReadable(stream);
|
||
}
|
||
maybeReadMore(stream, state);
|
||
}
|
||
|
||
function chunkInvalid(state, chunk) {
|
||
var er;
|
||
if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {
|
||
er = new TypeError('Invalid non-string/buffer chunk');
|
||
}
|
||
return er;
|
||
}
|
||
|
||
// if it's past the high water mark, we can push in some more.
|
||
// Also, if we have no data yet, we can stand some
|
||
// more bytes. This is to work around cases where hwm=0,
|
||
// such as the repl. Also, if the push() triggered a
|
||
// readable event, and the user called read(largeNumber) such that
|
||
// needReadable was set, then we ought to push more, so that another
|
||
// 'readable' event will be triggered.
|
||
function needMoreData(state) {
|
||
return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0);
|
||
}
|
||
|
||
Readable.prototype.isPaused = function () {
|
||
return this._readableState.flowing === false;
|
||
};
|
||
|
||
// backwards compatibility.
|
||
Readable.prototype.setEncoding = function (enc) {
|
||
if (!StringDecoder) StringDecoder = __nccwpck_require__(781)/* .StringDecoder */ .s;
|
||
this._readableState.decoder = new StringDecoder(enc);
|
||
this._readableState.encoding = enc;
|
||
return this;
|
||
};
|
||
|
||
// Don't raise the hwm > 8MB
|
||
var MAX_HWM = 0x800000;
|
||
function computeNewHighWaterMark(n) {
|
||
if (n >= MAX_HWM) {
|
||
n = MAX_HWM;
|
||
} else {
|
||
// Get the next highest power of 2 to prevent increasing hwm excessively in
|
||
// tiny amounts
|
||
n--;
|
||
n |= n >>> 1;
|
||
n |= n >>> 2;
|
||
n |= n >>> 4;
|
||
n |= n >>> 8;
|
||
n |= n >>> 16;
|
||
n++;
|
||
}
|
||
return n;
|
||
}
|
||
|
||
// This function is designed to be inlinable, so please take care when making
|
||
// changes to the function body.
|
||
function howMuchToRead(n, state) {
|
||
if (n <= 0 || state.length === 0 && state.ended) return 0;
|
||
if (state.objectMode) return 1;
|
||
if (n !== n) {
|
||
// Only flow one buffer at a time
|
||
if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length;
|
||
}
|
||
// If we're asking for more than the current hwm, then raise the hwm.
|
||
if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n);
|
||
if (n <= state.length) return n;
|
||
// Don't have enough
|
||
if (!state.ended) {
|
||
state.needReadable = true;
|
||
return 0;
|
||
}
|
||
return state.length;
|
||
}
|
||
|
||
// you can override either this method, or the async _read(n) below.
|
||
Readable.prototype.read = function (n) {
|
||
debug('read', n);
|
||
n = parseInt(n, 10);
|
||
var state = this._readableState;
|
||
var nOrig = n;
|
||
|
||
if (n !== 0) state.emittedReadable = false;
|
||
|
||
// if we're doing read(0) to trigger a readable event, but we
|
||
// already have a bunch of data in the buffer, then just trigger
|
||
// the 'readable' event and move on.
|
||
if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) {
|
||
debug('read: emitReadable', state.length, state.ended);
|
||
if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this);
|
||
return null;
|
||
}
|
||
|
||
n = howMuchToRead(n, state);
|
||
|
||
// if we've ended, and we're now clear, then finish it up.
|
||
if (n === 0 && state.ended) {
|
||
if (state.length === 0) endReadable(this);
|
||
return null;
|
||
}
|
||
|
||
// All the actual chunk generation logic needs to be
|
||
// *below* the call to _read. The reason is that in certain
|
||
// synthetic stream cases, such as passthrough streams, _read
|
||
// may be a completely synchronous operation which may change
|
||
// the state of the read buffer, providing enough data when
|
||
// before there was *not* enough.
|
||
//
|
||
// So, the steps are:
|
||
// 1. Figure out what the state of things will be after we do
|
||
// a read from the buffer.
|
||
//
|
||
// 2. If that resulting state will trigger a _read, then call _read.
|
||
// Note that this may be asynchronous, or synchronous. Yes, it is
|
||
// deeply ugly to write APIs this way, but that still doesn't mean
|
||
// that the Readable class should behave improperly, as streams are
|
||
// designed to be sync/async agnostic.
|
||
// Take note if the _read call is sync or async (ie, if the read call
|
||
// has returned yet), so that we know whether or not it's safe to emit
|
||
// 'readable' etc.
|
||
//
|
||
// 3. Actually pull the requested chunks out of the buffer and return.
|
||
|
||
// if we need a readable event, then we need to do some reading.
|
||
var doRead = state.needReadable;
|
||
debug('need readable', doRead);
|
||
|
||
// if we currently have less than the highWaterMark, then also read some
|
||
if (state.length === 0 || state.length - n < state.highWaterMark) {
|
||
doRead = true;
|
||
debug('length less than watermark', doRead);
|
||
}
|
||
|
||
// however, if we've ended, then there's no point, and if we're already
|
||
// reading, then it's unnecessary.
|
||
if (state.ended || state.reading) {
|
||
doRead = false;
|
||
debug('reading or ended', doRead);
|
||
} else if (doRead) {
|
||
debug('do read');
|
||
state.reading = true;
|
||
state.sync = true;
|
||
// if the length is currently zero, then we *need* a readable event.
|
||
if (state.length === 0) state.needReadable = true;
|
||
// call internal read method
|
||
this._read(state.highWaterMark);
|
||
state.sync = false;
|
||
// If _read pushed data synchronously, then `reading` will be false,
|
||
// and we need to re-evaluate how much data we can return to the user.
|
||
if (!state.reading) n = howMuchToRead(nOrig, state);
|
||
}
|
||
|
||
var ret;
|
||
if (n > 0) ret = fromList(n, state);else ret = null;
|
||
|
||
if (ret === null) {
|
||
state.needReadable = true;
|
||
n = 0;
|
||
} else {
|
||
state.length -= n;
|
||
}
|
||
|
||
if (state.length === 0) {
|
||
// If we have nothing in the buffer, then we want to know
|
||
// as soon as we *do* get something into the buffer.
|
||
if (!state.ended) state.needReadable = true;
|
||
|
||
// If we tried to read() past the EOF, then emit end on the next tick.
|
||
if (nOrig !== n && state.ended) endReadable(this);
|
||
}
|
||
|
||
if (ret !== null) this.emit('data', ret);
|
||
|
||
return ret;
|
||
};
|
||
|
||
function onEofChunk(stream, state) {
|
||
if (state.ended) return;
|
||
if (state.decoder) {
|
||
var chunk = state.decoder.end();
|
||
if (chunk && chunk.length) {
|
||
state.buffer.push(chunk);
|
||
state.length += state.objectMode ? 1 : chunk.length;
|
||
}
|
||
}
|
||
state.ended = true;
|
||
|
||
// emit 'readable' now to make sure it gets picked up.
|
||
emitReadable(stream);
|
||
}
|
||
|
||
// Don't emit readable right away in sync mode, because this can trigger
|
||
// another read() call => stack overflow. This way, it might trigger
|
||
// a nextTick recursion warning, but that's not so bad.
|
||
function emitReadable(stream) {
|
||
var state = stream._readableState;
|
||
state.needReadable = false;
|
||
if (!state.emittedReadable) {
|
||
debug('emitReadable', state.flowing);
|
||
state.emittedReadable = true;
|
||
if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream);
|
||
}
|
||
}
|
||
|
||
function emitReadable_(stream) {
|
||
debug('emit readable');
|
||
stream.emit('readable');
|
||
flow(stream);
|
||
}
|
||
|
||
// at this point, the user has presumably seen the 'readable' event,
|
||
// and called read() to consume some data. that may have triggered
|
||
// in turn another _read(n) call, in which case reading = true if
|
||
// it's in progress.
|
||
// However, if we're not ended, or reading, and the length < hwm,
|
||
// then go ahead and try to read some more preemptively.
|
||
function maybeReadMore(stream, state) {
|
||
if (!state.readingMore) {
|
||
state.readingMore = true;
|
||
pna.nextTick(maybeReadMore_, stream, state);
|
||
}
|
||
}
|
||
|
||
function maybeReadMore_(stream, state) {
|
||
var len = state.length;
|
||
while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) {
|
||
debug('maybeReadMore read 0');
|
||
stream.read(0);
|
||
if (len === state.length)
|
||
// didn't get any data, stop spinning.
|
||
break;else len = state.length;
|
||
}
|
||
state.readingMore = false;
|
||
}
|
||
|
||
// abstract method. to be overridden in specific implementation classes.
|
||
// call cb(er, data) where data is <= n in length.
|
||
// for virtual (non-string, non-buffer) streams, "length" is somewhat
|
||
// arbitrary, and perhaps not very meaningful.
|
||
Readable.prototype._read = function (n) {
|
||
this.emit('error', new Error('_read() is not implemented'));
|
||
};
|
||
|
||
Readable.prototype.pipe = function (dest, pipeOpts) {
|
||
var src = this;
|
||
var state = this._readableState;
|
||
|
||
switch (state.pipesCount) {
|
||
case 0:
|
||
state.pipes = dest;
|
||
break;
|
||
case 1:
|
||
state.pipes = [state.pipes, dest];
|
||
break;
|
||
default:
|
||
state.pipes.push(dest);
|
||
break;
|
||
}
|
||
state.pipesCount += 1;
|
||
debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);
|
||
|
||
var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr;
|
||
|
||
var endFn = doEnd ? onend : unpipe;
|
||
if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn);
|
||
|
||
dest.on('unpipe', onunpipe);
|
||
function onunpipe(readable, unpipeInfo) {
|
||
debug('onunpipe');
|
||
if (readable === src) {
|
||
if (unpipeInfo && unpipeInfo.hasUnpiped === false) {
|
||
unpipeInfo.hasUnpiped = true;
|
||
cleanup();
|
||
}
|
||
}
|
||
}
|
||
|
||
function onend() {
|
||
debug('onend');
|
||
dest.end();
|
||
}
|
||
|
||
// when the dest drains, it reduces the awaitDrain counter
|
||
// on the source. This would be more elegant with a .once()
|
||
// handler in flow(), but adding and removing repeatedly is
|
||
// too slow.
|
||
var ondrain = pipeOnDrain(src);
|
||
dest.on('drain', ondrain);
|
||
|
||
var cleanedUp = false;
|
||
function cleanup() {
|
||
debug('cleanup');
|
||
// cleanup event handlers once the pipe is broken
|
||
dest.removeListener('close', onclose);
|
||
dest.removeListener('finish', onfinish);
|
||
dest.removeListener('drain', ondrain);
|
||
dest.removeListener('error', onerror);
|
||
dest.removeListener('unpipe', onunpipe);
|
||
src.removeListener('end', onend);
|
||
src.removeListener('end', unpipe);
|
||
src.removeListener('data', ondata);
|
||
|
||
cleanedUp = true;
|
||
|
||
// if the reader is waiting for a drain event from this
|
||
// specific writer, then it would cause it to never start
|
||
// flowing again.
|
||
// So, if this is awaiting a drain, then we just call it now.
|
||
// If we don't know, then assume that we are waiting for one.
|
||
if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain();
|
||
}
|
||
|
||
// If the user pushes more data while we're writing to dest then we'll end up
|
||
// in ondata again. However, we only want to increase awaitDrain once because
|
||
// dest will only emit one 'drain' event for the multiple writes.
|
||
// => Introduce a guard on increasing awaitDrain.
|
||
var increasedAwaitDrain = false;
|
||
src.on('data', ondata);
|
||
function ondata(chunk) {
|
||
debug('ondata');
|
||
increasedAwaitDrain = false;
|
||
var ret = dest.write(chunk);
|
||
if (false === ret && !increasedAwaitDrain) {
|
||
// If the user unpiped during `dest.write()`, it is possible
|
||
// to get stuck in a permanently paused state if that write
|
||
// also returned false.
|
||
// => Check whether `dest` is still a piping destination.
|
||
if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) {
|
||
debug('false write response, pause', src._readableState.awaitDrain);
|
||
src._readableState.awaitDrain++;
|
||
increasedAwaitDrain = true;
|
||
}
|
||
src.pause();
|
||
}
|
||
}
|
||
|
||
// if the dest has an error, then stop piping into it.
|
||
// however, don't suppress the throwing behavior for this.
|
||
function onerror(er) {
|
||
debug('onerror', er);
|
||
unpipe();
|
||
dest.removeListener('error', onerror);
|
||
if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er);
|
||
}
|
||
|
||
// Make sure our error handler is attached before userland ones.
|
||
prependListener(dest, 'error', onerror);
|
||
|
||
// Both close and finish should trigger unpipe, but only once.
|
||
function onclose() {
|
||
dest.removeListener('finish', onfinish);
|
||
unpipe();
|
||
}
|
||
dest.once('close', onclose);
|
||
function onfinish() {
|
||
debug('onfinish');
|
||
dest.removeListener('close', onclose);
|
||
unpipe();
|
||
}
|
||
dest.once('finish', onfinish);
|
||
|
||
function unpipe() {
|
||
debug('unpipe');
|
||
src.unpipe(dest);
|
||
}
|
||
|
||
// tell the dest that it's being piped to
|
||
dest.emit('pipe', src);
|
||
|
||
// start the flow if it hasn't been started already.
|
||
if (!state.flowing) {
|
||
debug('pipe resume');
|
||
src.resume();
|
||
}
|
||
|
||
return dest;
|
||
};
|
||
|
||
function pipeOnDrain(src) {
|
||
return function () {
|
||
var state = src._readableState;
|
||
debug('pipeOnDrain', state.awaitDrain);
|
||
if (state.awaitDrain) state.awaitDrain--;
|
||
if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) {
|
||
state.flowing = true;
|
||
flow(src);
|
||
}
|
||
};
|
||
}
|
||
|
||
Readable.prototype.unpipe = function (dest) {
|
||
var state = this._readableState;
|
||
var unpipeInfo = { hasUnpiped: false };
|
||
|
||
// if we're not piping anywhere, then do nothing.
|
||
if (state.pipesCount === 0) return this;
|
||
|
||
// just one destination. most common case.
|
||
if (state.pipesCount === 1) {
|
||
// passed in one, but it's not the right one.
|
||
if (dest && dest !== state.pipes) return this;
|
||
|
||
if (!dest) dest = state.pipes;
|
||
|
||
// got a match.
|
||
state.pipes = null;
|
||
state.pipesCount = 0;
|
||
state.flowing = false;
|
||
if (dest) dest.emit('unpipe', this, unpipeInfo);
|
||
return this;
|
||
}
|
||
|
||
// slow case. multiple pipe destinations.
|
||
|
||
if (!dest) {
|
||
// remove all.
|
||
var dests = state.pipes;
|
||
var len = state.pipesCount;
|
||
state.pipes = null;
|
||
state.pipesCount = 0;
|
||
state.flowing = false;
|
||
|
||
for (var i = 0; i < len; i++) {
|
||
dests[i].emit('unpipe', this, unpipeInfo);
|
||
}return this;
|
||
}
|
||
|
||
// try to find the right one.
|
||
var index = indexOf(state.pipes, dest);
|
||
if (index === -1) return this;
|
||
|
||
state.pipes.splice(index, 1);
|
||
state.pipesCount -= 1;
|
||
if (state.pipesCount === 1) state.pipes = state.pipes[0];
|
||
|
||
dest.emit('unpipe', this, unpipeInfo);
|
||
|
||
return this;
|
||
};
|
||
|
||
// set up data events if they are asked for
|
||
// Ensure readable listeners eventually get something
|
||
Readable.prototype.on = function (ev, fn) {
|
||
var res = Stream.prototype.on.call(this, ev, fn);
|
||
|
||
if (ev === 'data') {
|
||
// Start flowing on next tick if stream isn't explicitly paused
|
||
if (this._readableState.flowing !== false) this.resume();
|
||
} else if (ev === 'readable') {
|
||
var state = this._readableState;
|
||
if (!state.endEmitted && !state.readableListening) {
|
||
state.readableListening = state.needReadable = true;
|
||
state.emittedReadable = false;
|
||
if (!state.reading) {
|
||
pna.nextTick(nReadingNextTick, this);
|
||
} else if (state.length) {
|
||
emitReadable(this);
|
||
}
|
||
}
|
||
}
|
||
|
||
return res;
|
||
};
|
||
Readable.prototype.addListener = Readable.prototype.on;
|
||
|
||
function nReadingNextTick(self) {
|
||
debug('readable nexttick read 0');
|
||
self.read(0);
|
||
}
|
||
|
||
// pause() and resume() are remnants of the legacy readable stream API
|
||
// If the user uses them, then switch into old mode.
|
||
Readable.prototype.resume = function () {
|
||
var state = this._readableState;
|
||
if (!state.flowing) {
|
||
debug('resume');
|
||
state.flowing = true;
|
||
resume(this, state);
|
||
}
|
||
return this;
|
||
};
|
||
|
||
function resume(stream, state) {
|
||
if (!state.resumeScheduled) {
|
||
state.resumeScheduled = true;
|
||
pna.nextTick(resume_, stream, state);
|
||
}
|
||
}
|
||
|
||
function resume_(stream, state) {
|
||
if (!state.reading) {
|
||
debug('resume read 0');
|
||
stream.read(0);
|
||
}
|
||
|
||
state.resumeScheduled = false;
|
||
state.awaitDrain = 0;
|
||
stream.emit('resume');
|
||
flow(stream);
|
||
if (state.flowing && !state.reading) stream.read(0);
|
||
}
|
||
|
||
Readable.prototype.pause = function () {
|
||
debug('call pause flowing=%j', this._readableState.flowing);
|
||
if (false !== this._readableState.flowing) {
|
||
debug('pause');
|
||
this._readableState.flowing = false;
|
||
this.emit('pause');
|
||
}
|
||
return this;
|
||
};
|
||
|
||
function flow(stream) {
|
||
var state = stream._readableState;
|
||
debug('flow', state.flowing);
|
||
while (state.flowing && stream.read() !== null) {}
|
||
}
|
||
|
||
// wrap an old-style stream as the async data source.
|
||
// This is *not* part of the readable stream interface.
|
||
// It is an ugly unfortunate mess of history.
|
||
Readable.prototype.wrap = function (stream) {
|
||
var _this = this;
|
||
|
||
var state = this._readableState;
|
||
var paused = false;
|
||
|
||
stream.on('end', function () {
|
||
debug('wrapped end');
|
||
if (state.decoder && !state.ended) {
|
||
var chunk = state.decoder.end();
|
||
if (chunk && chunk.length) _this.push(chunk);
|
||
}
|
||
|
||
_this.push(null);
|
||
});
|
||
|
||
stream.on('data', function (chunk) {
|
||
debug('wrapped data');
|
||
if (state.decoder) chunk = state.decoder.write(chunk);
|
||
|
||
// don't skip over falsy values in objectMode
|
||
if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return;
|
||
|
||
var ret = _this.push(chunk);
|
||
if (!ret) {
|
||
paused = true;
|
||
stream.pause();
|
||
}
|
||
});
|
||
|
||
// proxy all the other methods.
|
||
// important when wrapping filters and duplexes.
|
||
for (var i in stream) {
|
||
if (this[i] === undefined && typeof stream[i] === 'function') {
|
||
this[i] = function (method) {
|
||
return function () {
|
||
return stream[method].apply(stream, arguments);
|
||
};
|
||
}(i);
|
||
}
|
||
}
|
||
|
||
// proxy certain important events.
|
||
for (var n = 0; n < kProxyEvents.length; n++) {
|
||
stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n]));
|
||
}
|
||
|
||
// when we try to consume some more bytes, simply unpause the
|
||
// underlying stream.
|
||
this._read = function (n) {
|
||
debug('wrapped _read', n);
|
||
if (paused) {
|
||
paused = false;
|
||
stream.resume();
|
||
}
|
||
};
|
||
|
||
return this;
|
||
};
|
||
|
||
Object.defineProperty(Readable.prototype, 'readableHighWaterMark', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function () {
|
||
return this._readableState.highWaterMark;
|
||
}
|
||
});
|
||
|
||
// exposed for testing purposes only.
|
||
Readable._fromList = fromList;
|
||
|
||
// Pluck off n bytes from an array of buffers.
|
||
// Length is the combined lengths of all the buffers in the list.
|
||
// This function is designed to be inlinable, so please take care when making
|
||
// changes to the function body.
|
||
function fromList(n, state) {
|
||
// nothing buffered
|
||
if (state.length === 0) return null;
|
||
|
||
var ret;
|
||
if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) {
|
||
// read it all, truncate the list
|
||
if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length);
|
||
state.buffer.clear();
|
||
} else {
|
||
// read part of list
|
||
ret = fromListPartial(n, state.buffer, state.decoder);
|
||
}
|
||
|
||
return ret;
|
||
}
|
||
|
||
// Extracts only enough buffered data to satisfy the amount requested.
|
||
// This function is designed to be inlinable, so please take care when making
|
||
// changes to the function body.
|
||
function fromListPartial(n, list, hasStrings) {
|
||
var ret;
|
||
if (n < list.head.data.length) {
|
||
// slice is the same for buffers and strings
|
||
ret = list.head.data.slice(0, n);
|
||
list.head.data = list.head.data.slice(n);
|
||
} else if (n === list.head.data.length) {
|
||
// first chunk is a perfect match
|
||
ret = list.shift();
|
||
} else {
|
||
// result spans more than one buffer
|
||
ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list);
|
||
}
|
||
return ret;
|
||
}
|
||
|
||
// Copies a specified amount of characters from the list of buffered data
|
||
// chunks.
|
||
// This function is designed to be inlinable, so please take care when making
|
||
// changes to the function body.
|
||
function copyFromBufferString(n, list) {
|
||
var p = list.head;
|
||
var c = 1;
|
||
var ret = p.data;
|
||
n -= ret.length;
|
||
while (p = p.next) {
|
||
var str = p.data;
|
||
var nb = n > str.length ? str.length : n;
|
||
if (nb === str.length) ret += str;else ret += str.slice(0, n);
|
||
n -= nb;
|
||
if (n === 0) {
|
||
if (nb === str.length) {
|
||
++c;
|
||
if (p.next) list.head = p.next;else list.head = list.tail = null;
|
||
} else {
|
||
list.head = p;
|
||
p.data = str.slice(nb);
|
||
}
|
||
break;
|
||
}
|
||
++c;
|
||
}
|
||
list.length -= c;
|
||
return ret;
|
||
}
|
||
|
||
// Copies a specified amount of bytes from the list of buffered data chunks.
|
||
// This function is designed to be inlinable, so please take care when making
|
||
// changes to the function body.
|
||
function copyFromBuffer(n, list) {
|
||
var ret = Buffer.allocUnsafe(n);
|
||
var p = list.head;
|
||
var c = 1;
|
||
p.data.copy(ret);
|
||
n -= p.data.length;
|
||
while (p = p.next) {
|
||
var buf = p.data;
|
||
var nb = n > buf.length ? buf.length : n;
|
||
buf.copy(ret, ret.length - n, 0, nb);
|
||
n -= nb;
|
||
if (n === 0) {
|
||
if (nb === buf.length) {
|
||
++c;
|
||
if (p.next) list.head = p.next;else list.head = list.tail = null;
|
||
} else {
|
||
list.head = p;
|
||
p.data = buf.slice(nb);
|
||
}
|
||
break;
|
||
}
|
||
++c;
|
||
}
|
||
list.length -= c;
|
||
return ret;
|
||
}
|
||
|
||
function endReadable(stream) {
|
||
var state = stream._readableState;
|
||
|
||
// If we get here before consuming all the bytes, then that is a
|
||
// bug in node. Should never happen.
|
||
if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream');
|
||
|
||
if (!state.endEmitted) {
|
||
state.ended = true;
|
||
pna.nextTick(endReadableNT, state, stream);
|
||
}
|
||
}
|
||
|
||
function endReadableNT(state, stream) {
|
||
// Check that we didn't get one last unshift.
|
||
if (!state.endEmitted && state.length === 0) {
|
||
state.endEmitted = true;
|
||
stream.readable = false;
|
||
stream.emit('end');
|
||
}
|
||
}
|
||
|
||
function indexOf(xs, x) {
|
||
for (var i = 0, l = xs.length; i < l; i++) {
|
||
if (xs[i] === x) return i;
|
||
}
|
||
return -1;
|
||
}
|
||
|
||
/***/ }),
|
||
|
||
/***/ 3533:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
// Copyright Joyent, Inc. and other Node contributors.
|
||
//
|
||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||
// copy of this software and associated documentation files (the
|
||
// "Software"), to deal in the Software without restriction, including
|
||
// without limitation the rights to use, copy, modify, merge, publish,
|
||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||
// persons to whom the Software is furnished to do so, subject to the
|
||
// following conditions:
|
||
//
|
||
// The above copyright notice and this permission notice shall be included
|
||
// in all copies or substantial portions of the Software.
|
||
//
|
||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||
|
||
// a transform stream is a readable/writable stream where you do
|
||
// something with the data. Sometimes it's called a "filter",
|
||
// but that's not a great name for it, since that implies a thing where
|
||
// some bits pass through, and others are simply ignored. (That would
|
||
// be a valid example of a transform, of course.)
|
||
//
|
||
// While the output is causally related to the input, it's not a
|
||
// necessarily symmetric or synchronous transformation. For example,
|
||
// a zlib stream might take multiple plain-text writes(), and then
|
||
// emit a single compressed chunk some time in the future.
|
||
//
|
||
// Here's how this works:
|
||
//
|
||
// The Transform stream has all the aspects of the readable and writable
|
||
// stream classes. When you write(chunk), that calls _write(chunk,cb)
|
||
// internally, and returns false if there's a lot of pending writes
|
||
// buffered up. When you call read(), that calls _read(n) until
|
||
// there's enough pending readable data buffered up.
|
||
//
|
||
// In a transform stream, the written data is placed in a buffer. When
|
||
// _read(n) is called, it transforms the queued up data, calling the
|
||
// buffered _write cb's as it consumes chunks. If consuming a single
|
||
// written chunk would result in multiple output chunks, then the first
|
||
// outputted bit calls the readcb, and subsequent chunks just go into
|
||
// the read buffer, and will cause it to emit 'readable' if necessary.
|
||
//
|
||
// This way, back-pressure is actually determined by the reading side,
|
||
// since _read has to be called to start processing a new chunk. However,
|
||
// a pathological inflate type of transform can cause excessive buffering
|
||
// here. For example, imagine a stream where every byte of input is
|
||
// interpreted as an integer from 0-255, and then results in that many
|
||
// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
|
||
// 1kb of data being output. In this case, you could write a very small
|
||
// amount of input, and end up with a very large amount of output. In
|
||
// such a pathological inflating mechanism, there'd be no way to tell
|
||
// the system to stop doing the transform. A single 4MB write could
|
||
// cause the system to run out of memory.
|
||
//
|
||
// However, even in such a pathological case, only a single written chunk
|
||
// would be consumed, and then the rest would wait (un-transformed) until
|
||
// the results of the previous transformed chunk were consumed.
|
||
|
||
|
||
|
||
module.exports = Transform;
|
||
|
||
var Duplex = __nccwpck_require__(415);
|
||
|
||
/*<replacement>*/
|
||
var util = Object.create(__nccwpck_require__(9300));
|
||
util.inherits = __nccwpck_require__(3753);
|
||
/*</replacement>*/
|
||
|
||
util.inherits(Transform, Duplex);
|
||
|
||
function afterTransform(er, data) {
|
||
var ts = this._transformState;
|
||
ts.transforming = false;
|
||
|
||
var cb = ts.writecb;
|
||
|
||
if (!cb) {
|
||
return this.emit('error', new Error('write callback called multiple times'));
|
||
}
|
||
|
||
ts.writechunk = null;
|
||
ts.writecb = null;
|
||
|
||
if (data != null) // single equals check for both `null` and `undefined`
|
||
this.push(data);
|
||
|
||
cb(er);
|
||
|
||
var rs = this._readableState;
|
||
rs.reading = false;
|
||
if (rs.needReadable || rs.length < rs.highWaterMark) {
|
||
this._read(rs.highWaterMark);
|
||
}
|
||
}
|
||
|
||
function Transform(options) {
|
||
if (!(this instanceof Transform)) return new Transform(options);
|
||
|
||
Duplex.call(this, options);
|
||
|
||
this._transformState = {
|
||
afterTransform: afterTransform.bind(this),
|
||
needTransform: false,
|
||
transforming: false,
|
||
writecb: null,
|
||
writechunk: null,
|
||
writeencoding: null
|
||
};
|
||
|
||
// start out asking for a readable event once data is transformed.
|
||
this._readableState.needReadable = true;
|
||
|
||
// we have implemented the _read method, and done the other things
|
||
// that Readable wants before the first _read call, so unset the
|
||
// sync guard flag.
|
||
this._readableState.sync = false;
|
||
|
||
if (options) {
|
||
if (typeof options.transform === 'function') this._transform = options.transform;
|
||
|
||
if (typeof options.flush === 'function') this._flush = options.flush;
|
||
}
|
||
|
||
// When the writable side finishes, then flush out anything remaining.
|
||
this.on('prefinish', prefinish);
|
||
}
|
||
|
||
function prefinish() {
|
||
var _this = this;
|
||
|
||
if (typeof this._flush === 'function') {
|
||
this._flush(function (er, data) {
|
||
done(_this, er, data);
|
||
});
|
||
} else {
|
||
done(this, null, null);
|
||
}
|
||
}
|
||
|
||
Transform.prototype.push = function (chunk, encoding) {
|
||
this._transformState.needTransform = false;
|
||
return Duplex.prototype.push.call(this, chunk, encoding);
|
||
};
|
||
|
||
// This is the part where you do stuff!
|
||
// override this function in implementation classes.
|
||
// 'chunk' is an input chunk.
|
||
//
|
||
// Call `push(newChunk)` to pass along transformed output
|
||
// to the readable side. You may call 'push' zero or more times.
|
||
//
|
||
// Call `cb(err)` when you are done with this chunk. If you pass
|
||
// an error, then that'll put the hurt on the whole operation. If you
|
||
// never call cb(), then you'll never get another chunk.
|
||
Transform.prototype._transform = function (chunk, encoding, cb) {
|
||
throw new Error('_transform() is not implemented');
|
||
};
|
||
|
||
Transform.prototype._write = function (chunk, encoding, cb) {
|
||
var ts = this._transformState;
|
||
ts.writecb = cb;
|
||
ts.writechunk = chunk;
|
||
ts.writeencoding = encoding;
|
||
if (!ts.transforming) {
|
||
var rs = this._readableState;
|
||
if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);
|
||
}
|
||
};
|
||
|
||
// Doesn't matter what the args are here.
|
||
// _transform does all the work.
|
||
// That we got here means that the readable side wants more data.
|
||
Transform.prototype._read = function (n) {
|
||
var ts = this._transformState;
|
||
|
||
if (ts.writechunk !== null && ts.writecb && !ts.transforming) {
|
||
ts.transforming = true;
|
||
this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
|
||
} else {
|
||
// mark that we need a transform, so that any data that comes in
|
||
// will get processed, now that we've asked for it.
|
||
ts.needTransform = true;
|
||
}
|
||
};
|
||
|
||
Transform.prototype._destroy = function (err, cb) {
|
||
var _this2 = this;
|
||
|
||
Duplex.prototype._destroy.call(this, err, function (err2) {
|
||
cb(err2);
|
||
_this2.emit('close');
|
||
});
|
||
};
|
||
|
||
function done(stream, er, data) {
|
||
if (er) return stream.emit('error', er);
|
||
|
||
if (data != null) // single equals check for both `null` and `undefined`
|
||
stream.push(data);
|
||
|
||
// if there's nothing in the write buffer, then that means
|
||
// that nothing more will ever be provided
|
||
if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0');
|
||
|
||
if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming');
|
||
|
||
return stream.push(null);
|
||
}
|
||
|
||
/***/ }),
|
||
|
||
/***/ 5104:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
// Copyright Joyent, Inc. and other Node contributors.
|
||
//
|
||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||
// copy of this software and associated documentation files (the
|
||
// "Software"), to deal in the Software without restriction, including
|
||
// without limitation the rights to use, copy, modify, merge, publish,
|
||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||
// persons to whom the Software is furnished to do so, subject to the
|
||
// following conditions:
|
||
//
|
||
// The above copyright notice and this permission notice shall be included
|
||
// in all copies or substantial portions of the Software.
|
||
//
|
||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||
|
||
// A bit simpler than readable streams.
|
||
// Implement an async ._write(chunk, encoding, cb), and it'll handle all
|
||
// the drain event emission and buffering.
|
||
|
||
|
||
|
||
/*<replacement>*/
|
||
|
||
var pna = __nccwpck_require__(9029);
|
||
/*</replacement>*/
|
||
|
||
module.exports = Writable;
|
||
|
||
/* <replacement> */
|
||
function WriteReq(chunk, encoding, cb) {
|
||
this.chunk = chunk;
|
||
this.encoding = encoding;
|
||
this.callback = cb;
|
||
this.next = null;
|
||
}
|
||
|
||
// It seems a linked list but it is not
|
||
// there will be only 2 of these for each stream
|
||
function CorkedRequest(state) {
|
||
var _this = this;
|
||
|
||
this.next = null;
|
||
this.entry = null;
|
||
this.finish = function () {
|
||
onCorkedFinish(_this, state);
|
||
};
|
||
}
|
||
/* </replacement> */
|
||
|
||
/*<replacement>*/
|
||
var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick;
|
||
/*</replacement>*/
|
||
|
||
/*<replacement>*/
|
||
var Duplex;
|
||
/*</replacement>*/
|
||
|
||
Writable.WritableState = WritableState;
|
||
|
||
/*<replacement>*/
|
||
var util = Object.create(__nccwpck_require__(9300));
|
||
util.inherits = __nccwpck_require__(3753);
|
||
/*</replacement>*/
|
||
|
||
/*<replacement>*/
|
||
var internalUtil = {
|
||
deprecate: __nccwpck_require__(2053)
|
||
};
|
||
/*</replacement>*/
|
||
|
||
/*<replacement>*/
|
||
var Stream = __nccwpck_require__(3661);
|
||
/*</replacement>*/
|
||
|
||
/*<replacement>*/
|
||
|
||
var Buffer = __nccwpck_require__(1534).Buffer;
|
||
var OurUint8Array = global.Uint8Array || function () {};
|
||
function _uint8ArrayToBuffer(chunk) {
|
||
return Buffer.from(chunk);
|
||
}
|
||
function _isUint8Array(obj) {
|
||
return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
|
||
}
|
||
|
||
/*</replacement>*/
|
||
|
||
var destroyImpl = __nccwpck_require__(7174);
|
||
|
||
util.inherits(Writable, Stream);
|
||
|
||
function nop() {}
|
||
|
||
function WritableState(options, stream) {
|
||
Duplex = Duplex || __nccwpck_require__(415);
|
||
|
||
options = options || {};
|
||
|
||
// Duplex streams are both readable and writable, but share
|
||
// the same options object.
|
||
// However, some cases require setting options to different
|
||
// values for the readable and the writable sides of the duplex stream.
|
||
// These options can be provided separately as readableXXX and writableXXX.
|
||
var isDuplex = stream instanceof Duplex;
|
||
|
||
// object stream flag to indicate whether or not this stream
|
||
// contains buffers or objects.
|
||
this.objectMode = !!options.objectMode;
|
||
|
||
if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode;
|
||
|
||
// the point at which write() starts returning false
|
||
// Note: 0 is a valid value, means that we always return false if
|
||
// the entire buffer is not flushed immediately on write()
|
||
var hwm = options.highWaterMark;
|
||
var writableHwm = options.writableHighWaterMark;
|
||
var defaultHwm = this.objectMode ? 16 : 16 * 1024;
|
||
|
||
if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm;
|
||
|
||
// cast to ints.
|
||
this.highWaterMark = Math.floor(this.highWaterMark);
|
||
|
||
// if _final has been called
|
||
this.finalCalled = false;
|
||
|
||
// drain event flag.
|
||
this.needDrain = false;
|
||
// at the start of calling end()
|
||
this.ending = false;
|
||
// when end() has been called, and returned
|
||
this.ended = false;
|
||
// when 'finish' is emitted
|
||
this.finished = false;
|
||
|
||
// has it been destroyed
|
||
this.destroyed = false;
|
||
|
||
// should we decode strings into buffers before passing to _write?
|
||
// this is here so that some node-core streams can optimize string
|
||
// handling at a lower level.
|
||
var noDecode = options.decodeStrings === false;
|
||
this.decodeStrings = !noDecode;
|
||
|
||
// Crypto is kind of old and crusty. Historically, its default string
|
||
// encoding is 'binary' so we have to make this configurable.
|
||
// Everything else in the universe uses 'utf8', though.
|
||
this.defaultEncoding = options.defaultEncoding || 'utf8';
|
||
|
||
// not an actual buffer we keep track of, but a measurement
|
||
// of how much we're waiting to get pushed to some underlying
|
||
// socket or file.
|
||
this.length = 0;
|
||
|
||
// a flag to see when we're in the middle of a write.
|
||
this.writing = false;
|
||
|
||
// when true all writes will be buffered until .uncork() call
|
||
this.corked = 0;
|
||
|
||
// a flag to be able to tell if the onwrite cb is called immediately,
|
||
// or on a later tick. We set this to true at first, because any
|
||
// actions that shouldn't happen until "later" should generally also
|
||
// not happen before the first write call.
|
||
this.sync = true;
|
||
|
||
// a flag to know if we're processing previously buffered items, which
|
||
// may call the _write() callback in the same tick, so that we don't
|
||
// end up in an overlapped onwrite situation.
|
||
this.bufferProcessing = false;
|
||
|
||
// the callback that's passed to _write(chunk,cb)
|
||
this.onwrite = function (er) {
|
||
onwrite(stream, er);
|
||
};
|
||
|
||
// the callback that the user supplies to write(chunk,encoding,cb)
|
||
this.writecb = null;
|
||
|
||
// the amount that is being written when _write is called.
|
||
this.writelen = 0;
|
||
|
||
this.bufferedRequest = null;
|
||
this.lastBufferedRequest = null;
|
||
|
||
// number of pending user-supplied write callbacks
|
||
// this must be 0 before 'finish' can be emitted
|
||
this.pendingcb = 0;
|
||
|
||
// emit prefinish if the only thing we're waiting for is _write cbs
|
||
// This is relevant for synchronous Transform streams
|
||
this.prefinished = false;
|
||
|
||
// True if the error was already emitted and should not be thrown again
|
||
this.errorEmitted = false;
|
||
|
||
// count buffered requests
|
||
this.bufferedRequestCount = 0;
|
||
|
||
// allocate the first CorkedRequest, there is always
|
||
// one allocated and free to use, and we maintain at most two
|
||
this.corkedRequestsFree = new CorkedRequest(this);
|
||
}
|
||
|
||
WritableState.prototype.getBuffer = function getBuffer() {
|
||
var current = this.bufferedRequest;
|
||
var out = [];
|
||
while (current) {
|
||
out.push(current);
|
||
current = current.next;
|
||
}
|
||
return out;
|
||
};
|
||
|
||
(function () {
|
||
try {
|
||
Object.defineProperty(WritableState.prototype, 'buffer', {
|
||
get: internalUtil.deprecate(function () {
|
||
return this.getBuffer();
|
||
}, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003')
|
||
});
|
||
} catch (_) {}
|
||
})();
|
||
|
||
// Test _writableState for inheritance to account for Duplex streams,
|
||
// whose prototype chain only points to Readable.
|
||
var realHasInstance;
|
||
if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {
|
||
realHasInstance = Function.prototype[Symbol.hasInstance];
|
||
Object.defineProperty(Writable, Symbol.hasInstance, {
|
||
value: function (object) {
|
||
if (realHasInstance.call(this, object)) return true;
|
||
if (this !== Writable) return false;
|
||
|
||
return object && object._writableState instanceof WritableState;
|
||
}
|
||
});
|
||
} else {
|
||
realHasInstance = function (object) {
|
||
return object instanceof this;
|
||
};
|
||
}
|
||
|
||
function Writable(options) {
|
||
Duplex = Duplex || __nccwpck_require__(415);
|
||
|
||
// Writable ctor is applied to Duplexes, too.
|
||
// `realHasInstance` is necessary because using plain `instanceof`
|
||
// would return false, as no `_writableState` property is attached.
|
||
|
||
// Trying to use the custom `instanceof` for Writable here will also break the
|
||
// Node.js LazyTransform implementation, which has a non-trivial getter for
|
||
// `_writableState` that would lead to infinite recursion.
|
||
if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) {
|
||
return new Writable(options);
|
||
}
|
||
|
||
this._writableState = new WritableState(options, this);
|
||
|
||
// legacy.
|
||
this.writable = true;
|
||
|
||
if (options) {
|
||
if (typeof options.write === 'function') this._write = options.write;
|
||
|
||
if (typeof options.writev === 'function') this._writev = options.writev;
|
||
|
||
if (typeof options.destroy === 'function') this._destroy = options.destroy;
|
||
|
||
if (typeof options.final === 'function') this._final = options.final;
|
||
}
|
||
|
||
Stream.call(this);
|
||
}
|
||
|
||
// Otherwise people can pipe Writable streams, which is just wrong.
|
||
Writable.prototype.pipe = function () {
|
||
this.emit('error', new Error('Cannot pipe, not readable'));
|
||
};
|
||
|
||
function writeAfterEnd(stream, cb) {
|
||
var er = new Error('write after end');
|
||
// TODO: defer error events consistently everywhere, not just the cb
|
||
stream.emit('error', er);
|
||
pna.nextTick(cb, er);
|
||
}
|
||
|
||
// Checks that a user-supplied chunk is valid, especially for the particular
|
||
// mode the stream is in. Currently this means that `null` is never accepted
|
||
// and undefined/non-string values are only allowed in object mode.
|
||
function validChunk(stream, state, chunk, cb) {
|
||
var valid = true;
|
||
var er = false;
|
||
|
||
if (chunk === null) {
|
||
er = new TypeError('May not write null values to stream');
|
||
} else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {
|
||
er = new TypeError('Invalid non-string/buffer chunk');
|
||
}
|
||
if (er) {
|
||
stream.emit('error', er);
|
||
pna.nextTick(cb, er);
|
||
valid = false;
|
||
}
|
||
return valid;
|
||
}
|
||
|
||
Writable.prototype.write = function (chunk, encoding, cb) {
|
||
var state = this._writableState;
|
||
var ret = false;
|
||
var isBuf = !state.objectMode && _isUint8Array(chunk);
|
||
|
||
if (isBuf && !Buffer.isBuffer(chunk)) {
|
||
chunk = _uint8ArrayToBuffer(chunk);
|
||
}
|
||
|
||
if (typeof encoding === 'function') {
|
||
cb = encoding;
|
||
encoding = null;
|
||
}
|
||
|
||
if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;
|
||
|
||
if (typeof cb !== 'function') cb = nop;
|
||
|
||
if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) {
|
||
state.pendingcb++;
|
||
ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb);
|
||
}
|
||
|
||
return ret;
|
||
};
|
||
|
||
Writable.prototype.cork = function () {
|
||
var state = this._writableState;
|
||
|
||
state.corked++;
|
||
};
|
||
|
||
Writable.prototype.uncork = function () {
|
||
var state = this._writableState;
|
||
|
||
if (state.corked) {
|
||
state.corked--;
|
||
|
||
if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);
|
||
}
|
||
};
|
||
|
||
Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
|
||
// node::ParseEncoding() requires lower case.
|
||
if (typeof encoding === 'string') encoding = encoding.toLowerCase();
|
||
if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding);
|
||
this._writableState.defaultEncoding = encoding;
|
||
return this;
|
||
};
|
||
|
||
function decodeChunk(state, chunk, encoding) {
|
||
if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {
|
||
chunk = Buffer.from(chunk, encoding);
|
||
}
|
||
return chunk;
|
||
}
|
||
|
||
Object.defineProperty(Writable.prototype, 'writableHighWaterMark', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function () {
|
||
return this._writableState.highWaterMark;
|
||
}
|
||
});
|
||
|
||
// if we're already writing something, then just put this
|
||
// in the queue, and wait our turn. Otherwise, call _write
|
||
// If we return false, then we need a drain event, so set that flag.
|
||
function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) {
|
||
if (!isBuf) {
|
||
var newChunk = decodeChunk(state, chunk, encoding);
|
||
if (chunk !== newChunk) {
|
||
isBuf = true;
|
||
encoding = 'buffer';
|
||
chunk = newChunk;
|
||
}
|
||
}
|
||
var len = state.objectMode ? 1 : chunk.length;
|
||
|
||
state.length += len;
|
||
|
||
var ret = state.length < state.highWaterMark;
|
||
// we must ensure that previous needDrain will not be reset to false.
|
||
if (!ret) state.needDrain = true;
|
||
|
||
if (state.writing || state.corked) {
|
||
var last = state.lastBufferedRequest;
|
||
state.lastBufferedRequest = {
|
||
chunk: chunk,
|
||
encoding: encoding,
|
||
isBuf: isBuf,
|
||
callback: cb,
|
||
next: null
|
||
};
|
||
if (last) {
|
||
last.next = state.lastBufferedRequest;
|
||
} else {
|
||
state.bufferedRequest = state.lastBufferedRequest;
|
||
}
|
||
state.bufferedRequestCount += 1;
|
||
} else {
|
||
doWrite(stream, state, false, len, chunk, encoding, cb);
|
||
}
|
||
|
||
return ret;
|
||
}
|
||
|
||
function doWrite(stream, state, writev, len, chunk, encoding, cb) {
|
||
state.writelen = len;
|
||
state.writecb = cb;
|
||
state.writing = true;
|
||
state.sync = true;
|
||
if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);
|
||
state.sync = false;
|
||
}
|
||
|
||
function onwriteError(stream, state, sync, er, cb) {
|
||
--state.pendingcb;
|
||
|
||
if (sync) {
|
||
// defer the callback if we are being called synchronously
|
||
// to avoid piling up things on the stack
|
||
pna.nextTick(cb, er);
|
||
// this can emit finish, and it will always happen
|
||
// after error
|
||
pna.nextTick(finishMaybe, stream, state);
|
||
stream._writableState.errorEmitted = true;
|
||
stream.emit('error', er);
|
||
} else {
|
||
// the caller expect this to happen before if
|
||
// it is async
|
||
cb(er);
|
||
stream._writableState.errorEmitted = true;
|
||
stream.emit('error', er);
|
||
// this can emit finish, but finish must
|
||
// always follow error
|
||
finishMaybe(stream, state);
|
||
}
|
||
}
|
||
|
||
function onwriteStateUpdate(state) {
|
||
state.writing = false;
|
||
state.writecb = null;
|
||
state.length -= state.writelen;
|
||
state.writelen = 0;
|
||
}
|
||
|
||
function onwrite(stream, er) {
|
||
var state = stream._writableState;
|
||
var sync = state.sync;
|
||
var cb = state.writecb;
|
||
|
||
onwriteStateUpdate(state);
|
||
|
||
if (er) onwriteError(stream, state, sync, er, cb);else {
|
||
// Check if we're actually ready to finish, but don't emit yet
|
||
var finished = needFinish(state);
|
||
|
||
if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {
|
||
clearBuffer(stream, state);
|
||
}
|
||
|
||
if (sync) {
|
||
/*<replacement>*/
|
||
asyncWrite(afterWrite, stream, state, finished, cb);
|
||
/*</replacement>*/
|
||
} else {
|
||
afterWrite(stream, state, finished, cb);
|
||
}
|
||
}
|
||
}
|
||
|
||
function afterWrite(stream, state, finished, cb) {
|
||
if (!finished) onwriteDrain(stream, state);
|
||
state.pendingcb--;
|
||
cb();
|
||
finishMaybe(stream, state);
|
||
}
|
||
|
||
// Must force callback to be called on nextTick, so that we don't
|
||
// emit 'drain' before the write() consumer gets the 'false' return
|
||
// value, and has a chance to attach a 'drain' listener.
|
||
function onwriteDrain(stream, state) {
|
||
if (state.length === 0 && state.needDrain) {
|
||
state.needDrain = false;
|
||
stream.emit('drain');
|
||
}
|
||
}
|
||
|
||
// if there's something in the buffer waiting, then process it
|
||
function clearBuffer(stream, state) {
|
||
state.bufferProcessing = true;
|
||
var entry = state.bufferedRequest;
|
||
|
||
if (stream._writev && entry && entry.next) {
|
||
// Fast case, write everything using _writev()
|
||
var l = state.bufferedRequestCount;
|
||
var buffer = new Array(l);
|
||
var holder = state.corkedRequestsFree;
|
||
holder.entry = entry;
|
||
|
||
var count = 0;
|
||
var allBuffers = true;
|
||
while (entry) {
|
||
buffer[count] = entry;
|
||
if (!entry.isBuf) allBuffers = false;
|
||
entry = entry.next;
|
||
count += 1;
|
||
}
|
||
buffer.allBuffers = allBuffers;
|
||
|
||
doWrite(stream, state, true, state.length, buffer, '', holder.finish);
|
||
|
||
// doWrite is almost always async, defer these to save a bit of time
|
||
// as the hot path ends with doWrite
|
||
state.pendingcb++;
|
||
state.lastBufferedRequest = null;
|
||
if (holder.next) {
|
||
state.corkedRequestsFree = holder.next;
|
||
holder.next = null;
|
||
} else {
|
||
state.corkedRequestsFree = new CorkedRequest(state);
|
||
}
|
||
state.bufferedRequestCount = 0;
|
||
} else {
|
||
// Slow case, write chunks one-by-one
|
||
while (entry) {
|
||
var chunk = entry.chunk;
|
||
var encoding = entry.encoding;
|
||
var cb = entry.callback;
|
||
var len = state.objectMode ? 1 : chunk.length;
|
||
|
||
doWrite(stream, state, false, len, chunk, encoding, cb);
|
||
entry = entry.next;
|
||
state.bufferedRequestCount--;
|
||
// if we didn't call the onwrite immediately, then
|
||
// it means that we need to wait until it does.
|
||
// also, that means that the chunk and cb are currently
|
||
// being processed, so move the buffer counter past them.
|
||
if (state.writing) {
|
||
break;
|
||
}
|
||
}
|
||
|
||
if (entry === null) state.lastBufferedRequest = null;
|
||
}
|
||
|
||
state.bufferedRequest = entry;
|
||
state.bufferProcessing = false;
|
||
}
|
||
|
||
Writable.prototype._write = function (chunk, encoding, cb) {
|
||
cb(new Error('_write() is not implemented'));
|
||
};
|
||
|
||
Writable.prototype._writev = null;
|
||
|
||
Writable.prototype.end = function (chunk, encoding, cb) {
|
||
var state = this._writableState;
|
||
|
||
if (typeof chunk === 'function') {
|
||
cb = chunk;
|
||
chunk = null;
|
||
encoding = null;
|
||
} else if (typeof encoding === 'function') {
|
||
cb = encoding;
|
||
encoding = null;
|
||
}
|
||
|
||
if (chunk !== null && chunk !== undefined) this.write(chunk, encoding);
|
||
|
||
// .end() fully uncorks
|
||
if (state.corked) {
|
||
state.corked = 1;
|
||
this.uncork();
|
||
}
|
||
|
||
// ignore unnecessary end() calls.
|
||
if (!state.ending && !state.finished) endWritable(this, state, cb);
|
||
};
|
||
|
||
function needFinish(state) {
|
||
return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;
|
||
}
|
||
function callFinal(stream, state) {
|
||
stream._final(function (err) {
|
||
state.pendingcb--;
|
||
if (err) {
|
||
stream.emit('error', err);
|
||
}
|
||
state.prefinished = true;
|
||
stream.emit('prefinish');
|
||
finishMaybe(stream, state);
|
||
});
|
||
}
|
||
function prefinish(stream, state) {
|
||
if (!state.prefinished && !state.finalCalled) {
|
||
if (typeof stream._final === 'function') {
|
||
state.pendingcb++;
|
||
state.finalCalled = true;
|
||
pna.nextTick(callFinal, stream, state);
|
||
} else {
|
||
state.prefinished = true;
|
||
stream.emit('prefinish');
|
||
}
|
||
}
|
||
}
|
||
|
||
function finishMaybe(stream, state) {
|
||
var need = needFinish(state);
|
||
if (need) {
|
||
prefinish(stream, state);
|
||
if (state.pendingcb === 0) {
|
||
state.finished = true;
|
||
stream.emit('finish');
|
||
}
|
||
}
|
||
return need;
|
||
}
|
||
|
||
function endWritable(stream, state, cb) {
|
||
state.ending = true;
|
||
finishMaybe(stream, state);
|
||
if (cb) {
|
||
if (state.finished) pna.nextTick(cb);else stream.once('finish', cb);
|
||
}
|
||
state.ended = true;
|
||
stream.writable = false;
|
||
}
|
||
|
||
function onCorkedFinish(corkReq, state, err) {
|
||
var entry = corkReq.entry;
|
||
corkReq.entry = null;
|
||
while (entry) {
|
||
var cb = entry.callback;
|
||
state.pendingcb--;
|
||
cb(err);
|
||
entry = entry.next;
|
||
}
|
||
if (state.corkedRequestsFree) {
|
||
state.corkedRequestsFree.next = corkReq;
|
||
} else {
|
||
state.corkedRequestsFree = corkReq;
|
||
}
|
||
}
|
||
|
||
Object.defineProperty(Writable.prototype, 'destroyed', {
|
||
get: function () {
|
||
if (this._writableState === undefined) {
|
||
return false;
|
||
}
|
||
return this._writableState.destroyed;
|
||
},
|
||
set: function (value) {
|
||
// we ignore the value if the stream
|
||
// has not been initialized yet
|
||
if (!this._writableState) {
|
||
return;
|
||
}
|
||
|
||
// backward compatibility, the user is explicitly
|
||
// managing destroyed
|
||
this._writableState.destroyed = value;
|
||
}
|
||
});
|
||
|
||
Writable.prototype.destroy = destroyImpl.destroy;
|
||
Writable.prototype._undestroy = destroyImpl.undestroy;
|
||
Writable.prototype._destroy = function (err, cb) {
|
||
this.end();
|
||
cb(err);
|
||
};
|
||
|
||
/***/ }),
|
||
|
||
/***/ 765:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
|
||
|
||
var Buffer = __nccwpck_require__(1534).Buffer;
|
||
var util = __nccwpck_require__(1669);
|
||
|
||
function copyBuffer(src, target, offset) {
|
||
src.copy(target, offset);
|
||
}
|
||
|
||
module.exports = function () {
|
||
function BufferList() {
|
||
_classCallCheck(this, BufferList);
|
||
|
||
this.head = null;
|
||
this.tail = null;
|
||
this.length = 0;
|
||
}
|
||
|
||
BufferList.prototype.push = function push(v) {
|
||
var entry = { data: v, next: null };
|
||
if (this.length > 0) this.tail.next = entry;else this.head = entry;
|
||
this.tail = entry;
|
||
++this.length;
|
||
};
|
||
|
||
BufferList.prototype.unshift = function unshift(v) {
|
||
var entry = { data: v, next: this.head };
|
||
if (this.length === 0) this.tail = entry;
|
||
this.head = entry;
|
||
++this.length;
|
||
};
|
||
|
||
BufferList.prototype.shift = function shift() {
|
||
if (this.length === 0) return;
|
||
var ret = this.head.data;
|
||
if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next;
|
||
--this.length;
|
||
return ret;
|
||
};
|
||
|
||
BufferList.prototype.clear = function clear() {
|
||
this.head = this.tail = null;
|
||
this.length = 0;
|
||
};
|
||
|
||
BufferList.prototype.join = function join(s) {
|
||
if (this.length === 0) return '';
|
||
var p = this.head;
|
||
var ret = '' + p.data;
|
||
while (p = p.next) {
|
||
ret += s + p.data;
|
||
}return ret;
|
||
};
|
||
|
||
BufferList.prototype.concat = function concat(n) {
|
||
if (this.length === 0) return Buffer.alloc(0);
|
||
if (this.length === 1) return this.head.data;
|
||
var ret = Buffer.allocUnsafe(n >>> 0);
|
||
var p = this.head;
|
||
var i = 0;
|
||
while (p) {
|
||
copyBuffer(p.data, ret, i);
|
||
i += p.data.length;
|
||
p = p.next;
|
||
}
|
||
return ret;
|
||
};
|
||
|
||
return BufferList;
|
||
}();
|
||
|
||
if (util && util.inspect && util.inspect.custom) {
|
||
module.exports.prototype[util.inspect.custom] = function () {
|
||
var obj = util.inspect({ length: this.length });
|
||
return this.constructor.name + ' ' + obj;
|
||
};
|
||
}
|
||
|
||
/***/ }),
|
||
|
||
/***/ 7174:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
/*<replacement>*/
|
||
|
||
var pna = __nccwpck_require__(9029);
|
||
/*</replacement>*/
|
||
|
||
// undocumented cb() API, needed for core, not for public API
|
||
function destroy(err, cb) {
|
||
var _this = this;
|
||
|
||
var readableDestroyed = this._readableState && this._readableState.destroyed;
|
||
var writableDestroyed = this._writableState && this._writableState.destroyed;
|
||
|
||
if (readableDestroyed || writableDestroyed) {
|
||
if (cb) {
|
||
cb(err);
|
||
} else if (err && (!this._writableState || !this._writableState.errorEmitted)) {
|
||
pna.nextTick(emitErrorNT, this, err);
|
||
}
|
||
return this;
|
||
}
|
||
|
||
// we set destroyed to true before firing error callbacks in order
|
||
// to make it re-entrance safe in case destroy() is called within callbacks
|
||
|
||
if (this._readableState) {
|
||
this._readableState.destroyed = true;
|
||
}
|
||
|
||
// if this is a duplex stream mark the writable part as destroyed as well
|
||
if (this._writableState) {
|
||
this._writableState.destroyed = true;
|
||
}
|
||
|
||
this._destroy(err || null, function (err) {
|
||
if (!cb && err) {
|
||
pna.nextTick(emitErrorNT, _this, err);
|
||
if (_this._writableState) {
|
||
_this._writableState.errorEmitted = true;
|
||
}
|
||
} else if (cb) {
|
||
cb(err);
|
||
}
|
||
});
|
||
|
||
return this;
|
||
}
|
||
|
||
function undestroy() {
|
||
if (this._readableState) {
|
||
this._readableState.destroyed = false;
|
||
this._readableState.reading = false;
|
||
this._readableState.ended = false;
|
||
this._readableState.endEmitted = false;
|
||
}
|
||
|
||
if (this._writableState) {
|
||
this._writableState.destroyed = false;
|
||
this._writableState.ended = false;
|
||
this._writableState.ending = false;
|
||
this._writableState.finished = false;
|
||
this._writableState.errorEmitted = false;
|
||
}
|
||
}
|
||
|
||
function emitErrorNT(self, err) {
|
||
self.emit('error', err);
|
||
}
|
||
|
||
module.exports = {
|
||
destroy: destroy,
|
||
undestroy: undestroy
|
||
};
|
||
|
||
/***/ }),
|
||
|
||
/***/ 3661:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
module.exports = __nccwpck_require__(2413);
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 7775:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
module.exports = __nccwpck_require__(5806).PassThrough
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 5806:
|
||
/***/ ((module, exports, __nccwpck_require__) => {
|
||
|
||
var Stream = __nccwpck_require__(2413);
|
||
if (process.env.READABLE_STREAM === 'disable' && Stream) {
|
||
module.exports = Stream;
|
||
exports = module.exports = Stream.Readable;
|
||
exports.Readable = Stream.Readable;
|
||
exports.Writable = Stream.Writable;
|
||
exports.Duplex = Stream.Duplex;
|
||
exports.Transform = Stream.Transform;
|
||
exports.PassThrough = Stream.PassThrough;
|
||
exports.Stream = Stream;
|
||
} else {
|
||
exports = module.exports = __nccwpck_require__(3307);
|
||
exports.Stream = Stream || exports;
|
||
exports.Readable = exports;
|
||
exports.Writable = __nccwpck_require__(5104);
|
||
exports.Duplex = __nccwpck_require__(415);
|
||
exports.Transform = __nccwpck_require__(3533);
|
||
exports.PassThrough = __nccwpck_require__(9412);
|
||
}
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 1301:
|
||
/***/ ((module) => {
|
||
|
||
/**
|
||
* lodash (Custom Build) <https://lodash.com/>
|
||
* Build: `lodash modularize exports="npm" -o ./`
|
||
* Copyright jQuery Foundation and other contributors <https://jquery.org/>
|
||
* Released under MIT license <https://lodash.com/license>
|
||
* Based on Underscore.js 1.8.3 <http://underscorejs.org/LICENSE>
|
||
* Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
|
||
*/
|
||
|
||
/** Used as references for various `Number` constants. */
|
||
var MAX_SAFE_INTEGER = 9007199254740991;
|
||
|
||
/** `Object#toString` result references. */
|
||
var argsTag = '[object Arguments]',
|
||
funcTag = '[object Function]',
|
||
genTag = '[object GeneratorFunction]';
|
||
|
||
/** Used to detect unsigned integer values. */
|
||
var reIsUint = /^(?:0|[1-9]\d*)$/;
|
||
|
||
/**
|
||
* A faster alternative to `Function#apply`, this function invokes `func`
|
||
* with the `this` binding of `thisArg` and the arguments of `args`.
|
||
*
|
||
* @private
|
||
* @param {Function} func The function to invoke.
|
||
* @param {*} thisArg The `this` binding of `func`.
|
||
* @param {Array} args The arguments to invoke `func` with.
|
||
* @returns {*} Returns the result of `func`.
|
||
*/
|
||
function apply(func, thisArg, args) {
|
||
switch (args.length) {
|
||
case 0: return func.call(thisArg);
|
||
case 1: return func.call(thisArg, args[0]);
|
||
case 2: return func.call(thisArg, args[0], args[1]);
|
||
case 3: return func.call(thisArg, args[0], args[1], args[2]);
|
||
}
|
||
return func.apply(thisArg, args);
|
||
}
|
||
|
||
/**
|
||
* The base implementation of `_.times` without support for iteratee shorthands
|
||
* or max array length checks.
|
||
*
|
||
* @private
|
||
* @param {number} n The number of times to invoke `iteratee`.
|
||
* @param {Function} iteratee The function invoked per iteration.
|
||
* @returns {Array} Returns the array of results.
|
||
*/
|
||
function baseTimes(n, iteratee) {
|
||
var index = -1,
|
||
result = Array(n);
|
||
|
||
while (++index < n) {
|
||
result[index] = iteratee(index);
|
||
}
|
||
return result;
|
||
}
|
||
|
||
/** Used for built-in method references. */
|
||
var objectProto = Object.prototype;
|
||
|
||
/** Used to check objects for own properties. */
|
||
var hasOwnProperty = objectProto.hasOwnProperty;
|
||
|
||
/**
|
||
* Used to resolve the
|
||
* [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring)
|
||
* of values.
|
||
*/
|
||
var objectToString = objectProto.toString;
|
||
|
||
/** Built-in value references. */
|
||
var propertyIsEnumerable = objectProto.propertyIsEnumerable;
|
||
|
||
/* Built-in method references for those with the same name as other `lodash` methods. */
|
||
var nativeMax = Math.max;
|
||
|
||
/**
|
||
* Creates an array of the enumerable property names of the array-like `value`.
|
||
*
|
||
* @private
|
||
* @param {*} value The value to query.
|
||
* @param {boolean} inherited Specify returning inherited property names.
|
||
* @returns {Array} Returns the array of property names.
|
||
*/
|
||
function arrayLikeKeys(value, inherited) {
|
||
// Safari 8.1 makes `arguments.callee` enumerable in strict mode.
|
||
// Safari 9 makes `arguments.length` enumerable in strict mode.
|
||
var result = (isArray(value) || isArguments(value))
|
||
? baseTimes(value.length, String)
|
||
: [];
|
||
|
||
var length = result.length,
|
||
skipIndexes = !!length;
|
||
|
||
for (var key in value) {
|
||
if ((inherited || hasOwnProperty.call(value, key)) &&
|
||
!(skipIndexes && (key == 'length' || isIndex(key, length)))) {
|
||
result.push(key);
|
||
}
|
||
}
|
||
return result;
|
||
}
|
||
|
||
/**
|
||
* Used by `_.defaults` to customize its `_.assignIn` use.
|
||
*
|
||
* @private
|
||
* @param {*} objValue The destination value.
|
||
* @param {*} srcValue The source value.
|
||
* @param {string} key The key of the property to assign.
|
||
* @param {Object} object The parent object of `objValue`.
|
||
* @returns {*} Returns the value to assign.
|
||
*/
|
||
function assignInDefaults(objValue, srcValue, key, object) {
|
||
if (objValue === undefined ||
|
||
(eq(objValue, objectProto[key]) && !hasOwnProperty.call(object, key))) {
|
||
return srcValue;
|
||
}
|
||
return objValue;
|
||
}
|
||
|
||
/**
|
||
* Assigns `value` to `key` of `object` if the existing value is not equivalent
|
||
* using [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero)
|
||
* for equality comparisons.
|
||
*
|
||
* @private
|
||
* @param {Object} object The object to modify.
|
||
* @param {string} key The key of the property to assign.
|
||
* @param {*} value The value to assign.
|
||
*/
|
||
function assignValue(object, key, value) {
|
||
var objValue = object[key];
|
||
if (!(hasOwnProperty.call(object, key) && eq(objValue, value)) ||
|
||
(value === undefined && !(key in object))) {
|
||
object[key] = value;
|
||
}
|
||
}
|
||
|
||
/**
|
||
* The base implementation of `_.keysIn` which doesn't treat sparse arrays as dense.
|
||
*
|
||
* @private
|
||
* @param {Object} object The object to query.
|
||
* @returns {Array} Returns the array of property names.
|
||
*/
|
||
function baseKeysIn(object) {
|
||
if (!isObject(object)) {
|
||
return nativeKeysIn(object);
|
||
}
|
||
var isProto = isPrototype(object),
|
||
result = [];
|
||
|
||
for (var key in object) {
|
||
if (!(key == 'constructor' && (isProto || !hasOwnProperty.call(object, key)))) {
|
||
result.push(key);
|
||
}
|
||
}
|
||
return result;
|
||
}
|
||
|
||
/**
|
||
* The base implementation of `_.rest` which doesn't validate or coerce arguments.
|
||
*
|
||
* @private
|
||
* @param {Function} func The function to apply a rest parameter to.
|
||
* @param {number} [start=func.length-1] The start position of the rest parameter.
|
||
* @returns {Function} Returns the new function.
|
||
*/
|
||
function baseRest(func, start) {
|
||
start = nativeMax(start === undefined ? (func.length - 1) : start, 0);
|
||
return function() {
|
||
var args = arguments,
|
||
index = -1,
|
||
length = nativeMax(args.length - start, 0),
|
||
array = Array(length);
|
||
|
||
while (++index < length) {
|
||
array[index] = args[start + index];
|
||
}
|
||
index = -1;
|
||
var otherArgs = Array(start + 1);
|
||
while (++index < start) {
|
||
otherArgs[index] = args[index];
|
||
}
|
||
otherArgs[start] = array;
|
||
return apply(func, this, otherArgs);
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Copies properties of `source` to `object`.
|
||
*
|
||
* @private
|
||
* @param {Object} source The object to copy properties from.
|
||
* @param {Array} props The property identifiers to copy.
|
||
* @param {Object} [object={}] The object to copy properties to.
|
||
* @param {Function} [customizer] The function to customize copied values.
|
||
* @returns {Object} Returns `object`.
|
||
*/
|
||
function copyObject(source, props, object, customizer) {
|
||
object || (object = {});
|
||
|
||
var index = -1,
|
||
length = props.length;
|
||
|
||
while (++index < length) {
|
||
var key = props[index];
|
||
|
||
var newValue = customizer
|
||
? customizer(object[key], source[key], key, object, source)
|
||
: undefined;
|
||
|
||
assignValue(object, key, newValue === undefined ? source[key] : newValue);
|
||
}
|
||
return object;
|
||
}
|
||
|
||
/**
|
||
* Creates a function like `_.assign`.
|
||
*
|
||
* @private
|
||
* @param {Function} assigner The function to assign values.
|
||
* @returns {Function} Returns the new assigner function.
|
||
*/
|
||
function createAssigner(assigner) {
|
||
return baseRest(function(object, sources) {
|
||
var index = -1,
|
||
length = sources.length,
|
||
customizer = length > 1 ? sources[length - 1] : undefined,
|
||
guard = length > 2 ? sources[2] : undefined;
|
||
|
||
customizer = (assigner.length > 3 && typeof customizer == 'function')
|
||
? (length--, customizer)
|
||
: undefined;
|
||
|
||
if (guard && isIterateeCall(sources[0], sources[1], guard)) {
|
||
customizer = length < 3 ? undefined : customizer;
|
||
length = 1;
|
||
}
|
||
object = Object(object);
|
||
while (++index < length) {
|
||
var source = sources[index];
|
||
if (source) {
|
||
assigner(object, source, index, customizer);
|
||
}
|
||
}
|
||
return object;
|
||
});
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is a valid array-like index.
|
||
*
|
||
* @private
|
||
* @param {*} value The value to check.
|
||
* @param {number} [length=MAX_SAFE_INTEGER] The upper bounds of a valid index.
|
||
* @returns {boolean} Returns `true` if `value` is a valid index, else `false`.
|
||
*/
|
||
function isIndex(value, length) {
|
||
length = length == null ? MAX_SAFE_INTEGER : length;
|
||
return !!length &&
|
||
(typeof value == 'number' || reIsUint.test(value)) &&
|
||
(value > -1 && value % 1 == 0 && value < length);
|
||
}
|
||
|
||
/**
|
||
* Checks if the given arguments are from an iteratee call.
|
||
*
|
||
* @private
|
||
* @param {*} value The potential iteratee value argument.
|
||
* @param {*} index The potential iteratee index or key argument.
|
||
* @param {*} object The potential iteratee object argument.
|
||
* @returns {boolean} Returns `true` if the arguments are from an iteratee call,
|
||
* else `false`.
|
||
*/
|
||
function isIterateeCall(value, index, object) {
|
||
if (!isObject(object)) {
|
||
return false;
|
||
}
|
||
var type = typeof index;
|
||
if (type == 'number'
|
||
? (isArrayLike(object) && isIndex(index, object.length))
|
||
: (type == 'string' && index in object)
|
||
) {
|
||
return eq(object[index], value);
|
||
}
|
||
return false;
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is likely a prototype object.
|
||
*
|
||
* @private
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is a prototype, else `false`.
|
||
*/
|
||
function isPrototype(value) {
|
||
var Ctor = value && value.constructor,
|
||
proto = (typeof Ctor == 'function' && Ctor.prototype) || objectProto;
|
||
|
||
return value === proto;
|
||
}
|
||
|
||
/**
|
||
* This function is like
|
||
* [`Object.keys`](http://ecma-international.org/ecma-262/7.0/#sec-object.keys)
|
||
* except that it includes inherited enumerable properties.
|
||
*
|
||
* @private
|
||
* @param {Object} object The object to query.
|
||
* @returns {Array} Returns the array of property names.
|
||
*/
|
||
function nativeKeysIn(object) {
|
||
var result = [];
|
||
if (object != null) {
|
||
for (var key in Object(object)) {
|
||
result.push(key);
|
||
}
|
||
}
|
||
return result;
|
||
}
|
||
|
||
/**
|
||
* Performs a
|
||
* [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero)
|
||
* comparison between two values to determine if they are equivalent.
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 4.0.0
|
||
* @category Lang
|
||
* @param {*} value The value to compare.
|
||
* @param {*} other The other value to compare.
|
||
* @returns {boolean} Returns `true` if the values are equivalent, else `false`.
|
||
* @example
|
||
*
|
||
* var object = { 'a': 1 };
|
||
* var other = { 'a': 1 };
|
||
*
|
||
* _.eq(object, object);
|
||
* // => true
|
||
*
|
||
* _.eq(object, other);
|
||
* // => false
|
||
*
|
||
* _.eq('a', 'a');
|
||
* // => true
|
||
*
|
||
* _.eq('a', Object('a'));
|
||
* // => false
|
||
*
|
||
* _.eq(NaN, NaN);
|
||
* // => true
|
||
*/
|
||
function eq(value, other) {
|
||
return value === other || (value !== value && other !== other);
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is likely an `arguments` object.
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 0.1.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is an `arguments` object,
|
||
* else `false`.
|
||
* @example
|
||
*
|
||
* _.isArguments(function() { return arguments; }());
|
||
* // => true
|
||
*
|
||
* _.isArguments([1, 2, 3]);
|
||
* // => false
|
||
*/
|
||
function isArguments(value) {
|
||
// Safari 8.1 makes `arguments.callee` enumerable in strict mode.
|
||
return isArrayLikeObject(value) && hasOwnProperty.call(value, 'callee') &&
|
||
(!propertyIsEnumerable.call(value, 'callee') || objectToString.call(value) == argsTag);
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is classified as an `Array` object.
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 0.1.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is an array, else `false`.
|
||
* @example
|
||
*
|
||
* _.isArray([1, 2, 3]);
|
||
* // => true
|
||
*
|
||
* _.isArray(document.body.children);
|
||
* // => false
|
||
*
|
||
* _.isArray('abc');
|
||
* // => false
|
||
*
|
||
* _.isArray(_.noop);
|
||
* // => false
|
||
*/
|
||
var isArray = Array.isArray;
|
||
|
||
/**
|
||
* Checks if `value` is array-like. A value is considered array-like if it's
|
||
* not a function and has a `value.length` that's an integer greater than or
|
||
* equal to `0` and less than or equal to `Number.MAX_SAFE_INTEGER`.
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 4.0.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is array-like, else `false`.
|
||
* @example
|
||
*
|
||
* _.isArrayLike([1, 2, 3]);
|
||
* // => true
|
||
*
|
||
* _.isArrayLike(document.body.children);
|
||
* // => true
|
||
*
|
||
* _.isArrayLike('abc');
|
||
* // => true
|
||
*
|
||
* _.isArrayLike(_.noop);
|
||
* // => false
|
||
*/
|
||
function isArrayLike(value) {
|
||
return value != null && isLength(value.length) && !isFunction(value);
|
||
}
|
||
|
||
/**
|
||
* This method is like `_.isArrayLike` except that it also checks if `value`
|
||
* is an object.
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 4.0.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is an array-like object,
|
||
* else `false`.
|
||
* @example
|
||
*
|
||
* _.isArrayLikeObject([1, 2, 3]);
|
||
* // => true
|
||
*
|
||
* _.isArrayLikeObject(document.body.children);
|
||
* // => true
|
||
*
|
||
* _.isArrayLikeObject('abc');
|
||
* // => false
|
||
*
|
||
* _.isArrayLikeObject(_.noop);
|
||
* // => false
|
||
*/
|
||
function isArrayLikeObject(value) {
|
||
return isObjectLike(value) && isArrayLike(value);
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is classified as a `Function` object.
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 0.1.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is a function, else `false`.
|
||
* @example
|
||
*
|
||
* _.isFunction(_);
|
||
* // => true
|
||
*
|
||
* _.isFunction(/abc/);
|
||
* // => false
|
||
*/
|
||
function isFunction(value) {
|
||
// The use of `Object#toString` avoids issues with the `typeof` operator
|
||
// in Safari 8-9 which returns 'object' for typed array and other constructors.
|
||
var tag = isObject(value) ? objectToString.call(value) : '';
|
||
return tag == funcTag || tag == genTag;
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is a valid array-like length.
|
||
*
|
||
* **Note:** This method is loosely based on
|
||
* [`ToLength`](http://ecma-international.org/ecma-262/7.0/#sec-tolength).
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 4.0.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is a valid length, else `false`.
|
||
* @example
|
||
*
|
||
* _.isLength(3);
|
||
* // => true
|
||
*
|
||
* _.isLength(Number.MIN_VALUE);
|
||
* // => false
|
||
*
|
||
* _.isLength(Infinity);
|
||
* // => false
|
||
*
|
||
* _.isLength('3');
|
||
* // => false
|
||
*/
|
||
function isLength(value) {
|
||
return typeof value == 'number' &&
|
||
value > -1 && value % 1 == 0 && value <= MAX_SAFE_INTEGER;
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is the
|
||
* [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types)
|
||
* of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`)
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 0.1.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is an object, else `false`.
|
||
* @example
|
||
*
|
||
* _.isObject({});
|
||
* // => true
|
||
*
|
||
* _.isObject([1, 2, 3]);
|
||
* // => true
|
||
*
|
||
* _.isObject(_.noop);
|
||
* // => true
|
||
*
|
||
* _.isObject(null);
|
||
* // => false
|
||
*/
|
||
function isObject(value) {
|
||
var type = typeof value;
|
||
return !!value && (type == 'object' || type == 'function');
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is object-like. A value is object-like if it's not `null`
|
||
* and has a `typeof` result of "object".
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 4.0.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is object-like, else `false`.
|
||
* @example
|
||
*
|
||
* _.isObjectLike({});
|
||
* // => true
|
||
*
|
||
* _.isObjectLike([1, 2, 3]);
|
||
* // => true
|
||
*
|
||
* _.isObjectLike(_.noop);
|
||
* // => false
|
||
*
|
||
* _.isObjectLike(null);
|
||
* // => false
|
||
*/
|
||
function isObjectLike(value) {
|
||
return !!value && typeof value == 'object';
|
||
}
|
||
|
||
/**
|
||
* This method is like `_.assignIn` except that it accepts `customizer`
|
||
* which is invoked to produce the assigned values. If `customizer` returns
|
||
* `undefined`, assignment is handled by the method instead. The `customizer`
|
||
* is invoked with five arguments: (objValue, srcValue, key, object, source).
|
||
*
|
||
* **Note:** This method mutates `object`.
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 4.0.0
|
||
* @alias extendWith
|
||
* @category Object
|
||
* @param {Object} object The destination object.
|
||
* @param {...Object} sources The source objects.
|
||
* @param {Function} [customizer] The function to customize assigned values.
|
||
* @returns {Object} Returns `object`.
|
||
* @see _.assignWith
|
||
* @example
|
||
*
|
||
* function customizer(objValue, srcValue) {
|
||
* return _.isUndefined(objValue) ? srcValue : objValue;
|
||
* }
|
||
*
|
||
* var defaults = _.partialRight(_.assignInWith, customizer);
|
||
*
|
||
* defaults({ 'a': 1 }, { 'b': 2 }, { 'a': 3 });
|
||
* // => { 'a': 1, 'b': 2 }
|
||
*/
|
||
var assignInWith = createAssigner(function(object, source, srcIndex, customizer) {
|
||
copyObject(source, keysIn(source), object, customizer);
|
||
});
|
||
|
||
/**
|
||
* Assigns own and inherited enumerable string keyed properties of source
|
||
* objects to the destination object for all destination properties that
|
||
* resolve to `undefined`. Source objects are applied from left to right.
|
||
* Once a property is set, additional values of the same property are ignored.
|
||
*
|
||
* **Note:** This method mutates `object`.
|
||
*
|
||
* @static
|
||
* @since 0.1.0
|
||
* @memberOf _
|
||
* @category Object
|
||
* @param {Object} object The destination object.
|
||
* @param {...Object} [sources] The source objects.
|
||
* @returns {Object} Returns `object`.
|
||
* @see _.defaultsDeep
|
||
* @example
|
||
*
|
||
* _.defaults({ 'a': 1 }, { 'b': 2 }, { 'a': 3 });
|
||
* // => { 'a': 1, 'b': 2 }
|
||
*/
|
||
var defaults = baseRest(function(args) {
|
||
args.push(undefined, assignInDefaults);
|
||
return apply(assignInWith, undefined, args);
|
||
});
|
||
|
||
/**
|
||
* Creates an array of the own and inherited enumerable property names of `object`.
|
||
*
|
||
* **Note:** Non-object values are coerced to objects.
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 3.0.0
|
||
* @category Object
|
||
* @param {Object} object The object to query.
|
||
* @returns {Array} Returns the array of property names.
|
||
* @example
|
||
*
|
||
* function Foo() {
|
||
* this.a = 1;
|
||
* this.b = 2;
|
||
* }
|
||
*
|
||
* Foo.prototype.c = 3;
|
||
*
|
||
* _.keysIn(new Foo);
|
||
* // => ['a', 'b', 'c'] (iteration order is not guaranteed)
|
||
*/
|
||
function keysIn(object) {
|
||
return isArrayLike(object) ? arrayLikeKeys(object, true) : baseKeysIn(object);
|
||
}
|
||
|
||
module.exports = defaults;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 9765:
|
||
/***/ ((module) => {
|
||
|
||
/**
|
||
* lodash (Custom Build) <https://lodash.com/>
|
||
* Build: `lodash modularize exports="npm" -o ./`
|
||
* Copyright jQuery Foundation and other contributors <https://jquery.org/>
|
||
* Released under MIT license <https://lodash.com/license>
|
||
* Based on Underscore.js 1.8.3 <http://underscorejs.org/LICENSE>
|
||
* Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
|
||
*/
|
||
|
||
/** Used as the size to enable large array optimizations. */
|
||
var LARGE_ARRAY_SIZE = 200;
|
||
|
||
/** Used to stand-in for `undefined` hash values. */
|
||
var HASH_UNDEFINED = '__lodash_hash_undefined__';
|
||
|
||
/** Used as references for various `Number` constants. */
|
||
var MAX_SAFE_INTEGER = 9007199254740991;
|
||
|
||
/** `Object#toString` result references. */
|
||
var argsTag = '[object Arguments]',
|
||
funcTag = '[object Function]',
|
||
genTag = '[object GeneratorFunction]';
|
||
|
||
/**
|
||
* Used to match `RegExp`
|
||
* [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns).
|
||
*/
|
||
var reRegExpChar = /[\\^$.*+?()[\]{}|]/g;
|
||
|
||
/** Used to detect host constructors (Safari). */
|
||
var reIsHostCtor = /^\[object .+?Constructor\]$/;
|
||
|
||
/** Detect free variable `global` from Node.js. */
|
||
var freeGlobal = typeof global == 'object' && global && global.Object === Object && global;
|
||
|
||
/** Detect free variable `self`. */
|
||
var freeSelf = typeof self == 'object' && self && self.Object === Object && self;
|
||
|
||
/** Used as a reference to the global object. */
|
||
var root = freeGlobal || freeSelf || Function('return this')();
|
||
|
||
/**
|
||
* A faster alternative to `Function#apply`, this function invokes `func`
|
||
* with the `this` binding of `thisArg` and the arguments of `args`.
|
||
*
|
||
* @private
|
||
* @param {Function} func The function to invoke.
|
||
* @param {*} thisArg The `this` binding of `func`.
|
||
* @param {Array} args The arguments to invoke `func` with.
|
||
* @returns {*} Returns the result of `func`.
|
||
*/
|
||
function apply(func, thisArg, args) {
|
||
switch (args.length) {
|
||
case 0: return func.call(thisArg);
|
||
case 1: return func.call(thisArg, args[0]);
|
||
case 2: return func.call(thisArg, args[0], args[1]);
|
||
case 3: return func.call(thisArg, args[0], args[1], args[2]);
|
||
}
|
||
return func.apply(thisArg, args);
|
||
}
|
||
|
||
/**
|
||
* A specialized version of `_.includes` for arrays without support for
|
||
* specifying an index to search from.
|
||
*
|
||
* @private
|
||
* @param {Array} [array] The array to inspect.
|
||
* @param {*} target The value to search for.
|
||
* @returns {boolean} Returns `true` if `target` is found, else `false`.
|
||
*/
|
||
function arrayIncludes(array, value) {
|
||
var length = array ? array.length : 0;
|
||
return !!length && baseIndexOf(array, value, 0) > -1;
|
||
}
|
||
|
||
/**
|
||
* This function is like `arrayIncludes` except that it accepts a comparator.
|
||
*
|
||
* @private
|
||
* @param {Array} [array] The array to inspect.
|
||
* @param {*} target The value to search for.
|
||
* @param {Function} comparator The comparator invoked per element.
|
||
* @returns {boolean} Returns `true` if `target` is found, else `false`.
|
||
*/
|
||
function arrayIncludesWith(array, value, comparator) {
|
||
var index = -1,
|
||
length = array ? array.length : 0;
|
||
|
||
while (++index < length) {
|
||
if (comparator(value, array[index])) {
|
||
return true;
|
||
}
|
||
}
|
||
return false;
|
||
}
|
||
|
||
/**
|
||
* A specialized version of `_.map` for arrays without support for iteratee
|
||
* shorthands.
|
||
*
|
||
* @private
|
||
* @param {Array} [array] The array to iterate over.
|
||
* @param {Function} iteratee The function invoked per iteration.
|
||
* @returns {Array} Returns the new mapped array.
|
||
*/
|
||
function arrayMap(array, iteratee) {
|
||
var index = -1,
|
||
length = array ? array.length : 0,
|
||
result = Array(length);
|
||
|
||
while (++index < length) {
|
||
result[index] = iteratee(array[index], index, array);
|
||
}
|
||
return result;
|
||
}
|
||
|
||
/**
|
||
* Appends the elements of `values` to `array`.
|
||
*
|
||
* @private
|
||
* @param {Array} array The array to modify.
|
||
* @param {Array} values The values to append.
|
||
* @returns {Array} Returns `array`.
|
||
*/
|
||
function arrayPush(array, values) {
|
||
var index = -1,
|
||
length = values.length,
|
||
offset = array.length;
|
||
|
||
while (++index < length) {
|
||
array[offset + index] = values[index];
|
||
}
|
||
return array;
|
||
}
|
||
|
||
/**
|
||
* The base implementation of `_.findIndex` and `_.findLastIndex` without
|
||
* support for iteratee shorthands.
|
||
*
|
||
* @private
|
||
* @param {Array} array The array to inspect.
|
||
* @param {Function} predicate The function invoked per iteration.
|
||
* @param {number} fromIndex The index to search from.
|
||
* @param {boolean} [fromRight] Specify iterating from right to left.
|
||
* @returns {number} Returns the index of the matched value, else `-1`.
|
||
*/
|
||
function baseFindIndex(array, predicate, fromIndex, fromRight) {
|
||
var length = array.length,
|
||
index = fromIndex + (fromRight ? 1 : -1);
|
||
|
||
while ((fromRight ? index-- : ++index < length)) {
|
||
if (predicate(array[index], index, array)) {
|
||
return index;
|
||
}
|
||
}
|
||
return -1;
|
||
}
|
||
|
||
/**
|
||
* The base implementation of `_.indexOf` without `fromIndex` bounds checks.
|
||
*
|
||
* @private
|
||
* @param {Array} array The array to inspect.
|
||
* @param {*} value The value to search for.
|
||
* @param {number} fromIndex The index to search from.
|
||
* @returns {number} Returns the index of the matched value, else `-1`.
|
||
*/
|
||
function baseIndexOf(array, value, fromIndex) {
|
||
if (value !== value) {
|
||
return baseFindIndex(array, baseIsNaN, fromIndex);
|
||
}
|
||
var index = fromIndex - 1,
|
||
length = array.length;
|
||
|
||
while (++index < length) {
|
||
if (array[index] === value) {
|
||
return index;
|
||
}
|
||
}
|
||
return -1;
|
||
}
|
||
|
||
/**
|
||
* The base implementation of `_.isNaN` without support for number objects.
|
||
*
|
||
* @private
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is `NaN`, else `false`.
|
||
*/
|
||
function baseIsNaN(value) {
|
||
return value !== value;
|
||
}
|
||
|
||
/**
|
||
* The base implementation of `_.unary` without support for storing metadata.
|
||
*
|
||
* @private
|
||
* @param {Function} func The function to cap arguments for.
|
||
* @returns {Function} Returns the new capped function.
|
||
*/
|
||
function baseUnary(func) {
|
||
return function(value) {
|
||
return func(value);
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Checks if a cache value for `key` exists.
|
||
*
|
||
* @private
|
||
* @param {Object} cache The cache to query.
|
||
* @param {string} key The key of the entry to check.
|
||
* @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
|
||
*/
|
||
function cacheHas(cache, key) {
|
||
return cache.has(key);
|
||
}
|
||
|
||
/**
|
||
* Gets the value at `key` of `object`.
|
||
*
|
||
* @private
|
||
* @param {Object} [object] The object to query.
|
||
* @param {string} key The key of the property to get.
|
||
* @returns {*} Returns the property value.
|
||
*/
|
||
function getValue(object, key) {
|
||
return object == null ? undefined : object[key];
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is a host object in IE < 9.
|
||
*
|
||
* @private
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is a host object, else `false`.
|
||
*/
|
||
function isHostObject(value) {
|
||
// Many host objects are `Object` objects that can coerce to strings
|
||
// despite having improperly defined `toString` methods.
|
||
var result = false;
|
||
if (value != null && typeof value.toString != 'function') {
|
||
try {
|
||
result = !!(value + '');
|
||
} catch (e) {}
|
||
}
|
||
return result;
|
||
}
|
||
|
||
/** Used for built-in method references. */
|
||
var arrayProto = Array.prototype,
|
||
funcProto = Function.prototype,
|
||
objectProto = Object.prototype;
|
||
|
||
/** Used to detect overreaching core-js shims. */
|
||
var coreJsData = root['__core-js_shared__'];
|
||
|
||
/** Used to detect methods masquerading as native. */
|
||
var maskSrcKey = (function() {
|
||
var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || '');
|
||
return uid ? ('Symbol(src)_1.' + uid) : '';
|
||
}());
|
||
|
||
/** Used to resolve the decompiled source of functions. */
|
||
var funcToString = funcProto.toString;
|
||
|
||
/** Used to check objects for own properties. */
|
||
var hasOwnProperty = objectProto.hasOwnProperty;
|
||
|
||
/**
|
||
* Used to resolve the
|
||
* [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring)
|
||
* of values.
|
||
*/
|
||
var objectToString = objectProto.toString;
|
||
|
||
/** Used to detect if a method is native. */
|
||
var reIsNative = RegExp('^' +
|
||
funcToString.call(hasOwnProperty).replace(reRegExpChar, '\\$&')
|
||
.replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$'
|
||
);
|
||
|
||
/** Built-in value references. */
|
||
var Symbol = root.Symbol,
|
||
propertyIsEnumerable = objectProto.propertyIsEnumerable,
|
||
splice = arrayProto.splice,
|
||
spreadableSymbol = Symbol ? Symbol.isConcatSpreadable : undefined;
|
||
|
||
/* Built-in method references for those with the same name as other `lodash` methods. */
|
||
var nativeMax = Math.max;
|
||
|
||
/* Built-in method references that are verified to be native. */
|
||
var Map = getNative(root, 'Map'),
|
||
nativeCreate = getNative(Object, 'create');
|
||
|
||
/**
|
||
* Creates a hash object.
|
||
*
|
||
* @private
|
||
* @constructor
|
||
* @param {Array} [entries] The key-value pairs to cache.
|
||
*/
|
||
function Hash(entries) {
|
||
var index = -1,
|
||
length = entries ? entries.length : 0;
|
||
|
||
this.clear();
|
||
while (++index < length) {
|
||
var entry = entries[index];
|
||
this.set(entry[0], entry[1]);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Removes all key-value entries from the hash.
|
||
*
|
||
* @private
|
||
* @name clear
|
||
* @memberOf Hash
|
||
*/
|
||
function hashClear() {
|
||
this.__data__ = nativeCreate ? nativeCreate(null) : {};
|
||
}
|
||
|
||
/**
|
||
* Removes `key` and its value from the hash.
|
||
*
|
||
* @private
|
||
* @name delete
|
||
* @memberOf Hash
|
||
* @param {Object} hash The hash to modify.
|
||
* @param {string} key The key of the value to remove.
|
||
* @returns {boolean} Returns `true` if the entry was removed, else `false`.
|
||
*/
|
||
function hashDelete(key) {
|
||
return this.has(key) && delete this.__data__[key];
|
||
}
|
||
|
||
/**
|
||
* Gets the hash value for `key`.
|
||
*
|
||
* @private
|
||
* @name get
|
||
* @memberOf Hash
|
||
* @param {string} key The key of the value to get.
|
||
* @returns {*} Returns the entry value.
|
||
*/
|
||
function hashGet(key) {
|
||
var data = this.__data__;
|
||
if (nativeCreate) {
|
||
var result = data[key];
|
||
return result === HASH_UNDEFINED ? undefined : result;
|
||
}
|
||
return hasOwnProperty.call(data, key) ? data[key] : undefined;
|
||
}
|
||
|
||
/**
|
||
* Checks if a hash value for `key` exists.
|
||
*
|
||
* @private
|
||
* @name has
|
||
* @memberOf Hash
|
||
* @param {string} key The key of the entry to check.
|
||
* @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
|
||
*/
|
||
function hashHas(key) {
|
||
var data = this.__data__;
|
||
return nativeCreate ? data[key] !== undefined : hasOwnProperty.call(data, key);
|
||
}
|
||
|
||
/**
|
||
* Sets the hash `key` to `value`.
|
||
*
|
||
* @private
|
||
* @name set
|
||
* @memberOf Hash
|
||
* @param {string} key The key of the value to set.
|
||
* @param {*} value The value to set.
|
||
* @returns {Object} Returns the hash instance.
|
||
*/
|
||
function hashSet(key, value) {
|
||
var data = this.__data__;
|
||
data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value;
|
||
return this;
|
||
}
|
||
|
||
// Add methods to `Hash`.
|
||
Hash.prototype.clear = hashClear;
|
||
Hash.prototype['delete'] = hashDelete;
|
||
Hash.prototype.get = hashGet;
|
||
Hash.prototype.has = hashHas;
|
||
Hash.prototype.set = hashSet;
|
||
|
||
/**
|
||
* Creates an list cache object.
|
||
*
|
||
* @private
|
||
* @constructor
|
||
* @param {Array} [entries] The key-value pairs to cache.
|
||
*/
|
||
function ListCache(entries) {
|
||
var index = -1,
|
||
length = entries ? entries.length : 0;
|
||
|
||
this.clear();
|
||
while (++index < length) {
|
||
var entry = entries[index];
|
||
this.set(entry[0], entry[1]);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Removes all key-value entries from the list cache.
|
||
*
|
||
* @private
|
||
* @name clear
|
||
* @memberOf ListCache
|
||
*/
|
||
function listCacheClear() {
|
||
this.__data__ = [];
|
||
}
|
||
|
||
/**
|
||
* Removes `key` and its value from the list cache.
|
||
*
|
||
* @private
|
||
* @name delete
|
||
* @memberOf ListCache
|
||
* @param {string} key The key of the value to remove.
|
||
* @returns {boolean} Returns `true` if the entry was removed, else `false`.
|
||
*/
|
||
function listCacheDelete(key) {
|
||
var data = this.__data__,
|
||
index = assocIndexOf(data, key);
|
||
|
||
if (index < 0) {
|
||
return false;
|
||
}
|
||
var lastIndex = data.length - 1;
|
||
if (index == lastIndex) {
|
||
data.pop();
|
||
} else {
|
||
splice.call(data, index, 1);
|
||
}
|
||
return true;
|
||
}
|
||
|
||
/**
|
||
* Gets the list cache value for `key`.
|
||
*
|
||
* @private
|
||
* @name get
|
||
* @memberOf ListCache
|
||
* @param {string} key The key of the value to get.
|
||
* @returns {*} Returns the entry value.
|
||
*/
|
||
function listCacheGet(key) {
|
||
var data = this.__data__,
|
||
index = assocIndexOf(data, key);
|
||
|
||
return index < 0 ? undefined : data[index][1];
|
||
}
|
||
|
||
/**
|
||
* Checks if a list cache value for `key` exists.
|
||
*
|
||
* @private
|
||
* @name has
|
||
* @memberOf ListCache
|
||
* @param {string} key The key of the entry to check.
|
||
* @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
|
||
*/
|
||
function listCacheHas(key) {
|
||
return assocIndexOf(this.__data__, key) > -1;
|
||
}
|
||
|
||
/**
|
||
* Sets the list cache `key` to `value`.
|
||
*
|
||
* @private
|
||
* @name set
|
||
* @memberOf ListCache
|
||
* @param {string} key The key of the value to set.
|
||
* @param {*} value The value to set.
|
||
* @returns {Object} Returns the list cache instance.
|
||
*/
|
||
function listCacheSet(key, value) {
|
||
var data = this.__data__,
|
||
index = assocIndexOf(data, key);
|
||
|
||
if (index < 0) {
|
||
data.push([key, value]);
|
||
} else {
|
||
data[index][1] = value;
|
||
}
|
||
return this;
|
||
}
|
||
|
||
// Add methods to `ListCache`.
|
||
ListCache.prototype.clear = listCacheClear;
|
||
ListCache.prototype['delete'] = listCacheDelete;
|
||
ListCache.prototype.get = listCacheGet;
|
||
ListCache.prototype.has = listCacheHas;
|
||
ListCache.prototype.set = listCacheSet;
|
||
|
||
/**
|
||
* Creates a map cache object to store key-value pairs.
|
||
*
|
||
* @private
|
||
* @constructor
|
||
* @param {Array} [entries] The key-value pairs to cache.
|
||
*/
|
||
function MapCache(entries) {
|
||
var index = -1,
|
||
length = entries ? entries.length : 0;
|
||
|
||
this.clear();
|
||
while (++index < length) {
|
||
var entry = entries[index];
|
||
this.set(entry[0], entry[1]);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Removes all key-value entries from the map.
|
||
*
|
||
* @private
|
||
* @name clear
|
||
* @memberOf MapCache
|
||
*/
|
||
function mapCacheClear() {
|
||
this.__data__ = {
|
||
'hash': new Hash,
|
||
'map': new (Map || ListCache),
|
||
'string': new Hash
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Removes `key` and its value from the map.
|
||
*
|
||
* @private
|
||
* @name delete
|
||
* @memberOf MapCache
|
||
* @param {string} key The key of the value to remove.
|
||
* @returns {boolean} Returns `true` if the entry was removed, else `false`.
|
||
*/
|
||
function mapCacheDelete(key) {
|
||
return getMapData(this, key)['delete'](key);
|
||
}
|
||
|
||
/**
|
||
* Gets the map value for `key`.
|
||
*
|
||
* @private
|
||
* @name get
|
||
* @memberOf MapCache
|
||
* @param {string} key The key of the value to get.
|
||
* @returns {*} Returns the entry value.
|
||
*/
|
||
function mapCacheGet(key) {
|
||
return getMapData(this, key).get(key);
|
||
}
|
||
|
||
/**
|
||
* Checks if a map value for `key` exists.
|
||
*
|
||
* @private
|
||
* @name has
|
||
* @memberOf MapCache
|
||
* @param {string} key The key of the entry to check.
|
||
* @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
|
||
*/
|
||
function mapCacheHas(key) {
|
||
return getMapData(this, key).has(key);
|
||
}
|
||
|
||
/**
|
||
* Sets the map `key` to `value`.
|
||
*
|
||
* @private
|
||
* @name set
|
||
* @memberOf MapCache
|
||
* @param {string} key The key of the value to set.
|
||
* @param {*} value The value to set.
|
||
* @returns {Object} Returns the map cache instance.
|
||
*/
|
||
function mapCacheSet(key, value) {
|
||
getMapData(this, key).set(key, value);
|
||
return this;
|
||
}
|
||
|
||
// Add methods to `MapCache`.
|
||
MapCache.prototype.clear = mapCacheClear;
|
||
MapCache.prototype['delete'] = mapCacheDelete;
|
||
MapCache.prototype.get = mapCacheGet;
|
||
MapCache.prototype.has = mapCacheHas;
|
||
MapCache.prototype.set = mapCacheSet;
|
||
|
||
/**
|
||
*
|
||
* Creates an array cache object to store unique values.
|
||
*
|
||
* @private
|
||
* @constructor
|
||
* @param {Array} [values] The values to cache.
|
||
*/
|
||
function SetCache(values) {
|
||
var index = -1,
|
||
length = values ? values.length : 0;
|
||
|
||
this.__data__ = new MapCache;
|
||
while (++index < length) {
|
||
this.add(values[index]);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Adds `value` to the array cache.
|
||
*
|
||
* @private
|
||
* @name add
|
||
* @memberOf SetCache
|
||
* @alias push
|
||
* @param {*} value The value to cache.
|
||
* @returns {Object} Returns the cache instance.
|
||
*/
|
||
function setCacheAdd(value) {
|
||
this.__data__.set(value, HASH_UNDEFINED);
|
||
return this;
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is in the array cache.
|
||
*
|
||
* @private
|
||
* @name has
|
||
* @memberOf SetCache
|
||
* @param {*} value The value to search for.
|
||
* @returns {number} Returns `true` if `value` is found, else `false`.
|
||
*/
|
||
function setCacheHas(value) {
|
||
return this.__data__.has(value);
|
||
}
|
||
|
||
// Add methods to `SetCache`.
|
||
SetCache.prototype.add = SetCache.prototype.push = setCacheAdd;
|
||
SetCache.prototype.has = setCacheHas;
|
||
|
||
/**
|
||
* Gets the index at which the `key` is found in `array` of key-value pairs.
|
||
*
|
||
* @private
|
||
* @param {Array} array The array to inspect.
|
||
* @param {*} key The key to search for.
|
||
* @returns {number} Returns the index of the matched value, else `-1`.
|
||
*/
|
||
function assocIndexOf(array, key) {
|
||
var length = array.length;
|
||
while (length--) {
|
||
if (eq(array[length][0], key)) {
|
||
return length;
|
||
}
|
||
}
|
||
return -1;
|
||
}
|
||
|
||
/**
|
||
* The base implementation of methods like `_.difference` without support
|
||
* for excluding multiple arrays or iteratee shorthands.
|
||
*
|
||
* @private
|
||
* @param {Array} array The array to inspect.
|
||
* @param {Array} values The values to exclude.
|
||
* @param {Function} [iteratee] The iteratee invoked per element.
|
||
* @param {Function} [comparator] The comparator invoked per element.
|
||
* @returns {Array} Returns the new array of filtered values.
|
||
*/
|
||
function baseDifference(array, values, iteratee, comparator) {
|
||
var index = -1,
|
||
includes = arrayIncludes,
|
||
isCommon = true,
|
||
length = array.length,
|
||
result = [],
|
||
valuesLength = values.length;
|
||
|
||
if (!length) {
|
||
return result;
|
||
}
|
||
if (iteratee) {
|
||
values = arrayMap(values, baseUnary(iteratee));
|
||
}
|
||
if (comparator) {
|
||
includes = arrayIncludesWith;
|
||
isCommon = false;
|
||
}
|
||
else if (values.length >= LARGE_ARRAY_SIZE) {
|
||
includes = cacheHas;
|
||
isCommon = false;
|
||
values = new SetCache(values);
|
||
}
|
||
outer:
|
||
while (++index < length) {
|
||
var value = array[index],
|
||
computed = iteratee ? iteratee(value) : value;
|
||
|
||
value = (comparator || value !== 0) ? value : 0;
|
||
if (isCommon && computed === computed) {
|
||
var valuesIndex = valuesLength;
|
||
while (valuesIndex--) {
|
||
if (values[valuesIndex] === computed) {
|
||
continue outer;
|
||
}
|
||
}
|
||
result.push(value);
|
||
}
|
||
else if (!includes(values, computed, comparator)) {
|
||
result.push(value);
|
||
}
|
||
}
|
||
return result;
|
||
}
|
||
|
||
/**
|
||
* The base implementation of `_.flatten` with support for restricting flattening.
|
||
*
|
||
* @private
|
||
* @param {Array} array The array to flatten.
|
||
* @param {number} depth The maximum recursion depth.
|
||
* @param {boolean} [predicate=isFlattenable] The function invoked per iteration.
|
||
* @param {boolean} [isStrict] Restrict to values that pass `predicate` checks.
|
||
* @param {Array} [result=[]] The initial result value.
|
||
* @returns {Array} Returns the new flattened array.
|
||
*/
|
||
function baseFlatten(array, depth, predicate, isStrict, result) {
|
||
var index = -1,
|
||
length = array.length;
|
||
|
||
predicate || (predicate = isFlattenable);
|
||
result || (result = []);
|
||
|
||
while (++index < length) {
|
||
var value = array[index];
|
||
if (depth > 0 && predicate(value)) {
|
||
if (depth > 1) {
|
||
// Recursively flatten arrays (susceptible to call stack limits).
|
||
baseFlatten(value, depth - 1, predicate, isStrict, result);
|
||
} else {
|
||
arrayPush(result, value);
|
||
}
|
||
} else if (!isStrict) {
|
||
result[result.length] = value;
|
||
}
|
||
}
|
||
return result;
|
||
}
|
||
|
||
/**
|
||
* The base implementation of `_.isNative` without bad shim checks.
|
||
*
|
||
* @private
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is a native function,
|
||
* else `false`.
|
||
*/
|
||
function baseIsNative(value) {
|
||
if (!isObject(value) || isMasked(value)) {
|
||
return false;
|
||
}
|
||
var pattern = (isFunction(value) || isHostObject(value)) ? reIsNative : reIsHostCtor;
|
||
return pattern.test(toSource(value));
|
||
}
|
||
|
||
/**
|
||
* The base implementation of `_.rest` which doesn't validate or coerce arguments.
|
||
*
|
||
* @private
|
||
* @param {Function} func The function to apply a rest parameter to.
|
||
* @param {number} [start=func.length-1] The start position of the rest parameter.
|
||
* @returns {Function} Returns the new function.
|
||
*/
|
||
function baseRest(func, start) {
|
||
start = nativeMax(start === undefined ? (func.length - 1) : start, 0);
|
||
return function() {
|
||
var args = arguments,
|
||
index = -1,
|
||
length = nativeMax(args.length - start, 0),
|
||
array = Array(length);
|
||
|
||
while (++index < length) {
|
||
array[index] = args[start + index];
|
||
}
|
||
index = -1;
|
||
var otherArgs = Array(start + 1);
|
||
while (++index < start) {
|
||
otherArgs[index] = args[index];
|
||
}
|
||
otherArgs[start] = array;
|
||
return apply(func, this, otherArgs);
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Gets the data for `map`.
|
||
*
|
||
* @private
|
||
* @param {Object} map The map to query.
|
||
* @param {string} key The reference key.
|
||
* @returns {*} Returns the map data.
|
||
*/
|
||
function getMapData(map, key) {
|
||
var data = map.__data__;
|
||
return isKeyable(key)
|
||
? data[typeof key == 'string' ? 'string' : 'hash']
|
||
: data.map;
|
||
}
|
||
|
||
/**
|
||
* Gets the native function at `key` of `object`.
|
||
*
|
||
* @private
|
||
* @param {Object} object The object to query.
|
||
* @param {string} key The key of the method to get.
|
||
* @returns {*} Returns the function if it's native, else `undefined`.
|
||
*/
|
||
function getNative(object, key) {
|
||
var value = getValue(object, key);
|
||
return baseIsNative(value) ? value : undefined;
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is a flattenable `arguments` object or array.
|
||
*
|
||
* @private
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is flattenable, else `false`.
|
||
*/
|
||
function isFlattenable(value) {
|
||
return isArray(value) || isArguments(value) ||
|
||
!!(spreadableSymbol && value && value[spreadableSymbol]);
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is suitable for use as unique object key.
|
||
*
|
||
* @private
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is suitable, else `false`.
|
||
*/
|
||
function isKeyable(value) {
|
||
var type = typeof value;
|
||
return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean')
|
||
? (value !== '__proto__')
|
||
: (value === null);
|
||
}
|
||
|
||
/**
|
||
* Checks if `func` has its source masked.
|
||
*
|
||
* @private
|
||
* @param {Function} func The function to check.
|
||
* @returns {boolean} Returns `true` if `func` is masked, else `false`.
|
||
*/
|
||
function isMasked(func) {
|
||
return !!maskSrcKey && (maskSrcKey in func);
|
||
}
|
||
|
||
/**
|
||
* Converts `func` to its source code.
|
||
*
|
||
* @private
|
||
* @param {Function} func The function to process.
|
||
* @returns {string} Returns the source code.
|
||
*/
|
||
function toSource(func) {
|
||
if (func != null) {
|
||
try {
|
||
return funcToString.call(func);
|
||
} catch (e) {}
|
||
try {
|
||
return (func + '');
|
||
} catch (e) {}
|
||
}
|
||
return '';
|
||
}
|
||
|
||
/**
|
||
* Creates an array of `array` values not included in the other given arrays
|
||
* using [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero)
|
||
* for equality comparisons. The order of result values is determined by the
|
||
* order they occur in the first array.
|
||
*
|
||
* **Note:** Unlike `_.pullAll`, this method returns a new array.
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 0.1.0
|
||
* @category Array
|
||
* @param {Array} array The array to inspect.
|
||
* @param {...Array} [values] The values to exclude.
|
||
* @returns {Array} Returns the new array of filtered values.
|
||
* @see _.without, _.xor
|
||
* @example
|
||
*
|
||
* _.difference([2, 1], [2, 3]);
|
||
* // => [1]
|
||
*/
|
||
var difference = baseRest(function(array, values) {
|
||
return isArrayLikeObject(array)
|
||
? baseDifference(array, baseFlatten(values, 1, isArrayLikeObject, true))
|
||
: [];
|
||
});
|
||
|
||
/**
|
||
* Performs a
|
||
* [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero)
|
||
* comparison between two values to determine if they are equivalent.
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 4.0.0
|
||
* @category Lang
|
||
* @param {*} value The value to compare.
|
||
* @param {*} other The other value to compare.
|
||
* @returns {boolean} Returns `true` if the values are equivalent, else `false`.
|
||
* @example
|
||
*
|
||
* var object = { 'a': 1 };
|
||
* var other = { 'a': 1 };
|
||
*
|
||
* _.eq(object, object);
|
||
* // => true
|
||
*
|
||
* _.eq(object, other);
|
||
* // => false
|
||
*
|
||
* _.eq('a', 'a');
|
||
* // => true
|
||
*
|
||
* _.eq('a', Object('a'));
|
||
* // => false
|
||
*
|
||
* _.eq(NaN, NaN);
|
||
* // => true
|
||
*/
|
||
function eq(value, other) {
|
||
return value === other || (value !== value && other !== other);
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is likely an `arguments` object.
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 0.1.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is an `arguments` object,
|
||
* else `false`.
|
||
* @example
|
||
*
|
||
* _.isArguments(function() { return arguments; }());
|
||
* // => true
|
||
*
|
||
* _.isArguments([1, 2, 3]);
|
||
* // => false
|
||
*/
|
||
function isArguments(value) {
|
||
// Safari 8.1 makes `arguments.callee` enumerable in strict mode.
|
||
return isArrayLikeObject(value) && hasOwnProperty.call(value, 'callee') &&
|
||
(!propertyIsEnumerable.call(value, 'callee') || objectToString.call(value) == argsTag);
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is classified as an `Array` object.
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 0.1.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is an array, else `false`.
|
||
* @example
|
||
*
|
||
* _.isArray([1, 2, 3]);
|
||
* // => true
|
||
*
|
||
* _.isArray(document.body.children);
|
||
* // => false
|
||
*
|
||
* _.isArray('abc');
|
||
* // => false
|
||
*
|
||
* _.isArray(_.noop);
|
||
* // => false
|
||
*/
|
||
var isArray = Array.isArray;
|
||
|
||
/**
|
||
* Checks if `value` is array-like. A value is considered array-like if it's
|
||
* not a function and has a `value.length` that's an integer greater than or
|
||
* equal to `0` and less than or equal to `Number.MAX_SAFE_INTEGER`.
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 4.0.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is array-like, else `false`.
|
||
* @example
|
||
*
|
||
* _.isArrayLike([1, 2, 3]);
|
||
* // => true
|
||
*
|
||
* _.isArrayLike(document.body.children);
|
||
* // => true
|
||
*
|
||
* _.isArrayLike('abc');
|
||
* // => true
|
||
*
|
||
* _.isArrayLike(_.noop);
|
||
* // => false
|
||
*/
|
||
function isArrayLike(value) {
|
||
return value != null && isLength(value.length) && !isFunction(value);
|
||
}
|
||
|
||
/**
|
||
* This method is like `_.isArrayLike` except that it also checks if `value`
|
||
* is an object.
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 4.0.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is an array-like object,
|
||
* else `false`.
|
||
* @example
|
||
*
|
||
* _.isArrayLikeObject([1, 2, 3]);
|
||
* // => true
|
||
*
|
||
* _.isArrayLikeObject(document.body.children);
|
||
* // => true
|
||
*
|
||
* _.isArrayLikeObject('abc');
|
||
* // => false
|
||
*
|
||
* _.isArrayLikeObject(_.noop);
|
||
* // => false
|
||
*/
|
||
function isArrayLikeObject(value) {
|
||
return isObjectLike(value) && isArrayLike(value);
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is classified as a `Function` object.
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 0.1.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is a function, else `false`.
|
||
* @example
|
||
*
|
||
* _.isFunction(_);
|
||
* // => true
|
||
*
|
||
* _.isFunction(/abc/);
|
||
* // => false
|
||
*/
|
||
function isFunction(value) {
|
||
// The use of `Object#toString` avoids issues with the `typeof` operator
|
||
// in Safari 8-9 which returns 'object' for typed array and other constructors.
|
||
var tag = isObject(value) ? objectToString.call(value) : '';
|
||
return tag == funcTag || tag == genTag;
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is a valid array-like length.
|
||
*
|
||
* **Note:** This method is loosely based on
|
||
* [`ToLength`](http://ecma-international.org/ecma-262/7.0/#sec-tolength).
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 4.0.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is a valid length, else `false`.
|
||
* @example
|
||
*
|
||
* _.isLength(3);
|
||
* // => true
|
||
*
|
||
* _.isLength(Number.MIN_VALUE);
|
||
* // => false
|
||
*
|
||
* _.isLength(Infinity);
|
||
* // => false
|
||
*
|
||
* _.isLength('3');
|
||
* // => false
|
||
*/
|
||
function isLength(value) {
|
||
return typeof value == 'number' &&
|
||
value > -1 && value % 1 == 0 && value <= MAX_SAFE_INTEGER;
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is the
|
||
* [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types)
|
||
* of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`)
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 0.1.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is an object, else `false`.
|
||
* @example
|
||
*
|
||
* _.isObject({});
|
||
* // => true
|
||
*
|
||
* _.isObject([1, 2, 3]);
|
||
* // => true
|
||
*
|
||
* _.isObject(_.noop);
|
||
* // => true
|
||
*
|
||
* _.isObject(null);
|
||
* // => false
|
||
*/
|
||
function isObject(value) {
|
||
var type = typeof value;
|
||
return !!value && (type == 'object' || type == 'function');
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is object-like. A value is object-like if it's not `null`
|
||
* and has a `typeof` result of "object".
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 4.0.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is object-like, else `false`.
|
||
* @example
|
||
*
|
||
* _.isObjectLike({});
|
||
* // => true
|
||
*
|
||
* _.isObjectLike([1, 2, 3]);
|
||
* // => true
|
||
*
|
||
* _.isObjectLike(_.noop);
|
||
* // => false
|
||
*
|
||
* _.isObjectLike(null);
|
||
* // => false
|
||
*/
|
||
function isObjectLike(value) {
|
||
return !!value && typeof value == 'object';
|
||
}
|
||
|
||
module.exports = difference;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 9058:
|
||
/***/ ((module) => {
|
||
|
||
/**
|
||
* lodash (Custom Build) <https://lodash.com/>
|
||
* Build: `lodash modularize exports="npm" -o ./`
|
||
* Copyright jQuery Foundation and other contributors <https://jquery.org/>
|
||
* Released under MIT license <https://lodash.com/license>
|
||
* Based on Underscore.js 1.8.3 <http://underscorejs.org/LICENSE>
|
||
* Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
|
||
*/
|
||
|
||
/** Used as references for various `Number` constants. */
|
||
var MAX_SAFE_INTEGER = 9007199254740991;
|
||
|
||
/** `Object#toString` result references. */
|
||
var argsTag = '[object Arguments]',
|
||
funcTag = '[object Function]',
|
||
genTag = '[object GeneratorFunction]';
|
||
|
||
/** Detect free variable `global` from Node.js. */
|
||
var freeGlobal = typeof global == 'object' && global && global.Object === Object && global;
|
||
|
||
/** Detect free variable `self`. */
|
||
var freeSelf = typeof self == 'object' && self && self.Object === Object && self;
|
||
|
||
/** Used as a reference to the global object. */
|
||
var root = freeGlobal || freeSelf || Function('return this')();
|
||
|
||
/**
|
||
* Appends the elements of `values` to `array`.
|
||
*
|
||
* @private
|
||
* @param {Array} array The array to modify.
|
||
* @param {Array} values The values to append.
|
||
* @returns {Array} Returns `array`.
|
||
*/
|
||
function arrayPush(array, values) {
|
||
var index = -1,
|
||
length = values.length,
|
||
offset = array.length;
|
||
|
||
while (++index < length) {
|
||
array[offset + index] = values[index];
|
||
}
|
||
return array;
|
||
}
|
||
|
||
/** Used for built-in method references. */
|
||
var objectProto = Object.prototype;
|
||
|
||
/** Used to check objects for own properties. */
|
||
var hasOwnProperty = objectProto.hasOwnProperty;
|
||
|
||
/**
|
||
* Used to resolve the
|
||
* [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring)
|
||
* of values.
|
||
*/
|
||
var objectToString = objectProto.toString;
|
||
|
||
/** Built-in value references. */
|
||
var Symbol = root.Symbol,
|
||
propertyIsEnumerable = objectProto.propertyIsEnumerable,
|
||
spreadableSymbol = Symbol ? Symbol.isConcatSpreadable : undefined;
|
||
|
||
/**
|
||
* The base implementation of `_.flatten` with support for restricting flattening.
|
||
*
|
||
* @private
|
||
* @param {Array} array The array to flatten.
|
||
* @param {number} depth The maximum recursion depth.
|
||
* @param {boolean} [predicate=isFlattenable] The function invoked per iteration.
|
||
* @param {boolean} [isStrict] Restrict to values that pass `predicate` checks.
|
||
* @param {Array} [result=[]] The initial result value.
|
||
* @returns {Array} Returns the new flattened array.
|
||
*/
|
||
function baseFlatten(array, depth, predicate, isStrict, result) {
|
||
var index = -1,
|
||
length = array.length;
|
||
|
||
predicate || (predicate = isFlattenable);
|
||
result || (result = []);
|
||
|
||
while (++index < length) {
|
||
var value = array[index];
|
||
if (depth > 0 && predicate(value)) {
|
||
if (depth > 1) {
|
||
// Recursively flatten arrays (susceptible to call stack limits).
|
||
baseFlatten(value, depth - 1, predicate, isStrict, result);
|
||
} else {
|
||
arrayPush(result, value);
|
||
}
|
||
} else if (!isStrict) {
|
||
result[result.length] = value;
|
||
}
|
||
}
|
||
return result;
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is a flattenable `arguments` object or array.
|
||
*
|
||
* @private
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is flattenable, else `false`.
|
||
*/
|
||
function isFlattenable(value) {
|
||
return isArray(value) || isArguments(value) ||
|
||
!!(spreadableSymbol && value && value[spreadableSymbol]);
|
||
}
|
||
|
||
/**
|
||
* Flattens `array` a single level deep.
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 0.1.0
|
||
* @category Array
|
||
* @param {Array} array The array to flatten.
|
||
* @returns {Array} Returns the new flattened array.
|
||
* @example
|
||
*
|
||
* _.flatten([1, [2, [3, [4]], 5]]);
|
||
* // => [1, 2, [3, [4]], 5]
|
||
*/
|
||
function flatten(array) {
|
||
var length = array ? array.length : 0;
|
||
return length ? baseFlatten(array, 1) : [];
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is likely an `arguments` object.
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 0.1.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is an `arguments` object,
|
||
* else `false`.
|
||
* @example
|
||
*
|
||
* _.isArguments(function() { return arguments; }());
|
||
* // => true
|
||
*
|
||
* _.isArguments([1, 2, 3]);
|
||
* // => false
|
||
*/
|
||
function isArguments(value) {
|
||
// Safari 8.1 makes `arguments.callee` enumerable in strict mode.
|
||
return isArrayLikeObject(value) && hasOwnProperty.call(value, 'callee') &&
|
||
(!propertyIsEnumerable.call(value, 'callee') || objectToString.call(value) == argsTag);
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is classified as an `Array` object.
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 0.1.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is an array, else `false`.
|
||
* @example
|
||
*
|
||
* _.isArray([1, 2, 3]);
|
||
* // => true
|
||
*
|
||
* _.isArray(document.body.children);
|
||
* // => false
|
||
*
|
||
* _.isArray('abc');
|
||
* // => false
|
||
*
|
||
* _.isArray(_.noop);
|
||
* // => false
|
||
*/
|
||
var isArray = Array.isArray;
|
||
|
||
/**
|
||
* Checks if `value` is array-like. A value is considered array-like if it's
|
||
* not a function and has a `value.length` that's an integer greater than or
|
||
* equal to `0` and less than or equal to `Number.MAX_SAFE_INTEGER`.
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 4.0.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is array-like, else `false`.
|
||
* @example
|
||
*
|
||
* _.isArrayLike([1, 2, 3]);
|
||
* // => true
|
||
*
|
||
* _.isArrayLike(document.body.children);
|
||
* // => true
|
||
*
|
||
* _.isArrayLike('abc');
|
||
* // => true
|
||
*
|
||
* _.isArrayLike(_.noop);
|
||
* // => false
|
||
*/
|
||
function isArrayLike(value) {
|
||
return value != null && isLength(value.length) && !isFunction(value);
|
||
}
|
||
|
||
/**
|
||
* This method is like `_.isArrayLike` except that it also checks if `value`
|
||
* is an object.
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 4.0.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is an array-like object,
|
||
* else `false`.
|
||
* @example
|
||
*
|
||
* _.isArrayLikeObject([1, 2, 3]);
|
||
* // => true
|
||
*
|
||
* _.isArrayLikeObject(document.body.children);
|
||
* // => true
|
||
*
|
||
* _.isArrayLikeObject('abc');
|
||
* // => false
|
||
*
|
||
* _.isArrayLikeObject(_.noop);
|
||
* // => false
|
||
*/
|
||
function isArrayLikeObject(value) {
|
||
return isObjectLike(value) && isArrayLike(value);
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is classified as a `Function` object.
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 0.1.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is a function, else `false`.
|
||
* @example
|
||
*
|
||
* _.isFunction(_);
|
||
* // => true
|
||
*
|
||
* _.isFunction(/abc/);
|
||
* // => false
|
||
*/
|
||
function isFunction(value) {
|
||
// The use of `Object#toString` avoids issues with the `typeof` operator
|
||
// in Safari 8-9 which returns 'object' for typed array and other constructors.
|
||
var tag = isObject(value) ? objectToString.call(value) : '';
|
||
return tag == funcTag || tag == genTag;
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is a valid array-like length.
|
||
*
|
||
* **Note:** This method is loosely based on
|
||
* [`ToLength`](http://ecma-international.org/ecma-262/7.0/#sec-tolength).
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 4.0.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is a valid length, else `false`.
|
||
* @example
|
||
*
|
||
* _.isLength(3);
|
||
* // => true
|
||
*
|
||
* _.isLength(Number.MIN_VALUE);
|
||
* // => false
|
||
*
|
||
* _.isLength(Infinity);
|
||
* // => false
|
||
*
|
||
* _.isLength('3');
|
||
* // => false
|
||
*/
|
||
function isLength(value) {
|
||
return typeof value == 'number' &&
|
||
value > -1 && value % 1 == 0 && value <= MAX_SAFE_INTEGER;
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is the
|
||
* [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types)
|
||
* of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`)
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 0.1.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is an object, else `false`.
|
||
* @example
|
||
*
|
||
* _.isObject({});
|
||
* // => true
|
||
*
|
||
* _.isObject([1, 2, 3]);
|
||
* // => true
|
||
*
|
||
* _.isObject(_.noop);
|
||
* // => true
|
||
*
|
||
* _.isObject(null);
|
||
* // => false
|
||
*/
|
||
function isObject(value) {
|
||
var type = typeof value;
|
||
return !!value && (type == 'object' || type == 'function');
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is object-like. A value is object-like if it's not `null`
|
||
* and has a `typeof` result of "object".
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 4.0.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is object-like, else `false`.
|
||
* @example
|
||
*
|
||
* _.isObjectLike({});
|
||
* // => true
|
||
*
|
||
* _.isObjectLike([1, 2, 3]);
|
||
* // => true
|
||
*
|
||
* _.isObjectLike(_.noop);
|
||
* // => false
|
||
*
|
||
* _.isObjectLike(null);
|
||
* // => false
|
||
*/
|
||
function isObjectLike(value) {
|
||
return !!value && typeof value == 'object';
|
||
}
|
||
|
||
module.exports = flatten;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 7617:
|
||
/***/ ((module) => {
|
||
|
||
/**
|
||
* lodash (Custom Build) <https://lodash.com/>
|
||
* Build: `lodash modularize exports="npm" -o ./`
|
||
* Copyright jQuery Foundation and other contributors <https://jquery.org/>
|
||
* Released under MIT license <https://lodash.com/license>
|
||
* Based on Underscore.js 1.8.3 <http://underscorejs.org/LICENSE>
|
||
* Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
|
||
*/
|
||
|
||
/** `Object#toString` result references. */
|
||
var objectTag = '[object Object]';
|
||
|
||
/**
|
||
* Checks if `value` is a host object in IE < 9.
|
||
*
|
||
* @private
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is a host object, else `false`.
|
||
*/
|
||
function isHostObject(value) {
|
||
// Many host objects are `Object` objects that can coerce to strings
|
||
// despite having improperly defined `toString` methods.
|
||
var result = false;
|
||
if (value != null && typeof value.toString != 'function') {
|
||
try {
|
||
result = !!(value + '');
|
||
} catch (e) {}
|
||
}
|
||
return result;
|
||
}
|
||
|
||
/**
|
||
* Creates a unary function that invokes `func` with its argument transformed.
|
||
*
|
||
* @private
|
||
* @param {Function} func The function to wrap.
|
||
* @param {Function} transform The argument transform.
|
||
* @returns {Function} Returns the new function.
|
||
*/
|
||
function overArg(func, transform) {
|
||
return function(arg) {
|
||
return func(transform(arg));
|
||
};
|
||
}
|
||
|
||
/** Used for built-in method references. */
|
||
var funcProto = Function.prototype,
|
||
objectProto = Object.prototype;
|
||
|
||
/** Used to resolve the decompiled source of functions. */
|
||
var funcToString = funcProto.toString;
|
||
|
||
/** Used to check objects for own properties. */
|
||
var hasOwnProperty = objectProto.hasOwnProperty;
|
||
|
||
/** Used to infer the `Object` constructor. */
|
||
var objectCtorString = funcToString.call(Object);
|
||
|
||
/**
|
||
* Used to resolve the
|
||
* [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring)
|
||
* of values.
|
||
*/
|
||
var objectToString = objectProto.toString;
|
||
|
||
/** Built-in value references. */
|
||
var getPrototype = overArg(Object.getPrototypeOf, Object);
|
||
|
||
/**
|
||
* Checks if `value` is object-like. A value is object-like if it's not `null`
|
||
* and has a `typeof` result of "object".
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 4.0.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is object-like, else `false`.
|
||
* @example
|
||
*
|
||
* _.isObjectLike({});
|
||
* // => true
|
||
*
|
||
* _.isObjectLike([1, 2, 3]);
|
||
* // => true
|
||
*
|
||
* _.isObjectLike(_.noop);
|
||
* // => false
|
||
*
|
||
* _.isObjectLike(null);
|
||
* // => false
|
||
*/
|
||
function isObjectLike(value) {
|
||
return !!value && typeof value == 'object';
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is a plain object, that is, an object created by the
|
||
* `Object` constructor or one with a `[[Prototype]]` of `null`.
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 0.8.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is a plain object, else `false`.
|
||
* @example
|
||
*
|
||
* function Foo() {
|
||
* this.a = 1;
|
||
* }
|
||
*
|
||
* _.isPlainObject(new Foo);
|
||
* // => false
|
||
*
|
||
* _.isPlainObject([1, 2, 3]);
|
||
* // => false
|
||
*
|
||
* _.isPlainObject({ 'x': 0, 'y': 0 });
|
||
* // => true
|
||
*
|
||
* _.isPlainObject(Object.create(null));
|
||
* // => true
|
||
*/
|
||
function isPlainObject(value) {
|
||
if (!isObjectLike(value) ||
|
||
objectToString.call(value) != objectTag || isHostObject(value)) {
|
||
return false;
|
||
}
|
||
var proto = getPrototype(value);
|
||
if (proto === null) {
|
||
return true;
|
||
}
|
||
var Ctor = hasOwnProperty.call(proto, 'constructor') && proto.constructor;
|
||
return (typeof Ctor == 'function' &&
|
||
Ctor instanceof Ctor && funcToString.call(Ctor) == objectCtorString);
|
||
}
|
||
|
||
module.exports = isPlainObject;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 6969:
|
||
/***/ ((module) => {
|
||
|
||
/**
|
||
* lodash (Custom Build) <https://lodash.com/>
|
||
* Build: `lodash modularize exports="npm" -o ./`
|
||
* Copyright jQuery Foundation and other contributors <https://jquery.org/>
|
||
* Released under MIT license <https://lodash.com/license>
|
||
* Based on Underscore.js 1.8.3 <http://underscorejs.org/LICENSE>
|
||
* Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
|
||
*/
|
||
|
||
/** Used as the size to enable large array optimizations. */
|
||
var LARGE_ARRAY_SIZE = 200;
|
||
|
||
/** Used to stand-in for `undefined` hash values. */
|
||
var HASH_UNDEFINED = '__lodash_hash_undefined__';
|
||
|
||
/** Used as references for various `Number` constants. */
|
||
var INFINITY = 1 / 0,
|
||
MAX_SAFE_INTEGER = 9007199254740991;
|
||
|
||
/** `Object#toString` result references. */
|
||
var argsTag = '[object Arguments]',
|
||
funcTag = '[object Function]',
|
||
genTag = '[object GeneratorFunction]';
|
||
|
||
/**
|
||
* Used to match `RegExp`
|
||
* [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns).
|
||
*/
|
||
var reRegExpChar = /[\\^$.*+?()[\]{}|]/g;
|
||
|
||
/** Used to detect host constructors (Safari). */
|
||
var reIsHostCtor = /^\[object .+?Constructor\]$/;
|
||
|
||
/** Detect free variable `global` from Node.js. */
|
||
var freeGlobal = typeof global == 'object' && global && global.Object === Object && global;
|
||
|
||
/** Detect free variable `self`. */
|
||
var freeSelf = typeof self == 'object' && self && self.Object === Object && self;
|
||
|
||
/** Used as a reference to the global object. */
|
||
var root = freeGlobal || freeSelf || Function('return this')();
|
||
|
||
/**
|
||
* A faster alternative to `Function#apply`, this function invokes `func`
|
||
* with the `this` binding of `thisArg` and the arguments of `args`.
|
||
*
|
||
* @private
|
||
* @param {Function} func The function to invoke.
|
||
* @param {*} thisArg The `this` binding of `func`.
|
||
* @param {Array} args The arguments to invoke `func` with.
|
||
* @returns {*} Returns the result of `func`.
|
||
*/
|
||
function apply(func, thisArg, args) {
|
||
switch (args.length) {
|
||
case 0: return func.call(thisArg);
|
||
case 1: return func.call(thisArg, args[0]);
|
||
case 2: return func.call(thisArg, args[0], args[1]);
|
||
case 3: return func.call(thisArg, args[0], args[1], args[2]);
|
||
}
|
||
return func.apply(thisArg, args);
|
||
}
|
||
|
||
/**
|
||
* A specialized version of `_.includes` for arrays without support for
|
||
* specifying an index to search from.
|
||
*
|
||
* @private
|
||
* @param {Array} [array] The array to inspect.
|
||
* @param {*} target The value to search for.
|
||
* @returns {boolean} Returns `true` if `target` is found, else `false`.
|
||
*/
|
||
function arrayIncludes(array, value) {
|
||
var length = array ? array.length : 0;
|
||
return !!length && baseIndexOf(array, value, 0) > -1;
|
||
}
|
||
|
||
/**
|
||
* This function is like `arrayIncludes` except that it accepts a comparator.
|
||
*
|
||
* @private
|
||
* @param {Array} [array] The array to inspect.
|
||
* @param {*} target The value to search for.
|
||
* @param {Function} comparator The comparator invoked per element.
|
||
* @returns {boolean} Returns `true` if `target` is found, else `false`.
|
||
*/
|
||
function arrayIncludesWith(array, value, comparator) {
|
||
var index = -1,
|
||
length = array ? array.length : 0;
|
||
|
||
while (++index < length) {
|
||
if (comparator(value, array[index])) {
|
||
return true;
|
||
}
|
||
}
|
||
return false;
|
||
}
|
||
|
||
/**
|
||
* Appends the elements of `values` to `array`.
|
||
*
|
||
* @private
|
||
* @param {Array} array The array to modify.
|
||
* @param {Array} values The values to append.
|
||
* @returns {Array} Returns `array`.
|
||
*/
|
||
function arrayPush(array, values) {
|
||
var index = -1,
|
||
length = values.length,
|
||
offset = array.length;
|
||
|
||
while (++index < length) {
|
||
array[offset + index] = values[index];
|
||
}
|
||
return array;
|
||
}
|
||
|
||
/**
|
||
* The base implementation of `_.findIndex` and `_.findLastIndex` without
|
||
* support for iteratee shorthands.
|
||
*
|
||
* @private
|
||
* @param {Array} array The array to inspect.
|
||
* @param {Function} predicate The function invoked per iteration.
|
||
* @param {number} fromIndex The index to search from.
|
||
* @param {boolean} [fromRight] Specify iterating from right to left.
|
||
* @returns {number} Returns the index of the matched value, else `-1`.
|
||
*/
|
||
function baseFindIndex(array, predicate, fromIndex, fromRight) {
|
||
var length = array.length,
|
||
index = fromIndex + (fromRight ? 1 : -1);
|
||
|
||
while ((fromRight ? index-- : ++index < length)) {
|
||
if (predicate(array[index], index, array)) {
|
||
return index;
|
||
}
|
||
}
|
||
return -1;
|
||
}
|
||
|
||
/**
|
||
* The base implementation of `_.indexOf` without `fromIndex` bounds checks.
|
||
*
|
||
* @private
|
||
* @param {Array} array The array to inspect.
|
||
* @param {*} value The value to search for.
|
||
* @param {number} fromIndex The index to search from.
|
||
* @returns {number} Returns the index of the matched value, else `-1`.
|
||
*/
|
||
function baseIndexOf(array, value, fromIndex) {
|
||
if (value !== value) {
|
||
return baseFindIndex(array, baseIsNaN, fromIndex);
|
||
}
|
||
var index = fromIndex - 1,
|
||
length = array.length;
|
||
|
||
while (++index < length) {
|
||
if (array[index] === value) {
|
||
return index;
|
||
}
|
||
}
|
||
return -1;
|
||
}
|
||
|
||
/**
|
||
* The base implementation of `_.isNaN` without support for number objects.
|
||
*
|
||
* @private
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is `NaN`, else `false`.
|
||
*/
|
||
function baseIsNaN(value) {
|
||
return value !== value;
|
||
}
|
||
|
||
/**
|
||
* Checks if a cache value for `key` exists.
|
||
*
|
||
* @private
|
||
* @param {Object} cache The cache to query.
|
||
* @param {string} key The key of the entry to check.
|
||
* @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
|
||
*/
|
||
function cacheHas(cache, key) {
|
||
return cache.has(key);
|
||
}
|
||
|
||
/**
|
||
* Gets the value at `key` of `object`.
|
||
*
|
||
* @private
|
||
* @param {Object} [object] The object to query.
|
||
* @param {string} key The key of the property to get.
|
||
* @returns {*} Returns the property value.
|
||
*/
|
||
function getValue(object, key) {
|
||
return object == null ? undefined : object[key];
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is a host object in IE < 9.
|
||
*
|
||
* @private
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is a host object, else `false`.
|
||
*/
|
||
function isHostObject(value) {
|
||
// Many host objects are `Object` objects that can coerce to strings
|
||
// despite having improperly defined `toString` methods.
|
||
var result = false;
|
||
if (value != null && typeof value.toString != 'function') {
|
||
try {
|
||
result = !!(value + '');
|
||
} catch (e) {}
|
||
}
|
||
return result;
|
||
}
|
||
|
||
/**
|
||
* Converts `set` to an array of its values.
|
||
*
|
||
* @private
|
||
* @param {Object} set The set to convert.
|
||
* @returns {Array} Returns the values.
|
||
*/
|
||
function setToArray(set) {
|
||
var index = -1,
|
||
result = Array(set.size);
|
||
|
||
set.forEach(function(value) {
|
||
result[++index] = value;
|
||
});
|
||
return result;
|
||
}
|
||
|
||
/** Used for built-in method references. */
|
||
var arrayProto = Array.prototype,
|
||
funcProto = Function.prototype,
|
||
objectProto = Object.prototype;
|
||
|
||
/** Used to detect overreaching core-js shims. */
|
||
var coreJsData = root['__core-js_shared__'];
|
||
|
||
/** Used to detect methods masquerading as native. */
|
||
var maskSrcKey = (function() {
|
||
var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || '');
|
||
return uid ? ('Symbol(src)_1.' + uid) : '';
|
||
}());
|
||
|
||
/** Used to resolve the decompiled source of functions. */
|
||
var funcToString = funcProto.toString;
|
||
|
||
/** Used to check objects for own properties. */
|
||
var hasOwnProperty = objectProto.hasOwnProperty;
|
||
|
||
/**
|
||
* Used to resolve the
|
||
* [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring)
|
||
* of values.
|
||
*/
|
||
var objectToString = objectProto.toString;
|
||
|
||
/** Used to detect if a method is native. */
|
||
var reIsNative = RegExp('^' +
|
||
funcToString.call(hasOwnProperty).replace(reRegExpChar, '\\$&')
|
||
.replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$'
|
||
);
|
||
|
||
/** Built-in value references. */
|
||
var Symbol = root.Symbol,
|
||
propertyIsEnumerable = objectProto.propertyIsEnumerable,
|
||
splice = arrayProto.splice,
|
||
spreadableSymbol = Symbol ? Symbol.isConcatSpreadable : undefined;
|
||
|
||
/* Built-in method references for those with the same name as other `lodash` methods. */
|
||
var nativeMax = Math.max;
|
||
|
||
/* Built-in method references that are verified to be native. */
|
||
var Map = getNative(root, 'Map'),
|
||
Set = getNative(root, 'Set'),
|
||
nativeCreate = getNative(Object, 'create');
|
||
|
||
/**
|
||
* Creates a hash object.
|
||
*
|
||
* @private
|
||
* @constructor
|
||
* @param {Array} [entries] The key-value pairs to cache.
|
||
*/
|
||
function Hash(entries) {
|
||
var index = -1,
|
||
length = entries ? entries.length : 0;
|
||
|
||
this.clear();
|
||
while (++index < length) {
|
||
var entry = entries[index];
|
||
this.set(entry[0], entry[1]);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Removes all key-value entries from the hash.
|
||
*
|
||
* @private
|
||
* @name clear
|
||
* @memberOf Hash
|
||
*/
|
||
function hashClear() {
|
||
this.__data__ = nativeCreate ? nativeCreate(null) : {};
|
||
}
|
||
|
||
/**
|
||
* Removes `key` and its value from the hash.
|
||
*
|
||
* @private
|
||
* @name delete
|
||
* @memberOf Hash
|
||
* @param {Object} hash The hash to modify.
|
||
* @param {string} key The key of the value to remove.
|
||
* @returns {boolean} Returns `true` if the entry was removed, else `false`.
|
||
*/
|
||
function hashDelete(key) {
|
||
return this.has(key) && delete this.__data__[key];
|
||
}
|
||
|
||
/**
|
||
* Gets the hash value for `key`.
|
||
*
|
||
* @private
|
||
* @name get
|
||
* @memberOf Hash
|
||
* @param {string} key The key of the value to get.
|
||
* @returns {*} Returns the entry value.
|
||
*/
|
||
function hashGet(key) {
|
||
var data = this.__data__;
|
||
if (nativeCreate) {
|
||
var result = data[key];
|
||
return result === HASH_UNDEFINED ? undefined : result;
|
||
}
|
||
return hasOwnProperty.call(data, key) ? data[key] : undefined;
|
||
}
|
||
|
||
/**
|
||
* Checks if a hash value for `key` exists.
|
||
*
|
||
* @private
|
||
* @name has
|
||
* @memberOf Hash
|
||
* @param {string} key The key of the entry to check.
|
||
* @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
|
||
*/
|
||
function hashHas(key) {
|
||
var data = this.__data__;
|
||
return nativeCreate ? data[key] !== undefined : hasOwnProperty.call(data, key);
|
||
}
|
||
|
||
/**
|
||
* Sets the hash `key` to `value`.
|
||
*
|
||
* @private
|
||
* @name set
|
||
* @memberOf Hash
|
||
* @param {string} key The key of the value to set.
|
||
* @param {*} value The value to set.
|
||
* @returns {Object} Returns the hash instance.
|
||
*/
|
||
function hashSet(key, value) {
|
||
var data = this.__data__;
|
||
data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value;
|
||
return this;
|
||
}
|
||
|
||
// Add methods to `Hash`.
|
||
Hash.prototype.clear = hashClear;
|
||
Hash.prototype['delete'] = hashDelete;
|
||
Hash.prototype.get = hashGet;
|
||
Hash.prototype.has = hashHas;
|
||
Hash.prototype.set = hashSet;
|
||
|
||
/**
|
||
* Creates an list cache object.
|
||
*
|
||
* @private
|
||
* @constructor
|
||
* @param {Array} [entries] The key-value pairs to cache.
|
||
*/
|
||
function ListCache(entries) {
|
||
var index = -1,
|
||
length = entries ? entries.length : 0;
|
||
|
||
this.clear();
|
||
while (++index < length) {
|
||
var entry = entries[index];
|
||
this.set(entry[0], entry[1]);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Removes all key-value entries from the list cache.
|
||
*
|
||
* @private
|
||
* @name clear
|
||
* @memberOf ListCache
|
||
*/
|
||
function listCacheClear() {
|
||
this.__data__ = [];
|
||
}
|
||
|
||
/**
|
||
* Removes `key` and its value from the list cache.
|
||
*
|
||
* @private
|
||
* @name delete
|
||
* @memberOf ListCache
|
||
* @param {string} key The key of the value to remove.
|
||
* @returns {boolean} Returns `true` if the entry was removed, else `false`.
|
||
*/
|
||
function listCacheDelete(key) {
|
||
var data = this.__data__,
|
||
index = assocIndexOf(data, key);
|
||
|
||
if (index < 0) {
|
||
return false;
|
||
}
|
||
var lastIndex = data.length - 1;
|
||
if (index == lastIndex) {
|
||
data.pop();
|
||
} else {
|
||
splice.call(data, index, 1);
|
||
}
|
||
return true;
|
||
}
|
||
|
||
/**
|
||
* Gets the list cache value for `key`.
|
||
*
|
||
* @private
|
||
* @name get
|
||
* @memberOf ListCache
|
||
* @param {string} key The key of the value to get.
|
||
* @returns {*} Returns the entry value.
|
||
*/
|
||
function listCacheGet(key) {
|
||
var data = this.__data__,
|
||
index = assocIndexOf(data, key);
|
||
|
||
return index < 0 ? undefined : data[index][1];
|
||
}
|
||
|
||
/**
|
||
* Checks if a list cache value for `key` exists.
|
||
*
|
||
* @private
|
||
* @name has
|
||
* @memberOf ListCache
|
||
* @param {string} key The key of the entry to check.
|
||
* @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
|
||
*/
|
||
function listCacheHas(key) {
|
||
return assocIndexOf(this.__data__, key) > -1;
|
||
}
|
||
|
||
/**
|
||
* Sets the list cache `key` to `value`.
|
||
*
|
||
* @private
|
||
* @name set
|
||
* @memberOf ListCache
|
||
* @param {string} key The key of the value to set.
|
||
* @param {*} value The value to set.
|
||
* @returns {Object} Returns the list cache instance.
|
||
*/
|
||
function listCacheSet(key, value) {
|
||
var data = this.__data__,
|
||
index = assocIndexOf(data, key);
|
||
|
||
if (index < 0) {
|
||
data.push([key, value]);
|
||
} else {
|
||
data[index][1] = value;
|
||
}
|
||
return this;
|
||
}
|
||
|
||
// Add methods to `ListCache`.
|
||
ListCache.prototype.clear = listCacheClear;
|
||
ListCache.prototype['delete'] = listCacheDelete;
|
||
ListCache.prototype.get = listCacheGet;
|
||
ListCache.prototype.has = listCacheHas;
|
||
ListCache.prototype.set = listCacheSet;
|
||
|
||
/**
|
||
* Creates a map cache object to store key-value pairs.
|
||
*
|
||
* @private
|
||
* @constructor
|
||
* @param {Array} [entries] The key-value pairs to cache.
|
||
*/
|
||
function MapCache(entries) {
|
||
var index = -1,
|
||
length = entries ? entries.length : 0;
|
||
|
||
this.clear();
|
||
while (++index < length) {
|
||
var entry = entries[index];
|
||
this.set(entry[0], entry[1]);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Removes all key-value entries from the map.
|
||
*
|
||
* @private
|
||
* @name clear
|
||
* @memberOf MapCache
|
||
*/
|
||
function mapCacheClear() {
|
||
this.__data__ = {
|
||
'hash': new Hash,
|
||
'map': new (Map || ListCache),
|
||
'string': new Hash
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Removes `key` and its value from the map.
|
||
*
|
||
* @private
|
||
* @name delete
|
||
* @memberOf MapCache
|
||
* @param {string} key The key of the value to remove.
|
||
* @returns {boolean} Returns `true` if the entry was removed, else `false`.
|
||
*/
|
||
function mapCacheDelete(key) {
|
||
return getMapData(this, key)['delete'](key);
|
||
}
|
||
|
||
/**
|
||
* Gets the map value for `key`.
|
||
*
|
||
* @private
|
||
* @name get
|
||
* @memberOf MapCache
|
||
* @param {string} key The key of the value to get.
|
||
* @returns {*} Returns the entry value.
|
||
*/
|
||
function mapCacheGet(key) {
|
||
return getMapData(this, key).get(key);
|
||
}
|
||
|
||
/**
|
||
* Checks if a map value for `key` exists.
|
||
*
|
||
* @private
|
||
* @name has
|
||
* @memberOf MapCache
|
||
* @param {string} key The key of the entry to check.
|
||
* @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
|
||
*/
|
||
function mapCacheHas(key) {
|
||
return getMapData(this, key).has(key);
|
||
}
|
||
|
||
/**
|
||
* Sets the map `key` to `value`.
|
||
*
|
||
* @private
|
||
* @name set
|
||
* @memberOf MapCache
|
||
* @param {string} key The key of the value to set.
|
||
* @param {*} value The value to set.
|
||
* @returns {Object} Returns the map cache instance.
|
||
*/
|
||
function mapCacheSet(key, value) {
|
||
getMapData(this, key).set(key, value);
|
||
return this;
|
||
}
|
||
|
||
// Add methods to `MapCache`.
|
||
MapCache.prototype.clear = mapCacheClear;
|
||
MapCache.prototype['delete'] = mapCacheDelete;
|
||
MapCache.prototype.get = mapCacheGet;
|
||
MapCache.prototype.has = mapCacheHas;
|
||
MapCache.prototype.set = mapCacheSet;
|
||
|
||
/**
|
||
*
|
||
* Creates an array cache object to store unique values.
|
||
*
|
||
* @private
|
||
* @constructor
|
||
* @param {Array} [values] The values to cache.
|
||
*/
|
||
function SetCache(values) {
|
||
var index = -1,
|
||
length = values ? values.length : 0;
|
||
|
||
this.__data__ = new MapCache;
|
||
while (++index < length) {
|
||
this.add(values[index]);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Adds `value` to the array cache.
|
||
*
|
||
* @private
|
||
* @name add
|
||
* @memberOf SetCache
|
||
* @alias push
|
||
* @param {*} value The value to cache.
|
||
* @returns {Object} Returns the cache instance.
|
||
*/
|
||
function setCacheAdd(value) {
|
||
this.__data__.set(value, HASH_UNDEFINED);
|
||
return this;
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is in the array cache.
|
||
*
|
||
* @private
|
||
* @name has
|
||
* @memberOf SetCache
|
||
* @param {*} value The value to search for.
|
||
* @returns {number} Returns `true` if `value` is found, else `false`.
|
||
*/
|
||
function setCacheHas(value) {
|
||
return this.__data__.has(value);
|
||
}
|
||
|
||
// Add methods to `SetCache`.
|
||
SetCache.prototype.add = SetCache.prototype.push = setCacheAdd;
|
||
SetCache.prototype.has = setCacheHas;
|
||
|
||
/**
|
||
* Gets the index at which the `key` is found in `array` of key-value pairs.
|
||
*
|
||
* @private
|
||
* @param {Array} array The array to inspect.
|
||
* @param {*} key The key to search for.
|
||
* @returns {number} Returns the index of the matched value, else `-1`.
|
||
*/
|
||
function assocIndexOf(array, key) {
|
||
var length = array.length;
|
||
while (length--) {
|
||
if (eq(array[length][0], key)) {
|
||
return length;
|
||
}
|
||
}
|
||
return -1;
|
||
}
|
||
|
||
/**
|
||
* The base implementation of `_.flatten` with support for restricting flattening.
|
||
*
|
||
* @private
|
||
* @param {Array} array The array to flatten.
|
||
* @param {number} depth The maximum recursion depth.
|
||
* @param {boolean} [predicate=isFlattenable] The function invoked per iteration.
|
||
* @param {boolean} [isStrict] Restrict to values that pass `predicate` checks.
|
||
* @param {Array} [result=[]] The initial result value.
|
||
* @returns {Array} Returns the new flattened array.
|
||
*/
|
||
function baseFlatten(array, depth, predicate, isStrict, result) {
|
||
var index = -1,
|
||
length = array.length;
|
||
|
||
predicate || (predicate = isFlattenable);
|
||
result || (result = []);
|
||
|
||
while (++index < length) {
|
||
var value = array[index];
|
||
if (depth > 0 && predicate(value)) {
|
||
if (depth > 1) {
|
||
// Recursively flatten arrays (susceptible to call stack limits).
|
||
baseFlatten(value, depth - 1, predicate, isStrict, result);
|
||
} else {
|
||
arrayPush(result, value);
|
||
}
|
||
} else if (!isStrict) {
|
||
result[result.length] = value;
|
||
}
|
||
}
|
||
return result;
|
||
}
|
||
|
||
/**
|
||
* The base implementation of `_.isNative` without bad shim checks.
|
||
*
|
||
* @private
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is a native function,
|
||
* else `false`.
|
||
*/
|
||
function baseIsNative(value) {
|
||
if (!isObject(value) || isMasked(value)) {
|
||
return false;
|
||
}
|
||
var pattern = (isFunction(value) || isHostObject(value)) ? reIsNative : reIsHostCtor;
|
||
return pattern.test(toSource(value));
|
||
}
|
||
|
||
/**
|
||
* The base implementation of `_.rest` which doesn't validate or coerce arguments.
|
||
*
|
||
* @private
|
||
* @param {Function} func The function to apply a rest parameter to.
|
||
* @param {number} [start=func.length-1] The start position of the rest parameter.
|
||
* @returns {Function} Returns the new function.
|
||
*/
|
||
function baseRest(func, start) {
|
||
start = nativeMax(start === undefined ? (func.length - 1) : start, 0);
|
||
return function() {
|
||
var args = arguments,
|
||
index = -1,
|
||
length = nativeMax(args.length - start, 0),
|
||
array = Array(length);
|
||
|
||
while (++index < length) {
|
||
array[index] = args[start + index];
|
||
}
|
||
index = -1;
|
||
var otherArgs = Array(start + 1);
|
||
while (++index < start) {
|
||
otherArgs[index] = args[index];
|
||
}
|
||
otherArgs[start] = array;
|
||
return apply(func, this, otherArgs);
|
||
};
|
||
}
|
||
|
||
/**
|
||
* The base implementation of `_.uniqBy` without support for iteratee shorthands.
|
||
*
|
||
* @private
|
||
* @param {Array} array The array to inspect.
|
||
* @param {Function} [iteratee] The iteratee invoked per element.
|
||
* @param {Function} [comparator] The comparator invoked per element.
|
||
* @returns {Array} Returns the new duplicate free array.
|
||
*/
|
||
function baseUniq(array, iteratee, comparator) {
|
||
var index = -1,
|
||
includes = arrayIncludes,
|
||
length = array.length,
|
||
isCommon = true,
|
||
result = [],
|
||
seen = result;
|
||
|
||
if (comparator) {
|
||
isCommon = false;
|
||
includes = arrayIncludesWith;
|
||
}
|
||
else if (length >= LARGE_ARRAY_SIZE) {
|
||
var set = iteratee ? null : createSet(array);
|
||
if (set) {
|
||
return setToArray(set);
|
||
}
|
||
isCommon = false;
|
||
includes = cacheHas;
|
||
seen = new SetCache;
|
||
}
|
||
else {
|
||
seen = iteratee ? [] : result;
|
||
}
|
||
outer:
|
||
while (++index < length) {
|
||
var value = array[index],
|
||
computed = iteratee ? iteratee(value) : value;
|
||
|
||
value = (comparator || value !== 0) ? value : 0;
|
||
if (isCommon && computed === computed) {
|
||
var seenIndex = seen.length;
|
||
while (seenIndex--) {
|
||
if (seen[seenIndex] === computed) {
|
||
continue outer;
|
||
}
|
||
}
|
||
if (iteratee) {
|
||
seen.push(computed);
|
||
}
|
||
result.push(value);
|
||
}
|
||
else if (!includes(seen, computed, comparator)) {
|
||
if (seen !== result) {
|
||
seen.push(computed);
|
||
}
|
||
result.push(value);
|
||
}
|
||
}
|
||
return result;
|
||
}
|
||
|
||
/**
|
||
* Creates a set object of `values`.
|
||
*
|
||
* @private
|
||
* @param {Array} values The values to add to the set.
|
||
* @returns {Object} Returns the new set.
|
||
*/
|
||
var createSet = !(Set && (1 / setToArray(new Set([,-0]))[1]) == INFINITY) ? noop : function(values) {
|
||
return new Set(values);
|
||
};
|
||
|
||
/**
|
||
* Gets the data for `map`.
|
||
*
|
||
* @private
|
||
* @param {Object} map The map to query.
|
||
* @param {string} key The reference key.
|
||
* @returns {*} Returns the map data.
|
||
*/
|
||
function getMapData(map, key) {
|
||
var data = map.__data__;
|
||
return isKeyable(key)
|
||
? data[typeof key == 'string' ? 'string' : 'hash']
|
||
: data.map;
|
||
}
|
||
|
||
/**
|
||
* Gets the native function at `key` of `object`.
|
||
*
|
||
* @private
|
||
* @param {Object} object The object to query.
|
||
* @param {string} key The key of the method to get.
|
||
* @returns {*} Returns the function if it's native, else `undefined`.
|
||
*/
|
||
function getNative(object, key) {
|
||
var value = getValue(object, key);
|
||
return baseIsNative(value) ? value : undefined;
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is a flattenable `arguments` object or array.
|
||
*
|
||
* @private
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is flattenable, else `false`.
|
||
*/
|
||
function isFlattenable(value) {
|
||
return isArray(value) || isArguments(value) ||
|
||
!!(spreadableSymbol && value && value[spreadableSymbol]);
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is suitable for use as unique object key.
|
||
*
|
||
* @private
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is suitable, else `false`.
|
||
*/
|
||
function isKeyable(value) {
|
||
var type = typeof value;
|
||
return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean')
|
||
? (value !== '__proto__')
|
||
: (value === null);
|
||
}
|
||
|
||
/**
|
||
* Checks if `func` has its source masked.
|
||
*
|
||
* @private
|
||
* @param {Function} func The function to check.
|
||
* @returns {boolean} Returns `true` if `func` is masked, else `false`.
|
||
*/
|
||
function isMasked(func) {
|
||
return !!maskSrcKey && (maskSrcKey in func);
|
||
}
|
||
|
||
/**
|
||
* Converts `func` to its source code.
|
||
*
|
||
* @private
|
||
* @param {Function} func The function to process.
|
||
* @returns {string} Returns the source code.
|
||
*/
|
||
function toSource(func) {
|
||
if (func != null) {
|
||
try {
|
||
return funcToString.call(func);
|
||
} catch (e) {}
|
||
try {
|
||
return (func + '');
|
||
} catch (e) {}
|
||
}
|
||
return '';
|
||
}
|
||
|
||
/**
|
||
* Creates an array of unique values, in order, from all given arrays using
|
||
* [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero)
|
||
* for equality comparisons.
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 0.1.0
|
||
* @category Array
|
||
* @param {...Array} [arrays] The arrays to inspect.
|
||
* @returns {Array} Returns the new array of combined values.
|
||
* @example
|
||
*
|
||
* _.union([2], [1, 2]);
|
||
* // => [2, 1]
|
||
*/
|
||
var union = baseRest(function(arrays) {
|
||
return baseUniq(baseFlatten(arrays, 1, isArrayLikeObject, true));
|
||
});
|
||
|
||
/**
|
||
* Performs a
|
||
* [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero)
|
||
* comparison between two values to determine if they are equivalent.
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 4.0.0
|
||
* @category Lang
|
||
* @param {*} value The value to compare.
|
||
* @param {*} other The other value to compare.
|
||
* @returns {boolean} Returns `true` if the values are equivalent, else `false`.
|
||
* @example
|
||
*
|
||
* var object = { 'a': 1 };
|
||
* var other = { 'a': 1 };
|
||
*
|
||
* _.eq(object, object);
|
||
* // => true
|
||
*
|
||
* _.eq(object, other);
|
||
* // => false
|
||
*
|
||
* _.eq('a', 'a');
|
||
* // => true
|
||
*
|
||
* _.eq('a', Object('a'));
|
||
* // => false
|
||
*
|
||
* _.eq(NaN, NaN);
|
||
* // => true
|
||
*/
|
||
function eq(value, other) {
|
||
return value === other || (value !== value && other !== other);
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is likely an `arguments` object.
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 0.1.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is an `arguments` object,
|
||
* else `false`.
|
||
* @example
|
||
*
|
||
* _.isArguments(function() { return arguments; }());
|
||
* // => true
|
||
*
|
||
* _.isArguments([1, 2, 3]);
|
||
* // => false
|
||
*/
|
||
function isArguments(value) {
|
||
// Safari 8.1 makes `arguments.callee` enumerable in strict mode.
|
||
return isArrayLikeObject(value) && hasOwnProperty.call(value, 'callee') &&
|
||
(!propertyIsEnumerable.call(value, 'callee') || objectToString.call(value) == argsTag);
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is classified as an `Array` object.
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 0.1.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is an array, else `false`.
|
||
* @example
|
||
*
|
||
* _.isArray([1, 2, 3]);
|
||
* // => true
|
||
*
|
||
* _.isArray(document.body.children);
|
||
* // => false
|
||
*
|
||
* _.isArray('abc');
|
||
* // => false
|
||
*
|
||
* _.isArray(_.noop);
|
||
* // => false
|
||
*/
|
||
var isArray = Array.isArray;
|
||
|
||
/**
|
||
* Checks if `value` is array-like. A value is considered array-like if it's
|
||
* not a function and has a `value.length` that's an integer greater than or
|
||
* equal to `0` and less than or equal to `Number.MAX_SAFE_INTEGER`.
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 4.0.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is array-like, else `false`.
|
||
* @example
|
||
*
|
||
* _.isArrayLike([1, 2, 3]);
|
||
* // => true
|
||
*
|
||
* _.isArrayLike(document.body.children);
|
||
* // => true
|
||
*
|
||
* _.isArrayLike('abc');
|
||
* // => true
|
||
*
|
||
* _.isArrayLike(_.noop);
|
||
* // => false
|
||
*/
|
||
function isArrayLike(value) {
|
||
return value != null && isLength(value.length) && !isFunction(value);
|
||
}
|
||
|
||
/**
|
||
* This method is like `_.isArrayLike` except that it also checks if `value`
|
||
* is an object.
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 4.0.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is an array-like object,
|
||
* else `false`.
|
||
* @example
|
||
*
|
||
* _.isArrayLikeObject([1, 2, 3]);
|
||
* // => true
|
||
*
|
||
* _.isArrayLikeObject(document.body.children);
|
||
* // => true
|
||
*
|
||
* _.isArrayLikeObject('abc');
|
||
* // => false
|
||
*
|
||
* _.isArrayLikeObject(_.noop);
|
||
* // => false
|
||
*/
|
||
function isArrayLikeObject(value) {
|
||
return isObjectLike(value) && isArrayLike(value);
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is classified as a `Function` object.
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 0.1.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is a function, else `false`.
|
||
* @example
|
||
*
|
||
* _.isFunction(_);
|
||
* // => true
|
||
*
|
||
* _.isFunction(/abc/);
|
||
* // => false
|
||
*/
|
||
function isFunction(value) {
|
||
// The use of `Object#toString` avoids issues with the `typeof` operator
|
||
// in Safari 8-9 which returns 'object' for typed array and other constructors.
|
||
var tag = isObject(value) ? objectToString.call(value) : '';
|
||
return tag == funcTag || tag == genTag;
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is a valid array-like length.
|
||
*
|
||
* **Note:** This method is loosely based on
|
||
* [`ToLength`](http://ecma-international.org/ecma-262/7.0/#sec-tolength).
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 4.0.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is a valid length, else `false`.
|
||
* @example
|
||
*
|
||
* _.isLength(3);
|
||
* // => true
|
||
*
|
||
* _.isLength(Number.MIN_VALUE);
|
||
* // => false
|
||
*
|
||
* _.isLength(Infinity);
|
||
* // => false
|
||
*
|
||
* _.isLength('3');
|
||
* // => false
|
||
*/
|
||
function isLength(value) {
|
||
return typeof value == 'number' &&
|
||
value > -1 && value % 1 == 0 && value <= MAX_SAFE_INTEGER;
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is the
|
||
* [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types)
|
||
* of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`)
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 0.1.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is an object, else `false`.
|
||
* @example
|
||
*
|
||
* _.isObject({});
|
||
* // => true
|
||
*
|
||
* _.isObject([1, 2, 3]);
|
||
* // => true
|
||
*
|
||
* _.isObject(_.noop);
|
||
* // => true
|
||
*
|
||
* _.isObject(null);
|
||
* // => false
|
||
*/
|
||
function isObject(value) {
|
||
var type = typeof value;
|
||
return !!value && (type == 'object' || type == 'function');
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is object-like. A value is object-like if it's not `null`
|
||
* and has a `typeof` result of "object".
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 4.0.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is object-like, else `false`.
|
||
* @example
|
||
*
|
||
* _.isObjectLike({});
|
||
* // => true
|
||
*
|
||
* _.isObjectLike([1, 2, 3]);
|
||
* // => true
|
||
*
|
||
* _.isObjectLike(_.noop);
|
||
* // => false
|
||
*
|
||
* _.isObjectLike(null);
|
||
* // => false
|
||
*/
|
||
function isObjectLike(value) {
|
||
return !!value && typeof value == 'object';
|
||
}
|
||
|
||
/**
|
||
* This method returns `undefined`.
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 2.3.0
|
||
* @category Util
|
||
* @example
|
||
*
|
||
* _.times(2, _.noop);
|
||
* // => [undefined, undefined]
|
||
*/
|
||
function noop() {
|
||
// No operation performed.
|
||
}
|
||
|
||
module.exports = union;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 5423:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
(function(){
|
||
var crypt = __nccwpck_require__(5438),
|
||
utf8 = __nccwpck_require__(2295).utf8,
|
||
isBuffer = __nccwpck_require__(3125),
|
||
bin = __nccwpck_require__(2295).bin,
|
||
|
||
// The core
|
||
md5 = function (message, options) {
|
||
// Convert to byte array
|
||
if (message.constructor == String)
|
||
if (options && options.encoding === 'binary')
|
||
message = bin.stringToBytes(message);
|
||
else
|
||
message = utf8.stringToBytes(message);
|
||
else if (isBuffer(message))
|
||
message = Array.prototype.slice.call(message, 0);
|
||
else if (!Array.isArray(message) && message.constructor !== Uint8Array)
|
||
message = message.toString();
|
||
// else, assume byte array already
|
||
|
||
var m = crypt.bytesToWords(message),
|
||
l = message.length * 8,
|
||
a = 1732584193,
|
||
b = -271733879,
|
||
c = -1732584194,
|
||
d = 271733878;
|
||
|
||
// Swap endian
|
||
for (var i = 0; i < m.length; i++) {
|
||
m[i] = ((m[i] << 8) | (m[i] >>> 24)) & 0x00FF00FF |
|
||
((m[i] << 24) | (m[i] >>> 8)) & 0xFF00FF00;
|
||
}
|
||
|
||
// Padding
|
||
m[l >>> 5] |= 0x80 << (l % 32);
|
||
m[(((l + 64) >>> 9) << 4) + 14] = l;
|
||
|
||
// Method shortcuts
|
||
var FF = md5._ff,
|
||
GG = md5._gg,
|
||
HH = md5._hh,
|
||
II = md5._ii;
|
||
|
||
for (var i = 0; i < m.length; i += 16) {
|
||
|
||
var aa = a,
|
||
bb = b,
|
||
cc = c,
|
||
dd = d;
|
||
|
||
a = FF(a, b, c, d, m[i+ 0], 7, -680876936);
|
||
d = FF(d, a, b, c, m[i+ 1], 12, -389564586);
|
||
c = FF(c, d, a, b, m[i+ 2], 17, 606105819);
|
||
b = FF(b, c, d, a, m[i+ 3], 22, -1044525330);
|
||
a = FF(a, b, c, d, m[i+ 4], 7, -176418897);
|
||
d = FF(d, a, b, c, m[i+ 5], 12, 1200080426);
|
||
c = FF(c, d, a, b, m[i+ 6], 17, -1473231341);
|
||
b = FF(b, c, d, a, m[i+ 7], 22, -45705983);
|
||
a = FF(a, b, c, d, m[i+ 8], 7, 1770035416);
|
||
d = FF(d, a, b, c, m[i+ 9], 12, -1958414417);
|
||
c = FF(c, d, a, b, m[i+10], 17, -42063);
|
||
b = FF(b, c, d, a, m[i+11], 22, -1990404162);
|
||
a = FF(a, b, c, d, m[i+12], 7, 1804603682);
|
||
d = FF(d, a, b, c, m[i+13], 12, -40341101);
|
||
c = FF(c, d, a, b, m[i+14], 17, -1502002290);
|
||
b = FF(b, c, d, a, m[i+15], 22, 1236535329);
|
||
|
||
a = GG(a, b, c, d, m[i+ 1], 5, -165796510);
|
||
d = GG(d, a, b, c, m[i+ 6], 9, -1069501632);
|
||
c = GG(c, d, a, b, m[i+11], 14, 643717713);
|
||
b = GG(b, c, d, a, m[i+ 0], 20, -373897302);
|
||
a = GG(a, b, c, d, m[i+ 5], 5, -701558691);
|
||
d = GG(d, a, b, c, m[i+10], 9, 38016083);
|
||
c = GG(c, d, a, b, m[i+15], 14, -660478335);
|
||
b = GG(b, c, d, a, m[i+ 4], 20, -405537848);
|
||
a = GG(a, b, c, d, m[i+ 9], 5, 568446438);
|
||
d = GG(d, a, b, c, m[i+14], 9, -1019803690);
|
||
c = GG(c, d, a, b, m[i+ 3], 14, -187363961);
|
||
b = GG(b, c, d, a, m[i+ 8], 20, 1163531501);
|
||
a = GG(a, b, c, d, m[i+13], 5, -1444681467);
|
||
d = GG(d, a, b, c, m[i+ 2], 9, -51403784);
|
||
c = GG(c, d, a, b, m[i+ 7], 14, 1735328473);
|
||
b = GG(b, c, d, a, m[i+12], 20, -1926607734);
|
||
|
||
a = HH(a, b, c, d, m[i+ 5], 4, -378558);
|
||
d = HH(d, a, b, c, m[i+ 8], 11, -2022574463);
|
||
c = HH(c, d, a, b, m[i+11], 16, 1839030562);
|
||
b = HH(b, c, d, a, m[i+14], 23, -35309556);
|
||
a = HH(a, b, c, d, m[i+ 1], 4, -1530992060);
|
||
d = HH(d, a, b, c, m[i+ 4], 11, 1272893353);
|
||
c = HH(c, d, a, b, m[i+ 7], 16, -155497632);
|
||
b = HH(b, c, d, a, m[i+10], 23, -1094730640);
|
||
a = HH(a, b, c, d, m[i+13], 4, 681279174);
|
||
d = HH(d, a, b, c, m[i+ 0], 11, -358537222);
|
||
c = HH(c, d, a, b, m[i+ 3], 16, -722521979);
|
||
b = HH(b, c, d, a, m[i+ 6], 23, 76029189);
|
||
a = HH(a, b, c, d, m[i+ 9], 4, -640364487);
|
||
d = HH(d, a, b, c, m[i+12], 11, -421815835);
|
||
c = HH(c, d, a, b, m[i+15], 16, 530742520);
|
||
b = HH(b, c, d, a, m[i+ 2], 23, -995338651);
|
||
|
||
a = II(a, b, c, d, m[i+ 0], 6, -198630844);
|
||
d = II(d, a, b, c, m[i+ 7], 10, 1126891415);
|
||
c = II(c, d, a, b, m[i+14], 15, -1416354905);
|
||
b = II(b, c, d, a, m[i+ 5], 21, -57434055);
|
||
a = II(a, b, c, d, m[i+12], 6, 1700485571);
|
||
d = II(d, a, b, c, m[i+ 3], 10, -1894986606);
|
||
c = II(c, d, a, b, m[i+10], 15, -1051523);
|
||
b = II(b, c, d, a, m[i+ 1], 21, -2054922799);
|
||
a = II(a, b, c, d, m[i+ 8], 6, 1873313359);
|
||
d = II(d, a, b, c, m[i+15], 10, -30611744);
|
||
c = II(c, d, a, b, m[i+ 6], 15, -1560198380);
|
||
b = II(b, c, d, a, m[i+13], 21, 1309151649);
|
||
a = II(a, b, c, d, m[i+ 4], 6, -145523070);
|
||
d = II(d, a, b, c, m[i+11], 10, -1120210379);
|
||
c = II(c, d, a, b, m[i+ 2], 15, 718787259);
|
||
b = II(b, c, d, a, m[i+ 9], 21, -343485551);
|
||
|
||
a = (a + aa) >>> 0;
|
||
b = (b + bb) >>> 0;
|
||
c = (c + cc) >>> 0;
|
||
d = (d + dd) >>> 0;
|
||
}
|
||
|
||
return crypt.endian([a, b, c, d]);
|
||
};
|
||
|
||
// Auxiliary functions
|
||
md5._ff = function (a, b, c, d, x, s, t) {
|
||
var n = a + (b & c | ~b & d) + (x >>> 0) + t;
|
||
return ((n << s) | (n >>> (32 - s))) + b;
|
||
};
|
||
md5._gg = function (a, b, c, d, x, s, t) {
|
||
var n = a + (b & d | c & ~d) + (x >>> 0) + t;
|
||
return ((n << s) | (n >>> (32 - s))) + b;
|
||
};
|
||
md5._hh = function (a, b, c, d, x, s, t) {
|
||
var n = a + (b ^ c ^ d) + (x >>> 0) + t;
|
||
return ((n << s) | (n >>> (32 - s))) + b;
|
||
};
|
||
md5._ii = function (a, b, c, d, x, s, t) {
|
||
var n = a + (c ^ (b | ~d)) + (x >>> 0) + t;
|
||
return ((n << s) | (n >>> (32 - s))) + b;
|
||
};
|
||
|
||
// Package private blocksize
|
||
md5._blocksize = 16;
|
||
md5._digestsize = 16;
|
||
|
||
module.exports = function (message, options) {
|
||
if (message === undefined || message === null)
|
||
throw new Error('Illegal argument ' + message);
|
||
|
||
var digestbytes = crypt.wordsToBytes(md5(message, options));
|
||
return options && options.asBytes ? digestbytes :
|
||
options && options.asString ? bin.bytesToString(digestbytes) :
|
||
crypt.bytesToHex(digestbytes);
|
||
};
|
||
|
||
})();
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 7066:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
module.exports = minimatch
|
||
minimatch.Minimatch = Minimatch
|
||
|
||
var path = { sep: '/' }
|
||
try {
|
||
path = __nccwpck_require__(5622)
|
||
} catch (er) {}
|
||
|
||
var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}
|
||
var expand = __nccwpck_require__(6363)
|
||
|
||
var plTypes = {
|
||
'!': { open: '(?:(?!(?:', close: '))[^/]*?)'},
|
||
'?': { open: '(?:', close: ')?' },
|
||
'+': { open: '(?:', close: ')+' },
|
||
'*': { open: '(?:', close: ')*' },
|
||
'@': { open: '(?:', close: ')' }
|
||
}
|
||
|
||
// any single thing other than /
|
||
// don't need to escape / when using new RegExp()
|
||
var qmark = '[^/]'
|
||
|
||
// * => any number of characters
|
||
var star = qmark + '*?'
|
||
|
||
// ** when dots are allowed. Anything goes, except .. and .
|
||
// not (^ or / followed by one or two dots followed by $ or /),
|
||
// followed by anything, any number of times.
|
||
var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?'
|
||
|
||
// not a ^ or / followed by a dot,
|
||
// followed by anything, any number of times.
|
||
var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?'
|
||
|
||
// characters that need to be escaped in RegExp.
|
||
var reSpecials = charSet('().*{}+?[]^$\\!')
|
||
|
||
// "abc" -> { a:true, b:true, c:true }
|
||
function charSet (s) {
|
||
return s.split('').reduce(function (set, c) {
|
||
set[c] = true
|
||
return set
|
||
}, {})
|
||
}
|
||
|
||
// normalizes slashes.
|
||
var slashSplit = /\/+/
|
||
|
||
minimatch.filter = filter
|
||
function filter (pattern, options) {
|
||
options = options || {}
|
||
return function (p, i, list) {
|
||
return minimatch(p, pattern, options)
|
||
}
|
||
}
|
||
|
||
function ext (a, b) {
|
||
a = a || {}
|
||
b = b || {}
|
||
var t = {}
|
||
Object.keys(b).forEach(function (k) {
|
||
t[k] = b[k]
|
||
})
|
||
Object.keys(a).forEach(function (k) {
|
||
t[k] = a[k]
|
||
})
|
||
return t
|
||
}
|
||
|
||
minimatch.defaults = function (def) {
|
||
if (!def || !Object.keys(def).length) return minimatch
|
||
|
||
var orig = minimatch
|
||
|
||
var m = function minimatch (p, pattern, options) {
|
||
return orig.minimatch(p, pattern, ext(def, options))
|
||
}
|
||
|
||
m.Minimatch = function Minimatch (pattern, options) {
|
||
return new orig.Minimatch(pattern, ext(def, options))
|
||
}
|
||
|
||
return m
|
||
}
|
||
|
||
Minimatch.defaults = function (def) {
|
||
if (!def || !Object.keys(def).length) return Minimatch
|
||
return minimatch.defaults(def).Minimatch
|
||
}
|
||
|
||
function minimatch (p, pattern, options) {
|
||
if (typeof pattern !== 'string') {
|
||
throw new TypeError('glob pattern string required')
|
||
}
|
||
|
||
if (!options) options = {}
|
||
|
||
// shortcut: comments match nothing.
|
||
if (!options.nocomment && pattern.charAt(0) === '#') {
|
||
return false
|
||
}
|
||
|
||
// "" only matches ""
|
||
if (pattern.trim() === '') return p === ''
|
||
|
||
return new Minimatch(pattern, options).match(p)
|
||
}
|
||
|
||
function Minimatch (pattern, options) {
|
||
if (!(this instanceof Minimatch)) {
|
||
return new Minimatch(pattern, options)
|
||
}
|
||
|
||
if (typeof pattern !== 'string') {
|
||
throw new TypeError('glob pattern string required')
|
||
}
|
||
|
||
if (!options) options = {}
|
||
pattern = pattern.trim()
|
||
|
||
// windows support: need to use /, not \
|
||
if (path.sep !== '/') {
|
||
pattern = pattern.split(path.sep).join('/')
|
||
}
|
||
|
||
this.options = options
|
||
this.set = []
|
||
this.pattern = pattern
|
||
this.regexp = null
|
||
this.negate = false
|
||
this.comment = false
|
||
this.empty = false
|
||
|
||
// make the set of regexps etc.
|
||
this.make()
|
||
}
|
||
|
||
Minimatch.prototype.debug = function () {}
|
||
|
||
Minimatch.prototype.make = make
|
||
function make () {
|
||
// don't do it more than once.
|
||
if (this._made) return
|
||
|
||
var pattern = this.pattern
|
||
var options = this.options
|
||
|
||
// empty patterns and comments match nothing.
|
||
if (!options.nocomment && pattern.charAt(0) === '#') {
|
||
this.comment = true
|
||
return
|
||
}
|
||
if (!pattern) {
|
||
this.empty = true
|
||
return
|
||
}
|
||
|
||
// step 1: figure out negation, etc.
|
||
this.parseNegate()
|
||
|
||
// step 2: expand braces
|
||
var set = this.globSet = this.braceExpand()
|
||
|
||
if (options.debug) this.debug = console.error
|
||
|
||
this.debug(this.pattern, set)
|
||
|
||
// step 3: now we have a set, so turn each one into a series of path-portion
|
||
// matching patterns.
|
||
// These will be regexps, except in the case of "**", which is
|
||
// set to the GLOBSTAR object for globstar behavior,
|
||
// and will not contain any / characters
|
||
set = this.globParts = set.map(function (s) {
|
||
return s.split(slashSplit)
|
||
})
|
||
|
||
this.debug(this.pattern, set)
|
||
|
||
// glob --> regexps
|
||
set = set.map(function (s, si, set) {
|
||
return s.map(this.parse, this)
|
||
}, this)
|
||
|
||
this.debug(this.pattern, set)
|
||
|
||
// filter out everything that didn't compile properly.
|
||
set = set.filter(function (s) {
|
||
return s.indexOf(false) === -1
|
||
})
|
||
|
||
this.debug(this.pattern, set)
|
||
|
||
this.set = set
|
||
}
|
||
|
||
Minimatch.prototype.parseNegate = parseNegate
|
||
function parseNegate () {
|
||
var pattern = this.pattern
|
||
var negate = false
|
||
var options = this.options
|
||
var negateOffset = 0
|
||
|
||
if (options.nonegate) return
|
||
|
||
for (var i = 0, l = pattern.length
|
||
; i < l && pattern.charAt(i) === '!'
|
||
; i++) {
|
||
negate = !negate
|
||
negateOffset++
|
||
}
|
||
|
||
if (negateOffset) this.pattern = pattern.substr(negateOffset)
|
||
this.negate = negate
|
||
}
|
||
|
||
// Brace expansion:
|
||
// a{b,c}d -> abd acd
|
||
// a{b,}c -> abc ac
|
||
// a{0..3}d -> a0d a1d a2d a3d
|
||
// a{b,c{d,e}f}g -> abg acdfg acefg
|
||
// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
|
||
//
|
||
// Invalid sets are not expanded.
|
||
// a{2..}b -> a{2..}b
|
||
// a{b}c -> a{b}c
|
||
minimatch.braceExpand = function (pattern, options) {
|
||
return braceExpand(pattern, options)
|
||
}
|
||
|
||
Minimatch.prototype.braceExpand = braceExpand
|
||
|
||
function braceExpand (pattern, options) {
|
||
if (!options) {
|
||
if (this instanceof Minimatch) {
|
||
options = this.options
|
||
} else {
|
||
options = {}
|
||
}
|
||
}
|
||
|
||
pattern = typeof pattern === 'undefined'
|
||
? this.pattern : pattern
|
||
|
||
if (typeof pattern === 'undefined') {
|
||
throw new TypeError('undefined pattern')
|
||
}
|
||
|
||
if (options.nobrace ||
|
||
!pattern.match(/\{.*\}/)) {
|
||
// shortcut. no need to expand.
|
||
return [pattern]
|
||
}
|
||
|
||
return expand(pattern)
|
||
}
|
||
|
||
// parse a component of the expanded set.
|
||
// At this point, no pattern may contain "/" in it
|
||
// so we're going to return a 2d array, where each entry is the full
|
||
// pattern, split on '/', and then turned into a regular expression.
|
||
// A regexp is made at the end which joins each array with an
|
||
// escaped /, and another full one which joins each regexp with |.
|
||
//
|
||
// Following the lead of Bash 4.1, note that "**" only has special meaning
|
||
// when it is the *only* thing in a path portion. Otherwise, any series
|
||
// of * is equivalent to a single *. Globstar behavior is enabled by
|
||
// default, and can be disabled by setting options.noglobstar.
|
||
Minimatch.prototype.parse = parse
|
||
var SUBPARSE = {}
|
||
function parse (pattern, isSub) {
|
||
if (pattern.length > 1024 * 64) {
|
||
throw new TypeError('pattern is too long')
|
||
}
|
||
|
||
var options = this.options
|
||
|
||
// shortcuts
|
||
if (!options.noglobstar && pattern === '**') return GLOBSTAR
|
||
if (pattern === '') return ''
|
||
|
||
var re = ''
|
||
var hasMagic = !!options.nocase
|
||
var escaping = false
|
||
// ? => one single character
|
||
var patternListStack = []
|
||
var negativeLists = []
|
||
var stateChar
|
||
var inClass = false
|
||
var reClassStart = -1
|
||
var classStart = -1
|
||
// . and .. never match anything that doesn't start with .,
|
||
// even when options.dot is set.
|
||
var patternStart = pattern.charAt(0) === '.' ? '' // anything
|
||
// not (start or / followed by . or .. followed by / or end)
|
||
: options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))'
|
||
: '(?!\\.)'
|
||
var self = this
|
||
|
||
function clearStateChar () {
|
||
if (stateChar) {
|
||
// we had some state-tracking character
|
||
// that wasn't consumed by this pass.
|
||
switch (stateChar) {
|
||
case '*':
|
||
re += star
|
||
hasMagic = true
|
||
break
|
||
case '?':
|
||
re += qmark
|
||
hasMagic = true
|
||
break
|
||
default:
|
||
re += '\\' + stateChar
|
||
break
|
||
}
|
||
self.debug('clearStateChar %j %j', stateChar, re)
|
||
stateChar = false
|
||
}
|
||
}
|
||
|
||
for (var i = 0, len = pattern.length, c
|
||
; (i < len) && (c = pattern.charAt(i))
|
||
; i++) {
|
||
this.debug('%s\t%s %s %j', pattern, i, re, c)
|
||
|
||
// skip over any that are escaped.
|
||
if (escaping && reSpecials[c]) {
|
||
re += '\\' + c
|
||
escaping = false
|
||
continue
|
||
}
|
||
|
||
switch (c) {
|
||
case '/':
|
||
// completely not allowed, even escaped.
|
||
// Should already be path-split by now.
|
||
return false
|
||
|
||
case '\\':
|
||
clearStateChar()
|
||
escaping = true
|
||
continue
|
||
|
||
// the various stateChar values
|
||
// for the "extglob" stuff.
|
||
case '?':
|
||
case '*':
|
||
case '+':
|
||
case '@':
|
||
case '!':
|
||
this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c)
|
||
|
||
// all of those are literals inside a class, except that
|
||
// the glob [!a] means [^a] in regexp
|
||
if (inClass) {
|
||
this.debug(' in class')
|
||
if (c === '!' && i === classStart + 1) c = '^'
|
||
re += c
|
||
continue
|
||
}
|
||
|
||
// if we already have a stateChar, then it means
|
||
// that there was something like ** or +? in there.
|
||
// Handle the stateChar, then proceed with this one.
|
||
self.debug('call clearStateChar %j', stateChar)
|
||
clearStateChar()
|
||
stateChar = c
|
||
// if extglob is disabled, then +(asdf|foo) isn't a thing.
|
||
// just clear the statechar *now*, rather than even diving into
|
||
// the patternList stuff.
|
||
if (options.noext) clearStateChar()
|
||
continue
|
||
|
||
case '(':
|
||
if (inClass) {
|
||
re += '('
|
||
continue
|
||
}
|
||
|
||
if (!stateChar) {
|
||
re += '\\('
|
||
continue
|
||
}
|
||
|
||
patternListStack.push({
|
||
type: stateChar,
|
||
start: i - 1,
|
||
reStart: re.length,
|
||
open: plTypes[stateChar].open,
|
||
close: plTypes[stateChar].close
|
||
})
|
||
// negation is (?:(?!js)[^/]*)
|
||
re += stateChar === '!' ? '(?:(?!(?:' : '(?:'
|
||
this.debug('plType %j %j', stateChar, re)
|
||
stateChar = false
|
||
continue
|
||
|
||
case ')':
|
||
if (inClass || !patternListStack.length) {
|
||
re += '\\)'
|
||
continue
|
||
}
|
||
|
||
clearStateChar()
|
||
hasMagic = true
|
||
var pl = patternListStack.pop()
|
||
// negation is (?:(?!js)[^/]*)
|
||
// The others are (?:<pattern>)<type>
|
||
re += pl.close
|
||
if (pl.type === '!') {
|
||
negativeLists.push(pl)
|
||
}
|
||
pl.reEnd = re.length
|
||
continue
|
||
|
||
case '|':
|
||
if (inClass || !patternListStack.length || escaping) {
|
||
re += '\\|'
|
||
escaping = false
|
||
continue
|
||
}
|
||
|
||
clearStateChar()
|
||
re += '|'
|
||
continue
|
||
|
||
// these are mostly the same in regexp and glob
|
||
case '[':
|
||
// swallow any state-tracking char before the [
|
||
clearStateChar()
|
||
|
||
if (inClass) {
|
||
re += '\\' + c
|
||
continue
|
||
}
|
||
|
||
inClass = true
|
||
classStart = i
|
||
reClassStart = re.length
|
||
re += c
|
||
continue
|
||
|
||
case ']':
|
||
// a right bracket shall lose its special
|
||
// meaning and represent itself in
|
||
// a bracket expression if it occurs
|
||
// first in the list. -- POSIX.2 2.8.3.2
|
||
if (i === classStart + 1 || !inClass) {
|
||
re += '\\' + c
|
||
escaping = false
|
||
continue
|
||
}
|
||
|
||
// handle the case where we left a class open.
|
||
// "[z-a]" is valid, equivalent to "\[z-a\]"
|
||
if (inClass) {
|
||
// split where the last [ was, make sure we don't have
|
||
// an invalid re. if so, re-walk the contents of the
|
||
// would-be class to re-translate any characters that
|
||
// were passed through as-is
|
||
// TODO: It would probably be faster to determine this
|
||
// without a try/catch and a new RegExp, but it's tricky
|
||
// to do safely. For now, this is safe and works.
|
||
var cs = pattern.substring(classStart + 1, i)
|
||
try {
|
||
RegExp('[' + cs + ']')
|
||
} catch (er) {
|
||
// not a valid class!
|
||
var sp = this.parse(cs, SUBPARSE)
|
||
re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]'
|
||
hasMagic = hasMagic || sp[1]
|
||
inClass = false
|
||
continue
|
||
}
|
||
}
|
||
|
||
// finish up the class.
|
||
hasMagic = true
|
||
inClass = false
|
||
re += c
|
||
continue
|
||
|
||
default:
|
||
// swallow any state char that wasn't consumed
|
||
clearStateChar()
|
||
|
||
if (escaping) {
|
||
// no need
|
||
escaping = false
|
||
} else if (reSpecials[c]
|
||
&& !(c === '^' && inClass)) {
|
||
re += '\\'
|
||
}
|
||
|
||
re += c
|
||
|
||
} // switch
|
||
} // for
|
||
|
||
// handle the case where we left a class open.
|
||
// "[abc" is valid, equivalent to "\[abc"
|
||
if (inClass) {
|
||
// split where the last [ was, and escape it
|
||
// this is a huge pita. We now have to re-walk
|
||
// the contents of the would-be class to re-translate
|
||
// any characters that were passed through as-is
|
||
cs = pattern.substr(classStart + 1)
|
||
sp = this.parse(cs, SUBPARSE)
|
||
re = re.substr(0, reClassStart) + '\\[' + sp[0]
|
||
hasMagic = hasMagic || sp[1]
|
||
}
|
||
|
||
// handle the case where we had a +( thing at the *end*
|
||
// of the pattern.
|
||
// each pattern list stack adds 3 chars, and we need to go through
|
||
// and escape any | chars that were passed through as-is for the regexp.
|
||
// Go through and escape them, taking care not to double-escape any
|
||
// | chars that were already escaped.
|
||
for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) {
|
||
var tail = re.slice(pl.reStart + pl.open.length)
|
||
this.debug('setting tail', re, pl)
|
||
// maybe some even number of \, then maybe 1 \, followed by a |
|
||
tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) {
|
||
if (!$2) {
|
||
// the | isn't already escaped, so escape it.
|
||
$2 = '\\'
|
||
}
|
||
|
||
// need to escape all those slashes *again*, without escaping the
|
||
// one that we need for escaping the | character. As it works out,
|
||
// escaping an even number of slashes can be done by simply repeating
|
||
// it exactly after itself. That's why this trick works.
|
||
//
|
||
// I am sorry that you have to see this.
|
||
return $1 + $1 + $2 + '|'
|
||
})
|
||
|
||
this.debug('tail=%j\n %s', tail, tail, pl, re)
|
||
var t = pl.type === '*' ? star
|
||
: pl.type === '?' ? qmark
|
||
: '\\' + pl.type
|
||
|
||
hasMagic = true
|
||
re = re.slice(0, pl.reStart) + t + '\\(' + tail
|
||
}
|
||
|
||
// handle trailing things that only matter at the very end.
|
||
clearStateChar()
|
||
if (escaping) {
|
||
// trailing \\
|
||
re += '\\\\'
|
||
}
|
||
|
||
// only need to apply the nodot start if the re starts with
|
||
// something that could conceivably capture a dot
|
||
var addPatternStart = false
|
||
switch (re.charAt(0)) {
|
||
case '.':
|
||
case '[':
|
||
case '(': addPatternStart = true
|
||
}
|
||
|
||
// Hack to work around lack of negative lookbehind in JS
|
||
// A pattern like: *.!(x).!(y|z) needs to ensure that a name
|
||
// like 'a.xyz.yz' doesn't match. So, the first negative
|
||
// lookahead, has to look ALL the way ahead, to the end of
|
||
// the pattern.
|
||
for (var n = negativeLists.length - 1; n > -1; n--) {
|
||
var nl = negativeLists[n]
|
||
|
||
var nlBefore = re.slice(0, nl.reStart)
|
||
var nlFirst = re.slice(nl.reStart, nl.reEnd - 8)
|
||
var nlLast = re.slice(nl.reEnd - 8, nl.reEnd)
|
||
var nlAfter = re.slice(nl.reEnd)
|
||
|
||
nlLast += nlAfter
|
||
|
||
// Handle nested stuff like *(*.js|!(*.json)), where open parens
|
||
// mean that we should *not* include the ) in the bit that is considered
|
||
// "after" the negated section.
|
||
var openParensBefore = nlBefore.split('(').length - 1
|
||
var cleanAfter = nlAfter
|
||
for (i = 0; i < openParensBefore; i++) {
|
||
cleanAfter = cleanAfter.replace(/\)[+*?]?/, '')
|
||
}
|
||
nlAfter = cleanAfter
|
||
|
||
var dollar = ''
|
||
if (nlAfter === '' && isSub !== SUBPARSE) {
|
||
dollar = '$'
|
||
}
|
||
var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast
|
||
re = newRe
|
||
}
|
||
|
||
// if the re is not "" at this point, then we need to make sure
|
||
// it doesn't match against an empty path part.
|
||
// Otherwise a/* will match a/, which it should not.
|
||
if (re !== '' && hasMagic) {
|
||
re = '(?=.)' + re
|
||
}
|
||
|
||
if (addPatternStart) {
|
||
re = patternStart + re
|
||
}
|
||
|
||
// parsing just a piece of a larger pattern.
|
||
if (isSub === SUBPARSE) {
|
||
return [re, hasMagic]
|
||
}
|
||
|
||
// skip the regexp for non-magical patterns
|
||
// unescape anything in it, though, so that it'll be
|
||
// an exact match against a file etc.
|
||
if (!hasMagic) {
|
||
return globUnescape(pattern)
|
||
}
|
||
|
||
var flags = options.nocase ? 'i' : ''
|
||
try {
|
||
var regExp = new RegExp('^' + re + '$', flags)
|
||
} catch (er) {
|
||
// If it was an invalid regular expression, then it can't match
|
||
// anything. This trick looks for a character after the end of
|
||
// the string, which is of course impossible, except in multi-line
|
||
// mode, but it's not a /m regex.
|
||
return new RegExp('$.')
|
||
}
|
||
|
||
regExp._glob = pattern
|
||
regExp._src = re
|
||
|
||
return regExp
|
||
}
|
||
|
||
minimatch.makeRe = function (pattern, options) {
|
||
return new Minimatch(pattern, options || {}).makeRe()
|
||
}
|
||
|
||
Minimatch.prototype.makeRe = makeRe
|
||
function makeRe () {
|
||
if (this.regexp || this.regexp === false) return this.regexp
|
||
|
||
// at this point, this.set is a 2d array of partial
|
||
// pattern strings, or "**".
|
||
//
|
||
// It's better to use .match(). This function shouldn't
|
||
// be used, really, but it's pretty convenient sometimes,
|
||
// when you just want to work with a regex.
|
||
var set = this.set
|
||
|
||
if (!set.length) {
|
||
this.regexp = false
|
||
return this.regexp
|
||
}
|
||
var options = this.options
|
||
|
||
var twoStar = options.noglobstar ? star
|
||
: options.dot ? twoStarDot
|
||
: twoStarNoDot
|
||
var flags = options.nocase ? 'i' : ''
|
||
|
||
var re = set.map(function (pattern) {
|
||
return pattern.map(function (p) {
|
||
return (p === GLOBSTAR) ? twoStar
|
||
: (typeof p === 'string') ? regExpEscape(p)
|
||
: p._src
|
||
}).join('\\\/')
|
||
}).join('|')
|
||
|
||
// must match entire pattern
|
||
// ending in a * or ** will make it less strict.
|
||
re = '^(?:' + re + ')$'
|
||
|
||
// can match anything, as long as it's not this.
|
||
if (this.negate) re = '^(?!' + re + ').*$'
|
||
|
||
try {
|
||
this.regexp = new RegExp(re, flags)
|
||
} catch (ex) {
|
||
this.regexp = false
|
||
}
|
||
return this.regexp
|
||
}
|
||
|
||
minimatch.match = function (list, pattern, options) {
|
||
options = options || {}
|
||
var mm = new Minimatch(pattern, options)
|
||
list = list.filter(function (f) {
|
||
return mm.match(f)
|
||
})
|
||
if (mm.options.nonull && !list.length) {
|
||
list.push(pattern)
|
||
}
|
||
return list
|
||
}
|
||
|
||
Minimatch.prototype.match = match
|
||
function match (f, partial) {
|
||
this.debug('match', f, this.pattern)
|
||
// short-circuit in the case of busted things.
|
||
// comments, etc.
|
||
if (this.comment) return false
|
||
if (this.empty) return f === ''
|
||
|
||
if (f === '/' && partial) return true
|
||
|
||
var options = this.options
|
||
|
||
// windows: need to use /, not \
|
||
if (path.sep !== '/') {
|
||
f = f.split(path.sep).join('/')
|
||
}
|
||
|
||
// treat the test path as a set of pathparts.
|
||
f = f.split(slashSplit)
|
||
this.debug(this.pattern, 'split', f)
|
||
|
||
// just ONE of the pattern sets in this.set needs to match
|
||
// in order for it to be valid. If negating, then just one
|
||
// match means that we have failed.
|
||
// Either way, return on the first hit.
|
||
|
||
var set = this.set
|
||
this.debug(this.pattern, 'set', set)
|
||
|
||
// Find the basename of the path by looking for the last non-empty segment
|
||
var filename
|
||
var i
|
||
for (i = f.length - 1; i >= 0; i--) {
|
||
filename = f[i]
|
||
if (filename) break
|
||
}
|
||
|
||
for (i = 0; i < set.length; i++) {
|
||
var pattern = set[i]
|
||
var file = f
|
||
if (options.matchBase && pattern.length === 1) {
|
||
file = [filename]
|
||
}
|
||
var hit = this.matchOne(file, pattern, partial)
|
||
if (hit) {
|
||
if (options.flipNegate) return true
|
||
return !this.negate
|
||
}
|
||
}
|
||
|
||
// didn't get any hits. this is success if it's a negative
|
||
// pattern, failure otherwise.
|
||
if (options.flipNegate) return false
|
||
return this.negate
|
||
}
|
||
|
||
// set partial to true to test if, for example,
|
||
// "/a/b" matches the start of "/*/b/*/d"
|
||
// Partial means, if you run out of file before you run
|
||
// out of pattern, then that's fine, as long as all
|
||
// the parts match.
|
||
Minimatch.prototype.matchOne = function (file, pattern, partial) {
|
||
var options = this.options
|
||
|
||
this.debug('matchOne',
|
||
{ 'this': this, file: file, pattern: pattern })
|
||
|
||
this.debug('matchOne', file.length, pattern.length)
|
||
|
||
for (var fi = 0,
|
||
pi = 0,
|
||
fl = file.length,
|
||
pl = pattern.length
|
||
; (fi < fl) && (pi < pl)
|
||
; fi++, pi++) {
|
||
this.debug('matchOne loop')
|
||
var p = pattern[pi]
|
||
var f = file[fi]
|
||
|
||
this.debug(pattern, p, f)
|
||
|
||
// should be impossible.
|
||
// some invalid regexp stuff in the set.
|
||
if (p === false) return false
|
||
|
||
if (p === GLOBSTAR) {
|
||
this.debug('GLOBSTAR', [pattern, p, f])
|
||
|
||
// "**"
|
||
// a/**/b/**/c would match the following:
|
||
// a/b/x/y/z/c
|
||
// a/x/y/z/b/c
|
||
// a/b/x/b/x/c
|
||
// a/b/c
|
||
// To do this, take the rest of the pattern after
|
||
// the **, and see if it would match the file remainder.
|
||
// If so, return success.
|
||
// If not, the ** "swallows" a segment, and try again.
|
||
// This is recursively awful.
|
||
//
|
||
// a/**/b/**/c matching a/b/x/y/z/c
|
||
// - a matches a
|
||
// - doublestar
|
||
// - matchOne(b/x/y/z/c, b/**/c)
|
||
// - b matches b
|
||
// - doublestar
|
||
// - matchOne(x/y/z/c, c) -> no
|
||
// - matchOne(y/z/c, c) -> no
|
||
// - matchOne(z/c, c) -> no
|
||
// - matchOne(c, c) yes, hit
|
||
var fr = fi
|
||
var pr = pi + 1
|
||
if (pr === pl) {
|
||
this.debug('** at the end')
|
||
// a ** at the end will just swallow the rest.
|
||
// We have found a match.
|
||
// however, it will not swallow /.x, unless
|
||
// options.dot is set.
|
||
// . and .. are *never* matched by **, for explosively
|
||
// exponential reasons.
|
||
for (; fi < fl; fi++) {
|
||
if (file[fi] === '.' || file[fi] === '..' ||
|
||
(!options.dot && file[fi].charAt(0) === '.')) return false
|
||
}
|
||
return true
|
||
}
|
||
|
||
// ok, let's see if we can swallow whatever we can.
|
||
while (fr < fl) {
|
||
var swallowee = file[fr]
|
||
|
||
this.debug('\nglobstar while', file, fr, pattern, pr, swallowee)
|
||
|
||
// XXX remove this slice. Just pass the start index.
|
||
if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
|
||
this.debug('globstar found match!', fr, fl, swallowee)
|
||
// found a match.
|
||
return true
|
||
} else {
|
||
// can't swallow "." or ".." ever.
|
||
// can only swallow ".foo" when explicitly asked.
|
||
if (swallowee === '.' || swallowee === '..' ||
|
||
(!options.dot && swallowee.charAt(0) === '.')) {
|
||
this.debug('dot detected!', file, fr, pattern, pr)
|
||
break
|
||
}
|
||
|
||
// ** swallows a segment, and continue.
|
||
this.debug('globstar swallow a segment, and continue')
|
||
fr++
|
||
}
|
||
}
|
||
|
||
// no match was found.
|
||
// However, in partial mode, we can't say this is necessarily over.
|
||
// If there's more *pattern* left, then
|
||
if (partial) {
|
||
// ran out of file
|
||
this.debug('\n>>> no match, partial?', file, fr, pattern, pr)
|
||
if (fr === fl) return true
|
||
}
|
||
return false
|
||
}
|
||
|
||
// something other than **
|
||
// non-magic patterns just have to match exactly
|
||
// patterns with magic have been turned into regexps.
|
||
var hit
|
||
if (typeof p === 'string') {
|
||
if (options.nocase) {
|
||
hit = f.toLowerCase() === p.toLowerCase()
|
||
} else {
|
||
hit = f === p
|
||
}
|
||
this.debug('string match', p, f, hit)
|
||
} else {
|
||
hit = f.match(p)
|
||
this.debug('pattern match', p, f, hit)
|
||
}
|
||
|
||
if (!hit) return false
|
||
}
|
||
|
||
// Note: ending in / means that we'll get a final ""
|
||
// at the end of the pattern. This can only match a
|
||
// corresponding "" at the end of the file.
|
||
// If the file ends in /, then it can only match a
|
||
// a pattern that ends in /, unless the pattern just
|
||
// doesn't have any more for it. But, a/b/ should *not*
|
||
// match "a/b/*", even though "" matches against the
|
||
// [^/]*? pattern, except in partial mode, where it might
|
||
// simply not be reached yet.
|
||
// However, a/b/ should still satisfy a/*
|
||
|
||
// now either we fell off the end of the pattern, or we're done.
|
||
if (fi === fl && pi === pl) {
|
||
// ran out of pattern and filename at the same time.
|
||
// an exact hit!
|
||
return true
|
||
} else if (fi === fl) {
|
||
// ran out of file, but still had pattern left.
|
||
// this is ok if we're doing the match as part of
|
||
// a glob fs traversal.
|
||
return partial
|
||
} else if (pi === pl) {
|
||
// ran out of pattern, still have file left.
|
||
// this is only acceptable if we're on the very last
|
||
// empty segment of a file with a trailing slash.
|
||
// a/* should match a/b/
|
||
var emptyFileEnd = (fi === fl - 1) && (file[fi] === '')
|
||
return emptyFileEnd
|
||
}
|
||
|
||
// should be unreachable.
|
||
throw new Error('wtf?')
|
||
}
|
||
|
||
// replace stuff like \* with *
|
||
function globUnescape (s) {
|
||
return s.replace(/\\(.)/g, '$1')
|
||
}
|
||
|
||
function regExpEscape (s) {
|
||
return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&')
|
||
}
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 7049:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
/**
|
||
* @license nested-property https://github.com/cosmosio/nested-property
|
||
*
|
||
* The MIT License (MIT)
|
||
*
|
||
* Copyright (c) 2014-2020 Olivier Scherrer <pode.fr@gmail.com>
|
||
*/
|
||
|
||
|
||
function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
|
||
|
||
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
|
||
|
||
function _possibleConstructorReturn(self, call) { if (call && (_typeof(call) === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
|
||
|
||
function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
|
||
|
||
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
|
||
|
||
function _wrapNativeSuper(Class) { var _cache = typeof Map === "function" ? new Map() : undefined; _wrapNativeSuper = function _wrapNativeSuper(Class) { if (Class === null || !_isNativeFunction(Class)) return Class; if (typeof Class !== "function") { throw new TypeError("Super expression must either be null or a function"); } if (typeof _cache !== "undefined") { if (_cache.has(Class)) return _cache.get(Class); _cache.set(Class, Wrapper); } function Wrapper() { return _construct(Class, arguments, _getPrototypeOf(this).constructor); } Wrapper.prototype = Object.create(Class.prototype, { constructor: { value: Wrapper, enumerable: false, writable: true, configurable: true } }); return _setPrototypeOf(Wrapper, Class); }; return _wrapNativeSuper(Class); }
|
||
|
||
function _construct(Parent, args, Class) { if (_isNativeReflectConstruct()) { _construct = Reflect.construct; } else { _construct = function _construct(Parent, args, Class) { var a = [null]; a.push.apply(a, args); var Constructor = Function.bind.apply(Parent, a); var instance = new Constructor(); if (Class) _setPrototypeOf(instance, Class.prototype); return instance; }; } return _construct.apply(null, arguments); }
|
||
|
||
function _isNativeReflectConstruct() { if (typeof Reflect === "undefined" || !Reflect.construct) return false; if (Reflect.construct.sham) return false; if (typeof Proxy === "function") return true; try { Date.prototype.toString.call(Reflect.construct(Date, [], function () {})); return true; } catch (e) { return false; } }
|
||
|
||
function _isNativeFunction(fn) { return Function.toString.call(fn).indexOf("[native code]") !== -1; }
|
||
|
||
function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
|
||
|
||
function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
|
||
|
||
var ARRAY_WILDCARD = "+";
|
||
var PATH_DELIMITER = ".";
|
||
|
||
var ObjectPrototypeMutationError = /*#__PURE__*/function (_Error) {
|
||
_inherits(ObjectPrototypeMutationError, _Error);
|
||
|
||
function ObjectPrototypeMutationError(params) {
|
||
var _this;
|
||
|
||
_classCallCheck(this, ObjectPrototypeMutationError);
|
||
|
||
_this = _possibleConstructorReturn(this, _getPrototypeOf(ObjectPrototypeMutationError).call(this, params));
|
||
_this.name = "ObjectPrototypeMutationError";
|
||
return _this;
|
||
}
|
||
|
||
return ObjectPrototypeMutationError;
|
||
}(_wrapNativeSuper(Error));
|
||
|
||
module.exports = {
|
||
set: setNestedProperty,
|
||
get: getNestedProperty,
|
||
has: hasNestedProperty,
|
||
hasOwn: function hasOwn(object, property, options) {
|
||
return this.has(object, property, options || {
|
||
own: true
|
||
});
|
||
},
|
||
isIn: isInNestedProperty,
|
||
ObjectPrototypeMutationError: ObjectPrototypeMutationError
|
||
};
|
||
/**
|
||
* Get the property of an object nested in one or more objects or array
|
||
* Given an object such as a.b.c.d = 5, getNestedProperty(a, "b.c.d") will return 5.
|
||
* It also works through arrays. Given a nested array such as a[0].b = 5, getNestedProperty(a, "0.b") will return 5.
|
||
* For accessing nested properties through all items in an array, you may use the array wildcard "+".
|
||
* For instance, getNestedProperty([{a:1}, {a:2}, {a:3}], "+.a") will return [1, 2, 3]
|
||
* @param {Object} object the object to get the property from
|
||
* @param {String} property the path to the property as a string
|
||
* @returns the object or the the property value if found
|
||
*/
|
||
|
||
function getNestedProperty(object, property) {
|
||
if (_typeof(object) != "object" || object === null) {
|
||
return object;
|
||
}
|
||
|
||
if (typeof property == "undefined") {
|
||
return object;
|
||
}
|
||
|
||
if (typeof property == "number") {
|
||
return object[property];
|
||
}
|
||
|
||
try {
|
||
return traverse(object, property, function _getNestedProperty(currentObject, currentProperty) {
|
||
return currentObject[currentProperty];
|
||
});
|
||
} catch (err) {
|
||
return object;
|
||
}
|
||
}
|
||
/**
|
||
* Tell if a nested object has a given property (or array a given index)
|
||
* given an object such as a.b.c.d = 5, hasNestedProperty(a, "b.c.d") will return true.
|
||
* It also returns true if the property is in the prototype chain.
|
||
* @param {Object} object the object to get the property from
|
||
* @param {String} property the path to the property as a string
|
||
* @param {Object} options:
|
||
* - own: set to reject properties from the prototype
|
||
* @returns true if has (property in object), false otherwise
|
||
*/
|
||
|
||
|
||
function hasNestedProperty(object, property) {
|
||
var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
|
||
|
||
if (_typeof(object) != "object" || object === null) {
|
||
return false;
|
||
}
|
||
|
||
if (typeof property == "undefined") {
|
||
return false;
|
||
}
|
||
|
||
if (typeof property == "number") {
|
||
return property in object;
|
||
}
|
||
|
||
try {
|
||
var has = false;
|
||
traverse(object, property, function _hasNestedProperty(currentObject, currentProperty, segments, index) {
|
||
if (isLastSegment(segments, index)) {
|
||
if (options.own) {
|
||
has = currentObject.hasOwnProperty(currentProperty);
|
||
} else {
|
||
has = currentProperty in currentObject;
|
||
}
|
||
} else {
|
||
return currentObject && currentObject[currentProperty];
|
||
}
|
||
});
|
||
return has;
|
||
} catch (err) {
|
||
return false;
|
||
}
|
||
}
|
||
/**
|
||
* Set the property of an object nested in one or more objects
|
||
* If the property doesn't exist, it gets created.
|
||
* @param {Object} object
|
||
* @param {String} property
|
||
* @param value the value to set
|
||
* @returns object if no assignment was made or the value if the assignment was made
|
||
*/
|
||
|
||
|
||
function setNestedProperty(object, property, value) {
|
||
if (_typeof(object) != "object" || object === null) {
|
||
return object;
|
||
}
|
||
|
||
if (typeof property == "undefined") {
|
||
return object;
|
||
}
|
||
|
||
if (typeof property == "number") {
|
||
object[property] = value;
|
||
return object[property];
|
||
}
|
||
|
||
try {
|
||
return traverse(object, property, function _setNestedProperty(currentObject, currentProperty, segments, index) {
|
||
if (currentObject === Reflect.getPrototypeOf({})) {
|
||
throw new ObjectPrototypeMutationError("Attempting to mutate Object.prototype");
|
||
}
|
||
|
||
if (!currentObject[currentProperty]) {
|
||
var nextPropIsNumber = Number.isInteger(Number(segments[index + 1]));
|
||
var nextPropIsArrayWildcard = segments[index + 1] === ARRAY_WILDCARD;
|
||
|
||
if (nextPropIsNumber || nextPropIsArrayWildcard) {
|
||
currentObject[currentProperty] = [];
|
||
} else {
|
||
currentObject[currentProperty] = {};
|
||
}
|
||
}
|
||
|
||
if (isLastSegment(segments, index)) {
|
||
currentObject[currentProperty] = value;
|
||
}
|
||
|
||
return currentObject[currentProperty];
|
||
});
|
||
} catch (err) {
|
||
if (err instanceof ObjectPrototypeMutationError) {
|
||
// rethrow
|
||
throw err;
|
||
} else {
|
||
return object;
|
||
}
|
||
}
|
||
}
|
||
/**
|
||
* Tell if an object is on the path to a nested property
|
||
* If the object is on the path, and the path exists, it returns true, and false otherwise.
|
||
* @param {Object} object to get the nested property from
|
||
* @param {String} property name of the nested property
|
||
* @param {Object} objectInPath the object to check
|
||
* @param {Object} options:
|
||
* - validPath: return false if the path is invalid, even if the object is in the path
|
||
* @returns {boolean} true if the object is on the path
|
||
*/
|
||
|
||
|
||
function isInNestedProperty(object, property, objectInPath) {
|
||
var options = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {};
|
||
|
||
if (_typeof(object) != "object" || object === null) {
|
||
return false;
|
||
}
|
||
|
||
if (typeof property == "undefined") {
|
||
return false;
|
||
}
|
||
|
||
try {
|
||
var isIn = false,
|
||
pathExists = false;
|
||
traverse(object, property, function _isInNestedProperty(currentObject, currentProperty, segments, index) {
|
||
isIn = isIn || currentObject === objectInPath || !!currentObject && currentObject[currentProperty] === objectInPath;
|
||
pathExists = isLastSegment(segments, index) && _typeof(currentObject) === "object" && currentProperty in currentObject;
|
||
return currentObject && currentObject[currentProperty];
|
||
});
|
||
|
||
if (options.validPath) {
|
||
return isIn && pathExists;
|
||
} else {
|
||
return isIn;
|
||
}
|
||
} catch (err) {
|
||
return false;
|
||
}
|
||
}
|
||
|
||
function traverse(object, path) {
|
||
var callback = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : function () {};
|
||
var segments = path.split(PATH_DELIMITER);
|
||
var length = segments.length;
|
||
|
||
var _loop = function _loop(idx) {
|
||
var currentSegment = segments[idx];
|
||
|
||
if (!object) {
|
||
return {
|
||
v: void 0
|
||
};
|
||
}
|
||
|
||
if (currentSegment === ARRAY_WILDCARD) {
|
||
if (Array.isArray(object)) {
|
||
return {
|
||
v: object.map(function (value, index) {
|
||
var remainingSegments = segments.slice(idx + 1);
|
||
|
||
if (remainingSegments.length > 0) {
|
||
return traverse(value, remainingSegments.join(PATH_DELIMITER), callback);
|
||
} else {
|
||
return callback(object, index, segments, idx);
|
||
}
|
||
})
|
||
};
|
||
} else {
|
||
var pathToHere = segments.slice(0, idx).join(PATH_DELIMITER);
|
||
throw new Error("Object at wildcard (".concat(pathToHere, ") is not an array"));
|
||
}
|
||
} else {
|
||
object = callback(object, currentSegment, segments, idx);
|
||
}
|
||
};
|
||
|
||
for (var idx = 0; idx < length; idx++) {
|
||
var _ret = _loop(idx);
|
||
|
||
if (_typeof(_ret) === "object") return _ret.v;
|
||
}
|
||
|
||
return object;
|
||
}
|
||
|
||
function isLastSegment(segments, index) {
|
||
return segments.length === index + 1;
|
||
}
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 5992:
|
||
/***/ ((module, exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
|
||
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
|
||
|
||
var Stream = _interopDefault(__nccwpck_require__(2413));
|
||
var http = _interopDefault(__nccwpck_require__(8605));
|
||
var Url = _interopDefault(__nccwpck_require__(8835));
|
||
var https = _interopDefault(__nccwpck_require__(7211));
|
||
var zlib = _interopDefault(__nccwpck_require__(8761));
|
||
|
||
// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js
|
||
|
||
// fix for "Readable" isn't a named export issue
|
||
const Readable = Stream.Readable;
|
||
|
||
const BUFFER = Symbol('buffer');
|
||
const TYPE = Symbol('type');
|
||
|
||
class Blob {
|
||
constructor() {
|
||
this[TYPE] = '';
|
||
|
||
const blobParts = arguments[0];
|
||
const options = arguments[1];
|
||
|
||
const buffers = [];
|
||
let size = 0;
|
||
|
||
if (blobParts) {
|
||
const a = blobParts;
|
||
const length = Number(a.length);
|
||
for (let i = 0; i < length; i++) {
|
||
const element = a[i];
|
||
let buffer;
|
||
if (element instanceof Buffer) {
|
||
buffer = element;
|
||
} else if (ArrayBuffer.isView(element)) {
|
||
buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);
|
||
} else if (element instanceof ArrayBuffer) {
|
||
buffer = Buffer.from(element);
|
||
} else if (element instanceof Blob) {
|
||
buffer = element[BUFFER];
|
||
} else {
|
||
buffer = Buffer.from(typeof element === 'string' ? element : String(element));
|
||
}
|
||
size += buffer.length;
|
||
buffers.push(buffer);
|
||
}
|
||
}
|
||
|
||
this[BUFFER] = Buffer.concat(buffers);
|
||
|
||
let type = options && options.type !== undefined && String(options.type).toLowerCase();
|
||
if (type && !/[^\u0020-\u007E]/.test(type)) {
|
||
this[TYPE] = type;
|
||
}
|
||
}
|
||
get size() {
|
||
return this[BUFFER].length;
|
||
}
|
||
get type() {
|
||
return this[TYPE];
|
||
}
|
||
text() {
|
||
return Promise.resolve(this[BUFFER].toString());
|
||
}
|
||
arrayBuffer() {
|
||
const buf = this[BUFFER];
|
||
const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
|
||
return Promise.resolve(ab);
|
||
}
|
||
stream() {
|
||
const readable = new Readable();
|
||
readable._read = function () {};
|
||
readable.push(this[BUFFER]);
|
||
readable.push(null);
|
||
return readable;
|
||
}
|
||
toString() {
|
||
return '[object Blob]';
|
||
}
|
||
slice() {
|
||
const size = this.size;
|
||
|
||
const start = arguments[0];
|
||
const end = arguments[1];
|
||
let relativeStart, relativeEnd;
|
||
if (start === undefined) {
|
||
relativeStart = 0;
|
||
} else if (start < 0) {
|
||
relativeStart = Math.max(size + start, 0);
|
||
} else {
|
||
relativeStart = Math.min(start, size);
|
||
}
|
||
if (end === undefined) {
|
||
relativeEnd = size;
|
||
} else if (end < 0) {
|
||
relativeEnd = Math.max(size + end, 0);
|
||
} else {
|
||
relativeEnd = Math.min(end, size);
|
||
}
|
||
const span = Math.max(relativeEnd - relativeStart, 0);
|
||
|
||
const buffer = this[BUFFER];
|
||
const slicedBuffer = buffer.slice(relativeStart, relativeStart + span);
|
||
const blob = new Blob([], { type: arguments[2] });
|
||
blob[BUFFER] = slicedBuffer;
|
||
return blob;
|
||
}
|
||
}
|
||
|
||
Object.defineProperties(Blob.prototype, {
|
||
size: { enumerable: true },
|
||
type: { enumerable: true },
|
||
slice: { enumerable: true }
|
||
});
|
||
|
||
Object.defineProperty(Blob.prototype, Symbol.toStringTag, {
|
||
value: 'Blob',
|
||
writable: false,
|
||
enumerable: false,
|
||
configurable: true
|
||
});
|
||
|
||
/**
|
||
* fetch-error.js
|
||
*
|
||
* FetchError interface for operational errors
|
||
*/
|
||
|
||
/**
|
||
* Create FetchError instance
|
||
*
|
||
* @param String message Error message for human
|
||
* @param String type Error type for machine
|
||
* @param String systemError For Node.js system error
|
||
* @return FetchError
|
||
*/
|
||
function FetchError(message, type, systemError) {
|
||
Error.call(this, message);
|
||
|
||
this.message = message;
|
||
this.type = type;
|
||
|
||
// when err.type is `system`, err.code contains system error code
|
||
if (systemError) {
|
||
this.code = this.errno = systemError.code;
|
||
}
|
||
|
||
// hide custom error implementation details from end-users
|
||
Error.captureStackTrace(this, this.constructor);
|
||
}
|
||
|
||
FetchError.prototype = Object.create(Error.prototype);
|
||
FetchError.prototype.constructor = FetchError;
|
||
FetchError.prototype.name = 'FetchError';
|
||
|
||
let convert;
|
||
try {
|
||
convert = __nccwpck_require__(974).convert;
|
||
} catch (e) {}
|
||
|
||
const INTERNALS = Symbol('Body internals');
|
||
|
||
// fix an issue where "PassThrough" isn't a named export for node <10
|
||
const PassThrough = Stream.PassThrough;
|
||
|
||
/**
|
||
* Body mixin
|
||
*
|
||
* Ref: https://fetch.spec.whatwg.org/#body
|
||
*
|
||
* @param Stream body Readable stream
|
||
* @param Object opts Response options
|
||
* @return Void
|
||
*/
|
||
function Body(body) {
|
||
var _this = this;
|
||
|
||
var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
|
||
_ref$size = _ref.size;
|
||
|
||
let size = _ref$size === undefined ? 0 : _ref$size;
|
||
var _ref$timeout = _ref.timeout;
|
||
let timeout = _ref$timeout === undefined ? 0 : _ref$timeout;
|
||
|
||
if (body == null) {
|
||
// body is undefined or null
|
||
body = null;
|
||
} else if (isURLSearchParams(body)) {
|
||
// body is a URLSearchParams
|
||
body = Buffer.from(body.toString());
|
||
} else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {
|
||
// body is ArrayBuffer
|
||
body = Buffer.from(body);
|
||
} else if (ArrayBuffer.isView(body)) {
|
||
// body is ArrayBufferView
|
||
body = Buffer.from(body.buffer, body.byteOffset, body.byteLength);
|
||
} else if (body instanceof Stream) ; else {
|
||
// none of the above
|
||
// coerce to string then buffer
|
||
body = Buffer.from(String(body));
|
||
}
|
||
this[INTERNALS] = {
|
||
body,
|
||
disturbed: false,
|
||
error: null
|
||
};
|
||
this.size = size;
|
||
this.timeout = timeout;
|
||
|
||
if (body instanceof Stream) {
|
||
body.on('error', function (err) {
|
||
const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);
|
||
_this[INTERNALS].error = error;
|
||
});
|
||
}
|
||
}
|
||
|
||
Body.prototype = {
|
||
get body() {
|
||
return this[INTERNALS].body;
|
||
},
|
||
|
||
get bodyUsed() {
|
||
return this[INTERNALS].disturbed;
|
||
},
|
||
|
||
/**
|
||
* Decode response as ArrayBuffer
|
||
*
|
||
* @return Promise
|
||
*/
|
||
arrayBuffer() {
|
||
return consumeBody.call(this).then(function (buf) {
|
||
return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
|
||
});
|
||
},
|
||
|
||
/**
|
||
* Return raw response as Blob
|
||
*
|
||
* @return Promise
|
||
*/
|
||
blob() {
|
||
let ct = this.headers && this.headers.get('content-type') || '';
|
||
return consumeBody.call(this).then(function (buf) {
|
||
return Object.assign(
|
||
// Prevent copying
|
||
new Blob([], {
|
||
type: ct.toLowerCase()
|
||
}), {
|
||
[BUFFER]: buf
|
||
});
|
||
});
|
||
},
|
||
|
||
/**
|
||
* Decode response as json
|
||
*
|
||
* @return Promise
|
||
*/
|
||
json() {
|
||
var _this2 = this;
|
||
|
||
return consumeBody.call(this).then(function (buffer) {
|
||
try {
|
||
return JSON.parse(buffer.toString());
|
||
} catch (err) {
|
||
return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));
|
||
}
|
||
});
|
||
},
|
||
|
||
/**
|
||
* Decode response as text
|
||
*
|
||
* @return Promise
|
||
*/
|
||
text() {
|
||
return consumeBody.call(this).then(function (buffer) {
|
||
return buffer.toString();
|
||
});
|
||
},
|
||
|
||
/**
|
||
* Decode response as buffer (non-spec api)
|
||
*
|
||
* @return Promise
|
||
*/
|
||
buffer() {
|
||
return consumeBody.call(this);
|
||
},
|
||
|
||
/**
|
||
* Decode response as text, while automatically detecting the encoding and
|
||
* trying to decode to UTF-8 (non-spec api)
|
||
*
|
||
* @return Promise
|
||
*/
|
||
textConverted() {
|
||
var _this3 = this;
|
||
|
||
return consumeBody.call(this).then(function (buffer) {
|
||
return convertBody(buffer, _this3.headers);
|
||
});
|
||
}
|
||
};
|
||
|
||
// In browsers, all properties are enumerable.
|
||
Object.defineProperties(Body.prototype, {
|
||
body: { enumerable: true },
|
||
bodyUsed: { enumerable: true },
|
||
arrayBuffer: { enumerable: true },
|
||
blob: { enumerable: true },
|
||
json: { enumerable: true },
|
||
text: { enumerable: true }
|
||
});
|
||
|
||
Body.mixIn = function (proto) {
|
||
for (const name of Object.getOwnPropertyNames(Body.prototype)) {
|
||
// istanbul ignore else: future proof
|
||
if (!(name in proto)) {
|
||
const desc = Object.getOwnPropertyDescriptor(Body.prototype, name);
|
||
Object.defineProperty(proto, name, desc);
|
||
}
|
||
}
|
||
};
|
||
|
||
/**
|
||
* Consume and convert an entire Body to a Buffer.
|
||
*
|
||
* Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body
|
||
*
|
||
* @return Promise
|
||
*/
|
||
function consumeBody() {
|
||
var _this4 = this;
|
||
|
||
if (this[INTERNALS].disturbed) {
|
||
return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));
|
||
}
|
||
|
||
this[INTERNALS].disturbed = true;
|
||
|
||
if (this[INTERNALS].error) {
|
||
return Body.Promise.reject(this[INTERNALS].error);
|
||
}
|
||
|
||
let body = this.body;
|
||
|
||
// body is null
|
||
if (body === null) {
|
||
return Body.Promise.resolve(Buffer.alloc(0));
|
||
}
|
||
|
||
// body is blob
|
||
if (isBlob(body)) {
|
||
body = body.stream();
|
||
}
|
||
|
||
// body is buffer
|
||
if (Buffer.isBuffer(body)) {
|
||
return Body.Promise.resolve(body);
|
||
}
|
||
|
||
// istanbul ignore if: should never happen
|
||
if (!(body instanceof Stream)) {
|
||
return Body.Promise.resolve(Buffer.alloc(0));
|
||
}
|
||
|
||
// body is stream
|
||
// get ready to actually consume the body
|
||
let accum = [];
|
||
let accumBytes = 0;
|
||
let abort = false;
|
||
|
||
return new Body.Promise(function (resolve, reject) {
|
||
let resTimeout;
|
||
|
||
// allow timeout on slow response body
|
||
if (_this4.timeout) {
|
||
resTimeout = setTimeout(function () {
|
||
abort = true;
|
||
reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));
|
||
}, _this4.timeout);
|
||
}
|
||
|
||
// handle stream errors
|
||
body.on('error', function (err) {
|
||
if (err.name === 'AbortError') {
|
||
// if the request was aborted, reject with this Error
|
||
abort = true;
|
||
reject(err);
|
||
} else {
|
||
// other errors, such as incorrect content-encoding
|
||
reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));
|
||
}
|
||
});
|
||
|
||
body.on('data', function (chunk) {
|
||
if (abort || chunk === null) {
|
||
return;
|
||
}
|
||
|
||
if (_this4.size && accumBytes + chunk.length > _this4.size) {
|
||
abort = true;
|
||
reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));
|
||
return;
|
||
}
|
||
|
||
accumBytes += chunk.length;
|
||
accum.push(chunk);
|
||
});
|
||
|
||
body.on('end', function () {
|
||
if (abort) {
|
||
return;
|
||
}
|
||
|
||
clearTimeout(resTimeout);
|
||
|
||
try {
|
||
resolve(Buffer.concat(accum, accumBytes));
|
||
} catch (err) {
|
||
// handle streams that have accumulated too much data (issue #414)
|
||
reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));
|
||
}
|
||
});
|
||
});
|
||
}
|
||
|
||
/**
|
||
* Detect buffer encoding and convert to target encoding
|
||
* ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding
|
||
*
|
||
* @param Buffer buffer Incoming buffer
|
||
* @param String encoding Target encoding
|
||
* @return String
|
||
*/
|
||
function convertBody(buffer, headers) {
|
||
if (typeof convert !== 'function') {
|
||
throw new Error('The package `encoding` must be installed to use the textConverted() function');
|
||
}
|
||
|
||
const ct = headers.get('content-type');
|
||
let charset = 'utf-8';
|
||
let res, str;
|
||
|
||
// header
|
||
if (ct) {
|
||
res = /charset=([^;]*)/i.exec(ct);
|
||
}
|
||
|
||
// no charset in content type, peek at response body for at most 1024 bytes
|
||
str = buffer.slice(0, 1024).toString();
|
||
|
||
// html5
|
||
if (!res && str) {
|
||
res = /<meta.+?charset=(['"])(.+?)\1/i.exec(str);
|
||
}
|
||
|
||
// html4
|
||
if (!res && str) {
|
||
res = /<meta[\s]+?http-equiv=(['"])content-type\1[\s]+?content=(['"])(.+?)\2/i.exec(str);
|
||
if (!res) {
|
||
res = /<meta[\s]+?content=(['"])(.+?)\1[\s]+?http-equiv=(['"])content-type\3/i.exec(str);
|
||
if (res) {
|
||
res.pop(); // drop last quote
|
||
}
|
||
}
|
||
|
||
if (res) {
|
||
res = /charset=(.*)/i.exec(res.pop());
|
||
}
|
||
}
|
||
|
||
// xml
|
||
if (!res && str) {
|
||
res = /<\?xml.+?encoding=(['"])(.+?)\1/i.exec(str);
|
||
}
|
||
|
||
// found charset
|
||
if (res) {
|
||
charset = res.pop();
|
||
|
||
// prevent decode issues when sites use incorrect encoding
|
||
// ref: https://hsivonen.fi/encoding-menu/
|
||
if (charset === 'gb2312' || charset === 'gbk') {
|
||
charset = 'gb18030';
|
||
}
|
||
}
|
||
|
||
// turn raw buffers into a single utf-8 buffer
|
||
return convert(buffer, 'UTF-8', charset).toString();
|
||
}
|
||
|
||
/**
|
||
* Detect a URLSearchParams object
|
||
* ref: https://github.com/bitinn/node-fetch/issues/296#issuecomment-307598143
|
||
*
|
||
* @param Object obj Object to detect by type or brand
|
||
* @return String
|
||
*/
|
||
function isURLSearchParams(obj) {
|
||
// Duck-typing as a necessary condition.
|
||
if (typeof obj !== 'object' || typeof obj.append !== 'function' || typeof obj.delete !== 'function' || typeof obj.get !== 'function' || typeof obj.getAll !== 'function' || typeof obj.has !== 'function' || typeof obj.set !== 'function') {
|
||
return false;
|
||
}
|
||
|
||
// Brand-checking and more duck-typing as optional condition.
|
||
return obj.constructor.name === 'URLSearchParams' || Object.prototype.toString.call(obj) === '[object URLSearchParams]' || typeof obj.sort === 'function';
|
||
}
|
||
|
||
/**
|
||
* Check if `obj` is a W3C `Blob` object (which `File` inherits from)
|
||
* @param {*} obj
|
||
* @return {boolean}
|
||
*/
|
||
function isBlob(obj) {
|
||
return typeof obj === 'object' && typeof obj.arrayBuffer === 'function' && typeof obj.type === 'string' && typeof obj.stream === 'function' && typeof obj.constructor === 'function' && typeof obj.constructor.name === 'string' && /^(Blob|File)$/.test(obj.constructor.name) && /^(Blob|File)$/.test(obj[Symbol.toStringTag]);
|
||
}
|
||
|
||
/**
|
||
* Clone body given Res/Req instance
|
||
*
|
||
* @param Mixed instance Response or Request instance
|
||
* @return Mixed
|
||
*/
|
||
function clone(instance) {
|
||
let p1, p2;
|
||
let body = instance.body;
|
||
|
||
// don't allow cloning a used body
|
||
if (instance.bodyUsed) {
|
||
throw new Error('cannot clone body after it is used');
|
||
}
|
||
|
||
// check that body is a stream and not form-data object
|
||
// note: we can't clone the form-data object without having it as a dependency
|
||
if (body instanceof Stream && typeof body.getBoundary !== 'function') {
|
||
// tee instance body
|
||
p1 = new PassThrough();
|
||
p2 = new PassThrough();
|
||
body.pipe(p1);
|
||
body.pipe(p2);
|
||
// set instance body to teed body and return the other teed body
|
||
instance[INTERNALS].body = p1;
|
||
body = p2;
|
||
}
|
||
|
||
return body;
|
||
}
|
||
|
||
/**
|
||
* Performs the operation "extract a `Content-Type` value from |object|" as
|
||
* specified in the specification:
|
||
* https://fetch.spec.whatwg.org/#concept-bodyinit-extract
|
||
*
|
||
* This function assumes that instance.body is present.
|
||
*
|
||
* @param Mixed instance Any options.body input
|
||
*/
|
||
function extractContentType(body) {
|
||
if (body === null) {
|
||
// body is null
|
||
return null;
|
||
} else if (typeof body === 'string') {
|
||
// body is string
|
||
return 'text/plain;charset=UTF-8';
|
||
} else if (isURLSearchParams(body)) {
|
||
// body is a URLSearchParams
|
||
return 'application/x-www-form-urlencoded;charset=UTF-8';
|
||
} else if (isBlob(body)) {
|
||
// body is blob
|
||
return body.type || null;
|
||
} else if (Buffer.isBuffer(body)) {
|
||
// body is buffer
|
||
return null;
|
||
} else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {
|
||
// body is ArrayBuffer
|
||
return null;
|
||
} else if (ArrayBuffer.isView(body)) {
|
||
// body is ArrayBufferView
|
||
return null;
|
||
} else if (typeof body.getBoundary === 'function') {
|
||
// detect form data input from form-data module
|
||
return `multipart/form-data;boundary=${body.getBoundary()}`;
|
||
} else if (body instanceof Stream) {
|
||
// body is stream
|
||
// can't really do much about this
|
||
return null;
|
||
} else {
|
||
// Body constructor defaults other things to string
|
||
return 'text/plain;charset=UTF-8';
|
||
}
|
||
}
|
||
|
||
/**
|
||
* The Fetch Standard treats this as if "total bytes" is a property on the body.
|
||
* For us, we have to explicitly get it with a function.
|
||
*
|
||
* ref: https://fetch.spec.whatwg.org/#concept-body-total-bytes
|
||
*
|
||
* @param Body instance Instance of Body
|
||
* @return Number? Number of bytes, or null if not possible
|
||
*/
|
||
function getTotalBytes(instance) {
|
||
const body = instance.body;
|
||
|
||
|
||
if (body === null) {
|
||
// body is null
|
||
return 0;
|
||
} else if (isBlob(body)) {
|
||
return body.size;
|
||
} else if (Buffer.isBuffer(body)) {
|
||
// body is buffer
|
||
return body.length;
|
||
} else if (body && typeof body.getLengthSync === 'function') {
|
||
// detect form data input from form-data module
|
||
if (body._lengthRetrievers && body._lengthRetrievers.length == 0 || // 1.x
|
||
body.hasKnownLength && body.hasKnownLength()) {
|
||
// 2.x
|
||
return body.getLengthSync();
|
||
}
|
||
return null;
|
||
} else {
|
||
// body is stream
|
||
return null;
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Write a Body to a Node.js WritableStream (e.g. http.Request) object.
|
||
*
|
||
* @param Body instance Instance of Body
|
||
* @return Void
|
||
*/
|
||
function writeToStream(dest, instance) {
|
||
const body = instance.body;
|
||
|
||
|
||
if (body === null) {
|
||
// body is null
|
||
dest.end();
|
||
} else if (isBlob(body)) {
|
||
body.stream().pipe(dest);
|
||
} else if (Buffer.isBuffer(body)) {
|
||
// body is buffer
|
||
dest.write(body);
|
||
dest.end();
|
||
} else {
|
||
// body is stream
|
||
body.pipe(dest);
|
||
}
|
||
}
|
||
|
||
// expose Promise
|
||
Body.Promise = global.Promise;
|
||
|
||
/**
|
||
* headers.js
|
||
*
|
||
* Headers class offers convenient helpers
|
||
*/
|
||
|
||
const invalidTokenRegex = /[^\^_`a-zA-Z\-0-9!#$%&'*+.|~]/;
|
||
const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/;
|
||
|
||
function validateName(name) {
|
||
name = `${name}`;
|
||
if (invalidTokenRegex.test(name) || name === '') {
|
||
throw new TypeError(`${name} is not a legal HTTP header name`);
|
||
}
|
||
}
|
||
|
||
function validateValue(value) {
|
||
value = `${value}`;
|
||
if (invalidHeaderCharRegex.test(value)) {
|
||
throw new TypeError(`${value} is not a legal HTTP header value`);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Find the key in the map object given a header name.
|
||
*
|
||
* Returns undefined if not found.
|
||
*
|
||
* @param String name Header name
|
||
* @return String|Undefined
|
||
*/
|
||
function find(map, name) {
|
||
name = name.toLowerCase();
|
||
for (const key in map) {
|
||
if (key.toLowerCase() === name) {
|
||
return key;
|
||
}
|
||
}
|
||
return undefined;
|
||
}
|
||
|
||
const MAP = Symbol('map');
|
||
class Headers {
|
||
/**
|
||
* Headers class
|
||
*
|
||
* @param Object headers Response headers
|
||
* @return Void
|
||
*/
|
||
constructor() {
|
||
let init = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : undefined;
|
||
|
||
this[MAP] = Object.create(null);
|
||
|
||
if (init instanceof Headers) {
|
||
const rawHeaders = init.raw();
|
||
const headerNames = Object.keys(rawHeaders);
|
||
|
||
for (const headerName of headerNames) {
|
||
for (const value of rawHeaders[headerName]) {
|
||
this.append(headerName, value);
|
||
}
|
||
}
|
||
|
||
return;
|
||
}
|
||
|
||
// We don't worry about converting prop to ByteString here as append()
|
||
// will handle it.
|
||
if (init == null) ; else if (typeof init === 'object') {
|
||
const method = init[Symbol.iterator];
|
||
if (method != null) {
|
||
if (typeof method !== 'function') {
|
||
throw new TypeError('Header pairs must be iterable');
|
||
}
|
||
|
||
// sequence<sequence<ByteString>>
|
||
// Note: per spec we have to first exhaust the lists then process them
|
||
const pairs = [];
|
||
for (const pair of init) {
|
||
if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {
|
||
throw new TypeError('Each header pair must be iterable');
|
||
}
|
||
pairs.push(Array.from(pair));
|
||
}
|
||
|
||
for (const pair of pairs) {
|
||
if (pair.length !== 2) {
|
||
throw new TypeError('Each header pair must be a name/value tuple');
|
||
}
|
||
this.append(pair[0], pair[1]);
|
||
}
|
||
} else {
|
||
// record<ByteString, ByteString>
|
||
for (const key of Object.keys(init)) {
|
||
const value = init[key];
|
||
this.append(key, value);
|
||
}
|
||
}
|
||
} else {
|
||
throw new TypeError('Provided initializer must be an object');
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Return combined header value given name
|
||
*
|
||
* @param String name Header name
|
||
* @return Mixed
|
||
*/
|
||
get(name) {
|
||
name = `${name}`;
|
||
validateName(name);
|
||
const key = find(this[MAP], name);
|
||
if (key === undefined) {
|
||
return null;
|
||
}
|
||
|
||
return this[MAP][key].join(', ');
|
||
}
|
||
|
||
/**
|
||
* Iterate over all headers
|
||
*
|
||
* @param Function callback Executed for each item with parameters (value, name, thisArg)
|
||
* @param Boolean thisArg `this` context for callback function
|
||
* @return Void
|
||
*/
|
||
forEach(callback) {
|
||
let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;
|
||
|
||
let pairs = getHeaders(this);
|
||
let i = 0;
|
||
while (i < pairs.length) {
|
||
var _pairs$i = pairs[i];
|
||
const name = _pairs$i[0],
|
||
value = _pairs$i[1];
|
||
|
||
callback.call(thisArg, value, name, this);
|
||
pairs = getHeaders(this);
|
||
i++;
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Overwrite header values given name
|
||
*
|
||
* @param String name Header name
|
||
* @param String value Header value
|
||
* @return Void
|
||
*/
|
||
set(name, value) {
|
||
name = `${name}`;
|
||
value = `${value}`;
|
||
validateName(name);
|
||
validateValue(value);
|
||
const key = find(this[MAP], name);
|
||
this[MAP][key !== undefined ? key : name] = [value];
|
||
}
|
||
|
||
/**
|
||
* Append a value onto existing header
|
||
*
|
||
* @param String name Header name
|
||
* @param String value Header value
|
||
* @return Void
|
||
*/
|
||
append(name, value) {
|
||
name = `${name}`;
|
||
value = `${value}`;
|
||
validateName(name);
|
||
validateValue(value);
|
||
const key = find(this[MAP], name);
|
||
if (key !== undefined) {
|
||
this[MAP][key].push(value);
|
||
} else {
|
||
this[MAP][name] = [value];
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Check for header name existence
|
||
*
|
||
* @param String name Header name
|
||
* @return Boolean
|
||
*/
|
||
has(name) {
|
||
name = `${name}`;
|
||
validateName(name);
|
||
return find(this[MAP], name) !== undefined;
|
||
}
|
||
|
||
/**
|
||
* Delete all header values given name
|
||
*
|
||
* @param String name Header name
|
||
* @return Void
|
||
*/
|
||
delete(name) {
|
||
name = `${name}`;
|
||
validateName(name);
|
||
const key = find(this[MAP], name);
|
||
if (key !== undefined) {
|
||
delete this[MAP][key];
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Return raw headers (non-spec api)
|
||
*
|
||
* @return Object
|
||
*/
|
||
raw() {
|
||
return this[MAP];
|
||
}
|
||
|
||
/**
|
||
* Get an iterator on keys.
|
||
*
|
||
* @return Iterator
|
||
*/
|
||
keys() {
|
||
return createHeadersIterator(this, 'key');
|
||
}
|
||
|
||
/**
|
||
* Get an iterator on values.
|
||
*
|
||
* @return Iterator
|
||
*/
|
||
values() {
|
||
return createHeadersIterator(this, 'value');
|
||
}
|
||
|
||
/**
|
||
* Get an iterator on entries.
|
||
*
|
||
* This is the default iterator of the Headers object.
|
||
*
|
||
* @return Iterator
|
||
*/
|
||
[Symbol.iterator]() {
|
||
return createHeadersIterator(this, 'key+value');
|
||
}
|
||
}
|
||
Headers.prototype.entries = Headers.prototype[Symbol.iterator];
|
||
|
||
Object.defineProperty(Headers.prototype, Symbol.toStringTag, {
|
||
value: 'Headers',
|
||
writable: false,
|
||
enumerable: false,
|
||
configurable: true
|
||
});
|
||
|
||
Object.defineProperties(Headers.prototype, {
|
||
get: { enumerable: true },
|
||
forEach: { enumerable: true },
|
||
set: { enumerable: true },
|
||
append: { enumerable: true },
|
||
has: { enumerable: true },
|
||
delete: { enumerable: true },
|
||
keys: { enumerable: true },
|
||
values: { enumerable: true },
|
||
entries: { enumerable: true }
|
||
});
|
||
|
||
function getHeaders(headers) {
|
||
let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';
|
||
|
||
const keys = Object.keys(headers[MAP]).sort();
|
||
return keys.map(kind === 'key' ? function (k) {
|
||
return k.toLowerCase();
|
||
} : kind === 'value' ? function (k) {
|
||
return headers[MAP][k].join(', ');
|
||
} : function (k) {
|
||
return [k.toLowerCase(), headers[MAP][k].join(', ')];
|
||
});
|
||
}
|
||
|
||
const INTERNAL = Symbol('internal');
|
||
|
||
function createHeadersIterator(target, kind) {
|
||
const iterator = Object.create(HeadersIteratorPrototype);
|
||
iterator[INTERNAL] = {
|
||
target,
|
||
kind,
|
||
index: 0
|
||
};
|
||
return iterator;
|
||
}
|
||
|
||
const HeadersIteratorPrototype = Object.setPrototypeOf({
|
||
next() {
|
||
// istanbul ignore if
|
||
if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {
|
||
throw new TypeError('Value of `this` is not a HeadersIterator');
|
||
}
|
||
|
||
var _INTERNAL = this[INTERNAL];
|
||
const target = _INTERNAL.target,
|
||
kind = _INTERNAL.kind,
|
||
index = _INTERNAL.index;
|
||
|
||
const values = getHeaders(target, kind);
|
||
const len = values.length;
|
||
if (index >= len) {
|
||
return {
|
||
value: undefined,
|
||
done: true
|
||
};
|
||
}
|
||
|
||
this[INTERNAL].index = index + 1;
|
||
|
||
return {
|
||
value: values[index],
|
||
done: false
|
||
};
|
||
}
|
||
}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));
|
||
|
||
Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {
|
||
value: 'HeadersIterator',
|
||
writable: false,
|
||
enumerable: false,
|
||
configurable: true
|
||
});
|
||
|
||
/**
|
||
* Export the Headers object in a form that Node.js can consume.
|
||
*
|
||
* @param Headers headers
|
||
* @return Object
|
||
*/
|
||
function exportNodeCompatibleHeaders(headers) {
|
||
const obj = Object.assign({ __proto__: null }, headers[MAP]);
|
||
|
||
// http.request() only supports string as Host header. This hack makes
|
||
// specifying custom Host header possible.
|
||
const hostHeaderKey = find(headers[MAP], 'Host');
|
||
if (hostHeaderKey !== undefined) {
|
||
obj[hostHeaderKey] = obj[hostHeaderKey][0];
|
||
}
|
||
|
||
return obj;
|
||
}
|
||
|
||
/**
|
||
* Create a Headers object from an object of headers, ignoring those that do
|
||
* not conform to HTTP grammar productions.
|
||
*
|
||
* @param Object obj Object of headers
|
||
* @return Headers
|
||
*/
|
||
function createHeadersLenient(obj) {
|
||
const headers = new Headers();
|
||
for (const name of Object.keys(obj)) {
|
||
if (invalidTokenRegex.test(name)) {
|
||
continue;
|
||
}
|
||
if (Array.isArray(obj[name])) {
|
||
for (const val of obj[name]) {
|
||
if (invalidHeaderCharRegex.test(val)) {
|
||
continue;
|
||
}
|
||
if (headers[MAP][name] === undefined) {
|
||
headers[MAP][name] = [val];
|
||
} else {
|
||
headers[MAP][name].push(val);
|
||
}
|
||
}
|
||
} else if (!invalidHeaderCharRegex.test(obj[name])) {
|
||
headers[MAP][name] = [obj[name]];
|
||
}
|
||
}
|
||
return headers;
|
||
}
|
||
|
||
const INTERNALS$1 = Symbol('Response internals');
|
||
|
||
// fix an issue where "STATUS_CODES" aren't a named export for node <10
|
||
const STATUS_CODES = http.STATUS_CODES;
|
||
|
||
/**
|
||
* Response class
|
||
*
|
||
* @param Stream body Readable stream
|
||
* @param Object opts Response options
|
||
* @return Void
|
||
*/
|
||
class Response {
|
||
constructor() {
|
||
let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;
|
||
let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
||
|
||
Body.call(this, body, opts);
|
||
|
||
const status = opts.status || 200;
|
||
const headers = new Headers(opts.headers);
|
||
|
||
if (body != null && !headers.has('Content-Type')) {
|
||
const contentType = extractContentType(body);
|
||
if (contentType) {
|
||
headers.append('Content-Type', contentType);
|
||
}
|
||
}
|
||
|
||
this[INTERNALS$1] = {
|
||
url: opts.url,
|
||
status,
|
||
statusText: opts.statusText || STATUS_CODES[status],
|
||
headers,
|
||
counter: opts.counter
|
||
};
|
||
}
|
||
|
||
get url() {
|
||
return this[INTERNALS$1].url || '';
|
||
}
|
||
|
||
get status() {
|
||
return this[INTERNALS$1].status;
|
||
}
|
||
|
||
/**
|
||
* Convenience property representing if the request ended normally
|
||
*/
|
||
get ok() {
|
||
return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;
|
||
}
|
||
|
||
get redirected() {
|
||
return this[INTERNALS$1].counter > 0;
|
||
}
|
||
|
||
get statusText() {
|
||
return this[INTERNALS$1].statusText;
|
||
}
|
||
|
||
get headers() {
|
||
return this[INTERNALS$1].headers;
|
||
}
|
||
|
||
/**
|
||
* Clone this response
|
||
*
|
||
* @return Response
|
||
*/
|
||
clone() {
|
||
return new Response(clone(this), {
|
||
url: this.url,
|
||
status: this.status,
|
||
statusText: this.statusText,
|
||
headers: this.headers,
|
||
ok: this.ok,
|
||
redirected: this.redirected
|
||
});
|
||
}
|
||
}
|
||
|
||
Body.mixIn(Response.prototype);
|
||
|
||
Object.defineProperties(Response.prototype, {
|
||
url: { enumerable: true },
|
||
status: { enumerable: true },
|
||
ok: { enumerable: true },
|
||
redirected: { enumerable: true },
|
||
statusText: { enumerable: true },
|
||
headers: { enumerable: true },
|
||
clone: { enumerable: true }
|
||
});
|
||
|
||
Object.defineProperty(Response.prototype, Symbol.toStringTag, {
|
||
value: 'Response',
|
||
writable: false,
|
||
enumerable: false,
|
||
configurable: true
|
||
});
|
||
|
||
const INTERNALS$2 = Symbol('Request internals');
|
||
|
||
// fix an issue where "format", "parse" aren't a named export for node <10
|
||
const parse_url = Url.parse;
|
||
const format_url = Url.format;
|
||
|
||
const streamDestructionSupported = 'destroy' in Stream.Readable.prototype;
|
||
|
||
/**
|
||
* Check if a value is an instance of Request.
|
||
*
|
||
* @param Mixed input
|
||
* @return Boolean
|
||
*/
|
||
function isRequest(input) {
|
||
return typeof input === 'object' && typeof input[INTERNALS$2] === 'object';
|
||
}
|
||
|
||
function isAbortSignal(signal) {
|
||
const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);
|
||
return !!(proto && proto.constructor.name === 'AbortSignal');
|
||
}
|
||
|
||
/**
|
||
* Request class
|
||
*
|
||
* @param Mixed input Url or Request instance
|
||
* @param Object init Custom options
|
||
* @return Void
|
||
*/
|
||
class Request {
|
||
constructor(input) {
|
||
let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
||
|
||
let parsedURL;
|
||
|
||
// normalize input
|
||
if (!isRequest(input)) {
|
||
if (input && input.href) {
|
||
// in order to support Node.js' Url objects; though WHATWG's URL objects
|
||
// will fall into this branch also (since their `toString()` will return
|
||
// `href` property anyway)
|
||
parsedURL = parse_url(input.href);
|
||
} else {
|
||
// coerce input to a string before attempting to parse
|
||
parsedURL = parse_url(`${input}`);
|
||
}
|
||
input = {};
|
||
} else {
|
||
parsedURL = parse_url(input.url);
|
||
}
|
||
|
||
let method = init.method || input.method || 'GET';
|
||
method = method.toUpperCase();
|
||
|
||
if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {
|
||
throw new TypeError('Request with GET/HEAD method cannot have body');
|
||
}
|
||
|
||
let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;
|
||
|
||
Body.call(this, inputBody, {
|
||
timeout: init.timeout || input.timeout || 0,
|
||
size: init.size || input.size || 0
|
||
});
|
||
|
||
const headers = new Headers(init.headers || input.headers || {});
|
||
|
||
if (inputBody != null && !headers.has('Content-Type')) {
|
||
const contentType = extractContentType(inputBody);
|
||
if (contentType) {
|
||
headers.append('Content-Type', contentType);
|
||
}
|
||
}
|
||
|
||
let signal = isRequest(input) ? input.signal : null;
|
||
if ('signal' in init) signal = init.signal;
|
||
|
||
if (signal != null && !isAbortSignal(signal)) {
|
||
throw new TypeError('Expected signal to be an instanceof AbortSignal');
|
||
}
|
||
|
||
this[INTERNALS$2] = {
|
||
method,
|
||
redirect: init.redirect || input.redirect || 'follow',
|
||
headers,
|
||
parsedURL,
|
||
signal
|
||
};
|
||
|
||
// node-fetch-only options
|
||
this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;
|
||
this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;
|
||
this.counter = init.counter || input.counter || 0;
|
||
this.agent = init.agent || input.agent;
|
||
}
|
||
|
||
get method() {
|
||
return this[INTERNALS$2].method;
|
||
}
|
||
|
||
get url() {
|
||
return format_url(this[INTERNALS$2].parsedURL);
|
||
}
|
||
|
||
get headers() {
|
||
return this[INTERNALS$2].headers;
|
||
}
|
||
|
||
get redirect() {
|
||
return this[INTERNALS$2].redirect;
|
||
}
|
||
|
||
get signal() {
|
||
return this[INTERNALS$2].signal;
|
||
}
|
||
|
||
/**
|
||
* Clone this request
|
||
*
|
||
* @return Request
|
||
*/
|
||
clone() {
|
||
return new Request(this);
|
||
}
|
||
}
|
||
|
||
Body.mixIn(Request.prototype);
|
||
|
||
Object.defineProperty(Request.prototype, Symbol.toStringTag, {
|
||
value: 'Request',
|
||
writable: false,
|
||
enumerable: false,
|
||
configurable: true
|
||
});
|
||
|
||
Object.defineProperties(Request.prototype, {
|
||
method: { enumerable: true },
|
||
url: { enumerable: true },
|
||
headers: { enumerable: true },
|
||
redirect: { enumerable: true },
|
||
clone: { enumerable: true },
|
||
signal: { enumerable: true }
|
||
});
|
||
|
||
/**
|
||
* Convert a Request to Node.js http request options.
|
||
*
|
||
* @param Request A Request instance
|
||
* @return Object The options object to be passed to http.request
|
||
*/
|
||
function getNodeRequestOptions(request) {
|
||
const parsedURL = request[INTERNALS$2].parsedURL;
|
||
const headers = new Headers(request[INTERNALS$2].headers);
|
||
|
||
// fetch step 1.3
|
||
if (!headers.has('Accept')) {
|
||
headers.set('Accept', '*/*');
|
||
}
|
||
|
||
// Basic fetch
|
||
if (!parsedURL.protocol || !parsedURL.hostname) {
|
||
throw new TypeError('Only absolute URLs are supported');
|
||
}
|
||
|
||
if (!/^https?:$/.test(parsedURL.protocol)) {
|
||
throw new TypeError('Only HTTP(S) protocols are supported');
|
||
}
|
||
|
||
if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {
|
||
throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');
|
||
}
|
||
|
||
// HTTP-network-or-cache fetch steps 2.4-2.7
|
||
let contentLengthValue = null;
|
||
if (request.body == null && /^(POST|PUT)$/i.test(request.method)) {
|
||
contentLengthValue = '0';
|
||
}
|
||
if (request.body != null) {
|
||
const totalBytes = getTotalBytes(request);
|
||
if (typeof totalBytes === 'number') {
|
||
contentLengthValue = String(totalBytes);
|
||
}
|
||
}
|
||
if (contentLengthValue) {
|
||
headers.set('Content-Length', contentLengthValue);
|
||
}
|
||
|
||
// HTTP-network-or-cache fetch step 2.11
|
||
if (!headers.has('User-Agent')) {
|
||
headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');
|
||
}
|
||
|
||
// HTTP-network-or-cache fetch step 2.15
|
||
if (request.compress && !headers.has('Accept-Encoding')) {
|
||
headers.set('Accept-Encoding', 'gzip,deflate');
|
||
}
|
||
|
||
let agent = request.agent;
|
||
if (typeof agent === 'function') {
|
||
agent = agent(parsedURL);
|
||
}
|
||
|
||
if (!headers.has('Connection') && !agent) {
|
||
headers.set('Connection', 'close');
|
||
}
|
||
|
||
// HTTP-network fetch step 4.2
|
||
// chunked encoding is handled by Node.js
|
||
|
||
return Object.assign({}, parsedURL, {
|
||
method: request.method,
|
||
headers: exportNodeCompatibleHeaders(headers),
|
||
agent
|
||
});
|
||
}
|
||
|
||
/**
|
||
* abort-error.js
|
||
*
|
||
* AbortError interface for cancelled requests
|
||
*/
|
||
|
||
/**
|
||
* Create AbortError instance
|
||
*
|
||
* @param String message Error message for human
|
||
* @return AbortError
|
||
*/
|
||
function AbortError(message) {
|
||
Error.call(this, message);
|
||
|
||
this.type = 'aborted';
|
||
this.message = message;
|
||
|
||
// hide custom error implementation details from end-users
|
||
Error.captureStackTrace(this, this.constructor);
|
||
}
|
||
|
||
AbortError.prototype = Object.create(Error.prototype);
|
||
AbortError.prototype.constructor = AbortError;
|
||
AbortError.prototype.name = 'AbortError';
|
||
|
||
// fix an issue where "PassThrough", "resolve" aren't a named export for node <10
|
||
const PassThrough$1 = Stream.PassThrough;
|
||
const resolve_url = Url.resolve;
|
||
|
||
/**
|
||
* Fetch function
|
||
*
|
||
* @param Mixed url Absolute url or Request instance
|
||
* @param Object opts Fetch options
|
||
* @return Promise
|
||
*/
|
||
function fetch(url, opts) {
|
||
|
||
// allow custom promise
|
||
if (!fetch.Promise) {
|
||
throw new Error('native promise missing, set fetch.Promise to your favorite alternative');
|
||
}
|
||
|
||
Body.Promise = fetch.Promise;
|
||
|
||
// wrap http.request into fetch
|
||
return new fetch.Promise(function (resolve, reject) {
|
||
// build request object
|
||
const request = new Request(url, opts);
|
||
const options = getNodeRequestOptions(request);
|
||
|
||
const send = (options.protocol === 'https:' ? https : http).request;
|
||
const signal = request.signal;
|
||
|
||
let response = null;
|
||
|
||
const abort = function abort() {
|
||
let error = new AbortError('The user aborted a request.');
|
||
reject(error);
|
||
if (request.body && request.body instanceof Stream.Readable) {
|
||
request.body.destroy(error);
|
||
}
|
||
if (!response || !response.body) return;
|
||
response.body.emit('error', error);
|
||
};
|
||
|
||
if (signal && signal.aborted) {
|
||
abort();
|
||
return;
|
||
}
|
||
|
||
const abortAndFinalize = function abortAndFinalize() {
|
||
abort();
|
||
finalize();
|
||
};
|
||
|
||
// send request
|
||
const req = send(options);
|
||
let reqTimeout;
|
||
|
||
if (signal) {
|
||
signal.addEventListener('abort', abortAndFinalize);
|
||
}
|
||
|
||
function finalize() {
|
||
req.abort();
|
||
if (signal) signal.removeEventListener('abort', abortAndFinalize);
|
||
clearTimeout(reqTimeout);
|
||
}
|
||
|
||
if (request.timeout) {
|
||
req.once('socket', function (socket) {
|
||
reqTimeout = setTimeout(function () {
|
||
reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));
|
||
finalize();
|
||
}, request.timeout);
|
||
});
|
||
}
|
||
|
||
req.on('error', function (err) {
|
||
reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));
|
||
finalize();
|
||
});
|
||
|
||
req.on('response', function (res) {
|
||
clearTimeout(reqTimeout);
|
||
|
||
const headers = createHeadersLenient(res.headers);
|
||
|
||
// HTTP fetch step 5
|
||
if (fetch.isRedirect(res.statusCode)) {
|
||
// HTTP fetch step 5.2
|
||
const location = headers.get('Location');
|
||
|
||
// HTTP fetch step 5.3
|
||
const locationURL = location === null ? null : resolve_url(request.url, location);
|
||
|
||
// HTTP fetch step 5.5
|
||
switch (request.redirect) {
|
||
case 'error':
|
||
reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));
|
||
finalize();
|
||
return;
|
||
case 'manual':
|
||
// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.
|
||
if (locationURL !== null) {
|
||
// handle corrupted header
|
||
try {
|
||
headers.set('Location', locationURL);
|
||
} catch (err) {
|
||
// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request
|
||
reject(err);
|
||
}
|
||
}
|
||
break;
|
||
case 'follow':
|
||
// HTTP-redirect fetch step 2
|
||
if (locationURL === null) {
|
||
break;
|
||
}
|
||
|
||
// HTTP-redirect fetch step 5
|
||
if (request.counter >= request.follow) {
|
||
reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));
|
||
finalize();
|
||
return;
|
||
}
|
||
|
||
// HTTP-redirect fetch step 6 (counter increment)
|
||
// Create a new Request object.
|
||
const requestOpts = {
|
||
headers: new Headers(request.headers),
|
||
follow: request.follow,
|
||
counter: request.counter + 1,
|
||
agent: request.agent,
|
||
compress: request.compress,
|
||
method: request.method,
|
||
body: request.body,
|
||
signal: request.signal,
|
||
timeout: request.timeout,
|
||
size: request.size
|
||
};
|
||
|
||
// HTTP-redirect fetch step 9
|
||
if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {
|
||
reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));
|
||
finalize();
|
||
return;
|
||
}
|
||
|
||
// HTTP-redirect fetch step 11
|
||
if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {
|
||
requestOpts.method = 'GET';
|
||
requestOpts.body = undefined;
|
||
requestOpts.headers.delete('content-length');
|
||
}
|
||
|
||
// HTTP-redirect fetch step 15
|
||
resolve(fetch(new Request(locationURL, requestOpts)));
|
||
finalize();
|
||
return;
|
||
}
|
||
}
|
||
|
||
// prepare response
|
||
res.once('end', function () {
|
||
if (signal) signal.removeEventListener('abort', abortAndFinalize);
|
||
});
|
||
let body = res.pipe(new PassThrough$1());
|
||
|
||
const response_options = {
|
||
url: request.url,
|
||
status: res.statusCode,
|
||
statusText: res.statusMessage,
|
||
headers: headers,
|
||
size: request.size,
|
||
timeout: request.timeout,
|
||
counter: request.counter
|
||
};
|
||
|
||
// HTTP-network fetch step 12.1.1.3
|
||
const codings = headers.get('Content-Encoding');
|
||
|
||
// HTTP-network fetch step 12.1.1.4: handle content codings
|
||
|
||
// in following scenarios we ignore compression support
|
||
// 1. compression support is disabled
|
||
// 2. HEAD request
|
||
// 3. no Content-Encoding header
|
||
// 4. no content response (204)
|
||
// 5. content not modified response (304)
|
||
if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {
|
||
response = new Response(body, response_options);
|
||
resolve(response);
|
||
return;
|
||
}
|
||
|
||
// For Node v6+
|
||
// Be less strict when decoding compressed responses, since sometimes
|
||
// servers send slightly invalid responses that are still accepted
|
||
// by common browsers.
|
||
// Always using Z_SYNC_FLUSH is what cURL does.
|
||
const zlibOptions = {
|
||
flush: zlib.Z_SYNC_FLUSH,
|
||
finishFlush: zlib.Z_SYNC_FLUSH
|
||
};
|
||
|
||
// for gzip
|
||
if (codings == 'gzip' || codings == 'x-gzip') {
|
||
body = body.pipe(zlib.createGunzip(zlibOptions));
|
||
response = new Response(body, response_options);
|
||
resolve(response);
|
||
return;
|
||
}
|
||
|
||
// for deflate
|
||
if (codings == 'deflate' || codings == 'x-deflate') {
|
||
// handle the infamous raw deflate response from old servers
|
||
// a hack for old IIS and Apache servers
|
||
const raw = res.pipe(new PassThrough$1());
|
||
raw.once('data', function (chunk) {
|
||
// see http://stackoverflow.com/questions/37519828
|
||
if ((chunk[0] & 0x0F) === 0x08) {
|
||
body = body.pipe(zlib.createInflate());
|
||
} else {
|
||
body = body.pipe(zlib.createInflateRaw());
|
||
}
|
||
response = new Response(body, response_options);
|
||
resolve(response);
|
||
});
|
||
return;
|
||
}
|
||
|
||
// for br
|
||
if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {
|
||
body = body.pipe(zlib.createBrotliDecompress());
|
||
response = new Response(body, response_options);
|
||
resolve(response);
|
||
return;
|
||
}
|
||
|
||
// otherwise, use response as-is
|
||
response = new Response(body, response_options);
|
||
resolve(response);
|
||
});
|
||
|
||
writeToStream(req, request);
|
||
});
|
||
}
|
||
/**
|
||
* Redirect code matching
|
||
*
|
||
* @param Number code Status code
|
||
* @return Boolean
|
||
*/
|
||
fetch.isRedirect = function (code) {
|
||
return code === 301 || code === 302 || code === 303 || code === 307 || code === 308;
|
||
};
|
||
|
||
// expose Promise
|
||
fetch.Promise = global.Promise;
|
||
|
||
module.exports = exports = fetch;
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.default = exports;
|
||
exports.Headers = Headers;
|
||
exports.Request = Request;
|
||
exports.Response = Response;
|
||
exports.FetchError = FetchError;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 1980:
|
||
/***/ ((module) => {
|
||
|
||
/*!
|
||
* normalize-path <https://github.com/jonschlinkert/normalize-path>
|
||
*
|
||
* Copyright (c) 2014-2018, Jon Schlinkert.
|
||
* Released under the MIT License.
|
||
*/
|
||
|
||
module.exports = function(path, stripTrailing) {
|
||
if (typeof path !== 'string') {
|
||
throw new TypeError('expected path to be a string');
|
||
}
|
||
|
||
if (path === '\\' || path === '/') return '/';
|
||
|
||
var len = path.length;
|
||
if (len <= 1) return path;
|
||
|
||
// ensure that win32 namespaces has two leading slashes, so that the path is
|
||
// handled properly by the win32 version of path.parse() after being normalized
|
||
// https://msdn.microsoft.com/library/windows/desktop/aa365247(v=vs.85).aspx#namespaces
|
||
var prefix = '';
|
||
if (len > 4 && path[3] === '\\') {
|
||
var ch = path[2];
|
||
if ((ch === '?' || ch === '.') && path.slice(0, 2) === '\\\\') {
|
||
path = path.slice(2);
|
||
prefix = '//';
|
||
}
|
||
}
|
||
|
||
var segs = path.split(/[/\\]+/);
|
||
if (stripTrailing !== false && segs[segs.length - 1] === '') {
|
||
segs.pop();
|
||
}
|
||
return prefix + segs.join('/');
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 2940:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
var wrappy = __nccwpck_require__(682)
|
||
module.exports = wrappy(once)
|
||
module.exports.strict = wrappy(onceStrict)
|
||
|
||
once.proto = once(function () {
|
||
Object.defineProperty(Function.prototype, 'once', {
|
||
value: function () {
|
||
return once(this)
|
||
},
|
||
configurable: true
|
||
})
|
||
|
||
Object.defineProperty(Function.prototype, 'onceStrict', {
|
||
value: function () {
|
||
return onceStrict(this)
|
||
},
|
||
configurable: true
|
||
})
|
||
})
|
||
|
||
function once (fn) {
|
||
var f = function () {
|
||
if (f.called) return f.value
|
||
f.called = true
|
||
return f.value = fn.apply(this, arguments)
|
||
}
|
||
f.called = false
|
||
return f
|
||
}
|
||
|
||
function onceStrict (fn) {
|
||
var f = function () {
|
||
if (f.called)
|
||
throw new Error(f.onceError)
|
||
f.called = true
|
||
return f.value = fn.apply(this, arguments)
|
||
}
|
||
var name = fn.name || 'Function wrapped with `once`'
|
||
f.onceError = name + " shouldn't be called more than once"
|
||
f.called = false
|
||
return f
|
||
}
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 7342:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
function posix(path) {
|
||
return path.charAt(0) === '/';
|
||
}
|
||
|
||
function win32(path) {
|
||
// https://github.com/nodejs/node/blob/b3fcc245fb25539909ef1d5eaa01dbf92e168633/lib/path.js#L56
|
||
var splitDeviceRe = /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/;
|
||
var result = splitDeviceRe.exec(path);
|
||
var device = result[1] || '';
|
||
var isUnc = Boolean(device && device.charAt(1) !== ':');
|
||
|
||
// UNC paths are always absolute
|
||
return Boolean(result[2] || isUnc);
|
||
}
|
||
|
||
module.exports = process.platform === 'win32' ? win32 : posix;
|
||
module.exports.posix = posix;
|
||
module.exports.win32 = win32;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 4502:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
// Copyright Joyent, Inc. and other Node contributors.
|
||
//
|
||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||
// copy of this software and associated documentation files (the
|
||
// "Software"), to deal in the Software without restriction, including
|
||
// without limitation the rights to use, copy, modify, merge, publish,
|
||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||
// persons to whom the Software is furnished to do so, subject to the
|
||
// following conditions:
|
||
//
|
||
// The above copyright notice and this permission notice shall be included
|
||
// in all copies or substantial portions of the Software.
|
||
//
|
||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||
|
||
|
||
var util = __nccwpck_require__(1669);
|
||
var isString = function (x) {
|
||
return typeof x === 'string';
|
||
};
|
||
|
||
|
||
// resolves . and .. elements in a path array with directory names there
|
||
// must be no slashes or device names (c:\) in the array
|
||
// (so also no leading and trailing slashes - it does not distinguish
|
||
// relative and absolute paths)
|
||
function normalizeArray(parts, allowAboveRoot) {
|
||
var res = [];
|
||
for (var i = 0; i < parts.length; i++) {
|
||
var p = parts[i];
|
||
|
||
// ignore empty parts
|
||
if (!p || p === '.')
|
||
continue;
|
||
|
||
if (p === '..') {
|
||
if (res.length && res[res.length - 1] !== '..') {
|
||
res.pop();
|
||
} else if (allowAboveRoot) {
|
||
res.push('..');
|
||
}
|
||
} else {
|
||
res.push(p);
|
||
}
|
||
}
|
||
|
||
return res;
|
||
}
|
||
|
||
// Split a filename into [root, dir, basename, ext], unix version
|
||
// 'root' is just a slash, or nothing.
|
||
var splitPathRe =
|
||
/^(\/?|)([\s\S]*?)((?:\.{1,2}|[^\/]+?|)(\.[^.\/]*|))(?:[\/]*)$/;
|
||
var posix = {};
|
||
|
||
|
||
function posixSplitPath(filename) {
|
||
return splitPathRe.exec(filename).slice(1);
|
||
}
|
||
|
||
|
||
// path.resolve([from ...], to)
|
||
// posix version
|
||
posix.resolve = function() {
|
||
var resolvedPath = '',
|
||
resolvedAbsolute = false;
|
||
|
||
for (var i = arguments.length - 1; i >= -1 && !resolvedAbsolute; i--) {
|
||
var path = (i >= 0) ? arguments[i] : process.cwd();
|
||
|
||
// Skip empty and invalid entries
|
||
if (!isString(path)) {
|
||
throw new TypeError('Arguments to path.resolve must be strings');
|
||
} else if (!path) {
|
||
continue;
|
||
}
|
||
|
||
resolvedPath = path + '/' + resolvedPath;
|
||
resolvedAbsolute = path.charAt(0) === '/';
|
||
}
|
||
|
||
// At this point the path should be resolved to a full absolute path, but
|
||
// handle relative paths to be safe (might happen when process.cwd() fails)
|
||
|
||
// Normalize the path
|
||
resolvedPath = normalizeArray(resolvedPath.split('/'),
|
||
!resolvedAbsolute).join('/');
|
||
|
||
return ((resolvedAbsolute ? '/' : '') + resolvedPath) || '.';
|
||
};
|
||
|
||
// path.normalize(path)
|
||
// posix version
|
||
posix.normalize = function(path) {
|
||
var isAbsolute = posix.isAbsolute(path),
|
||
trailingSlash = path.substr(-1) === '/';
|
||
|
||
// Normalize the path
|
||
path = normalizeArray(path.split('/'), !isAbsolute).join('/');
|
||
|
||
if (!path && !isAbsolute) {
|
||
path = '.';
|
||
}
|
||
if (path && trailingSlash) {
|
||
path += '/';
|
||
}
|
||
|
||
return (isAbsolute ? '/' : '') + path;
|
||
};
|
||
|
||
// posix version
|
||
posix.isAbsolute = function(path) {
|
||
return path.charAt(0) === '/';
|
||
};
|
||
|
||
// posix version
|
||
posix.join = function() {
|
||
var path = '';
|
||
for (var i = 0; i < arguments.length; i++) {
|
||
var segment = arguments[i];
|
||
if (!isString(segment)) {
|
||
throw new TypeError('Arguments to path.join must be strings');
|
||
}
|
||
if (segment) {
|
||
if (!path) {
|
||
path += segment;
|
||
} else {
|
||
path += '/' + segment;
|
||
}
|
||
}
|
||
}
|
||
return posix.normalize(path);
|
||
};
|
||
|
||
|
||
// path.relative(from, to)
|
||
// posix version
|
||
posix.relative = function(from, to) {
|
||
from = posix.resolve(from).substr(1);
|
||
to = posix.resolve(to).substr(1);
|
||
|
||
function trim(arr) {
|
||
var start = 0;
|
||
for (; start < arr.length; start++) {
|
||
if (arr[start] !== '') break;
|
||
}
|
||
|
||
var end = arr.length - 1;
|
||
for (; end >= 0; end--) {
|
||
if (arr[end] !== '') break;
|
||
}
|
||
|
||
if (start > end) return [];
|
||
return arr.slice(start, end + 1);
|
||
}
|
||
|
||
var fromParts = trim(from.split('/'));
|
||
var toParts = trim(to.split('/'));
|
||
|
||
var length = Math.min(fromParts.length, toParts.length);
|
||
var samePartsLength = length;
|
||
for (var i = 0; i < length; i++) {
|
||
if (fromParts[i] !== toParts[i]) {
|
||
samePartsLength = i;
|
||
break;
|
||
}
|
||
}
|
||
|
||
var outputParts = [];
|
||
for (var i = samePartsLength; i < fromParts.length; i++) {
|
||
outputParts.push('..');
|
||
}
|
||
|
||
outputParts = outputParts.concat(toParts.slice(samePartsLength));
|
||
|
||
return outputParts.join('/');
|
||
};
|
||
|
||
|
||
posix._makeLong = function(path) {
|
||
return path;
|
||
};
|
||
|
||
|
||
posix.dirname = function(path) {
|
||
var result = posixSplitPath(path),
|
||
root = result[0],
|
||
dir = result[1];
|
||
|
||
if (!root && !dir) {
|
||
// No dirname whatsoever
|
||
return '.';
|
||
}
|
||
|
||
if (dir) {
|
||
// It has a dirname, strip trailing slash
|
||
dir = dir.substr(0, dir.length - 1);
|
||
}
|
||
|
||
return root + dir;
|
||
};
|
||
|
||
|
||
posix.basename = function(path, ext) {
|
||
var f = posixSplitPath(path)[2];
|
||
// TODO: make this comparison case-insensitive on windows?
|
||
if (ext && f.substr(-1 * ext.length) === ext) {
|
||
f = f.substr(0, f.length - ext.length);
|
||
}
|
||
return f;
|
||
};
|
||
|
||
|
||
posix.extname = function(path) {
|
||
return posixSplitPath(path)[3];
|
||
};
|
||
|
||
|
||
posix.format = function(pathObject) {
|
||
if (!util.isObject(pathObject)) {
|
||
throw new TypeError(
|
||
"Parameter 'pathObject' must be an object, not " + typeof pathObject
|
||
);
|
||
}
|
||
|
||
var root = pathObject.root || '';
|
||
|
||
if (!isString(root)) {
|
||
throw new TypeError(
|
||
"'pathObject.root' must be a string or undefined, not " +
|
||
typeof pathObject.root
|
||
);
|
||
}
|
||
|
||
var dir = pathObject.dir ? pathObject.dir + posix.sep : '';
|
||
var base = pathObject.base || '';
|
||
return dir + base;
|
||
};
|
||
|
||
|
||
posix.parse = function(pathString) {
|
||
if (!isString(pathString)) {
|
||
throw new TypeError(
|
||
"Parameter 'pathString' must be a string, not " + typeof pathString
|
||
);
|
||
}
|
||
var allParts = posixSplitPath(pathString);
|
||
if (!allParts || allParts.length !== 4) {
|
||
throw new TypeError("Invalid path '" + pathString + "'");
|
||
}
|
||
allParts[1] = allParts[1] || '';
|
||
allParts[2] = allParts[2] || '';
|
||
allParts[3] = allParts[3] || '';
|
||
|
||
return {
|
||
root: allParts[0],
|
||
dir: allParts[0] + allParts[1].slice(0, allParts[1].length - 1),
|
||
base: allParts[2],
|
||
ext: allParts[3],
|
||
name: allParts[2].slice(0, allParts[2].length - allParts[3].length)
|
||
};
|
||
};
|
||
|
||
|
||
posix.sep = '/';
|
||
posix.delimiter = ':';
|
||
|
||
module.exports = posix;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 9029:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
if (typeof process === 'undefined' ||
|
||
!process.version ||
|
||
process.version.indexOf('v0.') === 0 ||
|
||
process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) {
|
||
module.exports = { nextTick: nextTick };
|
||
} else {
|
||
module.exports = process
|
||
}
|
||
|
||
function nextTick(fn, arg1, arg2, arg3) {
|
||
if (typeof fn !== 'function') {
|
||
throw new TypeError('"callback" argument must be a function');
|
||
}
|
||
var len = arguments.length;
|
||
var args, i;
|
||
switch (len) {
|
||
case 0:
|
||
case 1:
|
||
return process.nextTick(fn);
|
||
case 2:
|
||
return process.nextTick(function afterTickOne() {
|
||
fn.call(null, arg1);
|
||
});
|
||
case 3:
|
||
return process.nextTick(function afterTickTwo() {
|
||
fn.call(null, arg1, arg2);
|
||
});
|
||
case 4:
|
||
return process.nextTick(function afterTickThree() {
|
||
fn.call(null, arg1, arg2, arg3);
|
||
});
|
||
default:
|
||
args = new Array(len - 1);
|
||
i = 0;
|
||
while (i < args.length) {
|
||
args[i++] = arguments[i];
|
||
}
|
||
return process.nextTick(function afterTick() {
|
||
fn.apply(null, args);
|
||
});
|
||
}
|
||
}
|
||
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 938:
|
||
/***/ ((__unused_webpack_module, exports) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
var has = Object.prototype.hasOwnProperty
|
||
, undef;
|
||
|
||
/**
|
||
* Decode a URI encoded string.
|
||
*
|
||
* @param {String} input The URI encoded string.
|
||
* @returns {String|Null} The decoded string.
|
||
* @api private
|
||
*/
|
||
function decode(input) {
|
||
try {
|
||
return decodeURIComponent(input.replace(/\+/g, ' '));
|
||
} catch (e) {
|
||
return null;
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Attempts to encode a given input.
|
||
*
|
||
* @param {String} input The string that needs to be encoded.
|
||
* @returns {String|Null} The encoded string.
|
||
* @api private
|
||
*/
|
||
function encode(input) {
|
||
try {
|
||
return encodeURIComponent(input);
|
||
} catch (e) {
|
||
return null;
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Simple query string parser.
|
||
*
|
||
* @param {String} query The query string that needs to be parsed.
|
||
* @returns {Object}
|
||
* @api public
|
||
*/
|
||
function querystring(query) {
|
||
var parser = /([^=?#&]+)=?([^&]*)/g
|
||
, result = {}
|
||
, part;
|
||
|
||
while (part = parser.exec(query)) {
|
||
var key = decode(part[1])
|
||
, value = decode(part[2]);
|
||
|
||
//
|
||
// Prevent overriding of existing properties. This ensures that build-in
|
||
// methods like `toString` or __proto__ are not overriden by malicious
|
||
// querystrings.
|
||
//
|
||
// In the case if failed decoding, we want to omit the key/value pairs
|
||
// from the result.
|
||
//
|
||
if (key === null || value === null || key in result) continue;
|
||
result[key] = value;
|
||
}
|
||
|
||
return result;
|
||
}
|
||
|
||
/**
|
||
* Transform a query string to an object.
|
||
*
|
||
* @param {Object} obj Object that should be transformed.
|
||
* @param {String} prefix Optional prefix.
|
||
* @returns {String}
|
||
* @api public
|
||
*/
|
||
function querystringify(obj, prefix) {
|
||
prefix = prefix || '';
|
||
|
||
var pairs = []
|
||
, value
|
||
, key;
|
||
|
||
//
|
||
// Optionally prefix with a '?' if needed
|
||
//
|
||
if ('string' !== typeof prefix) prefix = '?';
|
||
|
||
for (key in obj) {
|
||
if (has.call(obj, key)) {
|
||
value = obj[key];
|
||
|
||
//
|
||
// Edge cases where we actually want to encode the value to an empty
|
||
// string instead of the stringified value.
|
||
//
|
||
if (!value && (value === null || value === undef || isNaN(value))) {
|
||
value = '';
|
||
}
|
||
|
||
key = encode(key);
|
||
value = encode(value);
|
||
|
||
//
|
||
// If we failed to encode the strings, we should bail out as we don't
|
||
// want to add invalid strings to the query.
|
||
//
|
||
if (key === null || value === null) continue;
|
||
pairs.push(key +'='+ value);
|
||
}
|
||
}
|
||
|
||
return pairs.length ? prefix + pairs.join('&') : '';
|
||
}
|
||
|
||
//
|
||
// Expose the module.
|
||
//
|
||
exports.stringify = querystringify;
|
||
exports.parse = querystring;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 3537:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
const codes = {};
|
||
|
||
function createErrorType(code, message, Base) {
|
||
if (!Base) {
|
||
Base = Error
|
||
}
|
||
|
||
function getMessage (arg1, arg2, arg3) {
|
||
if (typeof message === 'string') {
|
||
return message
|
||
} else {
|
||
return message(arg1, arg2, arg3)
|
||
}
|
||
}
|
||
|
||
class NodeError extends Base {
|
||
constructor (arg1, arg2, arg3) {
|
||
super(getMessage(arg1, arg2, arg3));
|
||
}
|
||
}
|
||
|
||
NodeError.prototype.name = Base.name;
|
||
NodeError.prototype.code = code;
|
||
|
||
codes[code] = NodeError;
|
||
}
|
||
|
||
// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js
|
||
function oneOf(expected, thing) {
|
||
if (Array.isArray(expected)) {
|
||
const len = expected.length;
|
||
expected = expected.map((i) => String(i));
|
||
if (len > 2) {
|
||
return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` +
|
||
expected[len - 1];
|
||
} else if (len === 2) {
|
||
return `one of ${thing} ${expected[0]} or ${expected[1]}`;
|
||
} else {
|
||
return `of ${thing} ${expected[0]}`;
|
||
}
|
||
} else {
|
||
return `of ${thing} ${String(expected)}`;
|
||
}
|
||
}
|
||
|
||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith
|
||
function startsWith(str, search, pos) {
|
||
return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search;
|
||
}
|
||
|
||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
|
||
function endsWith(str, search, this_len) {
|
||
if (this_len === undefined || this_len > str.length) {
|
||
this_len = str.length;
|
||
}
|
||
return str.substring(this_len - search.length, this_len) === search;
|
||
}
|
||
|
||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes
|
||
function includes(str, search, start) {
|
||
if (typeof start !== 'number') {
|
||
start = 0;
|
||
}
|
||
|
||
if (start + search.length > str.length) {
|
||
return false;
|
||
} else {
|
||
return str.indexOf(search, start) !== -1;
|
||
}
|
||
}
|
||
|
||
createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) {
|
||
return 'The value "' + value + '" is invalid for option "' + name + '"'
|
||
}, TypeError);
|
||
createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) {
|
||
// determiner: 'must be' or 'must not be'
|
||
let determiner;
|
||
if (typeof expected === 'string' && startsWith(expected, 'not ')) {
|
||
determiner = 'must not be';
|
||
expected = expected.replace(/^not /, '');
|
||
} else {
|
||
determiner = 'must be';
|
||
}
|
||
|
||
let msg;
|
||
if (endsWith(name, ' argument')) {
|
||
// For cases like 'first argument'
|
||
msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`;
|
||
} else {
|
||
const type = includes(name, '.') ? 'property' : 'argument';
|
||
msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`;
|
||
}
|
||
|
||
msg += `. Received type ${typeof actual}`;
|
||
return msg;
|
||
}, TypeError);
|
||
createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF');
|
||
createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) {
|
||
return 'The ' + name + ' method is not implemented'
|
||
});
|
||
createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close');
|
||
createErrorType('ERR_STREAM_DESTROYED', function (name) {
|
||
return 'Cannot call ' + name + ' after a stream was destroyed';
|
||
});
|
||
createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times');
|
||
createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable');
|
||
createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end');
|
||
createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError);
|
||
createErrorType('ERR_UNKNOWN_ENCODING', function (arg) {
|
||
return 'Unknown encoding: ' + arg
|
||
}, TypeError);
|
||
createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event');
|
||
|
||
module.exports.q = codes;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 3997:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
// Copyright Joyent, Inc. and other Node contributors.
|
||
//
|
||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||
// copy of this software and associated documentation files (the
|
||
// "Software"), to deal in the Software without restriction, including
|
||
// without limitation the rights to use, copy, modify, merge, publish,
|
||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||
// persons to whom the Software is furnished to do so, subject to the
|
||
// following conditions:
|
||
//
|
||
// The above copyright notice and this permission notice shall be included
|
||
// in all copies or substantial portions of the Software.
|
||
//
|
||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||
// a duplex stream is just a stream that is both readable and writable.
|
||
// Since JS doesn't have multiple prototypal inheritance, this class
|
||
// prototypally inherits from Readable, and then parasitically from
|
||
// Writable.
|
||
|
||
/*<replacement>*/
|
||
|
||
var objectKeys = Object.keys || function (obj) {
|
||
var keys = [];
|
||
|
||
for (var key in obj) {
|
||
keys.push(key);
|
||
}
|
||
|
||
return keys;
|
||
};
|
||
/*</replacement>*/
|
||
|
||
|
||
module.exports = Duplex;
|
||
|
||
var Readable = __nccwpck_require__(7612);
|
||
|
||
var Writable = __nccwpck_require__(1069);
|
||
|
||
__nccwpck_require__(3753)(Duplex, Readable);
|
||
|
||
{
|
||
// Allow the keys array to be GC'ed.
|
||
var keys = objectKeys(Writable.prototype);
|
||
|
||
for (var v = 0; v < keys.length; v++) {
|
||
var method = keys[v];
|
||
if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method];
|
||
}
|
||
}
|
||
|
||
function Duplex(options) {
|
||
if (!(this instanceof Duplex)) return new Duplex(options);
|
||
Readable.call(this, options);
|
||
Writable.call(this, options);
|
||
this.allowHalfOpen = true;
|
||
|
||
if (options) {
|
||
if (options.readable === false) this.readable = false;
|
||
if (options.writable === false) this.writable = false;
|
||
|
||
if (options.allowHalfOpen === false) {
|
||
this.allowHalfOpen = false;
|
||
this.once('end', onend);
|
||
}
|
||
}
|
||
}
|
||
|
||
Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
return this._writableState.highWaterMark;
|
||
}
|
||
});
|
||
Object.defineProperty(Duplex.prototype, 'writableBuffer', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
return this._writableState && this._writableState.getBuffer();
|
||
}
|
||
});
|
||
Object.defineProperty(Duplex.prototype, 'writableLength', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
return this._writableState.length;
|
||
}
|
||
}); // the no-half-open enforcer
|
||
|
||
function onend() {
|
||
// If the writable side ended, then we're ok.
|
||
if (this._writableState.ended) return; // no more data can be written.
|
||
// But allow more writes to happen in this tick.
|
||
|
||
process.nextTick(onEndNT, this);
|
||
}
|
||
|
||
function onEndNT(self) {
|
||
self.end();
|
||
}
|
||
|
||
Object.defineProperty(Duplex.prototype, 'destroyed', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
if (this._readableState === undefined || this._writableState === undefined) {
|
||
return false;
|
||
}
|
||
|
||
return this._readableState.destroyed && this._writableState.destroyed;
|
||
},
|
||
set: function set(value) {
|
||
// we ignore the value if the stream
|
||
// has not been initialized yet
|
||
if (this._readableState === undefined || this._writableState === undefined) {
|
||
return;
|
||
} // backward compatibility, the user is explicitly
|
||
// managing destroyed
|
||
|
||
|
||
this._readableState.destroyed = value;
|
||
this._writableState.destroyed = value;
|
||
}
|
||
});
|
||
|
||
/***/ }),
|
||
|
||
/***/ 8530:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
// Copyright Joyent, Inc. and other Node contributors.
|
||
//
|
||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||
// copy of this software and associated documentation files (the
|
||
// "Software"), to deal in the Software without restriction, including
|
||
// without limitation the rights to use, copy, modify, merge, publish,
|
||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||
// persons to whom the Software is furnished to do so, subject to the
|
||
// following conditions:
|
||
//
|
||
// The above copyright notice and this permission notice shall be included
|
||
// in all copies or substantial portions of the Software.
|
||
//
|
||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||
// a passthrough stream.
|
||
// basically just the most minimal sort of Transform stream.
|
||
// Every written chunk gets output as-is.
|
||
|
||
|
||
module.exports = PassThrough;
|
||
|
||
var Transform = __nccwpck_require__(5331);
|
||
|
||
__nccwpck_require__(3753)(PassThrough, Transform);
|
||
|
||
function PassThrough(options) {
|
||
if (!(this instanceof PassThrough)) return new PassThrough(options);
|
||
Transform.call(this, options);
|
||
}
|
||
|
||
PassThrough.prototype._transform = function (chunk, encoding, cb) {
|
||
cb(null, chunk);
|
||
};
|
||
|
||
/***/ }),
|
||
|
||
/***/ 7612:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
// Copyright Joyent, Inc. and other Node contributors.
|
||
//
|
||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||
// copy of this software and associated documentation files (the
|
||
// "Software"), to deal in the Software without restriction, including
|
||
// without limitation the rights to use, copy, modify, merge, publish,
|
||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||
// persons to whom the Software is furnished to do so, subject to the
|
||
// following conditions:
|
||
//
|
||
// The above copyright notice and this permission notice shall be included
|
||
// in all copies or substantial portions of the Software.
|
||
//
|
||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||
|
||
|
||
module.exports = Readable;
|
||
/*<replacement>*/
|
||
|
||
var Duplex;
|
||
/*</replacement>*/
|
||
|
||
Readable.ReadableState = ReadableState;
|
||
/*<replacement>*/
|
||
|
||
var EE = __nccwpck_require__(8614).EventEmitter;
|
||
|
||
var EElistenerCount = function EElistenerCount(emitter, type) {
|
||
return emitter.listeners(type).length;
|
||
};
|
||
/*</replacement>*/
|
||
|
||
/*<replacement>*/
|
||
|
||
|
||
var Stream = __nccwpck_require__(8265);
|
||
/*</replacement>*/
|
||
|
||
|
||
var Buffer = __nccwpck_require__(4293).Buffer;
|
||
|
||
var OurUint8Array = global.Uint8Array || function () {};
|
||
|
||
function _uint8ArrayToBuffer(chunk) {
|
||
return Buffer.from(chunk);
|
||
}
|
||
|
||
function _isUint8Array(obj) {
|
||
return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
|
||
}
|
||
/*<replacement>*/
|
||
|
||
|
||
var debugUtil = __nccwpck_require__(1669);
|
||
|
||
var debug;
|
||
|
||
if (debugUtil && debugUtil.debuglog) {
|
||
debug = debugUtil.debuglog('stream');
|
||
} else {
|
||
debug = function debug() {};
|
||
}
|
||
/*</replacement>*/
|
||
|
||
|
||
var BufferList = __nccwpck_require__(3401);
|
||
|
||
var destroyImpl = __nccwpck_require__(730);
|
||
|
||
var _require = __nccwpck_require__(5239),
|
||
getHighWaterMark = _require.getHighWaterMark;
|
||
|
||
var _require$codes = __nccwpck_require__(3537)/* .codes */ .q,
|
||
ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE,
|
||
ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF,
|
||
ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,
|
||
ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; // Lazy loaded to improve the startup performance.
|
||
|
||
|
||
var StringDecoder;
|
||
var createReadableStreamAsyncIterator;
|
||
var from;
|
||
|
||
__nccwpck_require__(3753)(Readable, Stream);
|
||
|
||
var errorOrDestroy = destroyImpl.errorOrDestroy;
|
||
var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume'];
|
||
|
||
function prependListener(emitter, event, fn) {
|
||
// Sadly this is not cacheable as some libraries bundle their own
|
||
// event emitter implementation with them.
|
||
if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); // This is a hack to make sure that our error handler is attached before any
|
||
// userland ones. NEVER DO THIS. This is here only because this code needs
|
||
// to continue to work with older versions of Node.js that do not include
|
||
// the prependListener() method. The goal is to eventually remove this hack.
|
||
|
||
if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]];
|
||
}
|
||
|
||
function ReadableState(options, stream, isDuplex) {
|
||
Duplex = Duplex || __nccwpck_require__(3997);
|
||
options = options || {}; // Duplex streams are both readable and writable, but share
|
||
// the same options object.
|
||
// However, some cases require setting options to different
|
||
// values for the readable and the writable sides of the duplex stream.
|
||
// These options can be provided separately as readableXXX and writableXXX.
|
||
|
||
if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag. Used to make read(n) ignore n and to
|
||
// make all the buffer merging and length checks go away
|
||
|
||
this.objectMode = !!options.objectMode;
|
||
if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; // the point at which it stops calling _read() to fill the buffer
|
||
// Note: 0 is a valid value, means "don't call _read preemptively ever"
|
||
|
||
this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); // A linked list is used to store data chunks instead of an array because the
|
||
// linked list can remove elements from the beginning faster than
|
||
// array.shift()
|
||
|
||
this.buffer = new BufferList();
|
||
this.length = 0;
|
||
this.pipes = null;
|
||
this.pipesCount = 0;
|
||
this.flowing = null;
|
||
this.ended = false;
|
||
this.endEmitted = false;
|
||
this.reading = false; // a flag to be able to tell if the event 'readable'/'data' is emitted
|
||
// immediately, or on a later tick. We set this to true at first, because
|
||
// any actions that shouldn't happen until "later" should generally also
|
||
// not happen before the first read call.
|
||
|
||
this.sync = true; // whenever we return null, then we set a flag to say
|
||
// that we're awaiting a 'readable' event emission.
|
||
|
||
this.needReadable = false;
|
||
this.emittedReadable = false;
|
||
this.readableListening = false;
|
||
this.resumeScheduled = false;
|
||
this.paused = true; // Should close be emitted on destroy. Defaults to true.
|
||
|
||
this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'end' (and potentially 'finish')
|
||
|
||
this.autoDestroy = !!options.autoDestroy; // has it been destroyed
|
||
|
||
this.destroyed = false; // Crypto is kind of old and crusty. Historically, its default string
|
||
// encoding is 'binary' so we have to make this configurable.
|
||
// Everything else in the universe uses 'utf8', though.
|
||
|
||
this.defaultEncoding = options.defaultEncoding || 'utf8'; // the number of writers that are awaiting a drain event in .pipe()s
|
||
|
||
this.awaitDrain = 0; // if true, a maybeReadMore has been scheduled
|
||
|
||
this.readingMore = false;
|
||
this.decoder = null;
|
||
this.encoding = null;
|
||
|
||
if (options.encoding) {
|
||
if (!StringDecoder) StringDecoder = __nccwpck_require__(781)/* .StringDecoder */ .s;
|
||
this.decoder = new StringDecoder(options.encoding);
|
||
this.encoding = options.encoding;
|
||
}
|
||
}
|
||
|
||
function Readable(options) {
|
||
Duplex = Duplex || __nccwpck_require__(3997);
|
||
if (!(this instanceof Readable)) return new Readable(options); // Checking for a Stream.Duplex instance is faster here instead of inside
|
||
// the ReadableState constructor, at least with V8 6.5
|
||
|
||
var isDuplex = this instanceof Duplex;
|
||
this._readableState = new ReadableState(options, this, isDuplex); // legacy
|
||
|
||
this.readable = true;
|
||
|
||
if (options) {
|
||
if (typeof options.read === 'function') this._read = options.read;
|
||
if (typeof options.destroy === 'function') this._destroy = options.destroy;
|
||
}
|
||
|
||
Stream.call(this);
|
||
}
|
||
|
||
Object.defineProperty(Readable.prototype, 'destroyed', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
if (this._readableState === undefined) {
|
||
return false;
|
||
}
|
||
|
||
return this._readableState.destroyed;
|
||
},
|
||
set: function set(value) {
|
||
// we ignore the value if the stream
|
||
// has not been initialized yet
|
||
if (!this._readableState) {
|
||
return;
|
||
} // backward compatibility, the user is explicitly
|
||
// managing destroyed
|
||
|
||
|
||
this._readableState.destroyed = value;
|
||
}
|
||
});
|
||
Readable.prototype.destroy = destroyImpl.destroy;
|
||
Readable.prototype._undestroy = destroyImpl.undestroy;
|
||
|
||
Readable.prototype._destroy = function (err, cb) {
|
||
cb(err);
|
||
}; // Manually shove something into the read() buffer.
|
||
// This returns true if the highWaterMark has not been hit yet,
|
||
// similar to how Writable.write() returns true if you should
|
||
// write() some more.
|
||
|
||
|
||
Readable.prototype.push = function (chunk, encoding) {
|
||
var state = this._readableState;
|
||
var skipChunkCheck;
|
||
|
||
if (!state.objectMode) {
|
||
if (typeof chunk === 'string') {
|
||
encoding = encoding || state.defaultEncoding;
|
||
|
||
if (encoding !== state.encoding) {
|
||
chunk = Buffer.from(chunk, encoding);
|
||
encoding = '';
|
||
}
|
||
|
||
skipChunkCheck = true;
|
||
}
|
||
} else {
|
||
skipChunkCheck = true;
|
||
}
|
||
|
||
return readableAddChunk(this, chunk, encoding, false, skipChunkCheck);
|
||
}; // Unshift should *always* be something directly out of read()
|
||
|
||
|
||
Readable.prototype.unshift = function (chunk) {
|
||
return readableAddChunk(this, chunk, null, true, false);
|
||
};
|
||
|
||
function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) {
|
||
debug('readableAddChunk', chunk);
|
||
var state = stream._readableState;
|
||
|
||
if (chunk === null) {
|
||
state.reading = false;
|
||
onEofChunk(stream, state);
|
||
} else {
|
||
var er;
|
||
if (!skipChunkCheck) er = chunkInvalid(state, chunk);
|
||
|
||
if (er) {
|
||
errorOrDestroy(stream, er);
|
||
} else if (state.objectMode || chunk && chunk.length > 0) {
|
||
if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) {
|
||
chunk = _uint8ArrayToBuffer(chunk);
|
||
}
|
||
|
||
if (addToFront) {
|
||
if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true);
|
||
} else if (state.ended) {
|
||
errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF());
|
||
} else if (state.destroyed) {
|
||
return false;
|
||
} else {
|
||
state.reading = false;
|
||
|
||
if (state.decoder && !encoding) {
|
||
chunk = state.decoder.write(chunk);
|
||
if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state);
|
||
} else {
|
||
addChunk(stream, state, chunk, false);
|
||
}
|
||
}
|
||
} else if (!addToFront) {
|
||
state.reading = false;
|
||
maybeReadMore(stream, state);
|
||
}
|
||
} // We can push more data if we are below the highWaterMark.
|
||
// Also, if we have no data yet, we can stand some more bytes.
|
||
// This is to work around cases where hwm=0, such as the repl.
|
||
|
||
|
||
return !state.ended && (state.length < state.highWaterMark || state.length === 0);
|
||
}
|
||
|
||
function addChunk(stream, state, chunk, addToFront) {
|
||
if (state.flowing && state.length === 0 && !state.sync) {
|
||
state.awaitDrain = 0;
|
||
stream.emit('data', chunk);
|
||
} else {
|
||
// update the buffer info.
|
||
state.length += state.objectMode ? 1 : chunk.length;
|
||
if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk);
|
||
if (state.needReadable) emitReadable(stream);
|
||
}
|
||
|
||
maybeReadMore(stream, state);
|
||
}
|
||
|
||
function chunkInvalid(state, chunk) {
|
||
var er;
|
||
|
||
if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {
|
||
er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk);
|
||
}
|
||
|
||
return er;
|
||
}
|
||
|
||
Readable.prototype.isPaused = function () {
|
||
return this._readableState.flowing === false;
|
||
}; // backwards compatibility.
|
||
|
||
|
||
Readable.prototype.setEncoding = function (enc) {
|
||
if (!StringDecoder) StringDecoder = __nccwpck_require__(781)/* .StringDecoder */ .s;
|
||
var decoder = new StringDecoder(enc);
|
||
this._readableState.decoder = decoder; // If setEncoding(null), decoder.encoding equals utf8
|
||
|
||
this._readableState.encoding = this._readableState.decoder.encoding; // Iterate over current buffer to convert already stored Buffers:
|
||
|
||
var p = this._readableState.buffer.head;
|
||
var content = '';
|
||
|
||
while (p !== null) {
|
||
content += decoder.write(p.data);
|
||
p = p.next;
|
||
}
|
||
|
||
this._readableState.buffer.clear();
|
||
|
||
if (content !== '') this._readableState.buffer.push(content);
|
||
this._readableState.length = content.length;
|
||
return this;
|
||
}; // Don't raise the hwm > 1GB
|
||
|
||
|
||
var MAX_HWM = 0x40000000;
|
||
|
||
function computeNewHighWaterMark(n) {
|
||
if (n >= MAX_HWM) {
|
||
// TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE.
|
||
n = MAX_HWM;
|
||
} else {
|
||
// Get the next highest power of 2 to prevent increasing hwm excessively in
|
||
// tiny amounts
|
||
n--;
|
||
n |= n >>> 1;
|
||
n |= n >>> 2;
|
||
n |= n >>> 4;
|
||
n |= n >>> 8;
|
||
n |= n >>> 16;
|
||
n++;
|
||
}
|
||
|
||
return n;
|
||
} // This function is designed to be inlinable, so please take care when making
|
||
// changes to the function body.
|
||
|
||
|
||
function howMuchToRead(n, state) {
|
||
if (n <= 0 || state.length === 0 && state.ended) return 0;
|
||
if (state.objectMode) return 1;
|
||
|
||
if (n !== n) {
|
||
// Only flow one buffer at a time
|
||
if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length;
|
||
} // If we're asking for more than the current hwm, then raise the hwm.
|
||
|
||
|
||
if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n);
|
||
if (n <= state.length) return n; // Don't have enough
|
||
|
||
if (!state.ended) {
|
||
state.needReadable = true;
|
||
return 0;
|
||
}
|
||
|
||
return state.length;
|
||
} // you can override either this method, or the async _read(n) below.
|
||
|
||
|
||
Readable.prototype.read = function (n) {
|
||
debug('read', n);
|
||
n = parseInt(n, 10);
|
||
var state = this._readableState;
|
||
var nOrig = n;
|
||
if (n !== 0) state.emittedReadable = false; // if we're doing read(0) to trigger a readable event, but we
|
||
// already have a bunch of data in the buffer, then just trigger
|
||
// the 'readable' event and move on.
|
||
|
||
if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) {
|
||
debug('read: emitReadable', state.length, state.ended);
|
||
if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this);
|
||
return null;
|
||
}
|
||
|
||
n = howMuchToRead(n, state); // if we've ended, and we're now clear, then finish it up.
|
||
|
||
if (n === 0 && state.ended) {
|
||
if (state.length === 0) endReadable(this);
|
||
return null;
|
||
} // All the actual chunk generation logic needs to be
|
||
// *below* the call to _read. The reason is that in certain
|
||
// synthetic stream cases, such as passthrough streams, _read
|
||
// may be a completely synchronous operation which may change
|
||
// the state of the read buffer, providing enough data when
|
||
// before there was *not* enough.
|
||
//
|
||
// So, the steps are:
|
||
// 1. Figure out what the state of things will be after we do
|
||
// a read from the buffer.
|
||
//
|
||
// 2. If that resulting state will trigger a _read, then call _read.
|
||
// Note that this may be asynchronous, or synchronous. Yes, it is
|
||
// deeply ugly to write APIs this way, but that still doesn't mean
|
||
// that the Readable class should behave improperly, as streams are
|
||
// designed to be sync/async agnostic.
|
||
// Take note if the _read call is sync or async (ie, if the read call
|
||
// has returned yet), so that we know whether or not it's safe to emit
|
||
// 'readable' etc.
|
||
//
|
||
// 3. Actually pull the requested chunks out of the buffer and return.
|
||
// if we need a readable event, then we need to do some reading.
|
||
|
||
|
||
var doRead = state.needReadable;
|
||
debug('need readable', doRead); // if we currently have less than the highWaterMark, then also read some
|
||
|
||
if (state.length === 0 || state.length - n < state.highWaterMark) {
|
||
doRead = true;
|
||
debug('length less than watermark', doRead);
|
||
} // however, if we've ended, then there's no point, and if we're already
|
||
// reading, then it's unnecessary.
|
||
|
||
|
||
if (state.ended || state.reading) {
|
||
doRead = false;
|
||
debug('reading or ended', doRead);
|
||
} else if (doRead) {
|
||
debug('do read');
|
||
state.reading = true;
|
||
state.sync = true; // if the length is currently zero, then we *need* a readable event.
|
||
|
||
if (state.length === 0) state.needReadable = true; // call internal read method
|
||
|
||
this._read(state.highWaterMark);
|
||
|
||
state.sync = false; // If _read pushed data synchronously, then `reading` will be false,
|
||
// and we need to re-evaluate how much data we can return to the user.
|
||
|
||
if (!state.reading) n = howMuchToRead(nOrig, state);
|
||
}
|
||
|
||
var ret;
|
||
if (n > 0) ret = fromList(n, state);else ret = null;
|
||
|
||
if (ret === null) {
|
||
state.needReadable = state.length <= state.highWaterMark;
|
||
n = 0;
|
||
} else {
|
||
state.length -= n;
|
||
state.awaitDrain = 0;
|
||
}
|
||
|
||
if (state.length === 0) {
|
||
// If we have nothing in the buffer, then we want to know
|
||
// as soon as we *do* get something into the buffer.
|
||
if (!state.ended) state.needReadable = true; // If we tried to read() past the EOF, then emit end on the next tick.
|
||
|
||
if (nOrig !== n && state.ended) endReadable(this);
|
||
}
|
||
|
||
if (ret !== null) this.emit('data', ret);
|
||
return ret;
|
||
};
|
||
|
||
function onEofChunk(stream, state) {
|
||
debug('onEofChunk');
|
||
if (state.ended) return;
|
||
|
||
if (state.decoder) {
|
||
var chunk = state.decoder.end();
|
||
|
||
if (chunk && chunk.length) {
|
||
state.buffer.push(chunk);
|
||
state.length += state.objectMode ? 1 : chunk.length;
|
||
}
|
||
}
|
||
|
||
state.ended = true;
|
||
|
||
if (state.sync) {
|
||
// if we are sync, wait until next tick to emit the data.
|
||
// Otherwise we risk emitting data in the flow()
|
||
// the readable code triggers during a read() call
|
||
emitReadable(stream);
|
||
} else {
|
||
// emit 'readable' now to make sure it gets picked up.
|
||
state.needReadable = false;
|
||
|
||
if (!state.emittedReadable) {
|
||
state.emittedReadable = true;
|
||
emitReadable_(stream);
|
||
}
|
||
}
|
||
} // Don't emit readable right away in sync mode, because this can trigger
|
||
// another read() call => stack overflow. This way, it might trigger
|
||
// a nextTick recursion warning, but that's not so bad.
|
||
|
||
|
||
function emitReadable(stream) {
|
||
var state = stream._readableState;
|
||
debug('emitReadable', state.needReadable, state.emittedReadable);
|
||
state.needReadable = false;
|
||
|
||
if (!state.emittedReadable) {
|
||
debug('emitReadable', state.flowing);
|
||
state.emittedReadable = true;
|
||
process.nextTick(emitReadable_, stream);
|
||
}
|
||
}
|
||
|
||
function emitReadable_(stream) {
|
||
var state = stream._readableState;
|
||
debug('emitReadable_', state.destroyed, state.length, state.ended);
|
||
|
||
if (!state.destroyed && (state.length || state.ended)) {
|
||
stream.emit('readable');
|
||
state.emittedReadable = false;
|
||
} // The stream needs another readable event if
|
||
// 1. It is not flowing, as the flow mechanism will take
|
||
// care of it.
|
||
// 2. It is not ended.
|
||
// 3. It is below the highWaterMark, so we can schedule
|
||
// another readable later.
|
||
|
||
|
||
state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark;
|
||
flow(stream);
|
||
} // at this point, the user has presumably seen the 'readable' event,
|
||
// and called read() to consume some data. that may have triggered
|
||
// in turn another _read(n) call, in which case reading = true if
|
||
// it's in progress.
|
||
// However, if we're not ended, or reading, and the length < hwm,
|
||
// then go ahead and try to read some more preemptively.
|
||
|
||
|
||
function maybeReadMore(stream, state) {
|
||
if (!state.readingMore) {
|
||
state.readingMore = true;
|
||
process.nextTick(maybeReadMore_, stream, state);
|
||
}
|
||
}
|
||
|
||
function maybeReadMore_(stream, state) {
|
||
// Attempt to read more data if we should.
|
||
//
|
||
// The conditions for reading more data are (one of):
|
||
// - Not enough data buffered (state.length < state.highWaterMark). The loop
|
||
// is responsible for filling the buffer with enough data if such data
|
||
// is available. If highWaterMark is 0 and we are not in the flowing mode
|
||
// we should _not_ attempt to buffer any extra data. We'll get more data
|
||
// when the stream consumer calls read() instead.
|
||
// - No data in the buffer, and the stream is in flowing mode. In this mode
|
||
// the loop below is responsible for ensuring read() is called. Failing to
|
||
// call read here would abort the flow and there's no other mechanism for
|
||
// continuing the flow if the stream consumer has just subscribed to the
|
||
// 'data' event.
|
||
//
|
||
// In addition to the above conditions to keep reading data, the following
|
||
// conditions prevent the data from being read:
|
||
// - The stream has ended (state.ended).
|
||
// - There is already a pending 'read' operation (state.reading). This is a
|
||
// case where the the stream has called the implementation defined _read()
|
||
// method, but they are processing the call asynchronously and have _not_
|
||
// called push() with new data. In this case we skip performing more
|
||
// read()s. The execution ends in this method again after the _read() ends
|
||
// up calling push() with more data.
|
||
while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) {
|
||
var len = state.length;
|
||
debug('maybeReadMore read 0');
|
||
stream.read(0);
|
||
if (len === state.length) // didn't get any data, stop spinning.
|
||
break;
|
||
}
|
||
|
||
state.readingMore = false;
|
||
} // abstract method. to be overridden in specific implementation classes.
|
||
// call cb(er, data) where data is <= n in length.
|
||
// for virtual (non-string, non-buffer) streams, "length" is somewhat
|
||
// arbitrary, and perhaps not very meaningful.
|
||
|
||
|
||
Readable.prototype._read = function (n) {
|
||
errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()'));
|
||
};
|
||
|
||
Readable.prototype.pipe = function (dest, pipeOpts) {
|
||
var src = this;
|
||
var state = this._readableState;
|
||
|
||
switch (state.pipesCount) {
|
||
case 0:
|
||
state.pipes = dest;
|
||
break;
|
||
|
||
case 1:
|
||
state.pipes = [state.pipes, dest];
|
||
break;
|
||
|
||
default:
|
||
state.pipes.push(dest);
|
||
break;
|
||
}
|
||
|
||
state.pipesCount += 1;
|
||
debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);
|
||
var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr;
|
||
var endFn = doEnd ? onend : unpipe;
|
||
if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn);
|
||
dest.on('unpipe', onunpipe);
|
||
|
||
function onunpipe(readable, unpipeInfo) {
|
||
debug('onunpipe');
|
||
|
||
if (readable === src) {
|
||
if (unpipeInfo && unpipeInfo.hasUnpiped === false) {
|
||
unpipeInfo.hasUnpiped = true;
|
||
cleanup();
|
||
}
|
||
}
|
||
}
|
||
|
||
function onend() {
|
||
debug('onend');
|
||
dest.end();
|
||
} // when the dest drains, it reduces the awaitDrain counter
|
||
// on the source. This would be more elegant with a .once()
|
||
// handler in flow(), but adding and removing repeatedly is
|
||
// too slow.
|
||
|
||
|
||
var ondrain = pipeOnDrain(src);
|
||
dest.on('drain', ondrain);
|
||
var cleanedUp = false;
|
||
|
||
function cleanup() {
|
||
debug('cleanup'); // cleanup event handlers once the pipe is broken
|
||
|
||
dest.removeListener('close', onclose);
|
||
dest.removeListener('finish', onfinish);
|
||
dest.removeListener('drain', ondrain);
|
||
dest.removeListener('error', onerror);
|
||
dest.removeListener('unpipe', onunpipe);
|
||
src.removeListener('end', onend);
|
||
src.removeListener('end', unpipe);
|
||
src.removeListener('data', ondata);
|
||
cleanedUp = true; // if the reader is waiting for a drain event from this
|
||
// specific writer, then it would cause it to never start
|
||
// flowing again.
|
||
// So, if this is awaiting a drain, then we just call it now.
|
||
// If we don't know, then assume that we are waiting for one.
|
||
|
||
if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain();
|
||
}
|
||
|
||
src.on('data', ondata);
|
||
|
||
function ondata(chunk) {
|
||
debug('ondata');
|
||
var ret = dest.write(chunk);
|
||
debug('dest.write', ret);
|
||
|
||
if (ret === false) {
|
||
// If the user unpiped during `dest.write()`, it is possible
|
||
// to get stuck in a permanently paused state if that write
|
||
// also returned false.
|
||
// => Check whether `dest` is still a piping destination.
|
||
if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) {
|
||
debug('false write response, pause', state.awaitDrain);
|
||
state.awaitDrain++;
|
||
}
|
||
|
||
src.pause();
|
||
}
|
||
} // if the dest has an error, then stop piping into it.
|
||
// however, don't suppress the throwing behavior for this.
|
||
|
||
|
||
function onerror(er) {
|
||
debug('onerror', er);
|
||
unpipe();
|
||
dest.removeListener('error', onerror);
|
||
if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er);
|
||
} // Make sure our error handler is attached before userland ones.
|
||
|
||
|
||
prependListener(dest, 'error', onerror); // Both close and finish should trigger unpipe, but only once.
|
||
|
||
function onclose() {
|
||
dest.removeListener('finish', onfinish);
|
||
unpipe();
|
||
}
|
||
|
||
dest.once('close', onclose);
|
||
|
||
function onfinish() {
|
||
debug('onfinish');
|
||
dest.removeListener('close', onclose);
|
||
unpipe();
|
||
}
|
||
|
||
dest.once('finish', onfinish);
|
||
|
||
function unpipe() {
|
||
debug('unpipe');
|
||
src.unpipe(dest);
|
||
} // tell the dest that it's being piped to
|
||
|
||
|
||
dest.emit('pipe', src); // start the flow if it hasn't been started already.
|
||
|
||
if (!state.flowing) {
|
||
debug('pipe resume');
|
||
src.resume();
|
||
}
|
||
|
||
return dest;
|
||
};
|
||
|
||
function pipeOnDrain(src) {
|
||
return function pipeOnDrainFunctionResult() {
|
||
var state = src._readableState;
|
||
debug('pipeOnDrain', state.awaitDrain);
|
||
if (state.awaitDrain) state.awaitDrain--;
|
||
|
||
if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) {
|
||
state.flowing = true;
|
||
flow(src);
|
||
}
|
||
};
|
||
}
|
||
|
||
Readable.prototype.unpipe = function (dest) {
|
||
var state = this._readableState;
|
||
var unpipeInfo = {
|
||
hasUnpiped: false
|
||
}; // if we're not piping anywhere, then do nothing.
|
||
|
||
if (state.pipesCount === 0) return this; // just one destination. most common case.
|
||
|
||
if (state.pipesCount === 1) {
|
||
// passed in one, but it's not the right one.
|
||
if (dest && dest !== state.pipes) return this;
|
||
if (!dest) dest = state.pipes; // got a match.
|
||
|
||
state.pipes = null;
|
||
state.pipesCount = 0;
|
||
state.flowing = false;
|
||
if (dest) dest.emit('unpipe', this, unpipeInfo);
|
||
return this;
|
||
} // slow case. multiple pipe destinations.
|
||
|
||
|
||
if (!dest) {
|
||
// remove all.
|
||
var dests = state.pipes;
|
||
var len = state.pipesCount;
|
||
state.pipes = null;
|
||
state.pipesCount = 0;
|
||
state.flowing = false;
|
||
|
||
for (var i = 0; i < len; i++) {
|
||
dests[i].emit('unpipe', this, {
|
||
hasUnpiped: false
|
||
});
|
||
}
|
||
|
||
return this;
|
||
} // try to find the right one.
|
||
|
||
|
||
var index = indexOf(state.pipes, dest);
|
||
if (index === -1) return this;
|
||
state.pipes.splice(index, 1);
|
||
state.pipesCount -= 1;
|
||
if (state.pipesCount === 1) state.pipes = state.pipes[0];
|
||
dest.emit('unpipe', this, unpipeInfo);
|
||
return this;
|
||
}; // set up data events if they are asked for
|
||
// Ensure readable listeners eventually get something
|
||
|
||
|
||
Readable.prototype.on = function (ev, fn) {
|
||
var res = Stream.prototype.on.call(this, ev, fn);
|
||
var state = this._readableState;
|
||
|
||
if (ev === 'data') {
|
||
// update readableListening so that resume() may be a no-op
|
||
// a few lines down. This is needed to support once('readable').
|
||
state.readableListening = this.listenerCount('readable') > 0; // Try start flowing on next tick if stream isn't explicitly paused
|
||
|
||
if (state.flowing !== false) this.resume();
|
||
} else if (ev === 'readable') {
|
||
if (!state.endEmitted && !state.readableListening) {
|
||
state.readableListening = state.needReadable = true;
|
||
state.flowing = false;
|
||
state.emittedReadable = false;
|
||
debug('on readable', state.length, state.reading);
|
||
|
||
if (state.length) {
|
||
emitReadable(this);
|
||
} else if (!state.reading) {
|
||
process.nextTick(nReadingNextTick, this);
|
||
}
|
||
}
|
||
}
|
||
|
||
return res;
|
||
};
|
||
|
||
Readable.prototype.addListener = Readable.prototype.on;
|
||
|
||
Readable.prototype.removeListener = function (ev, fn) {
|
||
var res = Stream.prototype.removeListener.call(this, ev, fn);
|
||
|
||
if (ev === 'readable') {
|
||
// We need to check if there is someone still listening to
|
||
// readable and reset the state. However this needs to happen
|
||
// after readable has been emitted but before I/O (nextTick) to
|
||
// support once('readable', fn) cycles. This means that calling
|
||
// resume within the same tick will have no
|
||
// effect.
|
||
process.nextTick(updateReadableListening, this);
|
||
}
|
||
|
||
return res;
|
||
};
|
||
|
||
Readable.prototype.removeAllListeners = function (ev) {
|
||
var res = Stream.prototype.removeAllListeners.apply(this, arguments);
|
||
|
||
if (ev === 'readable' || ev === undefined) {
|
||
// We need to check if there is someone still listening to
|
||
// readable and reset the state. However this needs to happen
|
||
// after readable has been emitted but before I/O (nextTick) to
|
||
// support once('readable', fn) cycles. This means that calling
|
||
// resume within the same tick will have no
|
||
// effect.
|
||
process.nextTick(updateReadableListening, this);
|
||
}
|
||
|
||
return res;
|
||
};
|
||
|
||
function updateReadableListening(self) {
|
||
var state = self._readableState;
|
||
state.readableListening = self.listenerCount('readable') > 0;
|
||
|
||
if (state.resumeScheduled && !state.paused) {
|
||
// flowing needs to be set to true now, otherwise
|
||
// the upcoming resume will not flow.
|
||
state.flowing = true; // crude way to check if we should resume
|
||
} else if (self.listenerCount('data') > 0) {
|
||
self.resume();
|
||
}
|
||
}
|
||
|
||
function nReadingNextTick(self) {
|
||
debug('readable nexttick read 0');
|
||
self.read(0);
|
||
} // pause() and resume() are remnants of the legacy readable stream API
|
||
// If the user uses them, then switch into old mode.
|
||
|
||
|
||
Readable.prototype.resume = function () {
|
||
var state = this._readableState;
|
||
|
||
if (!state.flowing) {
|
||
debug('resume'); // we flow only if there is no one listening
|
||
// for readable, but we still have to call
|
||
// resume()
|
||
|
||
state.flowing = !state.readableListening;
|
||
resume(this, state);
|
||
}
|
||
|
||
state.paused = false;
|
||
return this;
|
||
};
|
||
|
||
function resume(stream, state) {
|
||
if (!state.resumeScheduled) {
|
||
state.resumeScheduled = true;
|
||
process.nextTick(resume_, stream, state);
|
||
}
|
||
}
|
||
|
||
function resume_(stream, state) {
|
||
debug('resume', state.reading);
|
||
|
||
if (!state.reading) {
|
||
stream.read(0);
|
||
}
|
||
|
||
state.resumeScheduled = false;
|
||
stream.emit('resume');
|
||
flow(stream);
|
||
if (state.flowing && !state.reading) stream.read(0);
|
||
}
|
||
|
||
Readable.prototype.pause = function () {
|
||
debug('call pause flowing=%j', this._readableState.flowing);
|
||
|
||
if (this._readableState.flowing !== false) {
|
||
debug('pause');
|
||
this._readableState.flowing = false;
|
||
this.emit('pause');
|
||
}
|
||
|
||
this._readableState.paused = true;
|
||
return this;
|
||
};
|
||
|
||
function flow(stream) {
|
||
var state = stream._readableState;
|
||
debug('flow', state.flowing);
|
||
|
||
while (state.flowing && stream.read() !== null) {
|
||
;
|
||
}
|
||
} // wrap an old-style stream as the async data source.
|
||
// This is *not* part of the readable stream interface.
|
||
// It is an ugly unfortunate mess of history.
|
||
|
||
|
||
Readable.prototype.wrap = function (stream) {
|
||
var _this = this;
|
||
|
||
var state = this._readableState;
|
||
var paused = false;
|
||
stream.on('end', function () {
|
||
debug('wrapped end');
|
||
|
||
if (state.decoder && !state.ended) {
|
||
var chunk = state.decoder.end();
|
||
if (chunk && chunk.length) _this.push(chunk);
|
||
}
|
||
|
||
_this.push(null);
|
||
});
|
||
stream.on('data', function (chunk) {
|
||
debug('wrapped data');
|
||
if (state.decoder) chunk = state.decoder.write(chunk); // don't skip over falsy values in objectMode
|
||
|
||
if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return;
|
||
|
||
var ret = _this.push(chunk);
|
||
|
||
if (!ret) {
|
||
paused = true;
|
||
stream.pause();
|
||
}
|
||
}); // proxy all the other methods.
|
||
// important when wrapping filters and duplexes.
|
||
|
||
for (var i in stream) {
|
||
if (this[i] === undefined && typeof stream[i] === 'function') {
|
||
this[i] = function methodWrap(method) {
|
||
return function methodWrapReturnFunction() {
|
||
return stream[method].apply(stream, arguments);
|
||
};
|
||
}(i);
|
||
}
|
||
} // proxy certain important events.
|
||
|
||
|
||
for (var n = 0; n < kProxyEvents.length; n++) {
|
||
stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n]));
|
||
} // when we try to consume some more bytes, simply unpause the
|
||
// underlying stream.
|
||
|
||
|
||
this._read = function (n) {
|
||
debug('wrapped _read', n);
|
||
|
||
if (paused) {
|
||
paused = false;
|
||
stream.resume();
|
||
}
|
||
};
|
||
|
||
return this;
|
||
};
|
||
|
||
if (typeof Symbol === 'function') {
|
||
Readable.prototype[Symbol.asyncIterator] = function () {
|
||
if (createReadableStreamAsyncIterator === undefined) {
|
||
createReadableStreamAsyncIterator = __nccwpck_require__(5962);
|
||
}
|
||
|
||
return createReadableStreamAsyncIterator(this);
|
||
};
|
||
}
|
||
|
||
Object.defineProperty(Readable.prototype, 'readableHighWaterMark', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
return this._readableState.highWaterMark;
|
||
}
|
||
});
|
||
Object.defineProperty(Readable.prototype, 'readableBuffer', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
return this._readableState && this._readableState.buffer;
|
||
}
|
||
});
|
||
Object.defineProperty(Readable.prototype, 'readableFlowing', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
return this._readableState.flowing;
|
||
},
|
||
set: function set(state) {
|
||
if (this._readableState) {
|
||
this._readableState.flowing = state;
|
||
}
|
||
}
|
||
}); // exposed for testing purposes only.
|
||
|
||
Readable._fromList = fromList;
|
||
Object.defineProperty(Readable.prototype, 'readableLength', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
return this._readableState.length;
|
||
}
|
||
}); // Pluck off n bytes from an array of buffers.
|
||
// Length is the combined lengths of all the buffers in the list.
|
||
// This function is designed to be inlinable, so please take care when making
|
||
// changes to the function body.
|
||
|
||
function fromList(n, state) {
|
||
// nothing buffered
|
||
if (state.length === 0) return null;
|
||
var ret;
|
||
if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) {
|
||
// read it all, truncate the list
|
||
if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length);
|
||
state.buffer.clear();
|
||
} else {
|
||
// read part of list
|
||
ret = state.buffer.consume(n, state.decoder);
|
||
}
|
||
return ret;
|
||
}
|
||
|
||
function endReadable(stream) {
|
||
var state = stream._readableState;
|
||
debug('endReadable', state.endEmitted);
|
||
|
||
if (!state.endEmitted) {
|
||
state.ended = true;
|
||
process.nextTick(endReadableNT, state, stream);
|
||
}
|
||
}
|
||
|
||
function endReadableNT(state, stream) {
|
||
debug('endReadableNT', state.endEmitted, state.length); // Check that we didn't get one last unshift.
|
||
|
||
if (!state.endEmitted && state.length === 0) {
|
||
state.endEmitted = true;
|
||
stream.readable = false;
|
||
stream.emit('end');
|
||
|
||
if (state.autoDestroy) {
|
||
// In case of duplex streams we need a way to detect
|
||
// if the writable side is ready for autoDestroy as well
|
||
var wState = stream._writableState;
|
||
|
||
if (!wState || wState.autoDestroy && wState.finished) {
|
||
stream.destroy();
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
if (typeof Symbol === 'function') {
|
||
Readable.from = function (iterable, opts) {
|
||
if (from === undefined) {
|
||
from = __nccwpck_require__(7417);
|
||
}
|
||
|
||
return from(Readable, iterable, opts);
|
||
};
|
||
}
|
||
|
||
function indexOf(xs, x) {
|
||
for (var i = 0, l = xs.length; i < l; i++) {
|
||
if (xs[i] === x) return i;
|
||
}
|
||
|
||
return -1;
|
||
}
|
||
|
||
/***/ }),
|
||
|
||
/***/ 5331:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
// Copyright Joyent, Inc. and other Node contributors.
|
||
//
|
||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||
// copy of this software and associated documentation files (the
|
||
// "Software"), to deal in the Software without restriction, including
|
||
// without limitation the rights to use, copy, modify, merge, publish,
|
||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||
// persons to whom the Software is furnished to do so, subject to the
|
||
// following conditions:
|
||
//
|
||
// The above copyright notice and this permission notice shall be included
|
||
// in all copies or substantial portions of the Software.
|
||
//
|
||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||
// a transform stream is a readable/writable stream where you do
|
||
// something with the data. Sometimes it's called a "filter",
|
||
// but that's not a great name for it, since that implies a thing where
|
||
// some bits pass through, and others are simply ignored. (That would
|
||
// be a valid example of a transform, of course.)
|
||
//
|
||
// While the output is causally related to the input, it's not a
|
||
// necessarily symmetric or synchronous transformation. For example,
|
||
// a zlib stream might take multiple plain-text writes(), and then
|
||
// emit a single compressed chunk some time in the future.
|
||
//
|
||
// Here's how this works:
|
||
//
|
||
// The Transform stream has all the aspects of the readable and writable
|
||
// stream classes. When you write(chunk), that calls _write(chunk,cb)
|
||
// internally, and returns false if there's a lot of pending writes
|
||
// buffered up. When you call read(), that calls _read(n) until
|
||
// there's enough pending readable data buffered up.
|
||
//
|
||
// In a transform stream, the written data is placed in a buffer. When
|
||
// _read(n) is called, it transforms the queued up data, calling the
|
||
// buffered _write cb's as it consumes chunks. If consuming a single
|
||
// written chunk would result in multiple output chunks, then the first
|
||
// outputted bit calls the readcb, and subsequent chunks just go into
|
||
// the read buffer, and will cause it to emit 'readable' if necessary.
|
||
//
|
||
// This way, back-pressure is actually determined by the reading side,
|
||
// since _read has to be called to start processing a new chunk. However,
|
||
// a pathological inflate type of transform can cause excessive buffering
|
||
// here. For example, imagine a stream where every byte of input is
|
||
// interpreted as an integer from 0-255, and then results in that many
|
||
// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
|
||
// 1kb of data being output. In this case, you could write a very small
|
||
// amount of input, and end up with a very large amount of output. In
|
||
// such a pathological inflating mechanism, there'd be no way to tell
|
||
// the system to stop doing the transform. A single 4MB write could
|
||
// cause the system to run out of memory.
|
||
//
|
||
// However, even in such a pathological case, only a single written chunk
|
||
// would be consumed, and then the rest would wait (un-transformed) until
|
||
// the results of the previous transformed chunk were consumed.
|
||
|
||
|
||
module.exports = Transform;
|
||
|
||
var _require$codes = __nccwpck_require__(3537)/* .codes */ .q,
|
||
ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,
|
||
ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK,
|
||
ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING,
|
||
ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0;
|
||
|
||
var Duplex = __nccwpck_require__(3997);
|
||
|
||
__nccwpck_require__(3753)(Transform, Duplex);
|
||
|
||
function afterTransform(er, data) {
|
||
var ts = this._transformState;
|
||
ts.transforming = false;
|
||
var cb = ts.writecb;
|
||
|
||
if (cb === null) {
|
||
return this.emit('error', new ERR_MULTIPLE_CALLBACK());
|
||
}
|
||
|
||
ts.writechunk = null;
|
||
ts.writecb = null;
|
||
if (data != null) // single equals check for both `null` and `undefined`
|
||
this.push(data);
|
||
cb(er);
|
||
var rs = this._readableState;
|
||
rs.reading = false;
|
||
|
||
if (rs.needReadable || rs.length < rs.highWaterMark) {
|
||
this._read(rs.highWaterMark);
|
||
}
|
||
}
|
||
|
||
function Transform(options) {
|
||
if (!(this instanceof Transform)) return new Transform(options);
|
||
Duplex.call(this, options);
|
||
this._transformState = {
|
||
afterTransform: afterTransform.bind(this),
|
||
needTransform: false,
|
||
transforming: false,
|
||
writecb: null,
|
||
writechunk: null,
|
||
writeencoding: null
|
||
}; // start out asking for a readable event once data is transformed.
|
||
|
||
this._readableState.needReadable = true; // we have implemented the _read method, and done the other things
|
||
// that Readable wants before the first _read call, so unset the
|
||
// sync guard flag.
|
||
|
||
this._readableState.sync = false;
|
||
|
||
if (options) {
|
||
if (typeof options.transform === 'function') this._transform = options.transform;
|
||
if (typeof options.flush === 'function') this._flush = options.flush;
|
||
} // When the writable side finishes, then flush out anything remaining.
|
||
|
||
|
||
this.on('prefinish', prefinish);
|
||
}
|
||
|
||
function prefinish() {
|
||
var _this = this;
|
||
|
||
if (typeof this._flush === 'function' && !this._readableState.destroyed) {
|
||
this._flush(function (er, data) {
|
||
done(_this, er, data);
|
||
});
|
||
} else {
|
||
done(this, null, null);
|
||
}
|
||
}
|
||
|
||
Transform.prototype.push = function (chunk, encoding) {
|
||
this._transformState.needTransform = false;
|
||
return Duplex.prototype.push.call(this, chunk, encoding);
|
||
}; // This is the part where you do stuff!
|
||
// override this function in implementation classes.
|
||
// 'chunk' is an input chunk.
|
||
//
|
||
// Call `push(newChunk)` to pass along transformed output
|
||
// to the readable side. You may call 'push' zero or more times.
|
||
//
|
||
// Call `cb(err)` when you are done with this chunk. If you pass
|
||
// an error, then that'll put the hurt on the whole operation. If you
|
||
// never call cb(), then you'll never get another chunk.
|
||
|
||
|
||
Transform.prototype._transform = function (chunk, encoding, cb) {
|
||
cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()'));
|
||
};
|
||
|
||
Transform.prototype._write = function (chunk, encoding, cb) {
|
||
var ts = this._transformState;
|
||
ts.writecb = cb;
|
||
ts.writechunk = chunk;
|
||
ts.writeencoding = encoding;
|
||
|
||
if (!ts.transforming) {
|
||
var rs = this._readableState;
|
||
if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);
|
||
}
|
||
}; // Doesn't matter what the args are here.
|
||
// _transform does all the work.
|
||
// That we got here means that the readable side wants more data.
|
||
|
||
|
||
Transform.prototype._read = function (n) {
|
||
var ts = this._transformState;
|
||
|
||
if (ts.writechunk !== null && !ts.transforming) {
|
||
ts.transforming = true;
|
||
|
||
this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
|
||
} else {
|
||
// mark that we need a transform, so that any data that comes in
|
||
// will get processed, now that we've asked for it.
|
||
ts.needTransform = true;
|
||
}
|
||
};
|
||
|
||
Transform.prototype._destroy = function (err, cb) {
|
||
Duplex.prototype._destroy.call(this, err, function (err2) {
|
||
cb(err2);
|
||
});
|
||
};
|
||
|
||
function done(stream, er, data) {
|
||
if (er) return stream.emit('error', er);
|
||
if (data != null) // single equals check for both `null` and `undefined`
|
||
stream.push(data); // TODO(BridgeAR): Write a test for these two error cases
|
||
// if there's nothing in the write buffer, then that means
|
||
// that nothing more will ever be provided
|
||
|
||
if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0();
|
||
if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING();
|
||
return stream.push(null);
|
||
}
|
||
|
||
/***/ }),
|
||
|
||
/***/ 1069:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
// Copyright Joyent, Inc. and other Node contributors.
|
||
//
|
||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||
// copy of this software and associated documentation files (the
|
||
// "Software"), to deal in the Software without restriction, including
|
||
// without limitation the rights to use, copy, modify, merge, publish,
|
||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||
// persons to whom the Software is furnished to do so, subject to the
|
||
// following conditions:
|
||
//
|
||
// The above copyright notice and this permission notice shall be included
|
||
// in all copies or substantial portions of the Software.
|
||
//
|
||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||
// A bit simpler than readable streams.
|
||
// Implement an async ._write(chunk, encoding, cb), and it'll handle all
|
||
// the drain event emission and buffering.
|
||
|
||
|
||
module.exports = Writable;
|
||
/* <replacement> */
|
||
|
||
function WriteReq(chunk, encoding, cb) {
|
||
this.chunk = chunk;
|
||
this.encoding = encoding;
|
||
this.callback = cb;
|
||
this.next = null;
|
||
} // It seems a linked list but it is not
|
||
// there will be only 2 of these for each stream
|
||
|
||
|
||
function CorkedRequest(state) {
|
||
var _this = this;
|
||
|
||
this.next = null;
|
||
this.entry = null;
|
||
|
||
this.finish = function () {
|
||
onCorkedFinish(_this, state);
|
||
};
|
||
}
|
||
/* </replacement> */
|
||
|
||
/*<replacement>*/
|
||
|
||
|
||
var Duplex;
|
||
/*</replacement>*/
|
||
|
||
Writable.WritableState = WritableState;
|
||
/*<replacement>*/
|
||
|
||
var internalUtil = {
|
||
deprecate: __nccwpck_require__(2053)
|
||
};
|
||
/*</replacement>*/
|
||
|
||
/*<replacement>*/
|
||
|
||
var Stream = __nccwpck_require__(8265);
|
||
/*</replacement>*/
|
||
|
||
|
||
var Buffer = __nccwpck_require__(4293).Buffer;
|
||
|
||
var OurUint8Array = global.Uint8Array || function () {};
|
||
|
||
function _uint8ArrayToBuffer(chunk) {
|
||
return Buffer.from(chunk);
|
||
}
|
||
|
||
function _isUint8Array(obj) {
|
||
return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
|
||
}
|
||
|
||
var destroyImpl = __nccwpck_require__(730);
|
||
|
||
var _require = __nccwpck_require__(5239),
|
||
getHighWaterMark = _require.getHighWaterMark;
|
||
|
||
var _require$codes = __nccwpck_require__(3537)/* .codes */ .q,
|
||
ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE,
|
||
ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,
|
||
ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK,
|
||
ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE,
|
||
ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED,
|
||
ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES,
|
||
ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END,
|
||
ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING;
|
||
|
||
var errorOrDestroy = destroyImpl.errorOrDestroy;
|
||
|
||
__nccwpck_require__(3753)(Writable, Stream);
|
||
|
||
function nop() {}
|
||
|
||
function WritableState(options, stream, isDuplex) {
|
||
Duplex = Duplex || __nccwpck_require__(3997);
|
||
options = options || {}; // Duplex streams are both readable and writable, but share
|
||
// the same options object.
|
||
// However, some cases require setting options to different
|
||
// values for the readable and the writable sides of the duplex stream,
|
||
// e.g. options.readableObjectMode vs. options.writableObjectMode, etc.
|
||
|
||
if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag to indicate whether or not this stream
|
||
// contains buffers or objects.
|
||
|
||
this.objectMode = !!options.objectMode;
|
||
if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; // the point at which write() starts returning false
|
||
// Note: 0 is a valid value, means that we always return false if
|
||
// the entire buffer is not flushed immediately on write()
|
||
|
||
this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); // if _final has been called
|
||
|
||
this.finalCalled = false; // drain event flag.
|
||
|
||
this.needDrain = false; // at the start of calling end()
|
||
|
||
this.ending = false; // when end() has been called, and returned
|
||
|
||
this.ended = false; // when 'finish' is emitted
|
||
|
||
this.finished = false; // has it been destroyed
|
||
|
||
this.destroyed = false; // should we decode strings into buffers before passing to _write?
|
||
// this is here so that some node-core streams can optimize string
|
||
// handling at a lower level.
|
||
|
||
var noDecode = options.decodeStrings === false;
|
||
this.decodeStrings = !noDecode; // Crypto is kind of old and crusty. Historically, its default string
|
||
// encoding is 'binary' so we have to make this configurable.
|
||
// Everything else in the universe uses 'utf8', though.
|
||
|
||
this.defaultEncoding = options.defaultEncoding || 'utf8'; // not an actual buffer we keep track of, but a measurement
|
||
// of how much we're waiting to get pushed to some underlying
|
||
// socket or file.
|
||
|
||
this.length = 0; // a flag to see when we're in the middle of a write.
|
||
|
||
this.writing = false; // when true all writes will be buffered until .uncork() call
|
||
|
||
this.corked = 0; // a flag to be able to tell if the onwrite cb is called immediately,
|
||
// or on a later tick. We set this to true at first, because any
|
||
// actions that shouldn't happen until "later" should generally also
|
||
// not happen before the first write call.
|
||
|
||
this.sync = true; // a flag to know if we're processing previously buffered items, which
|
||
// may call the _write() callback in the same tick, so that we don't
|
||
// end up in an overlapped onwrite situation.
|
||
|
||
this.bufferProcessing = false; // the callback that's passed to _write(chunk,cb)
|
||
|
||
this.onwrite = function (er) {
|
||
onwrite(stream, er);
|
||
}; // the callback that the user supplies to write(chunk,encoding,cb)
|
||
|
||
|
||
this.writecb = null; // the amount that is being written when _write is called.
|
||
|
||
this.writelen = 0;
|
||
this.bufferedRequest = null;
|
||
this.lastBufferedRequest = null; // number of pending user-supplied write callbacks
|
||
// this must be 0 before 'finish' can be emitted
|
||
|
||
this.pendingcb = 0; // emit prefinish if the only thing we're waiting for is _write cbs
|
||
// This is relevant for synchronous Transform streams
|
||
|
||
this.prefinished = false; // True if the error was already emitted and should not be thrown again
|
||
|
||
this.errorEmitted = false; // Should close be emitted on destroy. Defaults to true.
|
||
|
||
this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'finish' (and potentially 'end')
|
||
|
||
this.autoDestroy = !!options.autoDestroy; // count buffered requests
|
||
|
||
this.bufferedRequestCount = 0; // allocate the first CorkedRequest, there is always
|
||
// one allocated and free to use, and we maintain at most two
|
||
|
||
this.corkedRequestsFree = new CorkedRequest(this);
|
||
}
|
||
|
||
WritableState.prototype.getBuffer = function getBuffer() {
|
||
var current = this.bufferedRequest;
|
||
var out = [];
|
||
|
||
while (current) {
|
||
out.push(current);
|
||
current = current.next;
|
||
}
|
||
|
||
return out;
|
||
};
|
||
|
||
(function () {
|
||
try {
|
||
Object.defineProperty(WritableState.prototype, 'buffer', {
|
||
get: internalUtil.deprecate(function writableStateBufferGetter() {
|
||
return this.getBuffer();
|
||
}, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003')
|
||
});
|
||
} catch (_) {}
|
||
})(); // Test _writableState for inheritance to account for Duplex streams,
|
||
// whose prototype chain only points to Readable.
|
||
|
||
|
||
var realHasInstance;
|
||
|
||
if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {
|
||
realHasInstance = Function.prototype[Symbol.hasInstance];
|
||
Object.defineProperty(Writable, Symbol.hasInstance, {
|
||
value: function value(object) {
|
||
if (realHasInstance.call(this, object)) return true;
|
||
if (this !== Writable) return false;
|
||
return object && object._writableState instanceof WritableState;
|
||
}
|
||
});
|
||
} else {
|
||
realHasInstance = function realHasInstance(object) {
|
||
return object instanceof this;
|
||
};
|
||
}
|
||
|
||
function Writable(options) {
|
||
Duplex = Duplex || __nccwpck_require__(3997); // Writable ctor is applied to Duplexes, too.
|
||
// `realHasInstance` is necessary because using plain `instanceof`
|
||
// would return false, as no `_writableState` property is attached.
|
||
// Trying to use the custom `instanceof` for Writable here will also break the
|
||
// Node.js LazyTransform implementation, which has a non-trivial getter for
|
||
// `_writableState` that would lead to infinite recursion.
|
||
// Checking for a Stream.Duplex instance is faster here instead of inside
|
||
// the WritableState constructor, at least with V8 6.5
|
||
|
||
var isDuplex = this instanceof Duplex;
|
||
if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options);
|
||
this._writableState = new WritableState(options, this, isDuplex); // legacy.
|
||
|
||
this.writable = true;
|
||
|
||
if (options) {
|
||
if (typeof options.write === 'function') this._write = options.write;
|
||
if (typeof options.writev === 'function') this._writev = options.writev;
|
||
if (typeof options.destroy === 'function') this._destroy = options.destroy;
|
||
if (typeof options.final === 'function') this._final = options.final;
|
||
}
|
||
|
||
Stream.call(this);
|
||
} // Otherwise people can pipe Writable streams, which is just wrong.
|
||
|
||
|
||
Writable.prototype.pipe = function () {
|
||
errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE());
|
||
};
|
||
|
||
function writeAfterEnd(stream, cb) {
|
||
var er = new ERR_STREAM_WRITE_AFTER_END(); // TODO: defer error events consistently everywhere, not just the cb
|
||
|
||
errorOrDestroy(stream, er);
|
||
process.nextTick(cb, er);
|
||
} // Checks that a user-supplied chunk is valid, especially for the particular
|
||
// mode the stream is in. Currently this means that `null` is never accepted
|
||
// and undefined/non-string values are only allowed in object mode.
|
||
|
||
|
||
function validChunk(stream, state, chunk, cb) {
|
||
var er;
|
||
|
||
if (chunk === null) {
|
||
er = new ERR_STREAM_NULL_VALUES();
|
||
} else if (typeof chunk !== 'string' && !state.objectMode) {
|
||
er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk);
|
||
}
|
||
|
||
if (er) {
|
||
errorOrDestroy(stream, er);
|
||
process.nextTick(cb, er);
|
||
return false;
|
||
}
|
||
|
||
return true;
|
||
}
|
||
|
||
Writable.prototype.write = function (chunk, encoding, cb) {
|
||
var state = this._writableState;
|
||
var ret = false;
|
||
|
||
var isBuf = !state.objectMode && _isUint8Array(chunk);
|
||
|
||
if (isBuf && !Buffer.isBuffer(chunk)) {
|
||
chunk = _uint8ArrayToBuffer(chunk);
|
||
}
|
||
|
||
if (typeof encoding === 'function') {
|
||
cb = encoding;
|
||
encoding = null;
|
||
}
|
||
|
||
if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;
|
||
if (typeof cb !== 'function') cb = nop;
|
||
if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) {
|
||
state.pendingcb++;
|
||
ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb);
|
||
}
|
||
return ret;
|
||
};
|
||
|
||
Writable.prototype.cork = function () {
|
||
this._writableState.corked++;
|
||
};
|
||
|
||
Writable.prototype.uncork = function () {
|
||
var state = this._writableState;
|
||
|
||
if (state.corked) {
|
||
state.corked--;
|
||
if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);
|
||
}
|
||
};
|
||
|
||
Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
|
||
// node::ParseEncoding() requires lower case.
|
||
if (typeof encoding === 'string') encoding = encoding.toLowerCase();
|
||
if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding);
|
||
this._writableState.defaultEncoding = encoding;
|
||
return this;
|
||
};
|
||
|
||
Object.defineProperty(Writable.prototype, 'writableBuffer', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
return this._writableState && this._writableState.getBuffer();
|
||
}
|
||
});
|
||
|
||
function decodeChunk(state, chunk, encoding) {
|
||
if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {
|
||
chunk = Buffer.from(chunk, encoding);
|
||
}
|
||
|
||
return chunk;
|
||
}
|
||
|
||
Object.defineProperty(Writable.prototype, 'writableHighWaterMark', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
return this._writableState.highWaterMark;
|
||
}
|
||
}); // if we're already writing something, then just put this
|
||
// in the queue, and wait our turn. Otherwise, call _write
|
||
// If we return false, then we need a drain event, so set that flag.
|
||
|
||
function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) {
|
||
if (!isBuf) {
|
||
var newChunk = decodeChunk(state, chunk, encoding);
|
||
|
||
if (chunk !== newChunk) {
|
||
isBuf = true;
|
||
encoding = 'buffer';
|
||
chunk = newChunk;
|
||
}
|
||
}
|
||
|
||
var len = state.objectMode ? 1 : chunk.length;
|
||
state.length += len;
|
||
var ret = state.length < state.highWaterMark; // we must ensure that previous needDrain will not be reset to false.
|
||
|
||
if (!ret) state.needDrain = true;
|
||
|
||
if (state.writing || state.corked) {
|
||
var last = state.lastBufferedRequest;
|
||
state.lastBufferedRequest = {
|
||
chunk: chunk,
|
||
encoding: encoding,
|
||
isBuf: isBuf,
|
||
callback: cb,
|
||
next: null
|
||
};
|
||
|
||
if (last) {
|
||
last.next = state.lastBufferedRequest;
|
||
} else {
|
||
state.bufferedRequest = state.lastBufferedRequest;
|
||
}
|
||
|
||
state.bufferedRequestCount += 1;
|
||
} else {
|
||
doWrite(stream, state, false, len, chunk, encoding, cb);
|
||
}
|
||
|
||
return ret;
|
||
}
|
||
|
||
function doWrite(stream, state, writev, len, chunk, encoding, cb) {
|
||
state.writelen = len;
|
||
state.writecb = cb;
|
||
state.writing = true;
|
||
state.sync = true;
|
||
if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);
|
||
state.sync = false;
|
||
}
|
||
|
||
function onwriteError(stream, state, sync, er, cb) {
|
||
--state.pendingcb;
|
||
|
||
if (sync) {
|
||
// defer the callback if we are being called synchronously
|
||
// to avoid piling up things on the stack
|
||
process.nextTick(cb, er); // this can emit finish, and it will always happen
|
||
// after error
|
||
|
||
process.nextTick(finishMaybe, stream, state);
|
||
stream._writableState.errorEmitted = true;
|
||
errorOrDestroy(stream, er);
|
||
} else {
|
||
// the caller expect this to happen before if
|
||
// it is async
|
||
cb(er);
|
||
stream._writableState.errorEmitted = true;
|
||
errorOrDestroy(stream, er); // this can emit finish, but finish must
|
||
// always follow error
|
||
|
||
finishMaybe(stream, state);
|
||
}
|
||
}
|
||
|
||
function onwriteStateUpdate(state) {
|
||
state.writing = false;
|
||
state.writecb = null;
|
||
state.length -= state.writelen;
|
||
state.writelen = 0;
|
||
}
|
||
|
||
function onwrite(stream, er) {
|
||
var state = stream._writableState;
|
||
var sync = state.sync;
|
||
var cb = state.writecb;
|
||
if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK();
|
||
onwriteStateUpdate(state);
|
||
if (er) onwriteError(stream, state, sync, er, cb);else {
|
||
// Check if we're actually ready to finish, but don't emit yet
|
||
var finished = needFinish(state) || stream.destroyed;
|
||
|
||
if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {
|
||
clearBuffer(stream, state);
|
||
}
|
||
|
||
if (sync) {
|
||
process.nextTick(afterWrite, stream, state, finished, cb);
|
||
} else {
|
||
afterWrite(stream, state, finished, cb);
|
||
}
|
||
}
|
||
}
|
||
|
||
function afterWrite(stream, state, finished, cb) {
|
||
if (!finished) onwriteDrain(stream, state);
|
||
state.pendingcb--;
|
||
cb();
|
||
finishMaybe(stream, state);
|
||
} // Must force callback to be called on nextTick, so that we don't
|
||
// emit 'drain' before the write() consumer gets the 'false' return
|
||
// value, and has a chance to attach a 'drain' listener.
|
||
|
||
|
||
function onwriteDrain(stream, state) {
|
||
if (state.length === 0 && state.needDrain) {
|
||
state.needDrain = false;
|
||
stream.emit('drain');
|
||
}
|
||
} // if there's something in the buffer waiting, then process it
|
||
|
||
|
||
function clearBuffer(stream, state) {
|
||
state.bufferProcessing = true;
|
||
var entry = state.bufferedRequest;
|
||
|
||
if (stream._writev && entry && entry.next) {
|
||
// Fast case, write everything using _writev()
|
||
var l = state.bufferedRequestCount;
|
||
var buffer = new Array(l);
|
||
var holder = state.corkedRequestsFree;
|
||
holder.entry = entry;
|
||
var count = 0;
|
||
var allBuffers = true;
|
||
|
||
while (entry) {
|
||
buffer[count] = entry;
|
||
if (!entry.isBuf) allBuffers = false;
|
||
entry = entry.next;
|
||
count += 1;
|
||
}
|
||
|
||
buffer.allBuffers = allBuffers;
|
||
doWrite(stream, state, true, state.length, buffer, '', holder.finish); // doWrite is almost always async, defer these to save a bit of time
|
||
// as the hot path ends with doWrite
|
||
|
||
state.pendingcb++;
|
||
state.lastBufferedRequest = null;
|
||
|
||
if (holder.next) {
|
||
state.corkedRequestsFree = holder.next;
|
||
holder.next = null;
|
||
} else {
|
||
state.corkedRequestsFree = new CorkedRequest(state);
|
||
}
|
||
|
||
state.bufferedRequestCount = 0;
|
||
} else {
|
||
// Slow case, write chunks one-by-one
|
||
while (entry) {
|
||
var chunk = entry.chunk;
|
||
var encoding = entry.encoding;
|
||
var cb = entry.callback;
|
||
var len = state.objectMode ? 1 : chunk.length;
|
||
doWrite(stream, state, false, len, chunk, encoding, cb);
|
||
entry = entry.next;
|
||
state.bufferedRequestCount--; // if we didn't call the onwrite immediately, then
|
||
// it means that we need to wait until it does.
|
||
// also, that means that the chunk and cb are currently
|
||
// being processed, so move the buffer counter past them.
|
||
|
||
if (state.writing) {
|
||
break;
|
||
}
|
||
}
|
||
|
||
if (entry === null) state.lastBufferedRequest = null;
|
||
}
|
||
|
||
state.bufferedRequest = entry;
|
||
state.bufferProcessing = false;
|
||
}
|
||
|
||
Writable.prototype._write = function (chunk, encoding, cb) {
|
||
cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()'));
|
||
};
|
||
|
||
Writable.prototype._writev = null;
|
||
|
||
Writable.prototype.end = function (chunk, encoding, cb) {
|
||
var state = this._writableState;
|
||
|
||
if (typeof chunk === 'function') {
|
||
cb = chunk;
|
||
chunk = null;
|
||
encoding = null;
|
||
} else if (typeof encoding === 'function') {
|
||
cb = encoding;
|
||
encoding = null;
|
||
}
|
||
|
||
if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); // .end() fully uncorks
|
||
|
||
if (state.corked) {
|
||
state.corked = 1;
|
||
this.uncork();
|
||
} // ignore unnecessary end() calls.
|
||
|
||
|
||
if (!state.ending) endWritable(this, state, cb);
|
||
return this;
|
||
};
|
||
|
||
Object.defineProperty(Writable.prototype, 'writableLength', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
return this._writableState.length;
|
||
}
|
||
});
|
||
|
||
function needFinish(state) {
|
||
return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;
|
||
}
|
||
|
||
function callFinal(stream, state) {
|
||
stream._final(function (err) {
|
||
state.pendingcb--;
|
||
|
||
if (err) {
|
||
errorOrDestroy(stream, err);
|
||
}
|
||
|
||
state.prefinished = true;
|
||
stream.emit('prefinish');
|
||
finishMaybe(stream, state);
|
||
});
|
||
}
|
||
|
||
function prefinish(stream, state) {
|
||
if (!state.prefinished && !state.finalCalled) {
|
||
if (typeof stream._final === 'function' && !state.destroyed) {
|
||
state.pendingcb++;
|
||
state.finalCalled = true;
|
||
process.nextTick(callFinal, stream, state);
|
||
} else {
|
||
state.prefinished = true;
|
||
stream.emit('prefinish');
|
||
}
|
||
}
|
||
}
|
||
|
||
function finishMaybe(stream, state) {
|
||
var need = needFinish(state);
|
||
|
||
if (need) {
|
||
prefinish(stream, state);
|
||
|
||
if (state.pendingcb === 0) {
|
||
state.finished = true;
|
||
stream.emit('finish');
|
||
|
||
if (state.autoDestroy) {
|
||
// In case of duplex streams we need a way to detect
|
||
// if the readable side is ready for autoDestroy as well
|
||
var rState = stream._readableState;
|
||
|
||
if (!rState || rState.autoDestroy && rState.endEmitted) {
|
||
stream.destroy();
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
return need;
|
||
}
|
||
|
||
function endWritable(stream, state, cb) {
|
||
state.ending = true;
|
||
finishMaybe(stream, state);
|
||
|
||
if (cb) {
|
||
if (state.finished) process.nextTick(cb);else stream.once('finish', cb);
|
||
}
|
||
|
||
state.ended = true;
|
||
stream.writable = false;
|
||
}
|
||
|
||
function onCorkedFinish(corkReq, state, err) {
|
||
var entry = corkReq.entry;
|
||
corkReq.entry = null;
|
||
|
||
while (entry) {
|
||
var cb = entry.callback;
|
||
state.pendingcb--;
|
||
cb(err);
|
||
entry = entry.next;
|
||
} // reuse the free corkReq.
|
||
|
||
|
||
state.corkedRequestsFree.next = corkReq;
|
||
}
|
||
|
||
Object.defineProperty(Writable.prototype, 'destroyed', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
if (this._writableState === undefined) {
|
||
return false;
|
||
}
|
||
|
||
return this._writableState.destroyed;
|
||
},
|
||
set: function set(value) {
|
||
// we ignore the value if the stream
|
||
// has not been initialized yet
|
||
if (!this._writableState) {
|
||
return;
|
||
} // backward compatibility, the user is explicitly
|
||
// managing destroyed
|
||
|
||
|
||
this._writableState.destroyed = value;
|
||
}
|
||
});
|
||
Writable.prototype.destroy = destroyImpl.destroy;
|
||
Writable.prototype._undestroy = destroyImpl.undestroy;
|
||
|
||
Writable.prototype._destroy = function (err, cb) {
|
||
cb(err);
|
||
};
|
||
|
||
/***/ }),
|
||
|
||
/***/ 5962:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
var _Object$setPrototypeO;
|
||
|
||
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
|
||
|
||
var finished = __nccwpck_require__(9704);
|
||
|
||
var kLastResolve = Symbol('lastResolve');
|
||
var kLastReject = Symbol('lastReject');
|
||
var kError = Symbol('error');
|
||
var kEnded = Symbol('ended');
|
||
var kLastPromise = Symbol('lastPromise');
|
||
var kHandlePromise = Symbol('handlePromise');
|
||
var kStream = Symbol('stream');
|
||
|
||
function createIterResult(value, done) {
|
||
return {
|
||
value: value,
|
||
done: done
|
||
};
|
||
}
|
||
|
||
function readAndResolve(iter) {
|
||
var resolve = iter[kLastResolve];
|
||
|
||
if (resolve !== null) {
|
||
var data = iter[kStream].read(); // we defer if data is null
|
||
// we can be expecting either 'end' or
|
||
// 'error'
|
||
|
||
if (data !== null) {
|
||
iter[kLastPromise] = null;
|
||
iter[kLastResolve] = null;
|
||
iter[kLastReject] = null;
|
||
resolve(createIterResult(data, false));
|
||
}
|
||
}
|
||
}
|
||
|
||
function onReadable(iter) {
|
||
// we wait for the next tick, because it might
|
||
// emit an error with process.nextTick
|
||
process.nextTick(readAndResolve, iter);
|
||
}
|
||
|
||
function wrapForNext(lastPromise, iter) {
|
||
return function (resolve, reject) {
|
||
lastPromise.then(function () {
|
||
if (iter[kEnded]) {
|
||
resolve(createIterResult(undefined, true));
|
||
return;
|
||
}
|
||
|
||
iter[kHandlePromise](resolve, reject);
|
||
}, reject);
|
||
};
|
||
}
|
||
|
||
var AsyncIteratorPrototype = Object.getPrototypeOf(function () {});
|
||
var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = {
|
||
get stream() {
|
||
return this[kStream];
|
||
},
|
||
|
||
next: function next() {
|
||
var _this = this;
|
||
|
||
// if we have detected an error in the meanwhile
|
||
// reject straight away
|
||
var error = this[kError];
|
||
|
||
if (error !== null) {
|
||
return Promise.reject(error);
|
||
}
|
||
|
||
if (this[kEnded]) {
|
||
return Promise.resolve(createIterResult(undefined, true));
|
||
}
|
||
|
||
if (this[kStream].destroyed) {
|
||
// We need to defer via nextTick because if .destroy(err) is
|
||
// called, the error will be emitted via nextTick, and
|
||
// we cannot guarantee that there is no error lingering around
|
||
// waiting to be emitted.
|
||
return new Promise(function (resolve, reject) {
|
||
process.nextTick(function () {
|
||
if (_this[kError]) {
|
||
reject(_this[kError]);
|
||
} else {
|
||
resolve(createIterResult(undefined, true));
|
||
}
|
||
});
|
||
});
|
||
} // if we have multiple next() calls
|
||
// we will wait for the previous Promise to finish
|
||
// this logic is optimized to support for await loops,
|
||
// where next() is only called once at a time
|
||
|
||
|
||
var lastPromise = this[kLastPromise];
|
||
var promise;
|
||
|
||
if (lastPromise) {
|
||
promise = new Promise(wrapForNext(lastPromise, this));
|
||
} else {
|
||
// fast path needed to support multiple this.push()
|
||
// without triggering the next() queue
|
||
var data = this[kStream].read();
|
||
|
||
if (data !== null) {
|
||
return Promise.resolve(createIterResult(data, false));
|
||
}
|
||
|
||
promise = new Promise(this[kHandlePromise]);
|
||
}
|
||
|
||
this[kLastPromise] = promise;
|
||
return promise;
|
||
}
|
||
}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () {
|
||
return this;
|
||
}), _defineProperty(_Object$setPrototypeO, "return", function _return() {
|
||
var _this2 = this;
|
||
|
||
// destroy(err, cb) is a private API
|
||
// we can guarantee we have that here, because we control the
|
||
// Readable class this is attached to
|
||
return new Promise(function (resolve, reject) {
|
||
_this2[kStream].destroy(null, function (err) {
|
||
if (err) {
|
||
reject(err);
|
||
return;
|
||
}
|
||
|
||
resolve(createIterResult(undefined, true));
|
||
});
|
||
});
|
||
}), _Object$setPrototypeO), AsyncIteratorPrototype);
|
||
|
||
var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) {
|
||
var _Object$create;
|
||
|
||
var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, {
|
||
value: stream,
|
||
writable: true
|
||
}), _defineProperty(_Object$create, kLastResolve, {
|
||
value: null,
|
||
writable: true
|
||
}), _defineProperty(_Object$create, kLastReject, {
|
||
value: null,
|
||
writable: true
|
||
}), _defineProperty(_Object$create, kError, {
|
||
value: null,
|
||
writable: true
|
||
}), _defineProperty(_Object$create, kEnded, {
|
||
value: stream._readableState.endEmitted,
|
||
writable: true
|
||
}), _defineProperty(_Object$create, kHandlePromise, {
|
||
value: function value(resolve, reject) {
|
||
var data = iterator[kStream].read();
|
||
|
||
if (data) {
|
||
iterator[kLastPromise] = null;
|
||
iterator[kLastResolve] = null;
|
||
iterator[kLastReject] = null;
|
||
resolve(createIterResult(data, false));
|
||
} else {
|
||
iterator[kLastResolve] = resolve;
|
||
iterator[kLastReject] = reject;
|
||
}
|
||
},
|
||
writable: true
|
||
}), _Object$create));
|
||
iterator[kLastPromise] = null;
|
||
finished(stream, function (err) {
|
||
if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') {
|
||
var reject = iterator[kLastReject]; // reject if we are waiting for data in the Promise
|
||
// returned by next() and store the error
|
||
|
||
if (reject !== null) {
|
||
iterator[kLastPromise] = null;
|
||
iterator[kLastResolve] = null;
|
||
iterator[kLastReject] = null;
|
||
reject(err);
|
||
}
|
||
|
||
iterator[kError] = err;
|
||
return;
|
||
}
|
||
|
||
var resolve = iterator[kLastResolve];
|
||
|
||
if (resolve !== null) {
|
||
iterator[kLastPromise] = null;
|
||
iterator[kLastResolve] = null;
|
||
iterator[kLastReject] = null;
|
||
resolve(createIterResult(undefined, true));
|
||
}
|
||
|
||
iterator[kEnded] = true;
|
||
});
|
||
stream.on('readable', onReadable.bind(null, iterator));
|
||
return iterator;
|
||
};
|
||
|
||
module.exports = createReadableStreamAsyncIterator;
|
||
|
||
/***/ }),
|
||
|
||
/***/ 3401:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; }
|
||
|
||
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
|
||
|
||
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
|
||
|
||
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
|
||
|
||
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
|
||
|
||
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
|
||
|
||
var _require = __nccwpck_require__(4293),
|
||
Buffer = _require.Buffer;
|
||
|
||
var _require2 = __nccwpck_require__(1669),
|
||
inspect = _require2.inspect;
|
||
|
||
var custom = inspect && inspect.custom || 'inspect';
|
||
|
||
function copyBuffer(src, target, offset) {
|
||
Buffer.prototype.copy.call(src, target, offset);
|
||
}
|
||
|
||
module.exports =
|
||
/*#__PURE__*/
|
||
function () {
|
||
function BufferList() {
|
||
_classCallCheck(this, BufferList);
|
||
|
||
this.head = null;
|
||
this.tail = null;
|
||
this.length = 0;
|
||
}
|
||
|
||
_createClass(BufferList, [{
|
||
key: "push",
|
||
value: function push(v) {
|
||
var entry = {
|
||
data: v,
|
||
next: null
|
||
};
|
||
if (this.length > 0) this.tail.next = entry;else this.head = entry;
|
||
this.tail = entry;
|
||
++this.length;
|
||
}
|
||
}, {
|
||
key: "unshift",
|
||
value: function unshift(v) {
|
||
var entry = {
|
||
data: v,
|
||
next: this.head
|
||
};
|
||
if (this.length === 0) this.tail = entry;
|
||
this.head = entry;
|
||
++this.length;
|
||
}
|
||
}, {
|
||
key: "shift",
|
||
value: function shift() {
|
||
if (this.length === 0) return;
|
||
var ret = this.head.data;
|
||
if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next;
|
||
--this.length;
|
||
return ret;
|
||
}
|
||
}, {
|
||
key: "clear",
|
||
value: function clear() {
|
||
this.head = this.tail = null;
|
||
this.length = 0;
|
||
}
|
||
}, {
|
||
key: "join",
|
||
value: function join(s) {
|
||
if (this.length === 0) return '';
|
||
var p = this.head;
|
||
var ret = '' + p.data;
|
||
|
||
while (p = p.next) {
|
||
ret += s + p.data;
|
||
}
|
||
|
||
return ret;
|
||
}
|
||
}, {
|
||
key: "concat",
|
||
value: function concat(n) {
|
||
if (this.length === 0) return Buffer.alloc(0);
|
||
var ret = Buffer.allocUnsafe(n >>> 0);
|
||
var p = this.head;
|
||
var i = 0;
|
||
|
||
while (p) {
|
||
copyBuffer(p.data, ret, i);
|
||
i += p.data.length;
|
||
p = p.next;
|
||
}
|
||
|
||
return ret;
|
||
} // Consumes a specified amount of bytes or characters from the buffered data.
|
||
|
||
}, {
|
||
key: "consume",
|
||
value: function consume(n, hasStrings) {
|
||
var ret;
|
||
|
||
if (n < this.head.data.length) {
|
||
// `slice` is the same for buffers and strings.
|
||
ret = this.head.data.slice(0, n);
|
||
this.head.data = this.head.data.slice(n);
|
||
} else if (n === this.head.data.length) {
|
||
// First chunk is a perfect match.
|
||
ret = this.shift();
|
||
} else {
|
||
// Result spans more than one buffer.
|
||
ret = hasStrings ? this._getString(n) : this._getBuffer(n);
|
||
}
|
||
|
||
return ret;
|
||
}
|
||
}, {
|
||
key: "first",
|
||
value: function first() {
|
||
return this.head.data;
|
||
} // Consumes a specified amount of characters from the buffered data.
|
||
|
||
}, {
|
||
key: "_getString",
|
||
value: function _getString(n) {
|
||
var p = this.head;
|
||
var c = 1;
|
||
var ret = p.data;
|
||
n -= ret.length;
|
||
|
||
while (p = p.next) {
|
||
var str = p.data;
|
||
var nb = n > str.length ? str.length : n;
|
||
if (nb === str.length) ret += str;else ret += str.slice(0, n);
|
||
n -= nb;
|
||
|
||
if (n === 0) {
|
||
if (nb === str.length) {
|
||
++c;
|
||
if (p.next) this.head = p.next;else this.head = this.tail = null;
|
||
} else {
|
||
this.head = p;
|
||
p.data = str.slice(nb);
|
||
}
|
||
|
||
break;
|
||
}
|
||
|
||
++c;
|
||
}
|
||
|
||
this.length -= c;
|
||
return ret;
|
||
} // Consumes a specified amount of bytes from the buffered data.
|
||
|
||
}, {
|
||
key: "_getBuffer",
|
||
value: function _getBuffer(n) {
|
||
var ret = Buffer.allocUnsafe(n);
|
||
var p = this.head;
|
||
var c = 1;
|
||
p.data.copy(ret);
|
||
n -= p.data.length;
|
||
|
||
while (p = p.next) {
|
||
var buf = p.data;
|
||
var nb = n > buf.length ? buf.length : n;
|
||
buf.copy(ret, ret.length - n, 0, nb);
|
||
n -= nb;
|
||
|
||
if (n === 0) {
|
||
if (nb === buf.length) {
|
||
++c;
|
||
if (p.next) this.head = p.next;else this.head = this.tail = null;
|
||
} else {
|
||
this.head = p;
|
||
p.data = buf.slice(nb);
|
||
}
|
||
|
||
break;
|
||
}
|
||
|
||
++c;
|
||
}
|
||
|
||
this.length -= c;
|
||
return ret;
|
||
} // Make sure the linked list only shows the minimal necessary information.
|
||
|
||
}, {
|
||
key: custom,
|
||
value: function value(_, options) {
|
||
return inspect(this, _objectSpread({}, options, {
|
||
// Only inspect one level.
|
||
depth: 0,
|
||
// It should not recurse.
|
||
customInspect: false
|
||
}));
|
||
}
|
||
}]);
|
||
|
||
return BufferList;
|
||
}();
|
||
|
||
/***/ }),
|
||
|
||
/***/ 730:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
// undocumented cb() API, needed for core, not for public API
|
||
|
||
function destroy(err, cb) {
|
||
var _this = this;
|
||
|
||
var readableDestroyed = this._readableState && this._readableState.destroyed;
|
||
var writableDestroyed = this._writableState && this._writableState.destroyed;
|
||
|
||
if (readableDestroyed || writableDestroyed) {
|
||
if (cb) {
|
||
cb(err);
|
||
} else if (err) {
|
||
if (!this._writableState) {
|
||
process.nextTick(emitErrorNT, this, err);
|
||
} else if (!this._writableState.errorEmitted) {
|
||
this._writableState.errorEmitted = true;
|
||
process.nextTick(emitErrorNT, this, err);
|
||
}
|
||
}
|
||
|
||
return this;
|
||
} // we set destroyed to true before firing error callbacks in order
|
||
// to make it re-entrance safe in case destroy() is called within callbacks
|
||
|
||
|
||
if (this._readableState) {
|
||
this._readableState.destroyed = true;
|
||
} // if this is a duplex stream mark the writable part as destroyed as well
|
||
|
||
|
||
if (this._writableState) {
|
||
this._writableState.destroyed = true;
|
||
}
|
||
|
||
this._destroy(err || null, function (err) {
|
||
if (!cb && err) {
|
||
if (!_this._writableState) {
|
||
process.nextTick(emitErrorAndCloseNT, _this, err);
|
||
} else if (!_this._writableState.errorEmitted) {
|
||
_this._writableState.errorEmitted = true;
|
||
process.nextTick(emitErrorAndCloseNT, _this, err);
|
||
} else {
|
||
process.nextTick(emitCloseNT, _this);
|
||
}
|
||
} else if (cb) {
|
||
process.nextTick(emitCloseNT, _this);
|
||
cb(err);
|
||
} else {
|
||
process.nextTick(emitCloseNT, _this);
|
||
}
|
||
});
|
||
|
||
return this;
|
||
}
|
||
|
||
function emitErrorAndCloseNT(self, err) {
|
||
emitErrorNT(self, err);
|
||
emitCloseNT(self);
|
||
}
|
||
|
||
function emitCloseNT(self) {
|
||
if (self._writableState && !self._writableState.emitClose) return;
|
||
if (self._readableState && !self._readableState.emitClose) return;
|
||
self.emit('close');
|
||
}
|
||
|
||
function undestroy() {
|
||
if (this._readableState) {
|
||
this._readableState.destroyed = false;
|
||
this._readableState.reading = false;
|
||
this._readableState.ended = false;
|
||
this._readableState.endEmitted = false;
|
||
}
|
||
|
||
if (this._writableState) {
|
||
this._writableState.destroyed = false;
|
||
this._writableState.ended = false;
|
||
this._writableState.ending = false;
|
||
this._writableState.finalCalled = false;
|
||
this._writableState.prefinished = false;
|
||
this._writableState.finished = false;
|
||
this._writableState.errorEmitted = false;
|
||
}
|
||
}
|
||
|
||
function emitErrorNT(self, err) {
|
||
self.emit('error', err);
|
||
}
|
||
|
||
function errorOrDestroy(stream, err) {
|
||
// We have tests that rely on errors being emitted
|
||
// in the same tick, so changing this is semver major.
|
||
// For now when you opt-in to autoDestroy we allow
|
||
// the error to be emitted nextTick. In a future
|
||
// semver major update we should change the default to this.
|
||
var rState = stream._readableState;
|
||
var wState = stream._writableState;
|
||
if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err);
|
||
}
|
||
|
||
module.exports = {
|
||
destroy: destroy,
|
||
undestroy: undestroy,
|
||
errorOrDestroy: errorOrDestroy
|
||
};
|
||
|
||
/***/ }),
|
||
|
||
/***/ 9704:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
// Ported from https://github.com/mafintosh/end-of-stream with
|
||
// permission from the author, Mathias Buus (@mafintosh).
|
||
|
||
|
||
var ERR_STREAM_PREMATURE_CLOSE = __nccwpck_require__(3537)/* .codes.ERR_STREAM_PREMATURE_CLOSE */ .q.ERR_STREAM_PREMATURE_CLOSE;
|
||
|
||
function once(callback) {
|
||
var called = false;
|
||
return function () {
|
||
if (called) return;
|
||
called = true;
|
||
|
||
for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
|
||
args[_key] = arguments[_key];
|
||
}
|
||
|
||
callback.apply(this, args);
|
||
};
|
||
}
|
||
|
||
function noop() {}
|
||
|
||
function isRequest(stream) {
|
||
return stream.setHeader && typeof stream.abort === 'function';
|
||
}
|
||
|
||
function eos(stream, opts, callback) {
|
||
if (typeof opts === 'function') return eos(stream, null, opts);
|
||
if (!opts) opts = {};
|
||
callback = once(callback || noop);
|
||
var readable = opts.readable || opts.readable !== false && stream.readable;
|
||
var writable = opts.writable || opts.writable !== false && stream.writable;
|
||
|
||
var onlegacyfinish = function onlegacyfinish() {
|
||
if (!stream.writable) onfinish();
|
||
};
|
||
|
||
var writableEnded = stream._writableState && stream._writableState.finished;
|
||
|
||
var onfinish = function onfinish() {
|
||
writable = false;
|
||
writableEnded = true;
|
||
if (!readable) callback.call(stream);
|
||
};
|
||
|
||
var readableEnded = stream._readableState && stream._readableState.endEmitted;
|
||
|
||
var onend = function onend() {
|
||
readable = false;
|
||
readableEnded = true;
|
||
if (!writable) callback.call(stream);
|
||
};
|
||
|
||
var onerror = function onerror(err) {
|
||
callback.call(stream, err);
|
||
};
|
||
|
||
var onclose = function onclose() {
|
||
var err;
|
||
|
||
if (readable && !readableEnded) {
|
||
if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE();
|
||
return callback.call(stream, err);
|
||
}
|
||
|
||
if (writable && !writableEnded) {
|
||
if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE();
|
||
return callback.call(stream, err);
|
||
}
|
||
};
|
||
|
||
var onrequest = function onrequest() {
|
||
stream.req.on('finish', onfinish);
|
||
};
|
||
|
||
if (isRequest(stream)) {
|
||
stream.on('complete', onfinish);
|
||
stream.on('abort', onclose);
|
||
if (stream.req) onrequest();else stream.on('request', onrequest);
|
||
} else if (writable && !stream._writableState) {
|
||
// legacy streams
|
||
stream.on('end', onlegacyfinish);
|
||
stream.on('close', onlegacyfinish);
|
||
}
|
||
|
||
stream.on('end', onend);
|
||
stream.on('finish', onfinish);
|
||
if (opts.error !== false) stream.on('error', onerror);
|
||
stream.on('close', onclose);
|
||
return function () {
|
||
stream.removeListener('complete', onfinish);
|
||
stream.removeListener('abort', onclose);
|
||
stream.removeListener('request', onrequest);
|
||
if (stream.req) stream.req.removeListener('finish', onfinish);
|
||
stream.removeListener('end', onlegacyfinish);
|
||
stream.removeListener('close', onlegacyfinish);
|
||
stream.removeListener('finish', onfinish);
|
||
stream.removeListener('end', onend);
|
||
stream.removeListener('error', onerror);
|
||
stream.removeListener('close', onclose);
|
||
};
|
||
}
|
||
|
||
module.exports = eos;
|
||
|
||
/***/ }),
|
||
|
||
/***/ 7417:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
|
||
|
||
function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
|
||
|
||
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; }
|
||
|
||
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
|
||
|
||
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
|
||
|
||
var ERR_INVALID_ARG_TYPE = __nccwpck_require__(3537)/* .codes.ERR_INVALID_ARG_TYPE */ .q.ERR_INVALID_ARG_TYPE;
|
||
|
||
function from(Readable, iterable, opts) {
|
||
var iterator;
|
||
|
||
if (iterable && typeof iterable.next === 'function') {
|
||
iterator = iterable;
|
||
} else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable);
|
||
|
||
var readable = new Readable(_objectSpread({
|
||
objectMode: true
|
||
}, opts)); // Reading boolean to protect against _read
|
||
// being called before last iteration completion.
|
||
|
||
var reading = false;
|
||
|
||
readable._read = function () {
|
||
if (!reading) {
|
||
reading = true;
|
||
next();
|
||
}
|
||
};
|
||
|
||
function next() {
|
||
return _next2.apply(this, arguments);
|
||
}
|
||
|
||
function _next2() {
|
||
_next2 = _asyncToGenerator(function* () {
|
||
try {
|
||
var _ref = yield iterator.next(),
|
||
value = _ref.value,
|
||
done = _ref.done;
|
||
|
||
if (done) {
|
||
readable.push(null);
|
||
} else if (readable.push((yield value))) {
|
||
next();
|
||
} else {
|
||
reading = false;
|
||
}
|
||
} catch (err) {
|
||
readable.destroy(err);
|
||
}
|
||
});
|
||
return _next2.apply(this, arguments);
|
||
}
|
||
|
||
return readable;
|
||
}
|
||
|
||
module.exports = from;
|
||
|
||
/***/ }),
|
||
|
||
/***/ 1912:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
// Ported from https://github.com/mafintosh/pump with
|
||
// permission from the author, Mathias Buus (@mafintosh).
|
||
|
||
|
||
var eos;
|
||
|
||
function once(callback) {
|
||
var called = false;
|
||
return function () {
|
||
if (called) return;
|
||
called = true;
|
||
callback.apply(void 0, arguments);
|
||
};
|
||
}
|
||
|
||
var _require$codes = __nccwpck_require__(3537)/* .codes */ .q,
|
||
ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS,
|
||
ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED;
|
||
|
||
function noop(err) {
|
||
// Rethrow the error if it exists to avoid swallowing it
|
||
if (err) throw err;
|
||
}
|
||
|
||
function isRequest(stream) {
|
||
return stream.setHeader && typeof stream.abort === 'function';
|
||
}
|
||
|
||
function destroyer(stream, reading, writing, callback) {
|
||
callback = once(callback);
|
||
var closed = false;
|
||
stream.on('close', function () {
|
||
closed = true;
|
||
});
|
||
if (eos === undefined) eos = __nccwpck_require__(9704);
|
||
eos(stream, {
|
||
readable: reading,
|
||
writable: writing
|
||
}, function (err) {
|
||
if (err) return callback(err);
|
||
closed = true;
|
||
callback();
|
||
});
|
||
var destroyed = false;
|
||
return function (err) {
|
||
if (closed) return;
|
||
if (destroyed) return;
|
||
destroyed = true; // request.destroy just do .end - .abort is what we want
|
||
|
||
if (isRequest(stream)) return stream.abort();
|
||
if (typeof stream.destroy === 'function') return stream.destroy();
|
||
callback(err || new ERR_STREAM_DESTROYED('pipe'));
|
||
};
|
||
}
|
||
|
||
function call(fn) {
|
||
fn();
|
||
}
|
||
|
||
function pipe(from, to) {
|
||
return from.pipe(to);
|
||
}
|
||
|
||
function popCallback(streams) {
|
||
if (!streams.length) return noop;
|
||
if (typeof streams[streams.length - 1] !== 'function') return noop;
|
||
return streams.pop();
|
||
}
|
||
|
||
function pipeline() {
|
||
for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) {
|
||
streams[_key] = arguments[_key];
|
||
}
|
||
|
||
var callback = popCallback(streams);
|
||
if (Array.isArray(streams[0])) streams = streams[0];
|
||
|
||
if (streams.length < 2) {
|
||
throw new ERR_MISSING_ARGS('streams');
|
||
}
|
||
|
||
var error;
|
||
var destroys = streams.map(function (stream, i) {
|
||
var reading = i < streams.length - 1;
|
||
var writing = i > 0;
|
||
return destroyer(stream, reading, writing, function (err) {
|
||
if (!error) error = err;
|
||
if (err) destroys.forEach(call);
|
||
if (reading) return;
|
||
destroys.forEach(call);
|
||
callback(error);
|
||
});
|
||
});
|
||
return streams.reduce(pipe);
|
||
}
|
||
|
||
module.exports = pipeline;
|
||
|
||
/***/ }),
|
||
|
||
/***/ 5239:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
var ERR_INVALID_OPT_VALUE = __nccwpck_require__(3537)/* .codes.ERR_INVALID_OPT_VALUE */ .q.ERR_INVALID_OPT_VALUE;
|
||
|
||
function highWaterMarkFrom(options, isDuplex, duplexKey) {
|
||
return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null;
|
||
}
|
||
|
||
function getHighWaterMark(state, options, duplexKey, isDuplex) {
|
||
var hwm = highWaterMarkFrom(options, isDuplex, duplexKey);
|
||
|
||
if (hwm != null) {
|
||
if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) {
|
||
var name = isDuplex ? duplexKey : 'highWaterMark';
|
||
throw new ERR_INVALID_OPT_VALUE(name, hwm);
|
||
}
|
||
|
||
return Math.floor(hwm);
|
||
} // Default value
|
||
|
||
|
||
return state.objectMode ? 16 : 16 * 1024;
|
||
}
|
||
|
||
module.exports = {
|
||
getHighWaterMark: getHighWaterMark
|
||
};
|
||
|
||
/***/ }),
|
||
|
||
/***/ 8265:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
module.exports = __nccwpck_require__(2413);
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 2498:
|
||
/***/ ((module, exports, __nccwpck_require__) => {
|
||
|
||
var Stream = __nccwpck_require__(2413);
|
||
if (process.env.READABLE_STREAM === 'disable' && Stream) {
|
||
module.exports = Stream.Readable;
|
||
Object.assign(module.exports, Stream);
|
||
module.exports.Stream = Stream;
|
||
} else {
|
||
exports = module.exports = __nccwpck_require__(7612);
|
||
exports.Stream = Stream || exports;
|
||
exports.Readable = exports;
|
||
exports.Writable = __nccwpck_require__(1069);
|
||
exports.Duplex = __nccwpck_require__(3997);
|
||
exports.Transform = __nccwpck_require__(5331);
|
||
exports.PassThrough = __nccwpck_require__(8530);
|
||
exports.finished = __nccwpck_require__(9704);
|
||
exports.pipeline = __nccwpck_require__(1912);
|
||
}
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 951:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
module.exports = readdirGlob;
|
||
|
||
const fs = __nccwpck_require__(5747);
|
||
const { EventEmitter } = __nccwpck_require__(8614);
|
||
const { Minimatch } = __nccwpck_require__(7066);
|
||
const { resolve } = __nccwpck_require__(5622);
|
||
|
||
function readdir(dir, strict) {
|
||
return new Promise((resolve, reject) => {
|
||
fs.readdir(dir, {withFileTypes: true} ,(err, files) => {
|
||
if(err) {
|
||
switch (err.code) {
|
||
case 'ENOTDIR': // Not a directory
|
||
if(strict) {
|
||
reject(err);
|
||
} else {
|
||
resolve([]);
|
||
}
|
||
break;
|
||
case 'ENOTSUP': // Operation not supported
|
||
case 'ENOENT': // No such file or directory
|
||
case 'ENAMETOOLONG': // Filename too long
|
||
case 'UNKNOWN':
|
||
resolve([]);
|
||
break;
|
||
case 'ELOOP': // Too many levels of symbolic links
|
||
default:
|
||
reject(err);
|
||
break;
|
||
}
|
||
} else {
|
||
resolve(files);
|
||
}
|
||
});
|
||
});
|
||
}
|
||
function stat(file, followSyslinks) {
|
||
return new Promise((resolve, reject) => {
|
||
const statFunc = followSyslinks ? fs.stat : fs.lstat;
|
||
statFunc(file, (err, stats) => {
|
||
if(err) {
|
||
switch (err.code) {
|
||
case 'ENOENT':
|
||
if(followSyslinks) {
|
||
// Fallback to lstat to handle broken links as files
|
||
resolve(stat(file, false));
|
||
} else {
|
||
resolve(null);
|
||
}
|
||
break;
|
||
default:
|
||
resolve(null);
|
||
break;
|
||
}
|
||
} else {
|
||
resolve(stats);
|
||
}
|
||
});
|
||
});
|
||
}
|
||
|
||
async function* exploreWalkAsync(dir, path, followSyslinks, useStat, shouldSkip, strict) {
|
||
let files = await readdir(path + dir, strict);
|
||
for(const file of files) {
|
||
let name = file.name;
|
||
if(name === undefined) {
|
||
// undefined file.name means the `withFileTypes` options is not supported by node
|
||
// we have to call the stat function to know if file is directory or not.
|
||
name = file;
|
||
useStat = true;
|
||
}
|
||
const filename = dir + '/' + name;
|
||
const relative = filename.slice(1); // Remove the leading /
|
||
const absolute = path + '/' + relative;
|
||
let stats = null;
|
||
if(useStat || followSyslinks) {
|
||
stats = await stat(absolute, followSyslinks);
|
||
}
|
||
if(!stats && file.name !== undefined) {
|
||
stats = file;
|
||
}
|
||
if(stats === null) {
|
||
stats = { isDirectory: () => false };
|
||
}
|
||
|
||
if(stats.isDirectory()) {
|
||
if(!shouldSkip(relative)) {
|
||
yield {relative, absolute, stats};
|
||
yield* exploreWalkAsync(filename, path, followSyslinks, useStat, shouldSkip, false);
|
||
}
|
||
} else {
|
||
yield {relative, absolute, stats};
|
||
}
|
||
}
|
||
}
|
||
async function* explore(path, followSyslinks, useStat, shouldSkip) {
|
||
yield* exploreWalkAsync('', path, followSyslinks, useStat, shouldSkip, true);
|
||
}
|
||
|
||
|
||
function readOptions(options) {
|
||
return {
|
||
pattern: options.pattern,
|
||
dot: !!options.dot,
|
||
noglobstar: !!options.noglobstar,
|
||
matchBase: !!options.matchBase,
|
||
nocase: !!options.nocase,
|
||
ignore: options.ignore,
|
||
skip: options.skip,
|
||
|
||
follow: !!options.follow,
|
||
stat: !!options.stat,
|
||
nodir: !!options.nodir,
|
||
mark: !!options.mark,
|
||
silent: !!options.silent,
|
||
absolute: !!options.absolute
|
||
};
|
||
}
|
||
|
||
class ReaddirGlob extends EventEmitter {
|
||
constructor(cwd, options, cb) {
|
||
super();
|
||
if(typeof options === 'function') {
|
||
cb = options;
|
||
options = null;
|
||
}
|
||
|
||
this.options = readOptions(options || {});
|
||
|
||
this.matchers = [];
|
||
if(this.options.pattern) {
|
||
const matchers = Array.isArray(this.options.pattern) ? this.options.pattern : [this.options.pattern];
|
||
this.matchers = matchers.map( m =>
|
||
new Minimatch(m, {
|
||
dot: this.options.dot,
|
||
noglobstar:this.options.noglobstar,
|
||
matchBase:this.options.matchBase,
|
||
nocase:this.options.nocase
|
||
})
|
||
);
|
||
}
|
||
|
||
this.ignoreMatchers = [];
|
||
if(this.options.ignore) {
|
||
const ignorePatterns = Array.isArray(this.options.ignore) ? this.options.ignore : [this.options.ignore];
|
||
this.ignoreMatchers = ignorePatterns.map( ignore =>
|
||
new Minimatch(ignore, {dot: true})
|
||
);
|
||
}
|
||
|
||
this.skipMatchers = [];
|
||
if(this.options.skip) {
|
||
const skipPatterns = Array.isArray(this.options.skip) ? this.options.skip : [this.options.skip];
|
||
this.skipMatchers = skipPatterns.map( skip =>
|
||
new Minimatch(skip, {dot: true})
|
||
);
|
||
}
|
||
|
||
this.iterator = explore(resolve(cwd || '.'), this.options.follow, this.options.stat, this._shouldSkipDirectory.bind(this));
|
||
this.paused = false;
|
||
this.inactive = false;
|
||
this.aborted = false;
|
||
|
||
if(cb) {
|
||
this._matches = [];
|
||
this.on('match', match => this._matches.push(this.options.absolute ? match.absolute : match.relative));
|
||
this.on('error', err => cb(err));
|
||
this.on('end', () => cb(null, this._matches));
|
||
}
|
||
|
||
setTimeout( () => this._next(), 0);
|
||
}
|
||
|
||
_shouldSkipDirectory(relative) {
|
||
//console.log(relative, this.skipMatchers.some(m => m.match(relative)));
|
||
return this.skipMatchers.some(m => m.match(relative));
|
||
}
|
||
|
||
_fileMatches(relative, isDirectory) {
|
||
const file = relative + (isDirectory ? '/' : '');
|
||
return (this.matchers.length === 0 || this.matchers.some(m => m.match(file)))
|
||
&& !this.ignoreMatchers.some(m => m.match(file))
|
||
&& (!this.options.nodir || !isDirectory);
|
||
}
|
||
|
||
_next() {
|
||
if(!this.paused && !this.aborted) {
|
||
this.iterator.next()
|
||
.then((obj)=> {
|
||
if(!obj.done) {
|
||
const isDirectory = obj.value.stats.isDirectory();
|
||
if(this._fileMatches(obj.value.relative, isDirectory )) {
|
||
let relative = obj.value.relative;
|
||
let absolute = obj.value.absolute;
|
||
if(this.options.mark && isDirectory) {
|
||
relative += '/';
|
||
absolute += '/';
|
||
}
|
||
if(this.options.stat) {
|
||
this.emit('match', {relative, absolute, stat:obj.value.stats});
|
||
} else {
|
||
this.emit('match', {relative, absolute});
|
||
}
|
||
}
|
||
this._next(this.iterator);
|
||
} else {
|
||
this.emit('end');
|
||
}
|
||
})
|
||
.catch((err) => {
|
||
this.abort();
|
||
this.emit('error', err);
|
||
if(!err.code && !this.options.silent) {
|
||
console.error(err);
|
||
}
|
||
});
|
||
} else {
|
||
this.inactive = true;
|
||
}
|
||
}
|
||
|
||
abort() {
|
||
this.aborted = true;
|
||
}
|
||
|
||
pause() {
|
||
this.paused = true;
|
||
}
|
||
|
||
resume() {
|
||
this.paused = false;
|
||
if(this.inactive) {
|
||
this.inactive = false;
|
||
this._next();
|
||
}
|
||
}
|
||
}
|
||
|
||
|
||
function readdirGlob(pattern, options, cb) {
|
||
return new ReaddirGlob(pattern, options, cb);
|
||
}
|
||
readdirGlob.ReaddirGlob = ReaddirGlob;
|
||
|
||
/***/ }),
|
||
|
||
/***/ 3129:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
/**
|
||
* Check if we're required to add a port number.
|
||
*
|
||
* @see https://url.spec.whatwg.org/#default-port
|
||
* @param {Number|String} port Port number we need to check
|
||
* @param {String} protocol Protocol we need to check against.
|
||
* @returns {Boolean} Is it a default port for the given protocol
|
||
* @api private
|
||
*/
|
||
module.exports = function required(port, protocol) {
|
||
protocol = protocol.split(':')[0];
|
||
port = +port;
|
||
|
||
if (!port) return false;
|
||
|
||
switch (protocol) {
|
||
case 'http':
|
||
case 'ws':
|
||
return port !== 80;
|
||
|
||
case 'https':
|
||
case 'wss':
|
||
return port !== 443;
|
||
|
||
case 'ftp':
|
||
return port !== 21;
|
||
|
||
case 'gopher':
|
||
return port !== 70;
|
||
|
||
case 'file':
|
||
return false;
|
||
}
|
||
|
||
return port !== 0;
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 1534:
|
||
/***/ ((module, exports, __nccwpck_require__) => {
|
||
|
||
/* eslint-disable node/no-deprecated-api */
|
||
var buffer = __nccwpck_require__(4293)
|
||
var Buffer = buffer.Buffer
|
||
|
||
// alternative to using Object.keys for old browsers
|
||
function copyProps (src, dst) {
|
||
for (var key in src) {
|
||
dst[key] = src[key]
|
||
}
|
||
}
|
||
if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) {
|
||
module.exports = buffer
|
||
} else {
|
||
// Copy properties from require('buffer')
|
||
copyProps(buffer, exports)
|
||
exports.Buffer = SafeBuffer
|
||
}
|
||
|
||
function SafeBuffer (arg, encodingOrOffset, length) {
|
||
return Buffer(arg, encodingOrOffset, length)
|
||
}
|
||
|
||
// Copy static methods from Buffer
|
||
copyProps(Buffer, SafeBuffer)
|
||
|
||
SafeBuffer.from = function (arg, encodingOrOffset, length) {
|
||
if (typeof arg === 'number') {
|
||
throw new TypeError('Argument must not be a number')
|
||
}
|
||
return Buffer(arg, encodingOrOffset, length)
|
||
}
|
||
|
||
SafeBuffer.alloc = function (size, fill, encoding) {
|
||
if (typeof size !== 'number') {
|
||
throw new TypeError('Argument must be a number')
|
||
}
|
||
var buf = Buffer(size)
|
||
if (fill !== undefined) {
|
||
if (typeof encoding === 'string') {
|
||
buf.fill(fill, encoding)
|
||
} else {
|
||
buf.fill(fill)
|
||
}
|
||
} else {
|
||
buf.fill(0)
|
||
}
|
||
return buf
|
||
}
|
||
|
||
SafeBuffer.allocUnsafe = function (size) {
|
||
if (typeof size !== 'number') {
|
||
throw new TypeError('Argument must be a number')
|
||
}
|
||
return Buffer(size)
|
||
}
|
||
|
||
SafeBuffer.allocUnsafeSlow = function (size) {
|
||
if (typeof size !== 'number') {
|
||
throw new TypeError('Argument must be a number')
|
||
}
|
||
return buffer.SlowBuffer(size)
|
||
}
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 781:
|
||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
// Copyright Joyent, Inc. and other Node contributors.
|
||
//
|
||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||
// copy of this software and associated documentation files (the
|
||
// "Software"), to deal in the Software without restriction, including
|
||
// without limitation the rights to use, copy, modify, merge, publish,
|
||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||
// persons to whom the Software is furnished to do so, subject to the
|
||
// following conditions:
|
||
//
|
||
// The above copyright notice and this permission notice shall be included
|
||
// in all copies or substantial portions of the Software.
|
||
//
|
||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||
|
||
|
||
|
||
/*<replacement>*/
|
||
|
||
var Buffer = __nccwpck_require__(1534).Buffer;
|
||
/*</replacement>*/
|
||
|
||
var isEncoding = Buffer.isEncoding || function (encoding) {
|
||
encoding = '' + encoding;
|
||
switch (encoding && encoding.toLowerCase()) {
|
||
case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw':
|
||
return true;
|
||
default:
|
||
return false;
|
||
}
|
||
};
|
||
|
||
function _normalizeEncoding(enc) {
|
||
if (!enc) return 'utf8';
|
||
var retried;
|
||
while (true) {
|
||
switch (enc) {
|
||
case 'utf8':
|
||
case 'utf-8':
|
||
return 'utf8';
|
||
case 'ucs2':
|
||
case 'ucs-2':
|
||
case 'utf16le':
|
||
case 'utf-16le':
|
||
return 'utf16le';
|
||
case 'latin1':
|
||
case 'binary':
|
||
return 'latin1';
|
||
case 'base64':
|
||
case 'ascii':
|
||
case 'hex':
|
||
return enc;
|
||
default:
|
||
if (retried) return; // undefined
|
||
enc = ('' + enc).toLowerCase();
|
||
retried = true;
|
||
}
|
||
}
|
||
};
|
||
|
||
// Do not cache `Buffer.isEncoding` when checking encoding names as some
|
||
// modules monkey-patch it to support additional encodings
|
||
function normalizeEncoding(enc) {
|
||
var nenc = _normalizeEncoding(enc);
|
||
if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc);
|
||
return nenc || enc;
|
||
}
|
||
|
||
// StringDecoder provides an interface for efficiently splitting a series of
|
||
// buffers into a series of JS strings without breaking apart multi-byte
|
||
// characters.
|
||
exports.s = StringDecoder;
|
||
function StringDecoder(encoding) {
|
||
this.encoding = normalizeEncoding(encoding);
|
||
var nb;
|
||
switch (this.encoding) {
|
||
case 'utf16le':
|
||
this.text = utf16Text;
|
||
this.end = utf16End;
|
||
nb = 4;
|
||
break;
|
||
case 'utf8':
|
||
this.fillLast = utf8FillLast;
|
||
nb = 4;
|
||
break;
|
||
case 'base64':
|
||
this.text = base64Text;
|
||
this.end = base64End;
|
||
nb = 3;
|
||
break;
|
||
default:
|
||
this.write = simpleWrite;
|
||
this.end = simpleEnd;
|
||
return;
|
||
}
|
||
this.lastNeed = 0;
|
||
this.lastTotal = 0;
|
||
this.lastChar = Buffer.allocUnsafe(nb);
|
||
}
|
||
|
||
StringDecoder.prototype.write = function (buf) {
|
||
if (buf.length === 0) return '';
|
||
var r;
|
||
var i;
|
||
if (this.lastNeed) {
|
||
r = this.fillLast(buf);
|
||
if (r === undefined) return '';
|
||
i = this.lastNeed;
|
||
this.lastNeed = 0;
|
||
} else {
|
||
i = 0;
|
||
}
|
||
if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i);
|
||
return r || '';
|
||
};
|
||
|
||
StringDecoder.prototype.end = utf8End;
|
||
|
||
// Returns only complete characters in a Buffer
|
||
StringDecoder.prototype.text = utf8Text;
|
||
|
||
// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer
|
||
StringDecoder.prototype.fillLast = function (buf) {
|
||
if (this.lastNeed <= buf.length) {
|
||
buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed);
|
||
return this.lastChar.toString(this.encoding, 0, this.lastTotal);
|
||
}
|
||
buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length);
|
||
this.lastNeed -= buf.length;
|
||
};
|
||
|
||
// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a
|
||
// continuation byte. If an invalid byte is detected, -2 is returned.
|
||
function utf8CheckByte(byte) {
|
||
if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4;
|
||
return byte >> 6 === 0x02 ? -1 : -2;
|
||
}
|
||
|
||
// Checks at most 3 bytes at the end of a Buffer in order to detect an
|
||
// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4)
|
||
// needed to complete the UTF-8 character (if applicable) are returned.
|
||
function utf8CheckIncomplete(self, buf, i) {
|
||
var j = buf.length - 1;
|
||
if (j < i) return 0;
|
||
var nb = utf8CheckByte(buf[j]);
|
||
if (nb >= 0) {
|
||
if (nb > 0) self.lastNeed = nb - 1;
|
||
return nb;
|
||
}
|
||
if (--j < i || nb === -2) return 0;
|
||
nb = utf8CheckByte(buf[j]);
|
||
if (nb >= 0) {
|
||
if (nb > 0) self.lastNeed = nb - 2;
|
||
return nb;
|
||
}
|
||
if (--j < i || nb === -2) return 0;
|
||
nb = utf8CheckByte(buf[j]);
|
||
if (nb >= 0) {
|
||
if (nb > 0) {
|
||
if (nb === 2) nb = 0;else self.lastNeed = nb - 3;
|
||
}
|
||
return nb;
|
||
}
|
||
return 0;
|
||
}
|
||
|
||
// Validates as many continuation bytes for a multi-byte UTF-8 character as
|
||
// needed or are available. If we see a non-continuation byte where we expect
|
||
// one, we "replace" the validated continuation bytes we've seen so far with
|
||
// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding
|
||
// behavior. The continuation byte check is included three times in the case
|
||
// where all of the continuation bytes for a character exist in the same buffer.
|
||
// It is also done this way as a slight performance increase instead of using a
|
||
// loop.
|
||
function utf8CheckExtraBytes(self, buf, p) {
|
||
if ((buf[0] & 0xC0) !== 0x80) {
|
||
self.lastNeed = 0;
|
||
return '\ufffd';
|
||
}
|
||
if (self.lastNeed > 1 && buf.length > 1) {
|
||
if ((buf[1] & 0xC0) !== 0x80) {
|
||
self.lastNeed = 1;
|
||
return '\ufffd';
|
||
}
|
||
if (self.lastNeed > 2 && buf.length > 2) {
|
||
if ((buf[2] & 0xC0) !== 0x80) {
|
||
self.lastNeed = 2;
|
||
return '\ufffd';
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer.
|
||
function utf8FillLast(buf) {
|
||
var p = this.lastTotal - this.lastNeed;
|
||
var r = utf8CheckExtraBytes(this, buf, p);
|
||
if (r !== undefined) return r;
|
||
if (this.lastNeed <= buf.length) {
|
||
buf.copy(this.lastChar, p, 0, this.lastNeed);
|
||
return this.lastChar.toString(this.encoding, 0, this.lastTotal);
|
||
}
|
||
buf.copy(this.lastChar, p, 0, buf.length);
|
||
this.lastNeed -= buf.length;
|
||
}
|
||
|
||
// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a
|
||
// partial character, the character's bytes are buffered until the required
|
||
// number of bytes are available.
|
||
function utf8Text(buf, i) {
|
||
var total = utf8CheckIncomplete(this, buf, i);
|
||
if (!this.lastNeed) return buf.toString('utf8', i);
|
||
this.lastTotal = total;
|
||
var end = buf.length - (total - this.lastNeed);
|
||
buf.copy(this.lastChar, 0, end);
|
||
return buf.toString('utf8', i, end);
|
||
}
|
||
|
||
// For UTF-8, a replacement character is added when ending on a partial
|
||
// character.
|
||
function utf8End(buf) {
|
||
var r = buf && buf.length ? this.write(buf) : '';
|
||
if (this.lastNeed) return r + '\ufffd';
|
||
return r;
|
||
}
|
||
|
||
// UTF-16LE typically needs two bytes per character, but even if we have an even
|
||
// number of bytes available, we need to check if we end on a leading/high
|
||
// surrogate. In that case, we need to wait for the next two bytes in order to
|
||
// decode the last character properly.
|
||
function utf16Text(buf, i) {
|
||
if ((buf.length - i) % 2 === 0) {
|
||
var r = buf.toString('utf16le', i);
|
||
if (r) {
|
||
var c = r.charCodeAt(r.length - 1);
|
||
if (c >= 0xD800 && c <= 0xDBFF) {
|
||
this.lastNeed = 2;
|
||
this.lastTotal = 4;
|
||
this.lastChar[0] = buf[buf.length - 2];
|
||
this.lastChar[1] = buf[buf.length - 1];
|
||
return r.slice(0, -1);
|
||
}
|
||
}
|
||
return r;
|
||
}
|
||
this.lastNeed = 1;
|
||
this.lastTotal = 2;
|
||
this.lastChar[0] = buf[buf.length - 1];
|
||
return buf.toString('utf16le', i, buf.length - 1);
|
||
}
|
||
|
||
// For UTF-16LE we do not explicitly append special replacement characters if we
|
||
// end on a partial character, we simply let v8 handle that.
|
||
function utf16End(buf) {
|
||
var r = buf && buf.length ? this.write(buf) : '';
|
||
if (this.lastNeed) {
|
||
var end = this.lastTotal - this.lastNeed;
|
||
return r + this.lastChar.toString('utf16le', 0, end);
|
||
}
|
||
return r;
|
||
}
|
||
|
||
function base64Text(buf, i) {
|
||
var n = (buf.length - i) % 3;
|
||
if (n === 0) return buf.toString('base64', i);
|
||
this.lastNeed = 3 - n;
|
||
this.lastTotal = 3;
|
||
if (n === 1) {
|
||
this.lastChar[0] = buf[buf.length - 1];
|
||
} else {
|
||
this.lastChar[0] = buf[buf.length - 2];
|
||
this.lastChar[1] = buf[buf.length - 1];
|
||
}
|
||
return buf.toString('base64', i, buf.length - n);
|
||
}
|
||
|
||
function base64End(buf) {
|
||
var r = buf && buf.length ? this.write(buf) : '';
|
||
if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed);
|
||
return r;
|
||
}
|
||
|
||
// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex)
|
||
function simpleWrite(buf) {
|
||
return buf.toString(this.encoding);
|
||
}
|
||
|
||
function simpleEnd(buf) {
|
||
return buf && buf.length ? this.write(buf) : '';
|
||
}
|
||
|
||
/***/ }),
|
||
|
||
/***/ 7110:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
var util = __nccwpck_require__(1669)
|
||
var bl = __nccwpck_require__(1527)
|
||
var headers = __nccwpck_require__(1629)
|
||
|
||
var Writable = __nccwpck_require__(2498).Writable
|
||
var PassThrough = __nccwpck_require__(2498).PassThrough
|
||
|
||
var noop = function () {}
|
||
|
||
var overflow = function (size) {
|
||
size &= 511
|
||
return size && 512 - size
|
||
}
|
||
|
||
var emptyStream = function (self, offset) {
|
||
var s = new Source(self, offset)
|
||
s.end()
|
||
return s
|
||
}
|
||
|
||
var mixinPax = function (header, pax) {
|
||
if (pax.path) header.name = pax.path
|
||
if (pax.linkpath) header.linkname = pax.linkpath
|
||
if (pax.size) header.size = parseInt(pax.size, 10)
|
||
header.pax = pax
|
||
return header
|
||
}
|
||
|
||
var Source = function (self, offset) {
|
||
this._parent = self
|
||
this.offset = offset
|
||
PassThrough.call(this, { autoDestroy: false })
|
||
}
|
||
|
||
util.inherits(Source, PassThrough)
|
||
|
||
Source.prototype.destroy = function (err) {
|
||
this._parent.destroy(err)
|
||
}
|
||
|
||
var Extract = function (opts) {
|
||
if (!(this instanceof Extract)) return new Extract(opts)
|
||
Writable.call(this, opts)
|
||
|
||
opts = opts || {}
|
||
|
||
this._offset = 0
|
||
this._buffer = bl()
|
||
this._missing = 0
|
||
this._partial = false
|
||
this._onparse = noop
|
||
this._header = null
|
||
this._stream = null
|
||
this._overflow = null
|
||
this._cb = null
|
||
this._locked = false
|
||
this._destroyed = false
|
||
this._pax = null
|
||
this._paxGlobal = null
|
||
this._gnuLongPath = null
|
||
this._gnuLongLinkPath = null
|
||
|
||
var self = this
|
||
var b = self._buffer
|
||
|
||
var oncontinue = function () {
|
||
self._continue()
|
||
}
|
||
|
||
var onunlock = function (err) {
|
||
self._locked = false
|
||
if (err) return self.destroy(err)
|
||
if (!self._stream) oncontinue()
|
||
}
|
||
|
||
var onstreamend = function () {
|
||
self._stream = null
|
||
var drain = overflow(self._header.size)
|
||
if (drain) self._parse(drain, ondrain)
|
||
else self._parse(512, onheader)
|
||
if (!self._locked) oncontinue()
|
||
}
|
||
|
||
var ondrain = function () {
|
||
self._buffer.consume(overflow(self._header.size))
|
||
self._parse(512, onheader)
|
||
oncontinue()
|
||
}
|
||
|
||
var onpaxglobalheader = function () {
|
||
var size = self._header.size
|
||
self._paxGlobal = headers.decodePax(b.slice(0, size))
|
||
b.consume(size)
|
||
onstreamend()
|
||
}
|
||
|
||
var onpaxheader = function () {
|
||
var size = self._header.size
|
||
self._pax = headers.decodePax(b.slice(0, size))
|
||
if (self._paxGlobal) self._pax = Object.assign({}, self._paxGlobal, self._pax)
|
||
b.consume(size)
|
||
onstreamend()
|
||
}
|
||
|
||
var ongnulongpath = function () {
|
||
var size = self._header.size
|
||
this._gnuLongPath = headers.decodeLongPath(b.slice(0, size), opts.filenameEncoding)
|
||
b.consume(size)
|
||
onstreamend()
|
||
}
|
||
|
||
var ongnulonglinkpath = function () {
|
||
var size = self._header.size
|
||
this._gnuLongLinkPath = headers.decodeLongPath(b.slice(0, size), opts.filenameEncoding)
|
||
b.consume(size)
|
||
onstreamend()
|
||
}
|
||
|
||
var onheader = function () {
|
||
var offset = self._offset
|
||
var header
|
||
try {
|
||
header = self._header = headers.decode(b.slice(0, 512), opts.filenameEncoding, opts.allowUnknownFormat)
|
||
} catch (err) {
|
||
self.emit('error', err)
|
||
}
|
||
b.consume(512)
|
||
|
||
if (!header) {
|
||
self._parse(512, onheader)
|
||
oncontinue()
|
||
return
|
||
}
|
||
if (header.type === 'gnu-long-path') {
|
||
self._parse(header.size, ongnulongpath)
|
||
oncontinue()
|
||
return
|
||
}
|
||
if (header.type === 'gnu-long-link-path') {
|
||
self._parse(header.size, ongnulonglinkpath)
|
||
oncontinue()
|
||
return
|
||
}
|
||
if (header.type === 'pax-global-header') {
|
||
self._parse(header.size, onpaxglobalheader)
|
||
oncontinue()
|
||
return
|
||
}
|
||
if (header.type === 'pax-header') {
|
||
self._parse(header.size, onpaxheader)
|
||
oncontinue()
|
||
return
|
||
}
|
||
|
||
if (self._gnuLongPath) {
|
||
header.name = self._gnuLongPath
|
||
self._gnuLongPath = null
|
||
}
|
||
|
||
if (self._gnuLongLinkPath) {
|
||
header.linkname = self._gnuLongLinkPath
|
||
self._gnuLongLinkPath = null
|
||
}
|
||
|
||
if (self._pax) {
|
||
self._header = header = mixinPax(header, self._pax)
|
||
self._pax = null
|
||
}
|
||
|
||
self._locked = true
|
||
|
||
if (!header.size || header.type === 'directory') {
|
||
self._parse(512, onheader)
|
||
self.emit('entry', header, emptyStream(self, offset), onunlock)
|
||
return
|
||
}
|
||
|
||
self._stream = new Source(self, offset)
|
||
|
||
self.emit('entry', header, self._stream, onunlock)
|
||
self._parse(header.size, onstreamend)
|
||
oncontinue()
|
||
}
|
||
|
||
this._onheader = onheader
|
||
this._parse(512, onheader)
|
||
}
|
||
|
||
util.inherits(Extract, Writable)
|
||
|
||
Extract.prototype.destroy = function (err) {
|
||
if (this._destroyed) return
|
||
this._destroyed = true
|
||
|
||
if (err) this.emit('error', err)
|
||
this.emit('close')
|
||
if (this._stream) this._stream.emit('close')
|
||
}
|
||
|
||
Extract.prototype._parse = function (size, onparse) {
|
||
if (this._destroyed) return
|
||
this._offset += size
|
||
this._missing = size
|
||
if (onparse === this._onheader) this._partial = false
|
||
this._onparse = onparse
|
||
}
|
||
|
||
Extract.prototype._continue = function () {
|
||
if (this._destroyed) return
|
||
var cb = this._cb
|
||
this._cb = noop
|
||
if (this._overflow) this._write(this._overflow, undefined, cb)
|
||
else cb()
|
||
}
|
||
|
||
Extract.prototype._write = function (data, enc, cb) {
|
||
if (this._destroyed) return
|
||
|
||
var s = this._stream
|
||
var b = this._buffer
|
||
var missing = this._missing
|
||
if (data.length) this._partial = true
|
||
|
||
// we do not reach end-of-chunk now. just forward it
|
||
|
||
if (data.length < missing) {
|
||
this._missing -= data.length
|
||
this._overflow = null
|
||
if (s) return s.write(data, cb)
|
||
b.append(data)
|
||
return cb()
|
||
}
|
||
|
||
// end-of-chunk. the parser should call cb.
|
||
|
||
this._cb = cb
|
||
this._missing = 0
|
||
|
||
var overflow = null
|
||
if (data.length > missing) {
|
||
overflow = data.slice(missing)
|
||
data = data.slice(0, missing)
|
||
}
|
||
|
||
if (s) s.end(data)
|
||
else b.append(data)
|
||
|
||
this._overflow = overflow
|
||
this._onparse()
|
||
}
|
||
|
||
Extract.prototype._final = function (cb) {
|
||
if (this._partial) return this.destroy(new Error('Unexpected end of data'))
|
||
cb()
|
||
}
|
||
|
||
module.exports = Extract
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 1629:
|
||
/***/ ((__unused_webpack_module, exports) => {
|
||
|
||
var alloc = Buffer.alloc
|
||
|
||
var ZEROS = '0000000000000000000'
|
||
var SEVENS = '7777777777777777777'
|
||
var ZERO_OFFSET = '0'.charCodeAt(0)
|
||
var USTAR_MAGIC = Buffer.from('ustar\x00', 'binary')
|
||
var USTAR_VER = Buffer.from('00', 'binary')
|
||
var GNU_MAGIC = Buffer.from('ustar\x20', 'binary')
|
||
var GNU_VER = Buffer.from('\x20\x00', 'binary')
|
||
var MASK = parseInt('7777', 8)
|
||
var MAGIC_OFFSET = 257
|
||
var VERSION_OFFSET = 263
|
||
|
||
var clamp = function (index, len, defaultValue) {
|
||
if (typeof index !== 'number') return defaultValue
|
||
index = ~~index // Coerce to integer.
|
||
if (index >= len) return len
|
||
if (index >= 0) return index
|
||
index += len
|
||
if (index >= 0) return index
|
||
return 0
|
||
}
|
||
|
||
var toType = function (flag) {
|
||
switch (flag) {
|
||
case 0:
|
||
return 'file'
|
||
case 1:
|
||
return 'link'
|
||
case 2:
|
||
return 'symlink'
|
||
case 3:
|
||
return 'character-device'
|
||
case 4:
|
||
return 'block-device'
|
||
case 5:
|
||
return 'directory'
|
||
case 6:
|
||
return 'fifo'
|
||
case 7:
|
||
return 'contiguous-file'
|
||
case 72:
|
||
return 'pax-header'
|
||
case 55:
|
||
return 'pax-global-header'
|
||
case 27:
|
||
return 'gnu-long-link-path'
|
||
case 28:
|
||
case 30:
|
||
return 'gnu-long-path'
|
||
}
|
||
|
||
return null
|
||
}
|
||
|
||
var toTypeflag = function (flag) {
|
||
switch (flag) {
|
||
case 'file':
|
||
return 0
|
||
case 'link':
|
||
return 1
|
||
case 'symlink':
|
||
return 2
|
||
case 'character-device':
|
||
return 3
|
||
case 'block-device':
|
||
return 4
|
||
case 'directory':
|
||
return 5
|
||
case 'fifo':
|
||
return 6
|
||
case 'contiguous-file':
|
||
return 7
|
||
case 'pax-header':
|
||
return 72
|
||
}
|
||
|
||
return 0
|
||
}
|
||
|
||
var indexOf = function (block, num, offset, end) {
|
||
for (; offset < end; offset++) {
|
||
if (block[offset] === num) return offset
|
||
}
|
||
return end
|
||
}
|
||
|
||
var cksum = function (block) {
|
||
var sum = 8 * 32
|
||
for (var i = 0; i < 148; i++) sum += block[i]
|
||
for (var j = 156; j < 512; j++) sum += block[j]
|
||
return sum
|
||
}
|
||
|
||
var encodeOct = function (val, n) {
|
||
val = val.toString(8)
|
||
if (val.length > n) return SEVENS.slice(0, n) + ' '
|
||
else return ZEROS.slice(0, n - val.length) + val + ' '
|
||
}
|
||
|
||
/* Copied from the node-tar repo and modified to meet
|
||
* tar-stream coding standard.
|
||
*
|
||
* Source: https://github.com/npm/node-tar/blob/51b6627a1f357d2eb433e7378e5f05e83b7aa6cd/lib/header.js#L349
|
||
*/
|
||
function parse256 (buf) {
|
||
// first byte MUST be either 80 or FF
|
||
// 80 for positive, FF for 2's comp
|
||
var positive
|
||
if (buf[0] === 0x80) positive = true
|
||
else if (buf[0] === 0xFF) positive = false
|
||
else return null
|
||
|
||
// build up a base-256 tuple from the least sig to the highest
|
||
var tuple = []
|
||
for (var i = buf.length - 1; i > 0; i--) {
|
||
var byte = buf[i]
|
||
if (positive) tuple.push(byte)
|
||
else tuple.push(0xFF - byte)
|
||
}
|
||
|
||
var sum = 0
|
||
var l = tuple.length
|
||
for (i = 0; i < l; i++) {
|
||
sum += tuple[i] * Math.pow(256, i)
|
||
}
|
||
|
||
return positive ? sum : -1 * sum
|
||
}
|
||
|
||
var decodeOct = function (val, offset, length) {
|
||
val = val.slice(offset, offset + length)
|
||
offset = 0
|
||
|
||
// If prefixed with 0x80 then parse as a base-256 integer
|
||
if (val[offset] & 0x80) {
|
||
return parse256(val)
|
||
} else {
|
||
// Older versions of tar can prefix with spaces
|
||
while (offset < val.length && val[offset] === 32) offset++
|
||
var end = clamp(indexOf(val, 32, offset, val.length), val.length, val.length)
|
||
while (offset < end && val[offset] === 0) offset++
|
||
if (end === offset) return 0
|
||
return parseInt(val.slice(offset, end).toString(), 8)
|
||
}
|
||
}
|
||
|
||
var decodeStr = function (val, offset, length, encoding) {
|
||
return val.slice(offset, indexOf(val, 0, offset, offset + length)).toString(encoding)
|
||
}
|
||
|
||
var addLength = function (str) {
|
||
var len = Buffer.byteLength(str)
|
||
var digits = Math.floor(Math.log(len) / Math.log(10)) + 1
|
||
if (len + digits >= Math.pow(10, digits)) digits++
|
||
|
||
return (len + digits) + str
|
||
}
|
||
|
||
exports.decodeLongPath = function (buf, encoding) {
|
||
return decodeStr(buf, 0, buf.length, encoding)
|
||
}
|
||
|
||
exports.encodePax = function (opts) { // TODO: encode more stuff in pax
|
||
var result = ''
|
||
if (opts.name) result += addLength(' path=' + opts.name + '\n')
|
||
if (opts.linkname) result += addLength(' linkpath=' + opts.linkname + '\n')
|
||
var pax = opts.pax
|
||
if (pax) {
|
||
for (var key in pax) {
|
||
result += addLength(' ' + key + '=' + pax[key] + '\n')
|
||
}
|
||
}
|
||
return Buffer.from(result)
|
||
}
|
||
|
||
exports.decodePax = function (buf) {
|
||
var result = {}
|
||
|
||
while (buf.length) {
|
||
var i = 0
|
||
while (i < buf.length && buf[i] !== 32) i++
|
||
var len = parseInt(buf.slice(0, i).toString(), 10)
|
||
if (!len) return result
|
||
|
||
var b = buf.slice(i + 1, len - 1).toString()
|
||
var keyIndex = b.indexOf('=')
|
||
if (keyIndex === -1) return result
|
||
result[b.slice(0, keyIndex)] = b.slice(keyIndex + 1)
|
||
|
||
buf = buf.slice(len)
|
||
}
|
||
|
||
return result
|
||
}
|
||
|
||
exports.encode = function (opts) {
|
||
var buf = alloc(512)
|
||
var name = opts.name
|
||
var prefix = ''
|
||
|
||
if (opts.typeflag === 5 && name[name.length - 1] !== '/') name += '/'
|
||
if (Buffer.byteLength(name) !== name.length) return null // utf-8
|
||
|
||
while (Buffer.byteLength(name) > 100) {
|
||
var i = name.indexOf('/')
|
||
if (i === -1) return null
|
||
prefix += prefix ? '/' + name.slice(0, i) : name.slice(0, i)
|
||
name = name.slice(i + 1)
|
||
}
|
||
|
||
if (Buffer.byteLength(name) > 100 || Buffer.byteLength(prefix) > 155) return null
|
||
if (opts.linkname && Buffer.byteLength(opts.linkname) > 100) return null
|
||
|
||
buf.write(name)
|
||
buf.write(encodeOct(opts.mode & MASK, 6), 100)
|
||
buf.write(encodeOct(opts.uid, 6), 108)
|
||
buf.write(encodeOct(opts.gid, 6), 116)
|
||
buf.write(encodeOct(opts.size, 11), 124)
|
||
buf.write(encodeOct((opts.mtime.getTime() / 1000) | 0, 11), 136)
|
||
|
||
buf[156] = ZERO_OFFSET + toTypeflag(opts.type)
|
||
|
||
if (opts.linkname) buf.write(opts.linkname, 157)
|
||
|
||
USTAR_MAGIC.copy(buf, MAGIC_OFFSET)
|
||
USTAR_VER.copy(buf, VERSION_OFFSET)
|
||
if (opts.uname) buf.write(opts.uname, 265)
|
||
if (opts.gname) buf.write(opts.gname, 297)
|
||
buf.write(encodeOct(opts.devmajor || 0, 6), 329)
|
||
buf.write(encodeOct(opts.devminor || 0, 6), 337)
|
||
|
||
if (prefix) buf.write(prefix, 345)
|
||
|
||
buf.write(encodeOct(cksum(buf), 6), 148)
|
||
|
||
return buf
|
||
}
|
||
|
||
exports.decode = function (buf, filenameEncoding, allowUnknownFormat) {
|
||
var typeflag = buf[156] === 0 ? 0 : buf[156] - ZERO_OFFSET
|
||
|
||
var name = decodeStr(buf, 0, 100, filenameEncoding)
|
||
var mode = decodeOct(buf, 100, 8)
|
||
var uid = decodeOct(buf, 108, 8)
|
||
var gid = decodeOct(buf, 116, 8)
|
||
var size = decodeOct(buf, 124, 12)
|
||
var mtime = decodeOct(buf, 136, 12)
|
||
var type = toType(typeflag)
|
||
var linkname = buf[157] === 0 ? null : decodeStr(buf, 157, 100, filenameEncoding)
|
||
var uname = decodeStr(buf, 265, 32)
|
||
var gname = decodeStr(buf, 297, 32)
|
||
var devmajor = decodeOct(buf, 329, 8)
|
||
var devminor = decodeOct(buf, 337, 8)
|
||
|
||
var c = cksum(buf)
|
||
|
||
// checksum is still initial value if header was null.
|
||
if (c === 8 * 32) return null
|
||
|
||
// valid checksum
|
||
if (c !== decodeOct(buf, 148, 8)) throw new Error('Invalid tar header. Maybe the tar is corrupted or it needs to be gunzipped?')
|
||
|
||
if (USTAR_MAGIC.compare(buf, MAGIC_OFFSET, MAGIC_OFFSET + 6) === 0) {
|
||
// ustar (posix) format.
|
||
// prepend prefix, if present.
|
||
if (buf[345]) name = decodeStr(buf, 345, 155, filenameEncoding) + '/' + name
|
||
} else if (GNU_MAGIC.compare(buf, MAGIC_OFFSET, MAGIC_OFFSET + 6) === 0 &&
|
||
GNU_VER.compare(buf, VERSION_OFFSET, VERSION_OFFSET + 2) === 0) {
|
||
// 'gnu'/'oldgnu' format. Similar to ustar, but has support for incremental and
|
||
// multi-volume tarballs.
|
||
} else {
|
||
if (!allowUnknownFormat) {
|
||
throw new Error('Invalid tar header: unknown format.')
|
||
}
|
||
}
|
||
|
||
// to support old tar versions that use trailing / to indicate dirs
|
||
if (typeflag === 0 && name && name[name.length - 1] === '/') typeflag = 5
|
||
|
||
return {
|
||
name,
|
||
mode,
|
||
uid,
|
||
gid,
|
||
size,
|
||
mtime: new Date(1000 * mtime),
|
||
type,
|
||
linkname,
|
||
uname,
|
||
gname,
|
||
devmajor,
|
||
devminor
|
||
}
|
||
}
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 4703:
|
||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||
|
||
exports.extract = __nccwpck_require__(7110)
|
||
exports.pack = __nccwpck_require__(394)
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 394:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
var constants = __nccwpck_require__(5596)
|
||
var eos = __nccwpck_require__(3860)
|
||
var inherits = __nccwpck_require__(3753)
|
||
var alloc = Buffer.alloc
|
||
|
||
var Readable = __nccwpck_require__(2498).Readable
|
||
var Writable = __nccwpck_require__(2498).Writable
|
||
var StringDecoder = __nccwpck_require__(4304).StringDecoder
|
||
|
||
var headers = __nccwpck_require__(1629)
|
||
|
||
var DMODE = parseInt('755', 8)
|
||
var FMODE = parseInt('644', 8)
|
||
|
||
var END_OF_TAR = alloc(1024)
|
||
|
||
var noop = function () {}
|
||
|
||
var overflow = function (self, size) {
|
||
size &= 511
|
||
if (size) self.push(END_OF_TAR.slice(0, 512 - size))
|
||
}
|
||
|
||
function modeToType (mode) {
|
||
switch (mode & constants.S_IFMT) {
|
||
case constants.S_IFBLK: return 'block-device'
|
||
case constants.S_IFCHR: return 'character-device'
|
||
case constants.S_IFDIR: return 'directory'
|
||
case constants.S_IFIFO: return 'fifo'
|
||
case constants.S_IFLNK: return 'symlink'
|
||
}
|
||
|
||
return 'file'
|
||
}
|
||
|
||
var Sink = function (to) {
|
||
Writable.call(this)
|
||
this.written = 0
|
||
this._to = to
|
||
this._destroyed = false
|
||
}
|
||
|
||
inherits(Sink, Writable)
|
||
|
||
Sink.prototype._write = function (data, enc, cb) {
|
||
this.written += data.length
|
||
if (this._to.push(data)) return cb()
|
||
this._to._drain = cb
|
||
}
|
||
|
||
Sink.prototype.destroy = function () {
|
||
if (this._destroyed) return
|
||
this._destroyed = true
|
||
this.emit('close')
|
||
}
|
||
|
||
var LinkSink = function () {
|
||
Writable.call(this)
|
||
this.linkname = ''
|
||
this._decoder = new StringDecoder('utf-8')
|
||
this._destroyed = false
|
||
}
|
||
|
||
inherits(LinkSink, Writable)
|
||
|
||
LinkSink.prototype._write = function (data, enc, cb) {
|
||
this.linkname += this._decoder.write(data)
|
||
cb()
|
||
}
|
||
|
||
LinkSink.prototype.destroy = function () {
|
||
if (this._destroyed) return
|
||
this._destroyed = true
|
||
this.emit('close')
|
||
}
|
||
|
||
var Void = function () {
|
||
Writable.call(this)
|
||
this._destroyed = false
|
||
}
|
||
|
||
inherits(Void, Writable)
|
||
|
||
Void.prototype._write = function (data, enc, cb) {
|
||
cb(new Error('No body allowed for this entry'))
|
||
}
|
||
|
||
Void.prototype.destroy = function () {
|
||
if (this._destroyed) return
|
||
this._destroyed = true
|
||
this.emit('close')
|
||
}
|
||
|
||
var Pack = function (opts) {
|
||
if (!(this instanceof Pack)) return new Pack(opts)
|
||
Readable.call(this, opts)
|
||
|
||
this._drain = noop
|
||
this._finalized = false
|
||
this._finalizing = false
|
||
this._destroyed = false
|
||
this._stream = null
|
||
}
|
||
|
||
inherits(Pack, Readable)
|
||
|
||
Pack.prototype.entry = function (header, buffer, callback) {
|
||
if (this._stream) throw new Error('already piping an entry')
|
||
if (this._finalized || this._destroyed) return
|
||
|
||
if (typeof buffer === 'function') {
|
||
callback = buffer
|
||
buffer = null
|
||
}
|
||
|
||
if (!callback) callback = noop
|
||
|
||
var self = this
|
||
|
||
if (!header.size || header.type === 'symlink') header.size = 0
|
||
if (!header.type) header.type = modeToType(header.mode)
|
||
if (!header.mode) header.mode = header.type === 'directory' ? DMODE : FMODE
|
||
if (!header.uid) header.uid = 0
|
||
if (!header.gid) header.gid = 0
|
||
if (!header.mtime) header.mtime = new Date()
|
||
|
||
if (typeof buffer === 'string') buffer = Buffer.from(buffer)
|
||
if (Buffer.isBuffer(buffer)) {
|
||
header.size = buffer.length
|
||
this._encode(header)
|
||
var ok = this.push(buffer)
|
||
overflow(self, header.size)
|
||
if (ok) process.nextTick(callback)
|
||
else this._drain = callback
|
||
return new Void()
|
||
}
|
||
|
||
if (header.type === 'symlink' && !header.linkname) {
|
||
var linkSink = new LinkSink()
|
||
eos(linkSink, function (err) {
|
||
if (err) { // stream was closed
|
||
self.destroy()
|
||
return callback(err)
|
||
}
|
||
|
||
header.linkname = linkSink.linkname
|
||
self._encode(header)
|
||
callback()
|
||
})
|
||
|
||
return linkSink
|
||
}
|
||
|
||
this._encode(header)
|
||
|
||
if (header.type !== 'file' && header.type !== 'contiguous-file') {
|
||
process.nextTick(callback)
|
||
return new Void()
|
||
}
|
||
|
||
var sink = new Sink(this)
|
||
|
||
this._stream = sink
|
||
|
||
eos(sink, function (err) {
|
||
self._stream = null
|
||
|
||
if (err) { // stream was closed
|
||
self.destroy()
|
||
return callback(err)
|
||
}
|
||
|
||
if (sink.written !== header.size) { // corrupting tar
|
||
self.destroy()
|
||
return callback(new Error('size mismatch'))
|
||
}
|
||
|
||
overflow(self, header.size)
|
||
if (self._finalizing) self.finalize()
|
||
callback()
|
||
})
|
||
|
||
return sink
|
||
}
|
||
|
||
Pack.prototype.finalize = function () {
|
||
if (this._stream) {
|
||
this._finalizing = true
|
||
return
|
||
}
|
||
|
||
if (this._finalized) return
|
||
this._finalized = true
|
||
this.push(END_OF_TAR)
|
||
this.push(null)
|
||
}
|
||
|
||
Pack.prototype.destroy = function (err) {
|
||
if (this._destroyed) return
|
||
this._destroyed = true
|
||
|
||
if (err) this.emit('error', err)
|
||
this.emit('close')
|
||
if (this._stream && this._stream.destroy) this._stream.destroy()
|
||
}
|
||
|
||
Pack.prototype._encode = function (header) {
|
||
if (!header.pax) {
|
||
var buf = headers.encode(header)
|
||
if (buf) {
|
||
this.push(buf)
|
||
return
|
||
}
|
||
}
|
||
this._encodePax(header)
|
||
}
|
||
|
||
Pack.prototype._encodePax = function (header) {
|
||
var paxHeader = headers.encodePax({
|
||
name: header.name,
|
||
linkname: header.linkname,
|
||
pax: header.pax
|
||
})
|
||
|
||
var newHeader = {
|
||
name: 'PaxHeader',
|
||
mode: header.mode,
|
||
uid: header.uid,
|
||
gid: header.gid,
|
||
size: paxHeader.length,
|
||
mtime: header.mtime,
|
||
type: 'pax-header',
|
||
linkname: header.linkname && 'PaxHeader',
|
||
uname: header.uname,
|
||
gname: header.gname,
|
||
devmajor: header.devmajor,
|
||
devminor: header.devminor
|
||
}
|
||
|
||
this.push(headers.encode(newHeader))
|
||
this.push(paxHeader)
|
||
overflow(this, paxHeader.length)
|
||
|
||
newHeader.size = header.size
|
||
newHeader.type = header.type
|
||
this.push(headers.encode(newHeader))
|
||
}
|
||
|
||
Pack.prototype._read = function (n) {
|
||
var drain = this._drain
|
||
this._drain = noop
|
||
drain()
|
||
}
|
||
|
||
module.exports = Pack
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 1971:
|
||
/***/ (function(module) {
|
||
|
||
(function (name, context, definition) {
|
||
if ( true && module.exports) module.exports = definition();
|
||
else if (typeof define === 'function' && define.amd) define(definition);
|
||
else context[name] = definition();
|
||
})('urljoin', this, function () {
|
||
|
||
function normalize (strArray) {
|
||
var resultArray = [];
|
||
if (strArray.length === 0) { return ''; }
|
||
|
||
if (typeof strArray[0] !== 'string') {
|
||
throw new TypeError('Url must be a string. Received ' + strArray[0]);
|
||
}
|
||
|
||
// If the first part is a plain protocol, we combine it with the next part.
|
||
if (strArray[0].match(/^[^/:]+:\/*$/) && strArray.length > 1) {
|
||
var first = strArray.shift();
|
||
strArray[0] = first + strArray[0];
|
||
}
|
||
|
||
// There must be two or three slashes in the file protocol, two slashes in anything else.
|
||
if (strArray[0].match(/^file:\/\/\//)) {
|
||
strArray[0] = strArray[0].replace(/^([^/:]+):\/*/, '$1:///');
|
||
} else {
|
||
strArray[0] = strArray[0].replace(/^([^/:]+):\/*/, '$1://');
|
||
}
|
||
|
||
for (var i = 0; i < strArray.length; i++) {
|
||
var component = strArray[i];
|
||
|
||
if (typeof component !== 'string') {
|
||
throw new TypeError('Url must be a string. Received ' + component);
|
||
}
|
||
|
||
if (component === '') { continue; }
|
||
|
||
if (i > 0) {
|
||
// Removing the starting slashes for each component but the first.
|
||
component = component.replace(/^[\/]+/, '');
|
||
}
|
||
if (i < strArray.length - 1) {
|
||
// Removing the ending slashes for each component but the last.
|
||
component = component.replace(/[\/]+$/, '');
|
||
} else {
|
||
// For the last component we will combine multiple slashes to a single one.
|
||
component = component.replace(/[\/]+$/, '/');
|
||
}
|
||
|
||
resultArray.push(component);
|
||
|
||
}
|
||
|
||
var str = resultArray.join('/');
|
||
// Each input component is now separated by a single slash except the possible first plain protocol part.
|
||
|
||
// remove trailing slash before parameters or hash
|
||
str = str.replace(/\/(\?|&|#[^!])/g, '$1');
|
||
|
||
// replace ? in parameters with &
|
||
var parts = str.split('?');
|
||
str = parts.shift() + (parts.length > 0 ? '?': '') + parts.join('&');
|
||
|
||
return str;
|
||
}
|
||
|
||
return function () {
|
||
var input;
|
||
|
||
if (typeof arguments[0] === 'object') {
|
||
input = arguments[0];
|
||
} else {
|
||
input = [].slice.call(arguments);
|
||
}
|
||
|
||
return normalize(input);
|
||
};
|
||
|
||
});
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 1785:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
var required = __nccwpck_require__(3129)
|
||
, qs = __nccwpck_require__(938)
|
||
, slashes = /^[A-Za-z][A-Za-z0-9+-.]*:[\\/]+/
|
||
, protocolre = /^([a-z][a-z0-9.+-]*:)?([\\/]{1,})?([\S\s]*)/i
|
||
, whitespace = '[\\x09\\x0A\\x0B\\x0C\\x0D\\x20\\xA0\\u1680\\u180E\\u2000\\u2001\\u2002\\u2003\\u2004\\u2005\\u2006\\u2007\\u2008\\u2009\\u200A\\u202F\\u205F\\u3000\\u2028\\u2029\\uFEFF]'
|
||
, left = new RegExp('^'+ whitespace +'+');
|
||
|
||
/**
|
||
* Trim a given string.
|
||
*
|
||
* @param {String} str String to trim.
|
||
* @public
|
||
*/
|
||
function trimLeft(str) {
|
||
return (str ? str : '').toString().replace(left, '');
|
||
}
|
||
|
||
/**
|
||
* These are the parse rules for the URL parser, it informs the parser
|
||
* about:
|
||
*
|
||
* 0. The char it Needs to parse, if it's a string it should be done using
|
||
* indexOf, RegExp using exec and NaN means set as current value.
|
||
* 1. The property we should set when parsing this value.
|
||
* 2. Indication if it's backwards or forward parsing, when set as number it's
|
||
* the value of extra chars that should be split off.
|
||
* 3. Inherit from location if non existing in the parser.
|
||
* 4. `toLowerCase` the resulting value.
|
||
*/
|
||
var rules = [
|
||
['#', 'hash'], // Extract from the back.
|
||
['?', 'query'], // Extract from the back.
|
||
function sanitize(address) { // Sanitize what is left of the address
|
||
return address.replace('\\', '/');
|
||
},
|
||
['/', 'pathname'], // Extract from the back.
|
||
['@', 'auth', 1], // Extract from the front.
|
||
[NaN, 'host', undefined, 1, 1], // Set left over value.
|
||
[/:(\d+)$/, 'port', undefined, 1], // RegExp the back.
|
||
[NaN, 'hostname', undefined, 1, 1] // Set left over.
|
||
];
|
||
|
||
/**
|
||
* These properties should not be copied or inherited from. This is only needed
|
||
* for all non blob URL's as a blob URL does not include a hash, only the
|
||
* origin.
|
||
*
|
||
* @type {Object}
|
||
* @private
|
||
*/
|
||
var ignore = { hash: 1, query: 1 };
|
||
|
||
/**
|
||
* The location object differs when your code is loaded through a normal page,
|
||
* Worker or through a worker using a blob. And with the blobble begins the
|
||
* trouble as the location object will contain the URL of the blob, not the
|
||
* location of the page where our code is loaded in. The actual origin is
|
||
* encoded in the `pathname` so we can thankfully generate a good "default"
|
||
* location from it so we can generate proper relative URL's again.
|
||
*
|
||
* @param {Object|String} loc Optional default location object.
|
||
* @returns {Object} lolcation object.
|
||
* @public
|
||
*/
|
||
function lolcation(loc) {
|
||
var globalVar;
|
||
|
||
if (typeof window !== 'undefined') globalVar = window;
|
||
else if (typeof global !== 'undefined') globalVar = global;
|
||
else if (typeof self !== 'undefined') globalVar = self;
|
||
else globalVar = {};
|
||
|
||
var location = globalVar.location || {};
|
||
loc = loc || location;
|
||
|
||
var finaldestination = {}
|
||
, type = typeof loc
|
||
, key;
|
||
|
||
if ('blob:' === loc.protocol) {
|
||
finaldestination = new Url(unescape(loc.pathname), {});
|
||
} else if ('string' === type) {
|
||
finaldestination = new Url(loc, {});
|
||
for (key in ignore) delete finaldestination[key];
|
||
} else if ('object' === type) {
|
||
for (key in loc) {
|
||
if (key in ignore) continue;
|
||
finaldestination[key] = loc[key];
|
||
}
|
||
|
||
if (finaldestination.slashes === undefined) {
|
||
finaldestination.slashes = slashes.test(loc.href);
|
||
}
|
||
}
|
||
|
||
return finaldestination;
|
||
}
|
||
|
||
/**
|
||
* @typedef ProtocolExtract
|
||
* @type Object
|
||
* @property {String} protocol Protocol matched in the URL, in lowercase.
|
||
* @property {Boolean} slashes `true` if protocol is followed by "//", else `false`.
|
||
* @property {String} rest Rest of the URL that is not part of the protocol.
|
||
*/
|
||
|
||
/**
|
||
* Extract protocol information from a URL with/without double slash ("//").
|
||
*
|
||
* @param {String} address URL we want to extract from.
|
||
* @return {ProtocolExtract} Extracted information.
|
||
* @private
|
||
*/
|
||
function extractProtocol(address) {
|
||
address = trimLeft(address);
|
||
|
||
var match = protocolre.exec(address)
|
||
, protocol = match[1] ? match[1].toLowerCase() : ''
|
||
, slashes = !!(match[2] && match[2].length >= 2)
|
||
, rest = match[2] && match[2].length === 1 ? '/' + match[3] : match[3];
|
||
|
||
return {
|
||
protocol: protocol,
|
||
slashes: slashes,
|
||
rest: rest
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Resolve a relative URL pathname against a base URL pathname.
|
||
*
|
||
* @param {String} relative Pathname of the relative URL.
|
||
* @param {String} base Pathname of the base URL.
|
||
* @return {String} Resolved pathname.
|
||
* @private
|
||
*/
|
||
function resolve(relative, base) {
|
||
if (relative === '') return base;
|
||
|
||
var path = (base || '/').split('/').slice(0, -1).concat(relative.split('/'))
|
||
, i = path.length
|
||
, last = path[i - 1]
|
||
, unshift = false
|
||
, up = 0;
|
||
|
||
while (i--) {
|
||
if (path[i] === '.') {
|
||
path.splice(i, 1);
|
||
} else if (path[i] === '..') {
|
||
path.splice(i, 1);
|
||
up++;
|
||
} else if (up) {
|
||
if (i === 0) unshift = true;
|
||
path.splice(i, 1);
|
||
up--;
|
||
}
|
||
}
|
||
|
||
if (unshift) path.unshift('');
|
||
if (last === '.' || last === '..') path.push('');
|
||
|
||
return path.join('/');
|
||
}
|
||
|
||
/**
|
||
* The actual URL instance. Instead of returning an object we've opted-in to
|
||
* create an actual constructor as it's much more memory efficient and
|
||
* faster and it pleases my OCD.
|
||
*
|
||
* It is worth noting that we should not use `URL` as class name to prevent
|
||
* clashes with the global URL instance that got introduced in browsers.
|
||
*
|
||
* @constructor
|
||
* @param {String} address URL we want to parse.
|
||
* @param {Object|String} [location] Location defaults for relative paths.
|
||
* @param {Boolean|Function} [parser] Parser for the query string.
|
||
* @private
|
||
*/
|
||
function Url(address, location, parser) {
|
||
address = trimLeft(address);
|
||
|
||
if (!(this instanceof Url)) {
|
||
return new Url(address, location, parser);
|
||
}
|
||
|
||
var relative, extracted, parse, instruction, index, key
|
||
, instructions = rules.slice()
|
||
, type = typeof location
|
||
, url = this
|
||
, i = 0;
|
||
|
||
//
|
||
// The following if statements allows this module two have compatibility with
|
||
// 2 different API:
|
||
//
|
||
// 1. Node.js's `url.parse` api which accepts a URL, boolean as arguments
|
||
// where the boolean indicates that the query string should also be parsed.
|
||
//
|
||
// 2. The `URL` interface of the browser which accepts a URL, object as
|
||
// arguments. The supplied object will be used as default values / fall-back
|
||
// for relative paths.
|
||
//
|
||
if ('object' !== type && 'string' !== type) {
|
||
parser = location;
|
||
location = null;
|
||
}
|
||
|
||
if (parser && 'function' !== typeof parser) parser = qs.parse;
|
||
|
||
location = lolcation(location);
|
||
|
||
//
|
||
// Extract protocol information before running the instructions.
|
||
//
|
||
extracted = extractProtocol(address || '');
|
||
relative = !extracted.protocol && !extracted.slashes;
|
||
url.slashes = extracted.slashes || relative && location.slashes;
|
||
url.protocol = extracted.protocol || location.protocol || '';
|
||
address = extracted.rest;
|
||
|
||
//
|
||
// When the authority component is absent the URL starts with a path
|
||
// component.
|
||
//
|
||
if (!extracted.slashes) instructions[3] = [/(.*)/, 'pathname'];
|
||
|
||
for (; i < instructions.length; i++) {
|
||
instruction = instructions[i];
|
||
|
||
if (typeof instruction === 'function') {
|
||
address = instruction(address);
|
||
continue;
|
||
}
|
||
|
||
parse = instruction[0];
|
||
key = instruction[1];
|
||
|
||
if (parse !== parse) {
|
||
url[key] = address;
|
||
} else if ('string' === typeof parse) {
|
||
if (~(index = address.indexOf(parse))) {
|
||
if ('number' === typeof instruction[2]) {
|
||
url[key] = address.slice(0, index);
|
||
address = address.slice(index + instruction[2]);
|
||
} else {
|
||
url[key] = address.slice(index);
|
||
address = address.slice(0, index);
|
||
}
|
||
}
|
||
} else if ((index = parse.exec(address))) {
|
||
url[key] = index[1];
|
||
address = address.slice(0, index.index);
|
||
}
|
||
|
||
url[key] = url[key] || (
|
||
relative && instruction[3] ? location[key] || '' : ''
|
||
);
|
||
|
||
//
|
||
// Hostname, host and protocol should be lowercased so they can be used to
|
||
// create a proper `origin`.
|
||
//
|
||
if (instruction[4]) url[key] = url[key].toLowerCase();
|
||
}
|
||
|
||
//
|
||
// Also parse the supplied query string in to an object. If we're supplied
|
||
// with a custom parser as function use that instead of the default build-in
|
||
// parser.
|
||
//
|
||
if (parser) url.query = parser(url.query);
|
||
|
||
//
|
||
// If the URL is relative, resolve the pathname against the base URL.
|
||
//
|
||
if (
|
||
relative
|
||
&& location.slashes
|
||
&& url.pathname.charAt(0) !== '/'
|
||
&& (url.pathname !== '' || location.pathname !== '')
|
||
) {
|
||
url.pathname = resolve(url.pathname, location.pathname);
|
||
}
|
||
|
||
//
|
||
// Default to a / for pathname if none exists. This normalizes the URL
|
||
// to always have a /
|
||
//
|
||
if (url.pathname.charAt(0) !== '/' && url.hostname) {
|
||
url.pathname = '/' + url.pathname;
|
||
}
|
||
|
||
//
|
||
// We should not add port numbers if they are already the default port number
|
||
// for a given protocol. As the host also contains the port number we're going
|
||
// override it with the hostname which contains no port number.
|
||
//
|
||
if (!required(url.port, url.protocol)) {
|
||
url.host = url.hostname;
|
||
url.port = '';
|
||
}
|
||
|
||
//
|
||
// Parse down the `auth` for the username and password.
|
||
//
|
||
url.username = url.password = '';
|
||
if (url.auth) {
|
||
instruction = url.auth.split(':');
|
||
url.username = instruction[0] || '';
|
||
url.password = instruction[1] || '';
|
||
}
|
||
|
||
url.origin = url.protocol && url.host && url.protocol !== 'file:'
|
||
? url.protocol +'//'+ url.host
|
||
: 'null';
|
||
|
||
//
|
||
// The href is just the compiled result.
|
||
//
|
||
url.href = url.toString();
|
||
}
|
||
|
||
/**
|
||
* This is convenience method for changing properties in the URL instance to
|
||
* insure that they all propagate correctly.
|
||
*
|
||
* @param {String} part Property we need to adjust.
|
||
* @param {Mixed} value The newly assigned value.
|
||
* @param {Boolean|Function} fn When setting the query, it will be the function
|
||
* used to parse the query.
|
||
* When setting the protocol, double slash will be
|
||
* removed from the final url if it is true.
|
||
* @returns {URL} URL instance for chaining.
|
||
* @public
|
||
*/
|
||
function set(part, value, fn) {
|
||
var url = this;
|
||
|
||
switch (part) {
|
||
case 'query':
|
||
if ('string' === typeof value && value.length) {
|
||
value = (fn || qs.parse)(value);
|
||
}
|
||
|
||
url[part] = value;
|
||
break;
|
||
|
||
case 'port':
|
||
url[part] = value;
|
||
|
||
if (!required(value, url.protocol)) {
|
||
url.host = url.hostname;
|
||
url[part] = '';
|
||
} else if (value) {
|
||
url.host = url.hostname +':'+ value;
|
||
}
|
||
|
||
break;
|
||
|
||
case 'hostname':
|
||
url[part] = value;
|
||
|
||
if (url.port) value += ':'+ url.port;
|
||
url.host = value;
|
||
break;
|
||
|
||
case 'host':
|
||
url[part] = value;
|
||
|
||
if (/:\d+$/.test(value)) {
|
||
value = value.split(':');
|
||
url.port = value.pop();
|
||
url.hostname = value.join(':');
|
||
} else {
|
||
url.hostname = value;
|
||
url.port = '';
|
||
}
|
||
|
||
break;
|
||
|
||
case 'protocol':
|
||
url.protocol = value.toLowerCase();
|
||
url.slashes = !fn;
|
||
break;
|
||
|
||
case 'pathname':
|
||
case 'hash':
|
||
if (value) {
|
||
var char = part === 'pathname' ? '/' : '#';
|
||
url[part] = value.charAt(0) !== char ? char + value : value;
|
||
} else {
|
||
url[part] = value;
|
||
}
|
||
break;
|
||
|
||
default:
|
||
url[part] = value;
|
||
}
|
||
|
||
for (var i = 0; i < rules.length; i++) {
|
||
var ins = rules[i];
|
||
|
||
if (ins[4]) url[ins[1]] = url[ins[1]].toLowerCase();
|
||
}
|
||
|
||
url.origin = url.protocol && url.host && url.protocol !== 'file:'
|
||
? url.protocol +'//'+ url.host
|
||
: 'null';
|
||
|
||
url.href = url.toString();
|
||
|
||
return url;
|
||
}
|
||
|
||
/**
|
||
* Transform the properties back in to a valid and full URL string.
|
||
*
|
||
* @param {Function} stringify Optional query stringify function.
|
||
* @returns {String} Compiled version of the URL.
|
||
* @public
|
||
*/
|
||
function toString(stringify) {
|
||
if (!stringify || 'function' !== typeof stringify) stringify = qs.stringify;
|
||
|
||
var query
|
||
, url = this
|
||
, protocol = url.protocol;
|
||
|
||
if (protocol && protocol.charAt(protocol.length - 1) !== ':') protocol += ':';
|
||
|
||
var result = protocol + (url.slashes ? '//' : '');
|
||
|
||
if (url.username) {
|
||
result += url.username;
|
||
if (url.password) result += ':'+ url.password;
|
||
result += '@';
|
||
}
|
||
|
||
result += url.host + url.pathname;
|
||
|
||
query = 'object' === typeof url.query ? stringify(url.query) : url.query;
|
||
if (query) result += '?' !== query.charAt(0) ? '?'+ query : query;
|
||
|
||
if (url.hash) result += url.hash;
|
||
|
||
return result;
|
||
}
|
||
|
||
Url.prototype = { set: set, toString: toString };
|
||
|
||
//
|
||
// Expose the URL parser and some additional properties that might be useful for
|
||
// others or testing.
|
||
//
|
||
Url.extractProtocol = extractProtocol;
|
||
Url.location = lolcation;
|
||
Url.trimLeft = trimLeft;
|
||
Url.qs = qs;
|
||
|
||
module.exports = Url;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 2053:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
|
||
/**
|
||
* For Node.js, simply re-export the core `util.deprecate` function.
|
||
*/
|
||
|
||
module.exports = __nccwpck_require__(1669).deprecate;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 4043:
|
||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
Object.defineProperty(exports, "__esModule", ({
|
||
value: true
|
||
}));
|
||
Object.defineProperty(exports, "v1", ({
|
||
enumerable: true,
|
||
get: function () {
|
||
return _v.default;
|
||
}
|
||
}));
|
||
Object.defineProperty(exports, "v3", ({
|
||
enumerable: true,
|
||
get: function () {
|
||
return _v2.default;
|
||
}
|
||
}));
|
||
Object.defineProperty(exports, "v4", ({
|
||
enumerable: true,
|
||
get: function () {
|
||
return _v3.default;
|
||
}
|
||
}));
|
||
Object.defineProperty(exports, "v5", ({
|
||
enumerable: true,
|
||
get: function () {
|
||
return _v4.default;
|
||
}
|
||
}));
|
||
Object.defineProperty(exports, "NIL", ({
|
||
enumerable: true,
|
||
get: function () {
|
||
return _nil.default;
|
||
}
|
||
}));
|
||
Object.defineProperty(exports, "version", ({
|
||
enumerable: true,
|
||
get: function () {
|
||
return _version.default;
|
||
}
|
||
}));
|
||
Object.defineProperty(exports, "validate", ({
|
||
enumerable: true,
|
||
get: function () {
|
||
return _validate.default;
|
||
}
|
||
}));
|
||
Object.defineProperty(exports, "stringify", ({
|
||
enumerable: true,
|
||
get: function () {
|
||
return _stringify.default;
|
||
}
|
||
}));
|
||
Object.defineProperty(exports, "parse", ({
|
||
enumerable: true,
|
||
get: function () {
|
||
return _parse.default;
|
||
}
|
||
}));
|
||
|
||
var _v = _interopRequireDefault(__nccwpck_require__(8574));
|
||
|
||
var _v2 = _interopRequireDefault(__nccwpck_require__(7691));
|
||
|
||
var _v3 = _interopRequireDefault(__nccwpck_require__(6421));
|
||
|
||
var _v4 = _interopRequireDefault(__nccwpck_require__(3382));
|
||
|
||
var _nil = _interopRequireDefault(__nccwpck_require__(1264));
|
||
|
||
var _version = _interopRequireDefault(__nccwpck_require__(7939));
|
||
|
||
var _validate = _interopRequireDefault(__nccwpck_require__(6103));
|
||
|
||
var _stringify = _interopRequireDefault(__nccwpck_require__(5135));
|
||
|
||
var _parse = _interopRequireDefault(__nccwpck_require__(4664));
|
||
|
||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||
|
||
/***/ }),
|
||
|
||
/***/ 4173:
|
||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
Object.defineProperty(exports, "__esModule", ({
|
||
value: true
|
||
}));
|
||
exports.default = void 0;
|
||
|
||
var _crypto = _interopRequireDefault(__nccwpck_require__(6417));
|
||
|
||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||
|
||
function md5(bytes) {
|
||
if (Array.isArray(bytes)) {
|
||
bytes = Buffer.from(bytes);
|
||
} else if (typeof bytes === 'string') {
|
||
bytes = Buffer.from(bytes, 'utf8');
|
||
}
|
||
|
||
return _crypto.default.createHash('md5').update(bytes).digest();
|
||
}
|
||
|
||
var _default = md5;
|
||
exports.default = _default;
|
||
|
||
/***/ }),
|
||
|
||
/***/ 1264:
|
||
/***/ ((__unused_webpack_module, exports) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
Object.defineProperty(exports, "__esModule", ({
|
||
value: true
|
||
}));
|
||
exports.default = void 0;
|
||
var _default = '00000000-0000-0000-0000-000000000000';
|
||
exports.default = _default;
|
||
|
||
/***/ }),
|
||
|
||
/***/ 4664:
|
||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
Object.defineProperty(exports, "__esModule", ({
|
||
value: true
|
||
}));
|
||
exports.default = void 0;
|
||
|
||
var _validate = _interopRequireDefault(__nccwpck_require__(6103));
|
||
|
||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||
|
||
function parse(uuid) {
|
||
if (!(0, _validate.default)(uuid)) {
|
||
throw TypeError('Invalid UUID');
|
||
}
|
||
|
||
let v;
|
||
const arr = new Uint8Array(16); // Parse ########-....-....-....-............
|
||
|
||
arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24;
|
||
arr[1] = v >>> 16 & 0xff;
|
||
arr[2] = v >>> 8 & 0xff;
|
||
arr[3] = v & 0xff; // Parse ........-####-....-....-............
|
||
|
||
arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8;
|
||
arr[5] = v & 0xff; // Parse ........-....-####-....-............
|
||
|
||
arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8;
|
||
arr[7] = v & 0xff; // Parse ........-....-....-####-............
|
||
|
||
arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8;
|
||
arr[9] = v & 0xff; // Parse ........-....-....-....-############
|
||
// (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes)
|
||
|
||
arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff;
|
||
arr[11] = v / 0x100000000 & 0xff;
|
||
arr[12] = v >>> 24 & 0xff;
|
||
arr[13] = v >>> 16 & 0xff;
|
||
arr[14] = v >>> 8 & 0xff;
|
||
arr[15] = v & 0xff;
|
||
return arr;
|
||
}
|
||
|
||
var _default = parse;
|
||
exports.default = _default;
|
||
|
||
/***/ }),
|
||
|
||
/***/ 3176:
|
||
/***/ ((__unused_webpack_module, exports) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
Object.defineProperty(exports, "__esModule", ({
|
||
value: true
|
||
}));
|
||
exports.default = void 0;
|
||
var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;
|
||
exports.default = _default;
|
||
|
||
/***/ }),
|
||
|
||
/***/ 1694:
|
||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
Object.defineProperty(exports, "__esModule", ({
|
||
value: true
|
||
}));
|
||
exports.default = rng;
|
||
|
||
var _crypto = _interopRequireDefault(__nccwpck_require__(6417));
|
||
|
||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||
|
||
const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate
|
||
|
||
let poolPtr = rnds8Pool.length;
|
||
|
||
function rng() {
|
||
if (poolPtr > rnds8Pool.length - 16) {
|
||
_crypto.default.randomFillSync(rnds8Pool);
|
||
|
||
poolPtr = 0;
|
||
}
|
||
|
||
return rnds8Pool.slice(poolPtr, poolPtr += 16);
|
||
}
|
||
|
||
/***/ }),
|
||
|
||
/***/ 977:
|
||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
Object.defineProperty(exports, "__esModule", ({
|
||
value: true
|
||
}));
|
||
exports.default = void 0;
|
||
|
||
var _crypto = _interopRequireDefault(__nccwpck_require__(6417));
|
||
|
||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||
|
||
function sha1(bytes) {
|
||
if (Array.isArray(bytes)) {
|
||
bytes = Buffer.from(bytes);
|
||
} else if (typeof bytes === 'string') {
|
||
bytes = Buffer.from(bytes, 'utf8');
|
||
}
|
||
|
||
return _crypto.default.createHash('sha1').update(bytes).digest();
|
||
}
|
||
|
||
var _default = sha1;
|
||
exports.default = _default;
|
||
|
||
/***/ }),
|
||
|
||
/***/ 5135:
|
||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
Object.defineProperty(exports, "__esModule", ({
|
||
value: true
|
||
}));
|
||
exports.default = void 0;
|
||
|
||
var _validate = _interopRequireDefault(__nccwpck_require__(6103));
|
||
|
||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||
|
||
/**
|
||
* Convert array of 16 byte values to UUID string format of the form:
|
||
* XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX
|
||
*/
|
||
const byteToHex = [];
|
||
|
||
for (let i = 0; i < 256; ++i) {
|
||
byteToHex.push((i + 0x100).toString(16).substr(1));
|
||
}
|
||
|
||
function stringify(arr, offset = 0) {
|
||
// Note: Be careful editing this code! It's been tuned for performance
|
||
// and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434
|
||
const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one
|
||
// of the following:
|
||
// - One or more input array values don't map to a hex octet (leading to
|
||
// "undefined" in the uuid)
|
||
// - Invalid input values for the RFC `version` or `variant` fields
|
||
|
||
if (!(0, _validate.default)(uuid)) {
|
||
throw TypeError('Stringified UUID is invalid');
|
||
}
|
||
|
||
return uuid;
|
||
}
|
||
|
||
var _default = stringify;
|
||
exports.default = _default;
|
||
|
||
/***/ }),
|
||
|
||
/***/ 8574:
|
||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
Object.defineProperty(exports, "__esModule", ({
|
||
value: true
|
||
}));
|
||
exports.default = void 0;
|
||
|
||
var _rng = _interopRequireDefault(__nccwpck_require__(1694));
|
||
|
||
var _stringify = _interopRequireDefault(__nccwpck_require__(5135));
|
||
|
||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||
|
||
// **`v1()` - Generate time-based UUID**
|
||
//
|
||
// Inspired by https://github.com/LiosK/UUID.js
|
||
// and http://docs.python.org/library/uuid.html
|
||
let _nodeId;
|
||
|
||
let _clockseq; // Previous uuid creation time
|
||
|
||
|
||
let _lastMSecs = 0;
|
||
let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details
|
||
|
||
function v1(options, buf, offset) {
|
||
let i = buf && offset || 0;
|
||
const b = buf || new Array(16);
|
||
options = options || {};
|
||
let node = options.node || _nodeId;
|
||
let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not
|
||
// specified. We do this lazily to minimize issues related to insufficient
|
||
// system entropy. See #189
|
||
|
||
if (node == null || clockseq == null) {
|
||
const seedBytes = options.random || (options.rng || _rng.default)();
|
||
|
||
if (node == null) {
|
||
// Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)
|
||
node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]];
|
||
}
|
||
|
||
if (clockseq == null) {
|
||
// Per 4.2.2, randomize (14 bit) clockseq
|
||
clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;
|
||
}
|
||
} // UUID timestamps are 100 nano-second units since the Gregorian epoch,
|
||
// (1582-10-15 00:00). JSNumbers aren't precise enough for this, so
|
||
// time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'
|
||
// (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.
|
||
|
||
|
||
let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock
|
||
// cycle to simulate higher resolution clock
|
||
|
||
let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)
|
||
|
||
const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression
|
||
|
||
if (dt < 0 && options.clockseq === undefined) {
|
||
clockseq = clockseq + 1 & 0x3fff;
|
||
} // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new
|
||
// time interval
|
||
|
||
|
||
if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {
|
||
nsecs = 0;
|
||
} // Per 4.2.1.2 Throw error if too many uuids are requested
|
||
|
||
|
||
if (nsecs >= 10000) {
|
||
throw new Error("uuid.v1(): Can't create more than 10M uuids/sec");
|
||
}
|
||
|
||
_lastMSecs = msecs;
|
||
_lastNSecs = nsecs;
|
||
_clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch
|
||
|
||
msecs += 12219292800000; // `time_low`
|
||
|
||
const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;
|
||
b[i++] = tl >>> 24 & 0xff;
|
||
b[i++] = tl >>> 16 & 0xff;
|
||
b[i++] = tl >>> 8 & 0xff;
|
||
b[i++] = tl & 0xff; // `time_mid`
|
||
|
||
const tmh = msecs / 0x100000000 * 10000 & 0xfffffff;
|
||
b[i++] = tmh >>> 8 & 0xff;
|
||
b[i++] = tmh & 0xff; // `time_high_and_version`
|
||
|
||
b[i++] = tmh >>> 24 & 0xf | 0x10; // include version
|
||
|
||
b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)
|
||
|
||
b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`
|
||
|
||
b[i++] = clockseq & 0xff; // `node`
|
||
|
||
for (let n = 0; n < 6; ++n) {
|
||
b[i + n] = node[n];
|
||
}
|
||
|
||
return buf || (0, _stringify.default)(b);
|
||
}
|
||
|
||
var _default = v1;
|
||
exports.default = _default;
|
||
|
||
/***/ }),
|
||
|
||
/***/ 7691:
|
||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
Object.defineProperty(exports, "__esModule", ({
|
||
value: true
|
||
}));
|
||
exports.default = void 0;
|
||
|
||
var _v = _interopRequireDefault(__nccwpck_require__(8556));
|
||
|
||
var _md = _interopRequireDefault(__nccwpck_require__(4173));
|
||
|
||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||
|
||
const v3 = (0, _v.default)('v3', 0x30, _md.default);
|
||
var _default = v3;
|
||
exports.default = _default;
|
||
|
||
/***/ }),
|
||
|
||
/***/ 8556:
|
||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
Object.defineProperty(exports, "__esModule", ({
|
||
value: true
|
||
}));
|
||
exports.default = _default;
|
||
exports.URL = exports.DNS = void 0;
|
||
|
||
var _stringify = _interopRequireDefault(__nccwpck_require__(5135));
|
||
|
||
var _parse = _interopRequireDefault(__nccwpck_require__(4664));
|
||
|
||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||
|
||
function stringToBytes(str) {
|
||
str = unescape(encodeURIComponent(str)); // UTF8 escape
|
||
|
||
const bytes = [];
|
||
|
||
for (let i = 0; i < str.length; ++i) {
|
||
bytes.push(str.charCodeAt(i));
|
||
}
|
||
|
||
return bytes;
|
||
}
|
||
|
||
const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';
|
||
exports.DNS = DNS;
|
||
const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';
|
||
exports.URL = URL;
|
||
|
||
function _default(name, version, hashfunc) {
|
||
function generateUUID(value, namespace, buf, offset) {
|
||
if (typeof value === 'string') {
|
||
value = stringToBytes(value);
|
||
}
|
||
|
||
if (typeof namespace === 'string') {
|
||
namespace = (0, _parse.default)(namespace);
|
||
}
|
||
|
||
if (namespace.length !== 16) {
|
||
throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');
|
||
} // Compute hash of namespace and value, Per 4.3
|
||
// Future: Use spread syntax when supported on all platforms, e.g. `bytes =
|
||
// hashfunc([...namespace, ... value])`
|
||
|
||
|
||
let bytes = new Uint8Array(16 + value.length);
|
||
bytes.set(namespace);
|
||
bytes.set(value, namespace.length);
|
||
bytes = hashfunc(bytes);
|
||
bytes[6] = bytes[6] & 0x0f | version;
|
||
bytes[8] = bytes[8] & 0x3f | 0x80;
|
||
|
||
if (buf) {
|
||
offset = offset || 0;
|
||
|
||
for (let i = 0; i < 16; ++i) {
|
||
buf[offset + i] = bytes[i];
|
||
}
|
||
|
||
return buf;
|
||
}
|
||
|
||
return (0, _stringify.default)(bytes);
|
||
} // Function#name is not settable on some platforms (#270)
|
||
|
||
|
||
try {
|
||
generateUUID.name = name; // eslint-disable-next-line no-empty
|
||
} catch (err) {} // For CommonJS default export support
|
||
|
||
|
||
generateUUID.DNS = DNS;
|
||
generateUUID.URL = URL;
|
||
return generateUUID;
|
||
}
|
||
|
||
/***/ }),
|
||
|
||
/***/ 6421:
|
||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
Object.defineProperty(exports, "__esModule", ({
|
||
value: true
|
||
}));
|
||
exports.default = void 0;
|
||
|
||
var _rng = _interopRequireDefault(__nccwpck_require__(1694));
|
||
|
||
var _stringify = _interopRequireDefault(__nccwpck_require__(5135));
|
||
|
||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||
|
||
function v4(options, buf, offset) {
|
||
options = options || {};
|
||
|
||
const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
|
||
|
||
|
||
rnds[6] = rnds[6] & 0x0f | 0x40;
|
||
rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided
|
||
|
||
if (buf) {
|
||
offset = offset || 0;
|
||
|
||
for (let i = 0; i < 16; ++i) {
|
||
buf[offset + i] = rnds[i];
|
||
}
|
||
|
||
return buf;
|
||
}
|
||
|
||
return (0, _stringify.default)(rnds);
|
||
}
|
||
|
||
var _default = v4;
|
||
exports.default = _default;
|
||
|
||
/***/ }),
|
||
|
||
/***/ 3382:
|
||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
Object.defineProperty(exports, "__esModule", ({
|
||
value: true
|
||
}));
|
||
exports.default = void 0;
|
||
|
||
var _v = _interopRequireDefault(__nccwpck_require__(8556));
|
||
|
||
var _sha = _interopRequireDefault(__nccwpck_require__(977));
|
||
|
||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||
|
||
const v5 = (0, _v.default)('v5', 0x50, _sha.default);
|
||
var _default = v5;
|
||
exports.default = _default;
|
||
|
||
/***/ }),
|
||
|
||
/***/ 6103:
|
||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
Object.defineProperty(exports, "__esModule", ({
|
||
value: true
|
||
}));
|
||
exports.default = void 0;
|
||
|
||
var _regex = _interopRequireDefault(__nccwpck_require__(3176));
|
||
|
||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||
|
||
function validate(uuid) {
|
||
return typeof uuid === 'string' && _regex.default.test(uuid);
|
||
}
|
||
|
||
var _default = validate;
|
||
exports.default = _default;
|
||
|
||
/***/ }),
|
||
|
||
/***/ 7939:
|
||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
|
||
Object.defineProperty(exports, "__esModule", ({
|
||
value: true
|
||
}));
|
||
exports.default = void 0;
|
||
|
||
var _validate = _interopRequireDefault(__nccwpck_require__(6103));
|
||
|
||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||
|
||
function version(uuid) {
|
||
if (!(0, _validate.default)(uuid)) {
|
||
throw TypeError('Invalid UUID');
|
||
}
|
||
|
||
return parseInt(uuid.substr(14, 1), 16);
|
||
}
|
||
|
||
var _default = version;
|
||
exports.default = _default;
|
||
|
||
/***/ }),
|
||
|
||
/***/ 9931:
|
||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.generateBasicAuthHeader = void 0;
|
||
var encode_1 = __nccwpck_require__(8282);
|
||
function generateBasicAuthHeader(username, password) {
|
||
var encoded = encode_1.toBase64(username + ":" + password);
|
||
return "Basic " + encoded;
|
||
}
|
||
exports.generateBasicAuthHeader = generateBasicAuthHeader;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 4209:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.parseDigestAuth = exports.generateDigestAuthHeader = exports.createDigestContext = void 0;
|
||
var md5_1 = __importDefault(__nccwpck_require__(5423));
|
||
var crypto_1 = __nccwpck_require__(2224);
|
||
var NONCE_CHARS = "abcdef0123456789";
|
||
var NONCE_SIZE = 32;
|
||
function createDigestContext(username, password) {
|
||
return { username: username, password: password, nc: 0, algorithm: "md5", hasDigestAuth: false };
|
||
}
|
||
exports.createDigestContext = createDigestContext;
|
||
function generateDigestAuthHeader(options, digest) {
|
||
var url = options.url.replace("//", "");
|
||
var uri = url.indexOf("/") == -1 ? "/" : url.slice(url.indexOf("/"));
|
||
var method = options.method ? options.method.toUpperCase() : "GET";
|
||
var qop = /(^|,)\s*auth\s*($|,)/.test(digest.qop) ? "auth" : false;
|
||
var ncString = ("00000000" + digest.nc).slice(-8);
|
||
var ha1 = crypto_1.ha1Compute(digest.algorithm, digest.username, digest.realm, digest.password, digest.nonce, digest.cnonce);
|
||
var ha2 = md5_1.default(method + ":" + uri);
|
||
var digestResponse = qop
|
||
? md5_1.default(ha1 + ":" + digest.nonce + ":" + ncString + ":" + digest.cnonce + ":" + qop + ":" + ha2)
|
||
: md5_1.default(ha1 + ":" + digest.nonce + ":" + ha2);
|
||
var authValues = {
|
||
username: digest.username,
|
||
realm: digest.realm,
|
||
nonce: digest.nonce,
|
||
uri: uri,
|
||
qop: qop,
|
||
response: digestResponse,
|
||
nc: ncString,
|
||
cnonce: digest.cnonce,
|
||
algorithm: digest.algorithm,
|
||
opaque: digest.opaque
|
||
};
|
||
var authHeader = [];
|
||
for (var k in authValues) {
|
||
if (authValues[k]) {
|
||
if (k === "qop" || k === "nc" || k === "algorithm") {
|
||
authHeader.push(k + "=" + authValues[k]);
|
||
}
|
||
else {
|
||
authHeader.push(k + "=\"" + authValues[k] + "\"");
|
||
}
|
||
}
|
||
}
|
||
return "Digest " + authHeader.join(", ");
|
||
}
|
||
exports.generateDigestAuthHeader = generateDigestAuthHeader;
|
||
function makeNonce() {
|
||
var uid = "";
|
||
for (var i = 0; i < NONCE_SIZE; ++i) {
|
||
uid = "" + uid + NONCE_CHARS[Math.floor(Math.random() * NONCE_CHARS.length)];
|
||
}
|
||
return uid;
|
||
}
|
||
function parseDigestAuth(response, _digest) {
|
||
var authHeader = response.headers["www-authenticate"] || "";
|
||
if (authHeader.split(/\s/)[0].toLowerCase() !== "digest") {
|
||
return false;
|
||
}
|
||
var re = /([a-z0-9_-]+)=(?:"([^"]+)"|([a-z0-9_-]+))/gi;
|
||
for (;;) {
|
||
var match = re.exec(authHeader);
|
||
if (!match) {
|
||
break;
|
||
}
|
||
_digest[match[1]] = match[2] || match[3];
|
||
}
|
||
_digest.nc += 1;
|
||
_digest.cnonce = makeNonce();
|
||
return true;
|
||
}
|
||
exports.parseDigestAuth = parseDigestAuth;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 2266:
|
||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.setupAuth = void 0;
|
||
var layerr_1 = __nccwpck_require__(7546);
|
||
var digest_1 = __nccwpck_require__(4209);
|
||
var basic_1 = __nccwpck_require__(9931);
|
||
var oauth_1 = __nccwpck_require__(718);
|
||
var types_1 = __nccwpck_require__(5187);
|
||
function setupAuth(context, username, password, oauthToken) {
|
||
switch (context.authType) {
|
||
case types_1.AuthType.Digest:
|
||
context.digest = digest_1.createDigestContext(username, password);
|
||
break;
|
||
case types_1.AuthType.None:
|
||
// Do nothing
|
||
break;
|
||
case types_1.AuthType.Password:
|
||
context.headers.Authorization = basic_1.generateBasicAuthHeader(username, password);
|
||
break;
|
||
case types_1.AuthType.Token:
|
||
context.headers.Authorization = oauth_1.generateTokenAuthHeader(oauthToken);
|
||
break;
|
||
default:
|
||
throw new layerr_1.Layerr({
|
||
info: {
|
||
code: types_1.ErrorCode.InvalidAuthType
|
||
}
|
||
}, "Invalid auth type: " + context.authType);
|
||
}
|
||
}
|
||
exports.setupAuth = setupAuth;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 718:
|
||
/***/ ((__unused_webpack_module, exports) => {
|
||
|
||
"use strict";
|
||
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.generateTokenAuthHeader = void 0;
|
||
function generateTokenAuthHeader(token) {
|
||
return token.token_type + " " + token.access_token;
|
||
}
|
||
exports.generateTokenAuthHeader = generateTokenAuthHeader;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 7461:
|
||
/***/ ((__unused_webpack_module, exports) => {
|
||
|
||
"use strict";
|
||
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.isArrayBuffer = void 0;
|
||
var hasArrayBuffer = typeof ArrayBuffer === "function";
|
||
var objToString = Object.prototype.toString;
|
||
// Taken from: https://github.com/fengyuanchen/is-array-buffer/blob/master/src/index.js
|
||
function isArrayBuffer(value) {
|
||
return (hasArrayBuffer &&
|
||
(value instanceof ArrayBuffer || objToString.call(value) === "[object ArrayBuffer]"));
|
||
}
|
||
exports.isArrayBuffer = isArrayBuffer;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 6201:
|
||
/***/ ((__unused_webpack_module, exports) => {
|
||
|
||
"use strict";
|
||
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.isBuffer = void 0;
|
||
function isBuffer(value) {
|
||
return (value != null &&
|
||
value.constructor != null &&
|
||
typeof value.constructor.isBuffer === "function" &&
|
||
value.constructor.isBuffer(value));
|
||
}
|
||
exports.isBuffer = isBuffer;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 1638:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.getPatcher = void 0;
|
||
var hot_patcher_1 = __importDefault(__nccwpck_require__(3839));
|
||
var __patcher = null;
|
||
function getPatcher() {
|
||
if (!__patcher) {
|
||
__patcher = new hot_patcher_1.default();
|
||
}
|
||
return __patcher;
|
||
}
|
||
exports.getPatcher = getPatcher;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 7318:
|
||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.createClient = void 0;
|
||
var url_1 = __nccwpck_require__(1805);
|
||
var index_1 = __nccwpck_require__(2266);
|
||
var copyFile_1 = __nccwpck_require__(3022);
|
||
var createDirectory_1 = __nccwpck_require__(6673);
|
||
var createStream_1 = __nccwpck_require__(87);
|
||
var customRequest_1 = __nccwpck_require__(5703);
|
||
var deleteFile_1 = __nccwpck_require__(6787);
|
||
var exists_1 = __nccwpck_require__(9019);
|
||
var directoryContents_1 = __nccwpck_require__(9809);
|
||
var getFileContents_1 = __nccwpck_require__(2577);
|
||
var getQuota_1 = __nccwpck_require__(5785);
|
||
var stat_1 = __nccwpck_require__(2974);
|
||
var moveFile_1 = __nccwpck_require__(8154);
|
||
var putFileContents_1 = __nccwpck_require__(4951);
|
||
var types_1 = __nccwpck_require__(5187);
|
||
function createClient(remoteURL, options) {
|
||
if (options === void 0) { options = {}; }
|
||
var _a = options.authType, authTypeRaw = _a === void 0 ? null : _a, _b = options.headers, headers = _b === void 0 ? {} : _b, httpAgent = options.httpAgent, httpsAgent = options.httpsAgent, maxBodyLength = options.maxBodyLength, maxContentLength = options.maxContentLength, password = options.password, token = options.token, username = options.username, withCredentials = options.withCredentials;
|
||
var authType = authTypeRaw;
|
||
if (!authType) {
|
||
authType = username || password ? types_1.AuthType.Password : types_1.AuthType.None;
|
||
}
|
||
var context = {
|
||
authType: authType,
|
||
headers: Object.assign({}, headers),
|
||
httpAgent: httpAgent,
|
||
httpsAgent: httpsAgent,
|
||
maxBodyLength: maxBodyLength,
|
||
maxContentLength: maxContentLength,
|
||
remotePath: url_1.extractURLPath(remoteURL),
|
||
remoteURL: remoteURL,
|
||
password: password,
|
||
token: token,
|
||
username: username,
|
||
withCredentials: withCredentials
|
||
};
|
||
index_1.setupAuth(context, username, password, token);
|
||
return {
|
||
copyFile: function (filename, destination, options) {
|
||
return copyFile_1.copyFile(context, filename, destination, options);
|
||
},
|
||
createDirectory: function (path, options) {
|
||
return createDirectory_1.createDirectory(context, path, options);
|
||
},
|
||
createReadStream: function (filename, options) {
|
||
return createStream_1.createReadStream(context, filename, options);
|
||
},
|
||
createWriteStream: function (filename, options, callback) { return createStream_1.createWriteStream(context, filename, options, callback); },
|
||
customRequest: function (path, requestOptions) {
|
||
return customRequest_1.customRequest(context, path, requestOptions);
|
||
},
|
||
deleteFile: function (filename, options) {
|
||
return deleteFile_1.deleteFile(context, filename, options);
|
||
},
|
||
exists: function (path, options) { return exists_1.exists(context, path, options); },
|
||
getDirectoryContents: function (path, options) {
|
||
return directoryContents_1.getDirectoryContents(context, path, options);
|
||
},
|
||
getFileContents: function (filename, options) {
|
||
return getFileContents_1.getFileContents(context, filename, options);
|
||
},
|
||
getFileDownloadLink: function (filename) { return getFileContents_1.getFileDownloadLink(context, filename); },
|
||
getFileUploadLink: function (filename) { return putFileContents_1.getFileUploadLink(context, filename); },
|
||
getHeaders: function () { return Object.assign({}, context.headers); },
|
||
getQuota: function (options) { return getQuota_1.getQuota(context, options); },
|
||
moveFile: function (filename, destinationFilename, options) {
|
||
return moveFile_1.moveFile(context, filename, destinationFilename, options);
|
||
},
|
||
putFileContents: function (filename, data, options) { return putFileContents_1.putFileContents(context, filename, data, options); },
|
||
setHeaders: function (headers) {
|
||
context.headers = Object.assign({}, headers);
|
||
},
|
||
stat: function (path, options) { return stat_1.getStat(context, path, options); }
|
||
};
|
||
}
|
||
exports.createClient = createClient;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 4758:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||
if (k2 === undefined) k2 = k;
|
||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||
}) : (function(o, m, k, k2) {
|
||
if (k2 === undefined) k2 = k;
|
||
o[k2] = m[k];
|
||
}));
|
||
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
||
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.parseXML = exports.parseStat = exports.getPatcher = exports.createClient = void 0;
|
||
var factory_1 = __nccwpck_require__(7318);
|
||
Object.defineProperty(exports, "createClient", ({ enumerable: true, get: function () { return factory_1.createClient; } }));
|
||
var patcher_1 = __nccwpck_require__(1638);
|
||
Object.defineProperty(exports, "getPatcher", ({ enumerable: true, get: function () { return patcher_1.getPatcher; } }));
|
||
__exportStar(__nccwpck_require__(5187), exports);
|
||
var dav_1 = __nccwpck_require__(736);
|
||
Object.defineProperty(exports, "parseStat", ({ enumerable: true, get: function () { return dav_1.parseStat; } }));
|
||
Object.defineProperty(exports, "parseXML", ({ enumerable: true, get: function () { return dav_1.parseXML; } }));
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 3022:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
return new (P || (P = Promise))(function (resolve, reject) {
|
||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
});
|
||
};
|
||
var __generator = (this && this.__generator) || function (thisArg, body) {
|
||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||
function verb(n) { return function (v) { return step([n, v]); }; }
|
||
function step(op) {
|
||
if (f) throw new TypeError("Generator is already executing.");
|
||
while (_) try {
|
||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||
if (y = 0, t) op = [op[0] & 2, t.value];
|
||
switch (op[0]) {
|
||
case 0: case 1: t = op; break;
|
||
case 4: _.label++; return { value: op[1], done: false };
|
||
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||
default:
|
||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||
if (t[2]) _.ops.pop();
|
||
_.trys.pop(); continue;
|
||
}
|
||
op = body.call(thisArg, _);
|
||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||
}
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.copyFile = void 0;
|
||
var url_1 = __nccwpck_require__(1805);
|
||
var path_1 = __nccwpck_require__(2846);
|
||
var request_1 = __nccwpck_require__(104);
|
||
var response_1 = __nccwpck_require__(7052);
|
||
function copyFile(context, filename, destination, options) {
|
||
if (options === void 0) { options = {}; }
|
||
return __awaiter(this, void 0, void 0, function () {
|
||
var requestOptions, response;
|
||
return __generator(this, function (_a) {
|
||
switch (_a.label) {
|
||
case 0:
|
||
requestOptions = request_1.prepareRequestOptions({
|
||
url: url_1.joinURL(context.remoteURL, path_1.encodePath(filename)),
|
||
method: "COPY",
|
||
headers: {
|
||
Destination: url_1.joinURL(context.remoteURL, path_1.encodePath(destination))
|
||
}
|
||
}, context, options);
|
||
return [4 /*yield*/, request_1.request(requestOptions)];
|
||
case 1:
|
||
response = _a.sent();
|
||
response_1.handleResponseCode(context, response);
|
||
return [2 /*return*/];
|
||
}
|
||
});
|
||
});
|
||
}
|
||
exports.copyFile = copyFile;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 6673:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __assign = (this && this.__assign) || function () {
|
||
__assign = Object.assign || function(t) {
|
||
for (var s, i = 1, n = arguments.length; i < n; i++) {
|
||
s = arguments[i];
|
||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
|
||
t[p] = s[p];
|
||
}
|
||
return t;
|
||
};
|
||
return __assign.apply(this, arguments);
|
||
};
|
||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
return new (P || (P = Promise))(function (resolve, reject) {
|
||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
});
|
||
};
|
||
var __generator = (this && this.__generator) || function (thisArg, body) {
|
||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||
function verb(n) { return function (v) { return step([n, v]); }; }
|
||
function step(op) {
|
||
if (f) throw new TypeError("Generator is already executing.");
|
||
while (_) try {
|
||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||
if (y = 0, t) op = [op[0] & 2, t.value];
|
||
switch (op[0]) {
|
||
case 0: case 1: t = op; break;
|
||
case 4: _.label++; return { value: op[1], done: false };
|
||
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||
default:
|
||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||
if (t[2]) _.ops.pop();
|
||
_.trys.pop(); continue;
|
||
}
|
||
op = body.call(thisArg, _);
|
||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||
}
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.createDirectory = void 0;
|
||
var url_1 = __nccwpck_require__(1805);
|
||
var path_1 = __nccwpck_require__(2846);
|
||
var request_1 = __nccwpck_require__(104);
|
||
var response_1 = __nccwpck_require__(7052);
|
||
var stat_1 = __nccwpck_require__(2974);
|
||
function createDirectory(context, dirPath, options) {
|
||
if (options === void 0) { options = {}; }
|
||
return __awaiter(this, void 0, void 0, function () {
|
||
var requestOptions, response;
|
||
return __generator(this, function (_a) {
|
||
switch (_a.label) {
|
||
case 0:
|
||
if (options.recursive === true)
|
||
return [2 /*return*/, createDirectoryRecursively(context, dirPath, options)];
|
||
requestOptions = request_1.prepareRequestOptions({
|
||
url: url_1.joinURL(context.remoteURL, ensureCollectionPath(path_1.encodePath(dirPath))),
|
||
method: "MKCOL"
|
||
}, context, options);
|
||
return [4 /*yield*/, request_1.request(requestOptions)];
|
||
case 1:
|
||
response = _a.sent();
|
||
response_1.handleResponseCode(context, response);
|
||
return [2 /*return*/];
|
||
}
|
||
});
|
||
});
|
||
}
|
||
exports.createDirectory = createDirectory;
|
||
/**
|
||
* Ensure the path is a proper "collection" path by ensuring it has a trailing "/".
|
||
* The proper format of collection according to the specification does contain the trailing slash.
|
||
* http://www.webdav.org/specs/rfc4918.html#rfc.section.5.2
|
||
* @param path Path of the collection
|
||
* @return string Path of the collection with appended trailing "/" in case the `path` does not have it.
|
||
*/
|
||
function ensureCollectionPath(path) {
|
||
if (!path.endsWith("/")) {
|
||
return path + "/";
|
||
}
|
||
return path;
|
||
}
|
||
function createDirectoryRecursively(context, dirPath, options) {
|
||
if (options === void 0) { options = {}; }
|
||
return __awaiter(this, void 0, void 0, function () {
|
||
var paths, creating, _i, paths_1, testPath, testStat, err_1, error;
|
||
return __generator(this, function (_a) {
|
||
switch (_a.label) {
|
||
case 0:
|
||
paths = path_1.getAllDirectories(path_1.normalisePath(dirPath));
|
||
paths.sort(function (a, b) {
|
||
if (a.length > b.length) {
|
||
return 1;
|
||
}
|
||
else if (b.length > a.length) {
|
||
return -1;
|
||
}
|
||
return 0;
|
||
});
|
||
creating = false;
|
||
_i = 0, paths_1 = paths;
|
||
_a.label = 1;
|
||
case 1:
|
||
if (!(_i < paths_1.length)) return [3 /*break*/, 10];
|
||
testPath = paths_1[_i];
|
||
if (!creating) return [3 /*break*/, 3];
|
||
return [4 /*yield*/, createDirectory(context, testPath, __assign(__assign({}, options), { recursive: false }))];
|
||
case 2:
|
||
_a.sent();
|
||
return [3 /*break*/, 9];
|
||
case 3:
|
||
_a.trys.push([3, 5, , 9]);
|
||
return [4 /*yield*/, stat_1.getStat(context, testPath)];
|
||
case 4:
|
||
testStat = (_a.sent());
|
||
if (testStat.type !== "directory") {
|
||
throw new Error("Path includes a file: " + dirPath);
|
||
}
|
||
return [3 /*break*/, 9];
|
||
case 5:
|
||
err_1 = _a.sent();
|
||
error = err_1;
|
||
if (!(error.status === 404)) return [3 /*break*/, 7];
|
||
creating = true;
|
||
return [4 /*yield*/, createDirectory(context, testPath, __assign(__assign({}, options), { recursive: false }))];
|
||
case 6:
|
||
_a.sent();
|
||
return [3 /*break*/, 8];
|
||
case 7: throw err_1;
|
||
case 8: return [3 /*break*/, 9];
|
||
case 9:
|
||
_i++;
|
||
return [3 /*break*/, 1];
|
||
case 10: return [2 /*return*/];
|
||
}
|
||
});
|
||
});
|
||
}
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 87:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
return new (P || (P = Promise))(function (resolve, reject) {
|
||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
});
|
||
};
|
||
var __generator = (this && this.__generator) || function (thisArg, body) {
|
||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||
function verb(n) { return function (v) { return step([n, v]); }; }
|
||
function step(op) {
|
||
if (f) throw new TypeError("Generator is already executing.");
|
||
while (_) try {
|
||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||
if (y = 0, t) op = [op[0] & 2, t.value];
|
||
switch (op[0]) {
|
||
case 0: case 1: t = op; break;
|
||
case 4: _.label++; return { value: op[1], done: false };
|
||
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||
default:
|
||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||
if (t[2]) _.ops.pop();
|
||
_.trys.pop(); continue;
|
||
}
|
||
op = body.call(thisArg, _);
|
||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||
}
|
||
};
|
||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.createWriteStream = exports.createReadStream = void 0;
|
||
var stream_1 = __importDefault(__nccwpck_require__(2413));
|
||
var url_1 = __nccwpck_require__(1805);
|
||
var path_1 = __nccwpck_require__(2846);
|
||
var request_1 = __nccwpck_require__(104);
|
||
var response_1 = __nccwpck_require__(7052);
|
||
var NOOP = function () { };
|
||
function createReadStream(context, filePath, options) {
|
||
if (options === void 0) { options = {}; }
|
||
var PassThroughStream = stream_1.default.PassThrough;
|
||
var outStream = new PassThroughStream();
|
||
getFileStream(context, filePath, options)
|
||
.then(function (stream) {
|
||
stream.pipe(outStream);
|
||
})
|
||
.catch(function (err) {
|
||
outStream.emit("error", err);
|
||
});
|
||
return outStream;
|
||
}
|
||
exports.createReadStream = createReadStream;
|
||
function createWriteStream(context, filePath, options, callback) {
|
||
if (options === void 0) { options = {}; }
|
||
if (callback === void 0) { callback = NOOP; }
|
||
var PassThroughStream = stream_1.default.PassThrough;
|
||
var writeStream = new PassThroughStream();
|
||
var headers = {};
|
||
if (options.overwrite === false) {
|
||
headers["If-None-Match"] = "*";
|
||
}
|
||
var requestOptions = request_1.prepareRequestOptions({
|
||
url: url_1.joinURL(context.remoteURL, path_1.encodePath(filePath)),
|
||
method: "PUT",
|
||
headers: headers,
|
||
data: writeStream,
|
||
maxRedirects: 0
|
||
}, context, options);
|
||
request_1.request(requestOptions)
|
||
.then(function (response) { return response_1.handleResponseCode(context, response); })
|
||
.then(function (response) {
|
||
// Fire callback asynchronously to avoid errors
|
||
setTimeout(function () {
|
||
callback(response);
|
||
}, 0);
|
||
})
|
||
.catch(function (err) {
|
||
writeStream.emit("error", err);
|
||
});
|
||
return writeStream;
|
||
}
|
||
exports.createWriteStream = createWriteStream;
|
||
function getFileStream(context, filePath, options) {
|
||
if (options === void 0) { options = {}; }
|
||
return __awaiter(this, void 0, void 0, function () {
|
||
var headers, rangeHeader, requestOptions, response, responseError;
|
||
return __generator(this, function (_a) {
|
||
switch (_a.label) {
|
||
case 0:
|
||
headers = {};
|
||
if (typeof options.range === "object" && typeof options.range.start === "number") {
|
||
rangeHeader = "bytes=" + options.range.start + "-";
|
||
if (typeof options.range.end === "number") {
|
||
rangeHeader = "" + rangeHeader + options.range.end;
|
||
}
|
||
headers.Range = rangeHeader;
|
||
}
|
||
requestOptions = request_1.prepareRequestOptions({
|
||
url: url_1.joinURL(context.remoteURL, path_1.encodePath(filePath)),
|
||
method: "GET",
|
||
headers: headers,
|
||
responseType: "stream"
|
||
}, context, options);
|
||
return [4 /*yield*/, request_1.request(requestOptions)];
|
||
case 1:
|
||
response = _a.sent();
|
||
response_1.handleResponseCode(context, response);
|
||
if (headers.Range && response.status !== 206) {
|
||
responseError = new Error("Invalid response code for partial request: " + response.status);
|
||
responseError.status = response.status;
|
||
throw responseError;
|
||
}
|
||
if (options.callback) {
|
||
setTimeout(function () {
|
||
options.callback(response);
|
||
}, 0);
|
||
}
|
||
return [2 /*return*/, response.data];
|
||
}
|
||
});
|
||
});
|
||
}
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 5703:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
return new (P || (P = Promise))(function (resolve, reject) {
|
||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
});
|
||
};
|
||
var __generator = (this && this.__generator) || function (thisArg, body) {
|
||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||
function verb(n) { return function (v) { return step([n, v]); }; }
|
||
function step(op) {
|
||
if (f) throw new TypeError("Generator is already executing.");
|
||
while (_) try {
|
||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||
if (y = 0, t) op = [op[0] & 2, t.value];
|
||
switch (op[0]) {
|
||
case 0: case 1: t = op; break;
|
||
case 4: _.label++; return { value: op[1], done: false };
|
||
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||
default:
|
||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||
if (t[2]) _.ops.pop();
|
||
_.trys.pop(); continue;
|
||
}
|
||
op = body.call(thisArg, _);
|
||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||
}
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.customRequest = void 0;
|
||
var url_1 = __nccwpck_require__(1805);
|
||
var path_1 = __nccwpck_require__(2846);
|
||
var request_1 = __nccwpck_require__(104);
|
||
var response_1 = __nccwpck_require__(7052);
|
||
function customRequest(context, remotePath, requestOptions) {
|
||
return __awaiter(this, void 0, void 0, function () {
|
||
var finalOptions, response;
|
||
return __generator(this, function (_a) {
|
||
switch (_a.label) {
|
||
case 0:
|
||
if (!requestOptions.url) {
|
||
requestOptions.url = url_1.joinURL(context.remoteURL, path_1.encodePath(remotePath));
|
||
}
|
||
finalOptions = request_1.prepareRequestOptions(requestOptions, context, {});
|
||
return [4 /*yield*/, request_1.request(finalOptions)];
|
||
case 1:
|
||
response = _a.sent();
|
||
response_1.handleResponseCode(context, response);
|
||
return [2 /*return*/, response];
|
||
}
|
||
});
|
||
});
|
||
}
|
||
exports.customRequest = customRequest;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 6787:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
return new (P || (P = Promise))(function (resolve, reject) {
|
||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
});
|
||
};
|
||
var __generator = (this && this.__generator) || function (thisArg, body) {
|
||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||
function verb(n) { return function (v) { return step([n, v]); }; }
|
||
function step(op) {
|
||
if (f) throw new TypeError("Generator is already executing.");
|
||
while (_) try {
|
||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||
if (y = 0, t) op = [op[0] & 2, t.value];
|
||
switch (op[0]) {
|
||
case 0: case 1: t = op; break;
|
||
case 4: _.label++; return { value: op[1], done: false };
|
||
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||
default:
|
||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||
if (t[2]) _.ops.pop();
|
||
_.trys.pop(); continue;
|
||
}
|
||
op = body.call(thisArg, _);
|
||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||
}
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.deleteFile = void 0;
|
||
var url_1 = __nccwpck_require__(1805);
|
||
var path_1 = __nccwpck_require__(2846);
|
||
var request_1 = __nccwpck_require__(104);
|
||
var response_1 = __nccwpck_require__(7052);
|
||
function deleteFile(context, filename, options) {
|
||
if (options === void 0) { options = {}; }
|
||
return __awaiter(this, void 0, void 0, function () {
|
||
var requestOptions, response;
|
||
return __generator(this, function (_a) {
|
||
switch (_a.label) {
|
||
case 0:
|
||
requestOptions = request_1.prepareRequestOptions({
|
||
url: url_1.joinURL(context.remoteURL, path_1.encodePath(filename)),
|
||
method: "DELETE"
|
||
}, context, options);
|
||
return [4 /*yield*/, request_1.request(requestOptions)];
|
||
case 1:
|
||
response = _a.sent();
|
||
response_1.handleResponseCode(context, response);
|
||
return [2 /*return*/];
|
||
}
|
||
});
|
||
});
|
||
}
|
||
exports.deleteFile = deleteFile;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 9809:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
return new (P || (P = Promise))(function (resolve, reject) {
|
||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
});
|
||
};
|
||
var __generator = (this && this.__generator) || function (thisArg, body) {
|
||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||
function verb(n) { return function (v) { return step([n, v]); }; }
|
||
function step(op) {
|
||
if (f) throw new TypeError("Generator is already executing.");
|
||
while (_) try {
|
||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||
if (y = 0, t) op = [op[0] & 2, t.value];
|
||
switch (op[0]) {
|
||
case 0: case 1: t = op; break;
|
||
case 4: _.label++; return { value: op[1], done: false };
|
||
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||
default:
|
||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||
if (t[2]) _.ops.pop();
|
||
_.trys.pop(); continue;
|
||
}
|
||
op = body.call(thisArg, _);
|
||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||
}
|
||
};
|
||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.getDirectoryContents = void 0;
|
||
var path_posix_1 = __importDefault(__nccwpck_require__(4502));
|
||
var url_1 = __nccwpck_require__(1805);
|
||
var path_1 = __nccwpck_require__(2846);
|
||
var dav_1 = __nccwpck_require__(736);
|
||
var request_1 = __nccwpck_require__(104);
|
||
var response_1 = __nccwpck_require__(7052);
|
||
function getDirectoryContents(context, remotePath, options) {
|
||
if (options === void 0) { options = {}; }
|
||
return __awaiter(this, void 0, void 0, function () {
|
||
var requestOptions, response, davResp, files;
|
||
return __generator(this, function (_a) {
|
||
switch (_a.label) {
|
||
case 0:
|
||
requestOptions = request_1.prepareRequestOptions({
|
||
url: url_1.joinURL(context.remoteURL, path_1.encodePath(remotePath), "/"),
|
||
method: "PROPFIND",
|
||
headers: {
|
||
Accept: "text/plain",
|
||
Depth: options.deep ? "infinity" : "1"
|
||
},
|
||
responseType: "text"
|
||
}, context, options);
|
||
return [4 /*yield*/, request_1.request(requestOptions)];
|
||
case 1:
|
||
response = _a.sent();
|
||
response_1.handleResponseCode(context, response);
|
||
return [4 /*yield*/, dav_1.parseXML(response.data)];
|
||
case 2:
|
||
davResp = _a.sent();
|
||
files = getDirectoryFiles(davResp, context.remotePath, remotePath, options.details);
|
||
if (options.glob) {
|
||
files = response_1.processGlobFilter(files, options.glob);
|
||
}
|
||
return [2 /*return*/, response_1.processResponsePayload(response, files, options.details)];
|
||
}
|
||
});
|
||
});
|
||
}
|
||
exports.getDirectoryContents = getDirectoryContents;
|
||
function getDirectoryFiles(result, serverBasePath, requestPath, isDetailed) {
|
||
if (isDetailed === void 0) { isDetailed = false; }
|
||
var serverBase = path_posix_1.default.join(serverBasePath, "/");
|
||
// Extract the response items (directory contents)
|
||
var responseItems = result.multistatus.response;
|
||
return (responseItems
|
||
// Map all items to a consistent output structure (results)
|
||
.map(function (item) {
|
||
// HREF is the file path (in full)
|
||
var href = url_1.normaliseHREF(item.href);
|
||
// Each item should contain a stat object
|
||
var props = item.propstat.prop;
|
||
// Process the true full filename (minus the base server path)
|
||
var filename = serverBase === "/"
|
||
? decodeURIComponent(path_1.normalisePath(href))
|
||
: decodeURIComponent(path_1.normalisePath(path_posix_1.default.relative(serverBase, href)));
|
||
return dav_1.prepareFileFromProps(props, filename, isDetailed);
|
||
})
|
||
// Filter out the item pointing to the current directory (not needed)
|
||
.filter(function (item) {
|
||
return item.basename &&
|
||
(item.type === "file" || item.filename !== requestPath.replace(/\/$/, ""));
|
||
}));
|
||
}
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 9019:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
return new (P || (P = Promise))(function (resolve, reject) {
|
||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
});
|
||
};
|
||
var __generator = (this && this.__generator) || function (thisArg, body) {
|
||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||
function verb(n) { return function (v) { return step([n, v]); }; }
|
||
function step(op) {
|
||
if (f) throw new TypeError("Generator is already executing.");
|
||
while (_) try {
|
||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||
if (y = 0, t) op = [op[0] & 2, t.value];
|
||
switch (op[0]) {
|
||
case 0: case 1: t = op; break;
|
||
case 4: _.label++; return { value: op[1], done: false };
|
||
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||
default:
|
||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||
if (t[2]) _.ops.pop();
|
||
_.trys.pop(); continue;
|
||
}
|
||
op = body.call(thisArg, _);
|
||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||
}
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.exists = void 0;
|
||
var stat_1 = __nccwpck_require__(2974);
|
||
function exists(context, remotePath, options) {
|
||
if (options === void 0) { options = {}; }
|
||
return __awaiter(this, void 0, void 0, function () {
|
||
var err_1;
|
||
return __generator(this, function (_a) {
|
||
switch (_a.label) {
|
||
case 0:
|
||
_a.trys.push([0, 2, , 3]);
|
||
return [4 /*yield*/, stat_1.getStat(context, remotePath, options)];
|
||
case 1:
|
||
_a.sent();
|
||
return [2 /*return*/, true];
|
||
case 2:
|
||
err_1 = _a.sent();
|
||
if (err_1.status === 404) {
|
||
return [2 /*return*/, false];
|
||
}
|
||
throw err_1;
|
||
case 3: return [2 /*return*/];
|
||
}
|
||
});
|
||
});
|
||
}
|
||
exports.exists = exists;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 2577:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
return new (P || (P = Promise))(function (resolve, reject) {
|
||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
});
|
||
};
|
||
var __generator = (this && this.__generator) || function (thisArg, body) {
|
||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||
function verb(n) { return function (v) { return step([n, v]); }; }
|
||
function step(op) {
|
||
if (f) throw new TypeError("Generator is already executing.");
|
||
while (_) try {
|
||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||
if (y = 0, t) op = [op[0] & 2, t.value];
|
||
switch (op[0]) {
|
||
case 0: case 1: t = op; break;
|
||
case 4: _.label++; return { value: op[1], done: false };
|
||
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||
default:
|
||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||
if (t[2]) _.ops.pop();
|
||
_.trys.pop(); continue;
|
||
}
|
||
op = body.call(thisArg, _);
|
||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||
}
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.getFileDownloadLink = exports.getFileContents = void 0;
|
||
var layerr_1 = __nccwpck_require__(7546);
|
||
var url_1 = __nccwpck_require__(1805);
|
||
var path_1 = __nccwpck_require__(2846);
|
||
var encode_1 = __nccwpck_require__(8282);
|
||
var request_1 = __nccwpck_require__(104);
|
||
var response_1 = __nccwpck_require__(7052);
|
||
var types_1 = __nccwpck_require__(5187);
|
||
function getFileContents(context, filePath, options) {
|
||
if (options === void 0) { options = {}; }
|
||
return __awaiter(this, void 0, void 0, function () {
|
||
var _a, format;
|
||
return __generator(this, function (_b) {
|
||
_a = options.format, format = _a === void 0 ? "binary" : _a;
|
||
if (format !== "binary" && format !== "text") {
|
||
throw new layerr_1.Layerr({
|
||
info: {
|
||
code: types_1.ErrorCode.InvalidOutputFormat
|
||
}
|
||
}, "Invalid output format: " + format);
|
||
}
|
||
return [2 /*return*/, format === "text"
|
||
? getFileContentsString(context, filePath, options)
|
||
: getFileContentsBuffer(context, filePath, options)];
|
||
});
|
||
});
|
||
}
|
||
exports.getFileContents = getFileContents;
|
||
function getFileContentsBuffer(context, filePath, options) {
|
||
if (options === void 0) { options = {}; }
|
||
return __awaiter(this, void 0, void 0, function () {
|
||
var requestOptions, response;
|
||
return __generator(this, function (_a) {
|
||
switch (_a.label) {
|
||
case 0:
|
||
requestOptions = request_1.prepareRequestOptions({
|
||
url: url_1.joinURL(context.remoteURL, path_1.encodePath(filePath)),
|
||
method: "GET",
|
||
responseType: "arraybuffer"
|
||
}, context, options);
|
||
return [4 /*yield*/, request_1.request(requestOptions)];
|
||
case 1:
|
||
response = _a.sent();
|
||
response_1.handleResponseCode(context, response);
|
||
return [2 /*return*/, response_1.processResponsePayload(response, response.data, options.details)];
|
||
}
|
||
});
|
||
});
|
||
}
|
||
function getFileContentsString(context, filePath, options) {
|
||
if (options === void 0) { options = {}; }
|
||
return __awaiter(this, void 0, void 0, function () {
|
||
var requestOptions, response;
|
||
return __generator(this, function (_a) {
|
||
switch (_a.label) {
|
||
case 0:
|
||
requestOptions = request_1.prepareRequestOptions({
|
||
url: url_1.joinURL(context.remoteURL, path_1.encodePath(filePath)),
|
||
method: "GET",
|
||
responseType: "text"
|
||
}, context, options);
|
||
return [4 /*yield*/, request_1.request(requestOptions)];
|
||
case 1:
|
||
response = _a.sent();
|
||
response_1.handleResponseCode(context, response);
|
||
return [2 /*return*/, response_1.processResponsePayload(response, response.data, options.details)];
|
||
}
|
||
});
|
||
});
|
||
}
|
||
function getFileDownloadLink(context, filePath) {
|
||
var url = url_1.joinURL(context.remoteURL, path_1.encodePath(filePath));
|
||
var protocol = /^https:/i.test(url) ? "https" : "http";
|
||
switch (context.authType) {
|
||
case types_1.AuthType.None:
|
||
// Do nothing
|
||
break;
|
||
case types_1.AuthType.Password: {
|
||
var authPart = context.headers.Authorization.replace(/^Basic /i, "").trim();
|
||
var authContents = encode_1.fromBase64(authPart);
|
||
url = url.replace(/^https?:\/\//, protocol + "://" + authContents + "@");
|
||
break;
|
||
}
|
||
default:
|
||
throw new layerr_1.Layerr({
|
||
info: {
|
||
code: types_1.ErrorCode.LinkUnsupportedAuthType
|
||
}
|
||
}, "Unsupported auth type for file link: " + context.authType);
|
||
}
|
||
return url;
|
||
}
|
||
exports.getFileDownloadLink = getFileDownloadLink;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 5785:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
return new (P || (P = Promise))(function (resolve, reject) {
|
||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
});
|
||
};
|
||
var __generator = (this && this.__generator) || function (thisArg, body) {
|
||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||
function verb(n) { return function (v) { return step([n, v]); }; }
|
||
function step(op) {
|
||
if (f) throw new TypeError("Generator is already executing.");
|
||
while (_) try {
|
||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||
if (y = 0, t) op = [op[0] & 2, t.value];
|
||
switch (op[0]) {
|
||
case 0: case 1: t = op; break;
|
||
case 4: _.label++; return { value: op[1], done: false };
|
||
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||
default:
|
||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||
if (t[2]) _.ops.pop();
|
||
_.trys.pop(); continue;
|
||
}
|
||
op = body.call(thisArg, _);
|
||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||
}
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.getQuota = void 0;
|
||
var request_1 = __nccwpck_require__(104);
|
||
var response_1 = __nccwpck_require__(7052);
|
||
var dav_1 = __nccwpck_require__(736);
|
||
var url_1 = __nccwpck_require__(1805);
|
||
var quota_1 = __nccwpck_require__(1604);
|
||
function getQuota(context, options) {
|
||
if (options === void 0) { options = {}; }
|
||
return __awaiter(this, void 0, void 0, function () {
|
||
var requestOptions, response, result, quota;
|
||
return __generator(this, function (_a) {
|
||
switch (_a.label) {
|
||
case 0:
|
||
requestOptions = request_1.prepareRequestOptions({
|
||
url: url_1.joinURL(context.remoteURL, "/"),
|
||
method: "PROPFIND",
|
||
headers: {
|
||
Accept: "text/plain",
|
||
Depth: "0"
|
||
},
|
||
responseType: "text"
|
||
}, context, options);
|
||
return [4 /*yield*/, request_1.request(requestOptions)];
|
||
case 1:
|
||
response = _a.sent();
|
||
response_1.handleResponseCode(context, response);
|
||
return [4 /*yield*/, dav_1.parseXML(response.data)];
|
||
case 2:
|
||
result = _a.sent();
|
||
quota = quota_1.parseQuota(result);
|
||
return [2 /*return*/, response_1.processResponsePayload(response, quota, options.details)];
|
||
}
|
||
});
|
||
});
|
||
}
|
||
exports.getQuota = getQuota;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 8154:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
return new (P || (P = Promise))(function (resolve, reject) {
|
||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
});
|
||
};
|
||
var __generator = (this && this.__generator) || function (thisArg, body) {
|
||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||
function verb(n) { return function (v) { return step([n, v]); }; }
|
||
function step(op) {
|
||
if (f) throw new TypeError("Generator is already executing.");
|
||
while (_) try {
|
||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||
if (y = 0, t) op = [op[0] & 2, t.value];
|
||
switch (op[0]) {
|
||
case 0: case 1: t = op; break;
|
||
case 4: _.label++; return { value: op[1], done: false };
|
||
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||
default:
|
||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||
if (t[2]) _.ops.pop();
|
||
_.trys.pop(); continue;
|
||
}
|
||
op = body.call(thisArg, _);
|
||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||
}
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.moveFile = void 0;
|
||
var url_1 = __nccwpck_require__(1805);
|
||
var path_1 = __nccwpck_require__(2846);
|
||
var request_1 = __nccwpck_require__(104);
|
||
var response_1 = __nccwpck_require__(7052);
|
||
function moveFile(context, filename, destination, options) {
|
||
if (options === void 0) { options = {}; }
|
||
return __awaiter(this, void 0, void 0, function () {
|
||
var requestOptions, response;
|
||
return __generator(this, function (_a) {
|
||
switch (_a.label) {
|
||
case 0:
|
||
requestOptions = request_1.prepareRequestOptions({
|
||
url: url_1.joinURL(context.remoteURL, path_1.encodePath(filename)),
|
||
method: "MOVE",
|
||
headers: {
|
||
Destination: url_1.joinURL(context.remoteURL, path_1.encodePath(destination))
|
||
}
|
||
}, context, options);
|
||
return [4 /*yield*/, request_1.request(requestOptions)];
|
||
case 1:
|
||
response = _a.sent();
|
||
response_1.handleResponseCode(context, response);
|
||
return [2 /*return*/];
|
||
}
|
||
});
|
||
});
|
||
}
|
||
exports.moveFile = moveFile;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 4951:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
return new (P || (P = Promise))(function (resolve, reject) {
|
||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
});
|
||
};
|
||
var __generator = (this && this.__generator) || function (thisArg, body) {
|
||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||
function verb(n) { return function (v) { return step([n, v]); }; }
|
||
function step(op) {
|
||
if (f) throw new TypeError("Generator is already executing.");
|
||
while (_) try {
|
||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||
if (y = 0, t) op = [op[0] & 2, t.value];
|
||
switch (op[0]) {
|
||
case 0: case 1: t = op; break;
|
||
case 4: _.label++; return { value: op[1], done: false };
|
||
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||
default:
|
||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||
if (t[2]) _.ops.pop();
|
||
_.trys.pop(); continue;
|
||
}
|
||
op = body.call(thisArg, _);
|
||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||
}
|
||
};
|
||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.getFileUploadLink = exports.putFileContents = void 0;
|
||
var layerr_1 = __nccwpck_require__(7546);
|
||
var stream_1 = __importDefault(__nccwpck_require__(2413));
|
||
var encode_1 = __nccwpck_require__(8282);
|
||
var url_1 = __nccwpck_require__(1805);
|
||
var path_1 = __nccwpck_require__(2846);
|
||
var request_1 = __nccwpck_require__(104);
|
||
var response_1 = __nccwpck_require__(7052);
|
||
var size_1 = __nccwpck_require__(6767);
|
||
var types_1 = __nccwpck_require__(5187);
|
||
function putFileContents(context, filePath, data, options) {
|
||
if (options === void 0) { options = {}; }
|
||
return __awaiter(this, void 0, void 0, function () {
|
||
var _a, contentLength, _b, overwrite, headers, requestOptions, response, error;
|
||
return __generator(this, function (_c) {
|
||
switch (_c.label) {
|
||
case 0:
|
||
_a = options.contentLength, contentLength = _a === void 0 ? true : _a, _b = options.overwrite, overwrite = _b === void 0 ? true : _b;
|
||
headers = {
|
||
"Content-Type": "application/octet-stream"
|
||
};
|
||
if (typeof WEB === "undefined" && data instanceof stream_1.default.Readable) {
|
||
// Skip, no content-length
|
||
}
|
||
else if (contentLength === false) {
|
||
// Skip, disabled
|
||
}
|
||
else if (typeof contentLength === "number") {
|
||
headers["Content-Length"] = "" + contentLength;
|
||
}
|
||
else {
|
||
headers["Content-Length"] = "" + size_1.calculateDataLength(data);
|
||
}
|
||
if (!overwrite) {
|
||
headers["If-None-Match"] = "*";
|
||
}
|
||
requestOptions = request_1.prepareRequestOptions({
|
||
url: url_1.joinURL(context.remoteURL, path_1.encodePath(filePath)),
|
||
method: "PUT",
|
||
headers: headers,
|
||
data: data
|
||
}, context, options);
|
||
return [4 /*yield*/, request_1.request(requestOptions)];
|
||
case 1:
|
||
response = _c.sent();
|
||
try {
|
||
response_1.handleResponseCode(context, response);
|
||
}
|
||
catch (err) {
|
||
error = err;
|
||
if (error.status === 412 && !overwrite) {
|
||
return [2 /*return*/, false];
|
||
}
|
||
else {
|
||
throw error;
|
||
}
|
||
}
|
||
return [2 /*return*/, true];
|
||
}
|
||
});
|
||
});
|
||
}
|
||
exports.putFileContents = putFileContents;
|
||
function getFileUploadLink(context, filePath) {
|
||
var url = url_1.joinURL(context.remoteURL, path_1.encodePath(filePath)) + "?Content-Type=application/octet-stream";
|
||
var protocol = /^https:/i.test(url) ? "https" : "http";
|
||
switch (context.authType) {
|
||
case types_1.AuthType.None:
|
||
// Do nothing
|
||
break;
|
||
case types_1.AuthType.Password: {
|
||
var authPart = context.headers.Authorization.replace(/^Basic /i, "").trim();
|
||
var authContents = encode_1.fromBase64(authPart);
|
||
url = url.replace(/^https?:\/\//, protocol + "://" + authContents + "@");
|
||
break;
|
||
}
|
||
default:
|
||
throw new layerr_1.Layerr({
|
||
info: {
|
||
code: types_1.ErrorCode.LinkUnsupportedAuthType
|
||
}
|
||
}, "Unsupported auth type for file link: " + context.authType);
|
||
}
|
||
return url;
|
||
}
|
||
exports.getFileUploadLink = getFileUploadLink;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 2974:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
return new (P || (P = Promise))(function (resolve, reject) {
|
||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
});
|
||
};
|
||
var __generator = (this && this.__generator) || function (thisArg, body) {
|
||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||
function verb(n) { return function (v) { return step([n, v]); }; }
|
||
function step(op) {
|
||
if (f) throw new TypeError("Generator is already executing.");
|
||
while (_) try {
|
||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||
if (y = 0, t) op = [op[0] & 2, t.value];
|
||
switch (op[0]) {
|
||
case 0: case 1: t = op; break;
|
||
case 4: _.label++; return { value: op[1], done: false };
|
||
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||
default:
|
||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||
if (t[2]) _.ops.pop();
|
||
_.trys.pop(); continue;
|
||
}
|
||
op = body.call(thisArg, _);
|
||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||
}
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.getStat = void 0;
|
||
var dav_1 = __nccwpck_require__(736);
|
||
var url_1 = __nccwpck_require__(1805);
|
||
var path_1 = __nccwpck_require__(2846);
|
||
var request_1 = __nccwpck_require__(104);
|
||
var response_1 = __nccwpck_require__(7052);
|
||
function getStat(context, filename, options) {
|
||
if (options === void 0) { options = {}; }
|
||
return __awaiter(this, void 0, void 0, function () {
|
||
var _a, isDetailed, requestOptions, response, result, stat;
|
||
return __generator(this, function (_b) {
|
||
switch (_b.label) {
|
||
case 0:
|
||
_a = options.details, isDetailed = _a === void 0 ? false : _a;
|
||
requestOptions = request_1.prepareRequestOptions({
|
||
url: url_1.joinURL(context.remoteURL, path_1.encodePath(filename)),
|
||
method: "PROPFIND",
|
||
headers: {
|
||
Accept: "text/plain",
|
||
Depth: "0"
|
||
},
|
||
responseType: "text"
|
||
}, context, options);
|
||
return [4 /*yield*/, request_1.request(requestOptions)];
|
||
case 1:
|
||
response = _b.sent();
|
||
response_1.handleResponseCode(context, response);
|
||
return [4 /*yield*/, dav_1.parseXML(response.data)];
|
||
case 2:
|
||
result = _b.sent();
|
||
stat = dav_1.parseStat(result, filename, isDetailed);
|
||
return [2 /*return*/, response_1.processResponsePayload(response, stat, isDetailed)];
|
||
}
|
||
});
|
||
});
|
||
}
|
||
exports.getStat = getStat;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 104:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.request = exports.prepareRequestOptions = void 0;
|
||
var axios_1 = __importDefault(__nccwpck_require__(7961));
|
||
var patcher_1 = __nccwpck_require__(1638);
|
||
var digest_1 = __nccwpck_require__(4209);
|
||
var merge_1 = __nccwpck_require__(7357);
|
||
var headers_1 = __nccwpck_require__(5345);
|
||
function _request(requestOptions) {
|
||
return patcher_1.getPatcher().patchInline("request", function (options) { return axios_1.default(options); }, requestOptions);
|
||
}
|
||
function prepareRequestOptions(requestOptions, context, userOptions) {
|
||
var finalOptions = merge_1.cloneShallow(requestOptions);
|
||
finalOptions.headers = headers_1.mergeHeaders(context.headers, finalOptions.headers || {}, userOptions.headers || {});
|
||
if (typeof userOptions.data !== "undefined") {
|
||
finalOptions.data = userOptions.data;
|
||
}
|
||
if (context.httpAgent) {
|
||
finalOptions.httpAgent = context.httpAgent;
|
||
}
|
||
if (context.httpsAgent) {
|
||
finalOptions.httpsAgent = context.httpsAgent;
|
||
}
|
||
if (context.digest) {
|
||
finalOptions._digest = context.digest;
|
||
}
|
||
if (typeof context.withCredentials === "boolean") {
|
||
finalOptions.withCredentials = context.withCredentials;
|
||
}
|
||
if (context.maxContentLength) {
|
||
finalOptions.maxContentLength = context.maxContentLength;
|
||
}
|
||
if (context.maxBodyLength) {
|
||
finalOptions.maxBodyLength = context.maxBodyLength;
|
||
}
|
||
if (userOptions.hasOwnProperty("onUploadProgress")) {
|
||
finalOptions.onUploadProgress = userOptions["onUploadProgress"];
|
||
}
|
||
// Take full control of all response status codes
|
||
finalOptions.validateStatus = function () { return true; };
|
||
return finalOptions;
|
||
}
|
||
exports.prepareRequestOptions = prepareRequestOptions;
|
||
function request(requestOptions) {
|
||
// Client not configured for digest authentication
|
||
if (!requestOptions._digest) {
|
||
return _request(requestOptions);
|
||
}
|
||
// Remove client's digest authentication object from request options
|
||
var _digest = requestOptions._digest;
|
||
delete requestOptions._digest;
|
||
// If client is already using digest authentication, include the digest authorization header
|
||
if (_digest.hasDigestAuth) {
|
||
requestOptions = merge_1.merge(requestOptions, {
|
||
headers: {
|
||
Authorization: digest_1.generateDigestAuthHeader(requestOptions, _digest)
|
||
}
|
||
});
|
||
}
|
||
// Perform the request and handle digest authentication
|
||
return _request(requestOptions).then(function (response) {
|
||
if (response.status == 401) {
|
||
_digest.hasDigestAuth = digest_1.parseDigestAuth(response, _digest);
|
||
if (_digest.hasDigestAuth) {
|
||
requestOptions = merge_1.merge(requestOptions, {
|
||
headers: {
|
||
Authorization: digest_1.generateDigestAuthHeader(requestOptions, _digest)
|
||
}
|
||
});
|
||
return _request(requestOptions).then(function (response2) {
|
||
if (response2.status == 401) {
|
||
_digest.hasDigestAuth = false;
|
||
}
|
||
else {
|
||
_digest.nc++;
|
||
}
|
||
return response2;
|
||
});
|
||
}
|
||
}
|
||
else {
|
||
_digest.nc++;
|
||
}
|
||
return response;
|
||
});
|
||
}
|
||
exports.request = request;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 7052:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.processResponsePayload = exports.processGlobFilter = exports.handleResponseCode = void 0;
|
||
var minimatch_1 = __importDefault(__nccwpck_require__(7066));
|
||
function handleResponseCode(context, response) {
|
||
var status = response.status;
|
||
if (status === 401 && context.digest)
|
||
return response;
|
||
if (status >= 400) {
|
||
var err = new Error("Invalid response: " + status + " " + response.statusText);
|
||
err.status = status;
|
||
err.response = response;
|
||
throw err;
|
||
}
|
||
return response;
|
||
}
|
||
exports.handleResponseCode = handleResponseCode;
|
||
function processGlobFilter(files, glob) {
|
||
return files.filter(function (file) { return minimatch_1.default(file.filename, glob, { matchBase: true }); });
|
||
}
|
||
exports.processGlobFilter = processGlobFilter;
|
||
function processResponsePayload(response, data, isDetailed) {
|
||
if (isDetailed === void 0) { isDetailed = false; }
|
||
return isDetailed
|
||
? {
|
||
data: data,
|
||
headers: response.headers || {},
|
||
status: response.status,
|
||
statusText: response.statusText
|
||
}
|
||
: data;
|
||
}
|
||
exports.processResponsePayload = processResponsePayload;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 2224:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.ha1Compute = void 0;
|
||
var md5_1 = __importDefault(__nccwpck_require__(5423));
|
||
function ha1Compute(algorithm, user, realm, pass, nonce, cnonce) {
|
||
var ha1 = md5_1.default(user + ":" + realm + ":" + pass);
|
||
if (algorithm && algorithm.toLowerCase() === "md5-sess") {
|
||
return md5_1.default(ha1 + ":" + nonce + ":" + cnonce);
|
||
}
|
||
return ha1;
|
||
}
|
||
exports.ha1Compute = ha1Compute;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 736:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.translateDiskSpace = exports.parseStat = exports.prepareFileFromProps = exports.parseXML = void 0;
|
||
var path_posix_1 = __importDefault(__nccwpck_require__(4502));
|
||
var fast_xml_parser_1 = __importDefault(__nccwpck_require__(704));
|
||
var nested_property_1 = __importDefault(__nccwpck_require__(7049));
|
||
var encode_1 = __nccwpck_require__(8282);
|
||
var path_1 = __nccwpck_require__(2846);
|
||
var PropertyType;
|
||
(function (PropertyType) {
|
||
PropertyType["Array"] = "array";
|
||
PropertyType["Object"] = "object";
|
||
PropertyType["Original"] = "original";
|
||
})(PropertyType || (PropertyType = {}));
|
||
function getPropertyOfType(obj, prop, type) {
|
||
if (type === void 0) { type = PropertyType.Original; }
|
||
var val = nested_property_1.default.get(obj, prop);
|
||
if (type === "array" && Array.isArray(val) === false) {
|
||
return [val];
|
||
}
|
||
else if (type === "object" && Array.isArray(val)) {
|
||
return val[0];
|
||
}
|
||
return val;
|
||
}
|
||
function normaliseResponse(response) {
|
||
var output = Object.assign({}, response);
|
||
nested_property_1.default.set(output, "propstat", getPropertyOfType(output, "propstat", PropertyType.Object));
|
||
nested_property_1.default.set(output, "propstat.prop", getPropertyOfType(output, "propstat.prop", PropertyType.Object));
|
||
return output;
|
||
}
|
||
function normaliseResult(result) {
|
||
var multistatus = result.multistatus;
|
||
if (multistatus === "") {
|
||
return {
|
||
multistatus: {
|
||
response: []
|
||
}
|
||
};
|
||
}
|
||
if (!multistatus) {
|
||
throw new Error("Invalid response: No root multistatus found");
|
||
}
|
||
var output = {
|
||
multistatus: Array.isArray(multistatus) ? multistatus[0] : multistatus
|
||
};
|
||
nested_property_1.default.set(output, "multistatus.response", getPropertyOfType(output, "multistatus.response", PropertyType.Array));
|
||
nested_property_1.default.set(output, "multistatus.response", nested_property_1.default.get(output, "multistatus.response").map(function (response) { return normaliseResponse(response); }));
|
||
return output;
|
||
}
|
||
function parseXML(xml) {
|
||
return new Promise(function (resolve) {
|
||
var result = fast_xml_parser_1.default.parse(xml, {
|
||
arrayMode: false,
|
||
ignoreNameSpace: true
|
||
// // We don't use the processors here as decoding is done manually
|
||
// // later on - decoding early would break some path checks.
|
||
// attrValueProcessor: val => decodeHTMLEntities(decodeURIComponent(val)),
|
||
// tagValueProcessor: val => decodeHTMLEntities(decodeURIComponent(val))
|
||
});
|
||
resolve(normaliseResult(result));
|
||
});
|
||
}
|
||
exports.parseXML = parseXML;
|
||
function prepareFileFromProps(props, rawFilename, isDetailed) {
|
||
if (isDetailed === void 0) { isDetailed = false; }
|
||
// Last modified time, raw size, item type and mime
|
||
var _a = props.getlastmodified, lastMod = _a === void 0 ? null : _a, _b = props.getcontentlength, rawSize = _b === void 0 ? "0" : _b, _c = props.resourcetype, resourceType = _c === void 0 ? null : _c, _d = props.getcontenttype, mimeType = _d === void 0 ? null : _d, _e = props.getetag, etag = _e === void 0 ? null : _e;
|
||
var type = resourceType &&
|
||
typeof resourceType === "object" &&
|
||
typeof resourceType.collection !== "undefined"
|
||
? "directory"
|
||
: "file";
|
||
var filename = encode_1.decodeHTMLEntities(rawFilename);
|
||
var stat = {
|
||
filename: filename,
|
||
basename: path_posix_1.default.basename(filename),
|
||
lastmod: lastMod,
|
||
size: parseInt(rawSize, 10),
|
||
type: type,
|
||
etag: typeof etag === "string" ? etag.replace(/"/g, "") : null
|
||
};
|
||
if (type === "file") {
|
||
stat.mime = mimeType && typeof mimeType === "string" ? mimeType.split(";")[0] : "";
|
||
}
|
||
if (isDetailed) {
|
||
stat.props = props;
|
||
}
|
||
return stat;
|
||
}
|
||
exports.prepareFileFromProps = prepareFileFromProps;
|
||
function parseStat(result, filename, isDetailed) {
|
||
if (isDetailed === void 0) { isDetailed = false; }
|
||
var responseItem = null;
|
||
try {
|
||
responseItem = result.multistatus.response[0];
|
||
}
|
||
catch (e) {
|
||
/* ignore */
|
||
}
|
||
if (!responseItem) {
|
||
throw new Error("Failed getting item stat: bad response");
|
||
}
|
||
var _a = responseItem.propstat, props = _a.prop, statusLine = _a.status;
|
||
// As defined in https://tools.ietf.org/html/rfc2068#section-6.1
|
||
var _b = statusLine.split(" ", 3), _ = _b[0], statusCodeStr = _b[1], statusText = _b[2];
|
||
var statusCode = parseInt(statusCodeStr, 10);
|
||
if (statusCode >= 400) {
|
||
var err = new Error("Invalid response: " + statusCode + " " + statusText);
|
||
err.status = statusCode;
|
||
throw err;
|
||
}
|
||
var filePath = path_1.normalisePath(filename);
|
||
return prepareFileFromProps(props, filePath, isDetailed);
|
||
}
|
||
exports.parseStat = parseStat;
|
||
function translateDiskSpace(value) {
|
||
switch (value.toString()) {
|
||
case "-3":
|
||
return "unlimited";
|
||
case "-2":
|
||
/* falls-through */
|
||
case "-1":
|
||
// -1 is non-computed
|
||
return "unknown";
|
||
default:
|
||
return parseInt(value, 10);
|
||
}
|
||
}
|
||
exports.translateDiskSpace = translateDiskSpace;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 8282:
|
||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.toBase64 = exports.fromBase64 = exports.decodeHTMLEntities = void 0;
|
||
var base_64_1 = __nccwpck_require__(8091);
|
||
function decodeHTMLEntities(text) {
|
||
if (typeof WEB === "undefined") {
|
||
// Node
|
||
var he = __nccwpck_require__(8190);
|
||
return he.decode(text);
|
||
}
|
||
else {
|
||
// Nasty browser way
|
||
var txt = document.createElement("textarea");
|
||
txt.innerHTML = text;
|
||
return txt.value;
|
||
}
|
||
}
|
||
exports.decodeHTMLEntities = decodeHTMLEntities;
|
||
function fromBase64(text) {
|
||
return base_64_1.decode(text);
|
||
}
|
||
exports.fromBase64 = fromBase64;
|
||
function toBase64(text) {
|
||
return base_64_1.encode(text);
|
||
}
|
||
exports.toBase64 = toBase64;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 5345:
|
||
/***/ ((__unused_webpack_module, exports) => {
|
||
|
||
"use strict";
|
||
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.mergeHeaders = void 0;
|
||
function mergeHeaders() {
|
||
var headerPayloads = [];
|
||
for (var _i = 0; _i < arguments.length; _i++) {
|
||
headerPayloads[_i] = arguments[_i];
|
||
}
|
||
if (headerPayloads.length === 0)
|
||
return {};
|
||
var headerKeys = {};
|
||
return headerPayloads.reduce(function (output, headers) {
|
||
Object.keys(headers).forEach(function (header) {
|
||
var lowerHeader = header.toLowerCase();
|
||
if (headerKeys.hasOwnProperty(lowerHeader)) {
|
||
output[headerKeys[lowerHeader]] = headers[header];
|
||
}
|
||
else {
|
||
headerKeys[lowerHeader] = header;
|
||
output[header] = headers[header];
|
||
}
|
||
});
|
||
return output;
|
||
}, {});
|
||
}
|
||
exports.mergeHeaders = mergeHeaders;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 7357:
|
||
/***/ (function(__unused_webpack_module, exports) {
|
||
|
||
"use strict";
|
||
|
||
var __spreadArray = (this && this.__spreadArray) || function (to, from) {
|
||
for (var i = 0, il = from.length, j = to.length; i < il; i++, j++)
|
||
to[j] = from[i];
|
||
return to;
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.merge = exports.cloneShallow = void 0;
|
||
function cloneShallow(obj) {
|
||
return isPlainObject(obj)
|
||
? Object.assign({}, obj)
|
||
: Object.setPrototypeOf(Object.assign({}, obj), Object.getPrototypeOf(obj));
|
||
}
|
||
exports.cloneShallow = cloneShallow;
|
||
function isPlainObject(obj) {
|
||
if (typeof obj !== "object" ||
|
||
obj === null ||
|
||
Object.prototype.toString.call(obj) != "[object Object]") {
|
||
// Not an object
|
||
return false;
|
||
}
|
||
if (Object.getPrototypeOf(obj) === null) {
|
||
return true;
|
||
}
|
||
var proto = obj;
|
||
// Find the prototype
|
||
while (Object.getPrototypeOf(proto) !== null) {
|
||
proto = Object.getPrototypeOf(proto);
|
||
}
|
||
return Object.getPrototypeOf(obj) === proto;
|
||
}
|
||
function merge() {
|
||
var args = [];
|
||
for (var _i = 0; _i < arguments.length; _i++) {
|
||
args[_i] = arguments[_i];
|
||
}
|
||
var output = null, items = __spreadArray([], args);
|
||
while (items.length > 0) {
|
||
var nextItem = items.shift();
|
||
if (!output) {
|
||
output = cloneShallow(nextItem);
|
||
}
|
||
else {
|
||
output = mergeObjects(output, nextItem);
|
||
}
|
||
}
|
||
return output;
|
||
}
|
||
exports.merge = merge;
|
||
function mergeObjects(obj1, obj2) {
|
||
var output = cloneShallow(obj1);
|
||
Object.keys(obj2).forEach(function (key) {
|
||
if (!output.hasOwnProperty(key)) {
|
||
output[key] = obj2[key];
|
||
return;
|
||
}
|
||
if (Array.isArray(obj2[key])) {
|
||
output[key] = Array.isArray(output[key])
|
||
? __spreadArray(__spreadArray([], output[key]), obj2[key]) : __spreadArray([], obj2[key]);
|
||
}
|
||
else if (typeof obj2[key] === "object" && !!obj2[key]) {
|
||
output[key] =
|
||
typeof output[key] === "object" && !!output[key]
|
||
? mergeObjects(output[key], obj2[key])
|
||
: cloneShallow(obj2[key]);
|
||
}
|
||
else {
|
||
output[key] = obj2[key];
|
||
}
|
||
});
|
||
return output;
|
||
}
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 2846:
|
||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.normalisePath = exports.getAllDirectories = exports.encodePath = void 0;
|
||
var path_posix_1 = __nccwpck_require__(4502);
|
||
var SEP_PATH_POSIX = "__PATH_SEPARATOR_POSIX__";
|
||
var SEP_PATH_WINDOWS = "__PATH_SEPARATOR_WINDOWS__";
|
||
function encodePath(path) {
|
||
var replaced = path.replace(/\//g, SEP_PATH_POSIX).replace(/\\\\/g, SEP_PATH_WINDOWS);
|
||
var formatted = encodeURIComponent(replaced);
|
||
return formatted
|
||
.split(SEP_PATH_WINDOWS)
|
||
.join("\\\\")
|
||
.split(SEP_PATH_POSIX)
|
||
.join("/");
|
||
}
|
||
exports.encodePath = encodePath;
|
||
function getAllDirectories(path) {
|
||
if (!path || path === "/")
|
||
return [];
|
||
var currentPath = path;
|
||
var output = [];
|
||
do {
|
||
output.push(currentPath);
|
||
currentPath = path_posix_1.dirname(currentPath);
|
||
} while (currentPath && currentPath !== "/");
|
||
return output;
|
||
}
|
||
exports.getAllDirectories = getAllDirectories;
|
||
function normalisePath(pathStr) {
|
||
var normalisedPath = pathStr;
|
||
if (normalisedPath[0] !== "/") {
|
||
normalisedPath = "/" + normalisedPath;
|
||
}
|
||
if (/^.+\/$/.test(normalisedPath)) {
|
||
normalisedPath = normalisedPath.substr(0, normalisedPath.length - 1);
|
||
}
|
||
return normalisedPath;
|
||
}
|
||
exports.normalisePath = normalisePath;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 1604:
|
||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.parseQuota = void 0;
|
||
var dav_1 = __nccwpck_require__(736);
|
||
function parseQuota(result) {
|
||
try {
|
||
var responseItem = result.multistatus.response[0];
|
||
var _a = responseItem.propstat.prop, quotaUsed = _a["quota-used-bytes"], quotaAvail = _a["quota-available-bytes"];
|
||
return typeof quotaUsed !== "undefined" && typeof quotaAvail !== "undefined"
|
||
? {
|
||
used: parseInt(quotaUsed, 10),
|
||
available: dav_1.translateDiskSpace(quotaAvail)
|
||
}
|
||
: null;
|
||
}
|
||
catch (err) {
|
||
/* ignore */
|
||
}
|
||
return null;
|
||
}
|
||
exports.parseQuota = parseQuota;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 6767:
|
||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||
|
||
"use strict";
|
||
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.calculateDataLength = void 0;
|
||
var layerr_1 = __nccwpck_require__(7546);
|
||
var arrayBuffer_1 = __nccwpck_require__(7461);
|
||
var buffer_1 = __nccwpck_require__(6201);
|
||
var types_1 = __nccwpck_require__(5187);
|
||
function calculateDataLength(data) {
|
||
if (arrayBuffer_1.isArrayBuffer(data)) {
|
||
return data.byteLength;
|
||
}
|
||
else if (buffer_1.isBuffer(data)) {
|
||
return data.length;
|
||
}
|
||
else if (typeof data === "string") {
|
||
return data.length;
|
||
}
|
||
throw new layerr_1.Layerr({
|
||
info: {
|
||
code: types_1.ErrorCode.DataTypeNoLength
|
||
}
|
||
}, "Cannot calculate data length: Invalid type");
|
||
}
|
||
exports.calculateDataLength = calculateDataLength;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 1805:
|
||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||
|
||
"use strict";
|
||
|
||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||
};
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.normaliseHREF = exports.joinURL = exports.extractURLPath = void 0;
|
||
var url_parse_1 = __importDefault(__nccwpck_require__(1785));
|
||
var url_join_1 = __importDefault(__nccwpck_require__(1971));
|
||
var path_1 = __nccwpck_require__(2846);
|
||
function extractURLPath(fullURL) {
|
||
var url = new url_parse_1.default(fullURL);
|
||
var urlPath = url.pathname;
|
||
if (urlPath.length <= 0) {
|
||
urlPath = "/";
|
||
}
|
||
return path_1.normalisePath(urlPath);
|
||
}
|
||
exports.extractURLPath = extractURLPath;
|
||
function joinURL() {
|
||
var parts = [];
|
||
for (var _i = 0; _i < arguments.length; _i++) {
|
||
parts[_i] = arguments[_i];
|
||
}
|
||
return url_join_1.default(parts.reduce(function (output, nextPart, partIndex) {
|
||
if (partIndex === 0 ||
|
||
nextPart !== "/" ||
|
||
(nextPart === "/" && output[output.length - 1] !== "/")) {
|
||
output.push(nextPart);
|
||
}
|
||
return output;
|
||
}, []));
|
||
}
|
||
exports.joinURL = joinURL;
|
||
function normaliseHREF(href) {
|
||
var normalisedHref = href.replace(/^https?:\/\/[^\/]+/, "");
|
||
return normalisedHref;
|
||
}
|
||
exports.normaliseHREF = normaliseHREF;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 5187:
|
||
/***/ ((__unused_webpack_module, exports) => {
|
||
|
||
"use strict";
|
||
|
||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||
exports.ErrorCode = exports.AuthType = void 0;
|
||
var AuthType;
|
||
(function (AuthType) {
|
||
AuthType["Digest"] = "digest";
|
||
AuthType["None"] = "none";
|
||
AuthType["Password"] = "password";
|
||
AuthType["Token"] = "token";
|
||
})(AuthType = exports.AuthType || (exports.AuthType = {}));
|
||
var ErrorCode;
|
||
(function (ErrorCode) {
|
||
ErrorCode["DataTypeNoLength"] = "data-type-no-length";
|
||
ErrorCode["InvalidAuthType"] = "invalid-auth-type";
|
||
ErrorCode["InvalidOutputFormat"] = "invalid-output-format";
|
||
ErrorCode["LinkUnsupportedAuthType"] = "link-unsupported-auth";
|
||
})(ErrorCode = exports.ErrorCode || (exports.ErrorCode = {}));
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 682:
|
||
/***/ ((module) => {
|
||
|
||
// Returns a wrapper function that returns a wrapped callback
|
||
// The wrapper function should do some stuff, and return a
|
||
// presumably different callback function.
|
||
// This makes sure that own properties are retained, so that
|
||
// decorations and such are not lost along the way.
|
||
module.exports = wrappy
|
||
function wrappy (fn, cb) {
|
||
if (fn && cb) return wrappy(fn)(cb)
|
||
|
||
if (typeof fn !== 'function')
|
||
throw new TypeError('need wrapper function')
|
||
|
||
Object.keys(fn).forEach(function (k) {
|
||
wrapper[k] = fn[k]
|
||
})
|
||
|
||
return wrapper
|
||
|
||
function wrapper() {
|
||
var args = new Array(arguments.length)
|
||
for (var i = 0; i < args.length; i++) {
|
||
args[i] = arguments[i]
|
||
}
|
||
var ret = fn.apply(this, args)
|
||
var cb = args[args.length-1]
|
||
if (typeof ret === 'function' && ret !== cb) {
|
||
Object.keys(cb).forEach(function (k) {
|
||
ret[k] = cb[k]
|
||
})
|
||
}
|
||
return ret
|
||
}
|
||
}
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 1922:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
/**
|
||
* ZipStream
|
||
*
|
||
* @ignore
|
||
* @license [MIT]{@link https://github.com/archiverjs/node-zip-stream/blob/master/LICENSE}
|
||
* @copyright (c) 2014 Chris Talkington, contributors.
|
||
*/
|
||
var inherits = __nccwpck_require__(1669).inherits;
|
||
|
||
var ZipArchiveOutputStream = __nccwpck_require__(2247).ZipArchiveOutputStream;
|
||
var ZipArchiveEntry = __nccwpck_require__(2247).ZipArchiveEntry;
|
||
|
||
var util = __nccwpck_require__(9451);
|
||
|
||
/**
|
||
* @constructor
|
||
* @extends external:ZipArchiveOutputStream
|
||
* @param {Object} [options]
|
||
* @param {String} [options.comment] Sets the zip archive comment.
|
||
* @param {Boolean} [options.forceLocalTime=false] Forces the archive to contain local file times instead of UTC.
|
||
* @param {Boolean} [options.forceZip64=false] Forces the archive to contain ZIP64 headers.
|
||
* @param {Boolean} [options.store=false] Sets the compression method to STORE.
|
||
* @param {Object} [options.zlib] Passed to [zlib]{@link https://nodejs.org/api/zlib.html#zlib_class_options}
|
||
* to control compression.
|
||
*/
|
||
var ZipStream = module.exports = function(options) {
|
||
if (!(this instanceof ZipStream)) {
|
||
return new ZipStream(options);
|
||
}
|
||
|
||
options = this.options = options || {};
|
||
options.zlib = options.zlib || {};
|
||
|
||
ZipArchiveOutputStream.call(this, options);
|
||
|
||
if (typeof options.level === 'number' && options.level >= 0) {
|
||
options.zlib.level = options.level;
|
||
delete options.level;
|
||
}
|
||
|
||
if (!options.forceZip64 && typeof options.zlib.level === 'number' && options.zlib.level === 0) {
|
||
options.store = true;
|
||
}
|
||
|
||
options.namePrependSlash = options.namePrependSlash || false;
|
||
|
||
if (options.comment && options.comment.length > 0) {
|
||
this.setComment(options.comment);
|
||
}
|
||
};
|
||
|
||
inherits(ZipStream, ZipArchiveOutputStream);
|
||
|
||
/**
|
||
* Normalizes entry data with fallbacks for key properties.
|
||
*
|
||
* @private
|
||
* @param {Object} data
|
||
* @return {Object}
|
||
*/
|
||
ZipStream.prototype._normalizeFileData = function(data) {
|
||
data = util.defaults(data, {
|
||
type: 'file',
|
||
name: null,
|
||
namePrependSlash: this.options.namePrependSlash,
|
||
linkname: null,
|
||
date: null,
|
||
mode: null,
|
||
store: this.options.store,
|
||
comment: ''
|
||
});
|
||
|
||
var isDir = data.type === 'directory';
|
||
var isSymlink = data.type === 'symlink';
|
||
|
||
if (data.name) {
|
||
data.name = util.sanitizePath(data.name);
|
||
|
||
if (!isSymlink && data.name.slice(-1) === '/') {
|
||
isDir = true;
|
||
data.type = 'directory';
|
||
} else if (isDir) {
|
||
data.name += '/';
|
||
}
|
||
}
|
||
|
||
if (isDir || isSymlink) {
|
||
data.store = true;
|
||
}
|
||
|
||
data.date = util.dateify(data.date);
|
||
|
||
return data;
|
||
};
|
||
|
||
/**
|
||
* Appends an entry given an input source (text string, buffer, or stream).
|
||
*
|
||
* @param {(Buffer|Stream|String)} source The input source.
|
||
* @param {Object} data
|
||
* @param {String} data.name Sets the entry name including internal path.
|
||
* @param {String} [data.comment] Sets the entry comment.
|
||
* @param {(String|Date)} [data.date=NOW()] Sets the entry date.
|
||
* @param {Number} [data.mode=D:0755/F:0644] Sets the entry permissions.
|
||
* @param {Boolean} [data.store=options.store] Sets the compression method to STORE.
|
||
* @param {String} [data.type=file] Sets the entry type. Defaults to `directory`
|
||
* if name ends with trailing slash.
|
||
* @param {Function} callback
|
||
* @return this
|
||
*/
|
||
ZipStream.prototype.entry = function(source, data, callback) {
|
||
if (typeof callback !== 'function') {
|
||
callback = this._emitErrorCallback.bind(this);
|
||
}
|
||
|
||
data = this._normalizeFileData(data);
|
||
|
||
if (data.type !== 'file' && data.type !== 'directory' && data.type !== 'symlink') {
|
||
callback(new Error(data.type + ' entries not currently supported'));
|
||
return;
|
||
}
|
||
|
||
if (typeof data.name !== 'string' || data.name.length === 0) {
|
||
callback(new Error('entry name must be a non-empty string value'));
|
||
return;
|
||
}
|
||
|
||
if (data.type === 'symlink' && typeof data.linkname !== 'string') {
|
||
callback(new Error('entry linkname must be a non-empty string value when type equals symlink'));
|
||
return;
|
||
}
|
||
|
||
var entry = new ZipArchiveEntry(data.name);
|
||
entry.setTime(data.date, this.options.forceLocalTime);
|
||
|
||
if (data.namePrependSlash) {
|
||
entry.setName(data.name, true);
|
||
}
|
||
|
||
if (data.store) {
|
||
entry.setMethod(0);
|
||
}
|
||
|
||
if (data.comment.length > 0) {
|
||
entry.setComment(data.comment);
|
||
}
|
||
|
||
if (data.type === 'symlink' && typeof data.mode !== 'number') {
|
||
data.mode = 40960; // 0120000
|
||
}
|
||
|
||
if (typeof data.mode === 'number') {
|
||
if (data.type === 'symlink') {
|
||
data.mode |= 40960;
|
||
}
|
||
|
||
entry.setUnixMode(data.mode);
|
||
}
|
||
|
||
if (data.type === 'symlink' && typeof data.linkname === 'string') {
|
||
source = Buffer.from(data.linkname);
|
||
}
|
||
|
||
return ZipArchiveOutputStream.prototype.entry.call(this, entry, source, callback);
|
||
};
|
||
|
||
/**
|
||
* Finalizes the instance and prevents further appending to the archive
|
||
* structure (queue will continue til drained).
|
||
*
|
||
* @return void
|
||
*/
|
||
ZipStream.prototype.finalize = function() {
|
||
this.finish();
|
||
};
|
||
|
||
/**
|
||
* Returns the current number of bytes written to this stream.
|
||
* @function ZipStream#getBytesWritten
|
||
* @returns {Number}
|
||
*/
|
||
|
||
/**
|
||
* Compress Commons ZipArchiveOutputStream
|
||
* @external ZipArchiveOutputStream
|
||
* @see {@link https://github.com/archiverjs/node-compress-commons}
|
||
*/
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 882:
|
||
/***/ ((module) => {
|
||
|
||
module.exports = eval("require")("debug");
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 974:
|
||
/***/ ((module) => {
|
||
|
||
module.exports = eval("require")("encoding");
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 696:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
module.exports = JSON.parse('{"_from":"axios@^0.21.1","_id":"axios@0.21.1","_inBundle":false,"_integrity":"sha512-dKQiRHxGD9PPRIUNIWvZhPTPpl1rf/OxTYKsqKUDjBwYylTvV7SjSHJb9ratfyzM6wCdLCOYLzs73qpg5c4iGA==","_location":"/axios","_phantomChildren":{},"_requested":{"type":"range","registry":true,"raw":"axios@^0.21.1","name":"axios","escapedName":"axios","rawSpec":"^0.21.1","saveSpec":null,"fetchSpec":"^0.21.1"},"_requiredBy":["/webdav"],"_resolved":"https://registry.npmjs.org/axios/-/axios-0.21.1.tgz","_shasum":"22563481962f4d6bde9a76d516ef0e5d3c09b2b8","_spec":"axios@^0.21.1","_where":"/home/trym/source/linux/repos/nextcloud-artifacts-action/node_modules/webdav","author":{"name":"Matt Zabriskie"},"browser":{"./lib/adapters/http.js":"./lib/adapters/xhr.js"},"bugs":{"url":"https://github.com/axios/axios/issues"},"bundleDependencies":false,"bundlesize":[{"path":"./dist/axios.min.js","threshold":"5kB"}],"dependencies":{"follow-redirects":"^1.10.0"},"deprecated":false,"description":"Promise based HTTP client for the browser and node.js","devDependencies":{"bundlesize":"^0.17.0","coveralls":"^3.0.0","es6-promise":"^4.2.4","grunt":"^1.0.2","grunt-banner":"^0.6.0","grunt-cli":"^1.2.0","grunt-contrib-clean":"^1.1.0","grunt-contrib-watch":"^1.0.0","grunt-eslint":"^20.1.0","grunt-karma":"^2.0.0","grunt-mocha-test":"^0.13.3","grunt-ts":"^6.0.0-beta.19","grunt-webpack":"^1.0.18","istanbul-instrumenter-loader":"^1.0.0","jasmine-core":"^2.4.1","karma":"^1.3.0","karma-chrome-launcher":"^2.2.0","karma-coverage":"^1.1.1","karma-firefox-launcher":"^1.1.0","karma-jasmine":"^1.1.1","karma-jasmine-ajax":"^0.1.13","karma-opera-launcher":"^1.0.0","karma-safari-launcher":"^1.0.0","karma-sauce-launcher":"^1.2.0","karma-sinon":"^1.0.5","karma-sourcemap-loader":"^0.3.7","karma-webpack":"^1.7.0","load-grunt-tasks":"^3.5.2","minimist":"^1.2.0","mocha":"^5.2.0","sinon":"^4.5.0","typescript":"^2.8.1","url-search-params":"^0.10.0","webpack":"^1.13.1","webpack-dev-server":"^1.14.1"},"homepage":"https://github.com/axios/axios","jsdelivr":"dist/axios.min.js","keywords":["xhr","http","ajax","promise","node"],"license":"MIT","main":"index.js","name":"axios","repository":{"type":"git","url":"git+https://github.com/axios/axios.git"},"scripts":{"build":"NODE_ENV=production grunt build","coveralls":"cat coverage/lcov.info | ./node_modules/coveralls/bin/coveralls.js","examples":"node ./examples/server.js","fix":"eslint --fix lib/**/*.js","postversion":"git push && git push --tags","preversion":"npm test","start":"node ./sandbox/server.js","test":"grunt test && bundlesize","version":"npm run build && grunt version && git add -A dist && git add CHANGELOG.md bower.json package.json"},"typings":"./index.d.ts","unpkg":"dist/axios.min.js","version":"0.21.1"}');
|
||
|
||
/***/ }),
|
||
|
||
/***/ 2357:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
module.exports = require("assert");;
|
||
|
||
/***/ }),
|
||
|
||
/***/ 4293:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
module.exports = require("buffer");;
|
||
|
||
/***/ }),
|
||
|
||
/***/ 7619:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
module.exports = require("constants");;
|
||
|
||
/***/ }),
|
||
|
||
/***/ 6417:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
module.exports = require("crypto");;
|
||
|
||
/***/ }),
|
||
|
||
/***/ 8614:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
module.exports = require("events");;
|
||
|
||
/***/ }),
|
||
|
||
/***/ 5747:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
module.exports = require("fs");;
|
||
|
||
/***/ }),
|
||
|
||
/***/ 8605:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
module.exports = require("http");;
|
||
|
||
/***/ }),
|
||
|
||
/***/ 7211:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
module.exports = require("https");;
|
||
|
||
/***/ }),
|
||
|
||
/***/ 2087:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
module.exports = require("os");;
|
||
|
||
/***/ }),
|
||
|
||
/***/ 5622:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
module.exports = require("path");;
|
||
|
||
/***/ }),
|
||
|
||
/***/ 2413:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
module.exports = require("stream");;
|
||
|
||
/***/ }),
|
||
|
||
/***/ 4304:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
module.exports = require("string_decoder");;
|
||
|
||
/***/ }),
|
||
|
||
/***/ 8835:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
module.exports = require("url");;
|
||
|
||
/***/ }),
|
||
|
||
/***/ 1669:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
module.exports = require("util");;
|
||
|
||
/***/ }),
|
||
|
||
/***/ 8761:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
module.exports = require("zlib");;
|
||
|
||
/***/ })
|
||
|
||
/******/ });
|
||
/************************************************************************/
|
||
/******/ // The module cache
|
||
/******/ var __webpack_module_cache__ = {};
|
||
/******/
|
||
/******/ // The require function
|
||
/******/ function __nccwpck_require__(moduleId) {
|
||
/******/ // Check if module is in cache
|
||
/******/ var cachedModule = __webpack_module_cache__[moduleId];
|
||
/******/ if (cachedModule !== undefined) {
|
||
/******/ return cachedModule.exports;
|
||
/******/ }
|
||
/******/ // Create a new module (and put it into the cache)
|
||
/******/ var module = __webpack_module_cache__[moduleId] = {
|
||
/******/ id: moduleId,
|
||
/******/ loaded: false,
|
||
/******/ exports: {}
|
||
/******/ };
|
||
/******/
|
||
/******/ // Execute the module function
|
||
/******/ var threw = true;
|
||
/******/ try {
|
||
/******/ __webpack_modules__[moduleId].call(module.exports, module, module.exports, __nccwpck_require__);
|
||
/******/ threw = false;
|
||
/******/ } finally {
|
||
/******/ if(threw) delete __webpack_module_cache__[moduleId];
|
||
/******/ }
|
||
/******/
|
||
/******/ // Flag the module as loaded
|
||
/******/ module.loaded = true;
|
||
/******/
|
||
/******/ // Return the exports of the module
|
||
/******/ return module.exports;
|
||
/******/ }
|
||
/******/
|
||
/************************************************************************/
|
||
/******/ /* webpack/runtime/node module decorator */
|
||
/******/ (() => {
|
||
/******/ __nccwpck_require__.nmd = (module) => {
|
||
/******/ module.paths = [];
|
||
/******/ if (!module.children) module.children = [];
|
||
/******/ return module;
|
||
/******/ };
|
||
/******/ })();
|
||
/******/
|
||
/******/ /* webpack/runtime/compat */
|
||
/******/
|
||
/******/ if (typeof __nccwpck_require__ !== 'undefined') __nccwpck_require__.ab = __dirname + "/";/************************************************************************/
|
||
/******/
|
||
/******/ // startup
|
||
/******/ // Load entry module and return exports
|
||
/******/ // This entry module is referenced by other modules so it can't be inlined
|
||
/******/ var __webpack_exports__ = __nccwpck_require__(6762);
|
||
/******/ module.exports = __webpack_exports__;
|
||
/******/
|
||
/******/ })()
|
||
; |