Added login page and login form functionality

Added body-parser node_module
Created login.html
Renamed index.html to main.html
Added simple login functionality in index.js
This commit is contained in:
ShakedAp 2023-06-25 10:17:45 +03:00
parent 395cd7a195
commit 0b90b3a860
309 changed files with 3533 additions and 28037 deletions

View File

@ -1,5 +1,6 @@
{
"server_ip": "192.168.68.118",
"server_port": 3000,
"video_path": "videos/bigbuck.mp4"
"video_path": "videos/bigbuck.mp4",
"password": "pass"
}

View File

@ -3,9 +3,10 @@
const fs = require("fs");
const express = require('express');
const bodyParser = require('body-parser');
const WebSocket = require('ws');
const app = express();
const server = require('http').createServer(app);
const WebSocket = require('ws');
const wss = new WebSocket.Server({ server:server });
const settings = JSON.parse(fs.readFileSync("settings.json"));
@ -69,12 +70,26 @@ wss.on('connection', function connection(ws) {
});
console.log(__dirname);
app.use(express.static(__dirname));
app.use(bodyParser.urlencoded({ extended: true }));
app.use(bodyParser.json());
app.get("/", function (req, res) {
res.sendFile(__dirname + "/index.html");
res.sendFile(__dirname + "/login.html");
});
app.post("/login", function (req, res)
{
const data = req.body;
if(!data)
res.sendStatus(400);
console.log(data.password);
if(data.password == settings.password)
res.sendFile(__dirname + "/main.html");
else
res.sendFile(__dirname + "/login.html");
});
app.get("/video", function (req, res) {
// Ensure there is a range given for the video
@ -112,7 +127,8 @@ app.get("/video", function (req, res) {
videoStream.pipe(res);
});
server.listen(settings.server_port, settings.server_ip, () => console.log(`Listening on port: 3000`));
server.listen(settings.server_port, settings.server_ip,
() => console.log(`Server started at ${settings.server_ip}:${settings.server_port}`));
function get_time(){

14
src/login.html Normal file
View File

@ -0,0 +1,14 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Login Page</title>
</head>
<body>
<form action="/login" method="post">
<input name="password" type="password" id="password">
<button type="submit">Log In</button>
</form>
</body>
</html>

1
src/node_modules/.bin/nodemon generated vendored
View File

@ -1 +0,0 @@
../nodemon/bin/nodemon.js

1
src/node_modules/.bin/nodetouch generated vendored
View File

@ -1 +0,0 @@
../touch/bin/nodetouch.js

1
src/node_modules/.bin/nopt generated vendored
View File

@ -1 +0,0 @@
../nopt/bin/nopt.js

1
src/node_modules/.bin/semver generated vendored
View File

@ -1 +0,0 @@
../semver/bin/semver

364
src/node_modules/.package-lock.json generated vendored
View File

@ -1,14 +1,9 @@
{
"name": "wstest",
"name": "src",
"version": "1.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"node_modules/abbrev": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz",
"integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q=="
},
"node_modules/accepts": {
"version": "1.3.8",
"resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz",
@ -21,43 +16,18 @@
"node": ">= 0.6"
}
},
"node_modules/anymatch": {
"version": "3.1.3",
"resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz",
"integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==",
"dependencies": {
"normalize-path": "^3.0.0",
"picomatch": "^2.0.4"
},
"engines": {
"node": ">= 8"
}
},
"node_modules/array-flatten": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz",
"integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg=="
},
"node_modules/balanced-match": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="
},
"node_modules/binary-extensions": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz",
"integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==",
"engines": {
"node": ">=8"
}
},
"node_modules/body-parser": {
"version": "1.20.1",
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.1.tgz",
"integrity": "sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==",
"version": "1.20.2",
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.2.tgz",
"integrity": "sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==",
"dependencies": {
"bytes": "3.1.2",
"content-type": "~1.0.4",
"content-type": "~1.0.5",
"debug": "2.6.9",
"depd": "2.0.0",
"destroy": "1.2.0",
@ -65,7 +35,7 @@
"iconv-lite": "0.4.24",
"on-finished": "2.4.1",
"qs": "6.11.0",
"raw-body": "2.5.1",
"raw-body": "2.5.2",
"type-is": "~1.6.18",
"unpipe": "1.0.0"
},
@ -74,26 +44,6 @@
"npm": "1.2.8000 || >= 1.4.16"
}
},
"node_modules/brace-expansion": {
"version": "1.1.11",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
"dependencies": {
"balanced-match": "^1.0.0",
"concat-map": "0.0.1"
}
},
"node_modules/braces": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
"integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
"dependencies": {
"fill-range": "^7.0.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/bytes": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz",
@ -114,37 +64,6 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/chokidar": {
"version": "3.5.3",
"resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz",
"integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==",
"funding": [
{
"type": "individual",
"url": "https://paulmillr.com/funding/"
}
],
"dependencies": {
"anymatch": "~3.1.2",
"braces": "~3.0.2",
"glob-parent": "~5.1.2",
"is-binary-path": "~2.1.0",
"is-glob": "~4.0.1",
"normalize-path": "~3.0.0",
"readdirp": "~3.6.0"
},
"engines": {
"node": ">= 8.10.0"
},
"optionalDependencies": {
"fsevents": "~2.3.2"
}
},
"node_modules/concat-map": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
"integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="
},
"node_modules/content-disposition": {
"version": "0.5.4",
"resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz",
@ -269,15 +188,41 @@
"node": ">= 0.10.0"
}
},
"node_modules/fill-range": {
"version": "7.0.1",
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
"integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
"node_modules/express/node_modules/body-parser": {
"version": "1.20.1",
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.1.tgz",
"integrity": "sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==",
"dependencies": {
"to-regex-range": "^5.0.1"
"bytes": "3.1.2",
"content-type": "~1.0.4",
"debug": "2.6.9",
"depd": "2.0.0",
"destroy": "1.2.0",
"http-errors": "2.0.0",
"iconv-lite": "0.4.24",
"on-finished": "2.4.1",
"qs": "6.11.0",
"raw-body": "2.5.1",
"type-is": "~1.6.18",
"unpipe": "1.0.0"
},
"engines": {
"node": ">=8"
"node": ">= 0.8",
"npm": "1.2.8000 || >= 1.4.16"
}
},
"node_modules/express/node_modules/raw-body": {
"version": "2.5.1",
"resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.1.tgz",
"integrity": "sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==",
"dependencies": {
"bytes": "3.1.2",
"http-errors": "2.0.0",
"iconv-lite": "0.4.24",
"unpipe": "1.0.0"
},
"engines": {
"node": ">= 0.8"
}
},
"node_modules/finalhandler": {
@ -332,17 +277,6 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/glob-parent": {
"version": "5.1.2",
"resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz",
"integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==",
"dependencies": {
"is-glob": "^4.0.1"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/has": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz",
@ -354,14 +288,6 @@
"node": ">= 0.4.0"
}
},
"node_modules/has-flag": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz",
"integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==",
"engines": {
"node": ">=4"
}
},
"node_modules/has-proto": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.1.tgz",
@ -410,11 +336,6 @@
"node": ">=0.10.0"
}
},
"node_modules/ignore-by-default": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/ignore-by-default/-/ignore-by-default-1.0.1.tgz",
"integrity": "sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA=="
},
"node_modules/inherits": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
@ -428,44 +349,6 @@
"node": ">= 0.10"
}
},
"node_modules/is-binary-path": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz",
"integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==",
"dependencies": {
"binary-extensions": "^2.0.0"
},
"engines": {
"node": ">=8"
}
},
"node_modules/is-extglob": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
"integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/is-glob": {
"version": "4.0.3",
"resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz",
"integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==",
"dependencies": {
"is-extglob": "^2.1.1"
},
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/is-number": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
"integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
"engines": {
"node": ">=0.12.0"
}
},
"node_modules/media-typer": {
"version": "0.3.0",
"resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz",
@ -517,17 +400,6 @@
"node": ">= 0.6"
}
},
"node_modules/minimatch": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
"dependencies": {
"brace-expansion": "^1.1.7"
},
"engines": {
"node": "*"
}
},
"node_modules/ms": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
@ -541,68 +413,6 @@
"node": ">= 0.6"
}
},
"node_modules/nodemon": {
"version": "2.0.22",
"resolved": "https://registry.npmjs.org/nodemon/-/nodemon-2.0.22.tgz",
"integrity": "sha512-B8YqaKMmyuCO7BowF1Z1/mkPqLk6cs/l63Ojtd6otKjMx47Dq1utxfRxcavH1I7VSaL8n5BUaoutadnsX3AAVQ==",
"dependencies": {
"chokidar": "^3.5.2",
"debug": "^3.2.7",
"ignore-by-default": "^1.0.1",
"minimatch": "^3.1.2",
"pstree.remy": "^1.1.8",
"semver": "^5.7.1",
"simple-update-notifier": "^1.0.7",
"supports-color": "^5.5.0",
"touch": "^3.1.0",
"undefsafe": "^2.0.5"
},
"bin": {
"nodemon": "bin/nodemon.js"
},
"engines": {
"node": ">=8.10.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/nodemon"
}
},
"node_modules/nodemon/node_modules/debug": {
"version": "3.2.7",
"resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz",
"integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==",
"dependencies": {
"ms": "^2.1.1"
}
},
"node_modules/nodemon/node_modules/ms": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="
},
"node_modules/nopt": {
"version": "1.0.10",
"resolved": "https://registry.npmjs.org/nopt/-/nopt-1.0.10.tgz",
"integrity": "sha512-NWmpvLSqUrgrAC9HCuxEvb+PSloHpqVu+FqcO4eeF2h5qYRhA7ev6KvelyQAKtegUbC6RypJnlEOhd8vloNKYg==",
"dependencies": {
"abbrev": "1"
},
"bin": {
"nopt": "bin/nopt.js"
},
"engines": {
"node": "*"
}
},
"node_modules/normalize-path": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz",
"integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/object-inspect": {
"version": "1.12.3",
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.3.tgz",
@ -635,17 +445,6 @@
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz",
"integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ=="
},
"node_modules/picomatch": {
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
"integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
"engines": {
"node": ">=8.6"
},
"funding": {
"url": "https://github.com/sponsors/jonschlinkert"
}
},
"node_modules/proxy-addr": {
"version": "2.0.7",
"resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz",
@ -658,11 +457,6 @@
"node": ">= 0.10"
}
},
"node_modules/pstree.remy": {
"version": "1.1.8",
"resolved": "https://registry.npmjs.org/pstree.remy/-/pstree.remy-1.1.8.tgz",
"integrity": "sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w=="
},
"node_modules/qs": {
"version": "6.11.0",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz",
@ -686,9 +480,9 @@
}
},
"node_modules/raw-body": {
"version": "2.5.1",
"resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.1.tgz",
"integrity": "sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==",
"version": "2.5.2",
"resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz",
"integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==",
"dependencies": {
"bytes": "3.1.2",
"http-errors": "2.0.0",
@ -699,17 +493,6 @@
"node": ">= 0.8"
}
},
"node_modules/readdirp": {
"version": "3.6.0",
"resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz",
"integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==",
"dependencies": {
"picomatch": "^2.2.1"
},
"engines": {
"node": ">=8.10.0"
}
},
"node_modules/safe-buffer": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
@ -734,14 +517,6 @@
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
},
"node_modules/semver": {
"version": "5.7.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz",
"integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==",
"bin": {
"semver": "bin/semver"
}
},
"node_modules/send": {
"version": "0.18.0",
"resolved": "https://registry.npmjs.org/send/-/send-0.18.0.tgz",
@ -802,25 +577,6 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/simple-update-notifier": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-1.1.0.tgz",
"integrity": "sha512-VpsrsJSUcJEseSbMHkrsrAVSdvVS5I96Qo1QAQ4FxQ9wXFcB+pjj7FB7/us9+GcgfW4ziHtYMc1J0PLczb55mg==",
"dependencies": {
"semver": "~7.0.0"
},
"engines": {
"node": ">=8.10.0"
}
},
"node_modules/simple-update-notifier/node_modules/semver": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.0.0.tgz",
"integrity": "sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==",
"bin": {
"semver": "bin/semver.js"
}
},
"node_modules/statuses": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz",
@ -829,28 +585,6 @@
"node": ">= 0.8"
}
},
"node_modules/supports-color": {
"version": "5.5.0",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz",
"integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==",
"dependencies": {
"has-flag": "^3.0.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/to-regex-range": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
"integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
"dependencies": {
"is-number": "^7.0.0"
},
"engines": {
"node": ">=8.0"
}
},
"node_modules/toidentifier": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz",
@ -859,17 +593,6 @@
"node": ">=0.6"
}
},
"node_modules/touch": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/touch/-/touch-3.1.0.tgz",
"integrity": "sha512-WBx8Uy5TLtOSRtIq+M03/sKDrXCLHxwDcquSP2c43Le03/9serjQBIztjRz6FkJez9D/hleyAXTBGLwwZUw9lA==",
"dependencies": {
"nopt": "~1.0.10"
},
"bin": {
"nodetouch": "bin/nodetouch.js"
}
},
"node_modules/type-is": {
"version": "1.6.18",
"resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz",
@ -882,11 +605,6 @@
"node": ">= 0.6"
}
},
"node_modules/undefsafe": {
"version": "2.0.5",
"resolved": "https://registry.npmjs.org/undefsafe/-/undefsafe-2.0.5.tgz",
"integrity": "sha512-WxONCrssBM8TSPRqN5EmsjVrsv4A8X12J4ArBiiayv3DyyG3ZlIg6yysuuSYdZsVz3TKcTg2fd//Ujd4CHV1iA=="
},
"node_modules/unpipe": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz",

46
src/node_modules/abbrev/LICENSE generated vendored
View File

@ -1,46 +0,0 @@
This software is dual-licensed under the ISC and MIT licenses.
You may use this software under EITHER of the following licenses.
----------
The ISC License
Copyright (c) Isaac Z. Schlueter and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
----------
Copyright Isaac Z. Schlueter and Contributors
All rights reserved.
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.

23
src/node_modules/abbrev/README.md generated vendored
View File

@ -1,23 +0,0 @@
# abbrev-js
Just like [ruby's Abbrev](http://apidock.com/ruby/Abbrev).
Usage:
var abbrev = require("abbrev");
abbrev("foo", "fool", "folding", "flop");
// returns:
{ fl: 'flop'
, flo: 'flop'
, flop: 'flop'
, fol: 'folding'
, fold: 'folding'
, foldi: 'folding'
, foldin: 'folding'
, folding: 'folding'
, foo: 'foo'
, fool: 'fool'
}
This is handy for command-line scripts, or other cases where you want to be able to accept shorthands.

61
src/node_modules/abbrev/abbrev.js generated vendored
View File

@ -1,61 +0,0 @@
module.exports = exports = abbrev.abbrev = abbrev
abbrev.monkeyPatch = monkeyPatch
function monkeyPatch () {
Object.defineProperty(Array.prototype, 'abbrev', {
value: function () { return abbrev(this) },
enumerable: false, configurable: true, writable: true
})
Object.defineProperty(Object.prototype, 'abbrev', {
value: function () { return abbrev(Object.keys(this)) },
enumerable: false, configurable: true, writable: true
})
}
function abbrev (list) {
if (arguments.length !== 1 || !Array.isArray(list)) {
list = Array.prototype.slice.call(arguments, 0)
}
for (var i = 0, l = list.length, args = [] ; i < l ; i ++) {
args[i] = typeof list[i] === "string" ? list[i] : String(list[i])
}
// sort them lexicographically, so that they're next to their nearest kin
args = args.sort(lexSort)
// walk through each, seeing how much it has in common with the next and previous
var abbrevs = {}
, prev = ""
for (var i = 0, l = args.length ; i < l ; i ++) {
var current = args[i]
, next = args[i + 1] || ""
, nextMatches = true
, prevMatches = true
if (current === next) continue
for (var j = 0, cl = current.length ; j < cl ; j ++) {
var curChar = current.charAt(j)
nextMatches = nextMatches && curChar === next.charAt(j)
prevMatches = prevMatches && curChar === prev.charAt(j)
if (!nextMatches && !prevMatches) {
j ++
break
}
}
prev = current
if (j === cl) {
abbrevs[current] = current
continue
}
for (var a = current.substr(0, j) ; j <= cl ; j ++) {
abbrevs[a] = current
a += current.charAt(j)
}
}
return abbrevs
}
function lexSort (a, b) {
return a === b ? 0 : a > b ? 1 : -1
}

21
src/node_modules/abbrev/package.json generated vendored
View File

@ -1,21 +0,0 @@
{
"name": "abbrev",
"version": "1.1.1",
"description": "Like ruby's abbrev module, but in js",
"author": "Isaac Z. Schlueter <i@izs.me>",
"main": "abbrev.js",
"scripts": {
"test": "tap test.js --100",
"preversion": "npm test",
"postversion": "npm publish",
"postpublish": "git push origin --all; git push origin --tags"
},
"repository": "http://github.com/isaacs/abbrev-js",
"license": "ISC",
"devDependencies": {
"tap": "^10.1"
},
"files": [
"abbrev.js"
]
}

15
src/node_modules/anymatch/LICENSE generated vendored
View File

@ -1,15 +0,0 @@
The ISC License
Copyright (c) 2019 Elan Shanker, Paul Miller (https://paulmillr.com)
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

87
src/node_modules/anymatch/README.md generated vendored
View File

@ -1,87 +0,0 @@
anymatch [![Build Status](https://travis-ci.org/micromatch/anymatch.svg?branch=master)](https://travis-ci.org/micromatch/anymatch) [![Coverage Status](https://img.shields.io/coveralls/micromatch/anymatch.svg?branch=master)](https://coveralls.io/r/micromatch/anymatch?branch=master)
======
Javascript module to match a string against a regular expression, glob, string,
or function that takes the string as an argument and returns a truthy or falsy
value. The matcher can also be an array of any or all of these. Useful for
allowing a very flexible user-defined config to define things like file paths.
__Note: This module has Bash-parity, please be aware that Windows-style backslashes are not supported as separators. See https://github.com/micromatch/micromatch#backslashes for more information.__
Usage
-----
```sh
npm install anymatch
```
#### anymatch(matchers, testString, [returnIndex], [options])
* __matchers__: (_Array|String|RegExp|Function_)
String to be directly matched, string with glob patterns, regular expression
test, function that takes the testString as an argument and returns a truthy
value if it should be matched, or an array of any number and mix of these types.
* __testString__: (_String|Array_) The string to test against the matchers. If
passed as an array, the first element of the array will be used as the
`testString` for non-function matchers, while the entire array will be applied
as the arguments for function matchers.
* __options__: (_Object_ [optional]_) Any of the [picomatch](https://github.com/micromatch/picomatch#options) options.
* __returnIndex__: (_Boolean [optional]_) If true, return the array index of
the first matcher that that testString matched, or -1 if no match, instead of a
boolean result.
```js
const anymatch = require('anymatch');
const matchers = [ 'path/to/file.js', 'path/anyjs/**/*.js', /foo.js$/, string => string.includes('bar') && string.length > 10 ] ;
anymatch(matchers, 'path/to/file.js'); // true
anymatch(matchers, 'path/anyjs/baz.js'); // true
anymatch(matchers, 'path/to/foo.js'); // true
anymatch(matchers, 'path/to/bar.js'); // true
anymatch(matchers, 'bar.js'); // false
// returnIndex = true
anymatch(matchers, 'foo.js', {returnIndex: true}); // 2
anymatch(matchers, 'path/anyjs/foo.js', {returnIndex: true}); // 1
// any picomatc
// using globs to match directories and their children
anymatch('node_modules', 'node_modules'); // true
anymatch('node_modules', 'node_modules/somelib/index.js'); // false
anymatch('node_modules/**', 'node_modules/somelib/index.js'); // true
anymatch('node_modules/**', '/absolute/path/to/node_modules/somelib/index.js'); // false
anymatch('**/node_modules/**', '/absolute/path/to/node_modules/somelib/index.js'); // true
const matcher = anymatch(matchers);
['foo.js', 'bar.js'].filter(matcher); // [ 'foo.js' ]
anymatch master*
```
#### anymatch(matchers)
You can also pass in only your matcher(s) to get a curried function that has
already been bound to the provided matching criteria. This can be used as an
`Array#filter` callback.
```js
var matcher = anymatch(matchers);
matcher('path/to/file.js'); // true
matcher('path/anyjs/baz.js', true); // 1
['foo.js', 'bar.js'].filter(matcher); // ['foo.js']
```
Changelog
----------
[See release notes page on GitHub](https://github.com/micromatch/anymatch/releases)
- **v3.0:** Removed `startIndex` and `endIndex` arguments. Node 8.x-only.
- **v2.0:** [micromatch](https://github.com/jonschlinkert/micromatch) moves away from minimatch-parity and inline with Bash. This includes handling backslashes differently (see https://github.com/micromatch/micromatch#backslashes for more information).
- **v1.2:** anymatch uses [micromatch](https://github.com/jonschlinkert/micromatch)
for glob pattern matching. Issues with glob pattern matching should be
reported directly to the [micromatch issue tracker](https://github.com/jonschlinkert/micromatch/issues).
License
-------
[ISC](https://raw.github.com/micromatch/anymatch/master/LICENSE)

20
src/node_modules/anymatch/index.d.ts generated vendored
View File

@ -1,20 +0,0 @@
type AnymatchFn = (testString: string) => boolean;
type AnymatchPattern = string|RegExp|AnymatchFn;
type AnymatchMatcher = AnymatchPattern|AnymatchPattern[]
type AnymatchTester = {
(testString: string|any[], returnIndex: true): number;
(testString: string|any[]): boolean;
}
type PicomatchOptions = {dot: boolean};
declare const anymatch: {
(matchers: AnymatchMatcher): AnymatchTester;
(matchers: AnymatchMatcher, testString: null, returnIndex: true | PicomatchOptions): AnymatchTester;
(matchers: AnymatchMatcher, testString: string|any[], returnIndex: true | PicomatchOptions): number;
(matchers: AnymatchMatcher, testString: string|any[]): boolean;
}
export {AnymatchMatcher as Matcher}
export {AnymatchTester as Tester}
export default anymatch

104
src/node_modules/anymatch/index.js generated vendored
View File

@ -1,104 +0,0 @@
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const picomatch = require('picomatch');
const normalizePath = require('normalize-path');
/**
* @typedef {(testString: string) => boolean} AnymatchFn
* @typedef {string|RegExp|AnymatchFn} AnymatchPattern
* @typedef {AnymatchPattern|AnymatchPattern[]} AnymatchMatcher
*/
const BANG = '!';
const DEFAULT_OPTIONS = {returnIndex: false};
const arrify = (item) => Array.isArray(item) ? item : [item];
/**
* @param {AnymatchPattern} matcher
* @param {object} options
* @returns {AnymatchFn}
*/
const createPattern = (matcher, options) => {
if (typeof matcher === 'function') {
return matcher;
}
if (typeof matcher === 'string') {
const glob = picomatch(matcher, options);
return (string) => matcher === string || glob(string);
}
if (matcher instanceof RegExp) {
return (string) => matcher.test(string);
}
return (string) => false;
};
/**
* @param {Array<Function>} patterns
* @param {Array<Function>} negPatterns
* @param {String|Array} args
* @param {Boolean} returnIndex
* @returns {boolean|number}
*/
const matchPatterns = (patterns, negPatterns, args, returnIndex) => {
const isList = Array.isArray(args);
const _path = isList ? args[0] : args;
if (!isList && typeof _path !== 'string') {
throw new TypeError('anymatch: second argument must be a string: got ' +
Object.prototype.toString.call(_path))
}
const path = normalizePath(_path, false);
for (let index = 0; index < negPatterns.length; index++) {
const nglob = negPatterns[index];
if (nglob(path)) {
return returnIndex ? -1 : false;
}
}
const applied = isList && [path].concat(args.slice(1));
for (let index = 0; index < patterns.length; index++) {
const pattern = patterns[index];
if (isList ? pattern(...applied) : pattern(path)) {
return returnIndex ? index : true;
}
}
return returnIndex ? -1 : false;
};
/**
* @param {AnymatchMatcher} matchers
* @param {Array|string} testString
* @param {object} options
* @returns {boolean|number|Function}
*/
const anymatch = (matchers, testString, options = DEFAULT_OPTIONS) => {
if (matchers == null) {
throw new TypeError('anymatch: specify first argument');
}
const opts = typeof options === 'boolean' ? {returnIndex: options} : options;
const returnIndex = opts.returnIndex || false;
// Early cache for matchers.
const mtchers = arrify(matchers);
const negatedGlobs = mtchers
.filter(item => typeof item === 'string' && item.charAt(0) === BANG)
.map(item => item.slice(1))
.map(item => picomatch(item, opts));
const patterns = mtchers
.filter(item => typeof item !== 'string' || (typeof item === 'string' && item.charAt(0) !== BANG))
.map(matcher => createPattern(matcher, opts));
if (testString == null) {
return (testString, ri = false) => {
const returnIndex = typeof ri === 'boolean' ? ri : false;
return matchPatterns(patterns, negatedGlobs, testString, returnIndex);
}
}
return matchPatterns(patterns, negatedGlobs, testString, returnIndex);
};
anymatch.default = anymatch;
module.exports = anymatch;

View File

@ -1,48 +0,0 @@
{
"name": "anymatch",
"version": "3.1.3",
"description": "Matches strings against configurable strings, globs, regular expressions, and/or functions",
"files": [
"index.js",
"index.d.ts"
],
"dependencies": {
"normalize-path": "^3.0.0",
"picomatch": "^2.0.4"
},
"author": {
"name": "Elan Shanker",
"url": "https://github.com/es128"
},
"license": "ISC",
"homepage": "https://github.com/micromatch/anymatch",
"repository": {
"type": "git",
"url": "https://github.com/micromatch/anymatch"
},
"keywords": [
"match",
"any",
"string",
"file",
"fs",
"list",
"glob",
"regex",
"regexp",
"regular",
"expression",
"function"
],
"scripts": {
"test": "nyc mocha",
"mocha": "mocha"
},
"devDependencies": {
"mocha": "^6.1.3",
"nyc": "^14.0.0"
},
"engines": {
"node": ">= 8"
}
}

View File

@ -1,2 +0,0 @@
tidelift: "npm/balanced-match"
patreon: juliangruber

View File

@ -1,21 +0,0 @@
(MIT)
Copyright (c) 2013 Julian Gruber &lt;julian@juliangruber.com&gt;
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -1,97 +0,0 @@
# balanced-match
Match balanced string pairs, like `{` and `}` or `<b>` and `</b>`. Supports regular expressions as well!
[![build status](https://secure.travis-ci.org/juliangruber/balanced-match.svg)](http://travis-ci.org/juliangruber/balanced-match)
[![downloads](https://img.shields.io/npm/dm/balanced-match.svg)](https://www.npmjs.org/package/balanced-match)
[![testling badge](https://ci.testling.com/juliangruber/balanced-match.png)](https://ci.testling.com/juliangruber/balanced-match)
## Example
Get the first matching pair of braces:
```js
var balanced = require('balanced-match');
console.log(balanced('{', '}', 'pre{in{nested}}post'));
console.log(balanced('{', '}', 'pre{first}between{second}post'));
console.log(balanced(/\s+\{\s+/, /\s+\}\s+/, 'pre { in{nest} } post'));
```
The matches are:
```bash
$ node example.js
{ start: 3, end: 14, pre: 'pre', body: 'in{nested}', post: 'post' }
{ start: 3,
end: 9,
pre: 'pre',
body: 'first',
post: 'between{second}post' }
{ start: 3, end: 17, pre: 'pre', body: 'in{nest}', post: 'post' }
```
## API
### var m = balanced(a, b, str)
For the first non-nested matching pair of `a` and `b` in `str`, return an
object with those keys:
* **start** the index of the first match of `a`
* **end** the index of the matching `b`
* **pre** the preamble, `a` and `b` not included
* **body** the match, `a` and `b` not included
* **post** the postscript, `a` and `b` not included
If there's no match, `undefined` will be returned.
If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `['{', 'a', '']` and `{a}}` will match `['', 'a', '}']`.
### var r = balanced.range(a, b, str)
For the first non-nested matching pair of `a` and `b` in `str`, return an
array with indexes: `[ <a index>, <b index> ]`.
If there's no match, `undefined` will be returned.
If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `[ 1, 3 ]` and `{a}}` will match `[0, 2]`.
## Installation
With [npm](https://npmjs.org) do:
```bash
npm install balanced-match
```
## Security contact information
To report a security vulnerability, please use the
[Tidelift security contact](https://tidelift.com/security).
Tidelift will coordinate the fix and disclosure.
## License
(MIT)
Copyright (c) 2013 Julian Gruber &lt;julian@juliangruber.com&gt;
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -1,62 +0,0 @@
'use strict';
module.exports = balanced;
function balanced(a, b, str) {
if (a instanceof RegExp) a = maybeMatch(a, str);
if (b instanceof RegExp) b = maybeMatch(b, str);
var r = range(a, b, str);
return r && {
start: r[0],
end: r[1],
pre: str.slice(0, r[0]),
body: str.slice(r[0] + a.length, r[1]),
post: str.slice(r[1] + b.length)
};
}
function maybeMatch(reg, str) {
var m = str.match(reg);
return m ? m[0] : null;
}
balanced.range = range;
function range(a, b, str) {
var begs, beg, left, right, result;
var ai = str.indexOf(a);
var bi = str.indexOf(b, ai + 1);
var i = ai;
if (ai >= 0 && bi > 0) {
if(a===b) {
return [ai, bi];
}
begs = [];
left = str.length;
while (i >= 0 && !result) {
if (i == ai) {
begs.push(i);
ai = str.indexOf(a, i + 1);
} else if (begs.length == 1) {
result = [ begs.pop(), bi ];
} else {
beg = begs.pop();
if (beg < left) {
left = beg;
right = bi;
}
bi = str.indexOf(b, i + 1);
}
i = ai < bi && ai >= 0 ? ai : bi;
}
if (begs.length) {
result = [ left, right ];
}
}
return result;
}

View File

@ -1,48 +0,0 @@
{
"name": "balanced-match",
"description": "Match balanced character pairs, like \"{\" and \"}\"",
"version": "1.0.2",
"repository": {
"type": "git",
"url": "git://github.com/juliangruber/balanced-match.git"
},
"homepage": "https://github.com/juliangruber/balanced-match",
"main": "index.js",
"scripts": {
"test": "tape test/test.js",
"bench": "matcha test/bench.js"
},
"devDependencies": {
"matcha": "^0.7.0",
"tape": "^4.6.0"
},
"keywords": [
"match",
"regexp",
"test",
"balanced",
"parse"
],
"author": {
"name": "Julian Gruber",
"email": "mail@juliangruber.com",
"url": "http://juliangruber.com"
},
"license": "MIT",
"testling": {
"files": "test/*.js",
"browsers": [
"ie/8..latest",
"firefox/20..latest",
"firefox/nightly",
"chrome/25..latest",
"chrome/canary",
"opera/12..latest",
"opera/next",
"safari/5.1..latest",
"ipad/6.0..latest",
"iphone/6.0..latest",
"android-browser/4.2..latest"
]
}
}

View File

@ -1,260 +0,0 @@
[
"3dm",
"3ds",
"3g2",
"3gp",
"7z",
"a",
"aac",
"adp",
"ai",
"aif",
"aiff",
"alz",
"ape",
"apk",
"appimage",
"ar",
"arj",
"asf",
"au",
"avi",
"bak",
"baml",
"bh",
"bin",
"bk",
"bmp",
"btif",
"bz2",
"bzip2",
"cab",
"caf",
"cgm",
"class",
"cmx",
"cpio",
"cr2",
"cur",
"dat",
"dcm",
"deb",
"dex",
"djvu",
"dll",
"dmg",
"dng",
"doc",
"docm",
"docx",
"dot",
"dotm",
"dra",
"DS_Store",
"dsk",
"dts",
"dtshd",
"dvb",
"dwg",
"dxf",
"ecelp4800",
"ecelp7470",
"ecelp9600",
"egg",
"eol",
"eot",
"epub",
"exe",
"f4v",
"fbs",
"fh",
"fla",
"flac",
"flatpak",
"fli",
"flv",
"fpx",
"fst",
"fvt",
"g3",
"gh",
"gif",
"graffle",
"gz",
"gzip",
"h261",
"h263",
"h264",
"icns",
"ico",
"ief",
"img",
"ipa",
"iso",
"jar",
"jpeg",
"jpg",
"jpgv",
"jpm",
"jxr",
"key",
"ktx",
"lha",
"lib",
"lvp",
"lz",
"lzh",
"lzma",
"lzo",
"m3u",
"m4a",
"m4v",
"mar",
"mdi",
"mht",
"mid",
"midi",
"mj2",
"mka",
"mkv",
"mmr",
"mng",
"mobi",
"mov",
"movie",
"mp3",
"mp4",
"mp4a",
"mpeg",
"mpg",
"mpga",
"mxu",
"nef",
"npx",
"numbers",
"nupkg",
"o",
"odp",
"ods",
"odt",
"oga",
"ogg",
"ogv",
"otf",
"ott",
"pages",
"pbm",
"pcx",
"pdb",
"pdf",
"pea",
"pgm",
"pic",
"png",
"pnm",
"pot",
"potm",
"potx",
"ppa",
"ppam",
"ppm",
"pps",
"ppsm",
"ppsx",
"ppt",
"pptm",
"pptx",
"psd",
"pya",
"pyc",
"pyo",
"pyv",
"qt",
"rar",
"ras",
"raw",
"resources",
"rgb",
"rip",
"rlc",
"rmf",
"rmvb",
"rpm",
"rtf",
"rz",
"s3m",
"s7z",
"scpt",
"sgi",
"shar",
"snap",
"sil",
"sketch",
"slk",
"smv",
"snk",
"so",
"stl",
"suo",
"sub",
"swf",
"tar",
"tbz",
"tbz2",
"tga",
"tgz",
"thmx",
"tif",
"tiff",
"tlz",
"ttc",
"ttf",
"txz",
"udf",
"uvh",
"uvi",
"uvm",
"uvp",
"uvs",
"uvu",
"viv",
"vob",
"war",
"wav",
"wax",
"wbmp",
"wdp",
"weba",
"webm",
"webp",
"whl",
"wim",
"wm",
"wma",
"wmv",
"wmx",
"woff",
"woff2",
"wrm",
"wvx",
"xbm",
"xif",
"xla",
"xlam",
"xls",
"xlsb",
"xlsm",
"xlsx",
"xlt",
"xltm",
"xltx",
"xm",
"xmind",
"xpi",
"xpm",
"xwd",
"xz",
"z",
"zip",
"zipx"
]

View File

@ -1,3 +0,0 @@
declare const binaryExtensionsJson: readonly string[];
export = binaryExtensionsJson;

View File

@ -1,14 +0,0 @@
/**
List of binary file extensions.
@example
```
import binaryExtensions = require('binary-extensions');
console.log(binaryExtensions);
//=> ['3ds', '3g2', …]
```
*/
declare const binaryExtensions: readonly string[];
export = binaryExtensions;

View File

@ -1 +0,0 @@
module.exports = require('./binary-extensions.json');

View File

@ -1,9 +0,0 @@
MIT License
Copyright (c) 2019 Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com), Paul Miller (https://paulmillr.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -1,38 +0,0 @@
{
"name": "binary-extensions",
"version": "2.2.0",
"description": "List of binary file extensions",
"license": "MIT",
"repository": "sindresorhus/binary-extensions",
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "sindresorhus.com"
},
"engines": {
"node": ">=8"
},
"scripts": {
"test": "xo && ava && tsd"
},
"files": [
"index.js",
"index.d.ts",
"binary-extensions.json",
"binary-extensions.json.d.ts"
],
"keywords": [
"binary",
"extensions",
"extension",
"file",
"json",
"list",
"array"
],
"devDependencies": {
"ava": "^1.4.1",
"tsd": "^0.7.2",
"xo": "^0.24.0"
}
}

View File

@ -1,41 +0,0 @@
# binary-extensions
> List of binary file extensions
The list is just a [JSON file](binary-extensions.json) and can be used anywhere.
## Install
```
$ npm install binary-extensions
```
## Usage
```js
const binaryExtensions = require('binary-extensions');
console.log(binaryExtensions);
//=> ['3ds', '3g2', …]
```
## Related
- [is-binary-path](https://github.com/sindresorhus/is-binary-path) - Check if a filepath is a binary file
- [text-extensions](https://github.com/sindresorhus/text-extensions) - List of text file extensions
---
<div align="center">
<b>
<a href="https://tidelift.com/subscription/pkg/npm-binary-extensions?utm_source=npm-binary-extensions&utm_medium=referral&utm_campaign=readme">Get professional support for this package with a Tidelift subscription</a>
</b>
<br>
<sub>
Tidelift helps make open source sustainable for maintainers while giving companies<br>assurances about security, maintenance, and licensing for their dependencies.
</sub>
</div>

View File

@ -1,3 +1,11 @@
1.20.2 / 2023-02-21
===================
* Fix strict json error message on Node.js 19+
* deps: content-type@~1.0.5
- perf: skip value escaping when unnecessary
* deps: raw-body@2.5.2
1.20.1 / 2022-10-06
===================

View File

@ -1,8 +1,8 @@
# body-parser
[![NPM Version][npm-image]][npm-url]
[![NPM Downloads][downloads-image]][downloads-url]
[![Build Status][github-actions-ci-image]][github-actions-ci-url]
[![NPM Version][npm-version-image]][npm-url]
[![NPM Downloads][npm-downloads-image]][npm-url]
[![Build Status][ci-image]][ci-url]
[![Test Coverage][coveralls-image]][coveralls-url]
Node.js body parsing middleware.
@ -454,11 +454,12 @@ app.use(bodyParser.text({ type: 'text/html' }))
[MIT](LICENSE)
[npm-image]: https://img.shields.io/npm/v/body-parser.svg
[npm-url]: https://npmjs.org/package/body-parser
[coveralls-image]: https://img.shields.io/coveralls/expressjs/body-parser/master.svg
[ci-image]: https://badgen.net/github/checks/expressjs/body-parser/master?label=ci
[ci-url]: https://github.com/expressjs/body-parser/actions/workflows/ci.yml
[coveralls-image]: https://badgen.net/coveralls/c/github/expressjs/body-parser/master
[coveralls-url]: https://coveralls.io/r/expressjs/body-parser?branch=master
[downloads-image]: https://img.shields.io/npm/dm/body-parser.svg
[downloads-url]: https://npmjs.org/package/body-parser
[github-actions-ci-image]: https://img.shields.io/github/workflow/status/expressjs/body-parser/ci/master?label=ci
[github-actions-ci-url]: https://github.com/expressjs/body-parser/actions/workflows/ci.yml
[node-version-image]: https://badgen.net/npm/node/body-parser
[node-version-url]: https://nodejs.org/en/download
[npm-downloads-image]: https://badgen.net/npm/dm/body-parser
[npm-url]: https://npmjs.org/package/body-parser
[npm-version-image]: https://badgen.net/npm/v/body-parser

View File

@ -39,6 +39,9 @@ module.exports = json
var FIRST_CHAR_REGEXP = /^[\x20\x09\x0a\x0d]*([^\x20\x09\x0a\x0d])/ // eslint-disable-line no-control-regex
var JSON_SYNTAX_CHAR = '#'
var JSON_SYNTAX_REGEXP = /#+/g
/**
* Create a middleware to parse JSON bodies.
*
@ -152,15 +155,23 @@ function json (options) {
function createStrictSyntaxError (str, char) {
var index = str.indexOf(char)
var partial = index !== -1
? str.substring(0, index) + '#'
: ''
var partial = ''
if (index !== -1) {
partial = str.substring(0, index) + JSON_SYNTAX_CHAR
for (var i = index + 1; i < str.length; i++) {
partial += JSON_SYNTAX_CHAR
}
}
try {
JSON.parse(partial); /* istanbul ignore next */ throw new SyntaxError('strict violation')
} catch (e) {
return normalizeJsonSyntaxError(e, {
message: e.message.replace('#', char),
message: e.message.replace(JSON_SYNTAX_REGEXP, function (placeholder) {
return str.substring(index, index + placeholder.length)
}),
stack: e.stack
})
}

View File

@ -1,7 +1,7 @@
{
"name": "body-parser",
"description": "Node.js body parsing middleware",
"version": "1.20.1",
"version": "1.20.2",
"contributors": [
"Douglas Christopher Wilson <doug@somethingdoug.com>",
"Jonathan Ong <me@jongleberry.com> (http://jongleberry.com)"
@ -10,7 +10,7 @@
"repository": "expressjs/body-parser",
"dependencies": {
"bytes": "3.1.2",
"content-type": "~1.0.4",
"content-type": "~1.0.5",
"debug": "2.6.9",
"depd": "2.0.0",
"destroy": "1.2.0",
@ -18,23 +18,23 @@
"iconv-lite": "0.4.24",
"on-finished": "2.4.1",
"qs": "6.11.0",
"raw-body": "2.5.1",
"raw-body": "2.5.2",
"type-is": "~1.6.18",
"unpipe": "1.0.0"
},
"devDependencies": {
"eslint": "8.24.0",
"eslint": "8.34.0",
"eslint-config-standard": "14.1.1",
"eslint-plugin-import": "2.26.0",
"eslint-plugin-import": "2.27.5",
"eslint-plugin-markdown": "3.0.0",
"eslint-plugin-node": "11.1.0",
"eslint-plugin-promise": "6.0.1",
"eslint-plugin-promise": "6.1.1",
"eslint-plugin-standard": "4.1.0",
"methods": "1.1.2",
"mocha": "10.0.0",
"mocha": "10.2.0",
"nyc": "15.1.0",
"safe-buffer": "5.2.1",
"supertest": "6.3.0"
"supertest": "6.3.3"
},
"files": [
"lib/",

View File

@ -1,21 +0,0 @@
MIT License
Copyright (c) 2013 Julian Gruber <julian@juliangruber.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -1,129 +0,0 @@
# brace-expansion
[Brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html),
as known from sh/bash, in JavaScript.
[![build status](https://secure.travis-ci.org/juliangruber/brace-expansion.svg)](http://travis-ci.org/juliangruber/brace-expansion)
[![downloads](https://img.shields.io/npm/dm/brace-expansion.svg)](https://www.npmjs.org/package/brace-expansion)
[![Greenkeeper badge](https://badges.greenkeeper.io/juliangruber/brace-expansion.svg)](https://greenkeeper.io/)
[![testling badge](https://ci.testling.com/juliangruber/brace-expansion.png)](https://ci.testling.com/juliangruber/brace-expansion)
## Example
```js
var expand = require('brace-expansion');
expand('file-{a,b,c}.jpg')
// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg']
expand('-v{,,}')
// => ['-v', '-v', '-v']
expand('file{0..2}.jpg')
// => ['file0.jpg', 'file1.jpg', 'file2.jpg']
expand('file-{a..c}.jpg')
// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg']
expand('file{2..0}.jpg')
// => ['file2.jpg', 'file1.jpg', 'file0.jpg']
expand('file{0..4..2}.jpg')
// => ['file0.jpg', 'file2.jpg', 'file4.jpg']
expand('file-{a..e..2}.jpg')
// => ['file-a.jpg', 'file-c.jpg', 'file-e.jpg']
expand('file{00..10..5}.jpg')
// => ['file00.jpg', 'file05.jpg', 'file10.jpg']
expand('{{A..C},{a..c}}')
// => ['A', 'B', 'C', 'a', 'b', 'c']
expand('ppp{,config,oe{,conf}}')
// => ['ppp', 'pppconfig', 'pppoe', 'pppoeconf']
```
## API
```js
var expand = require('brace-expansion');
```
### var expanded = expand(str)
Return an array of all possible and valid expansions of `str`. If none are
found, `[str]` is returned.
Valid expansions are:
```js
/^(.*,)+(.+)?$/
// {a,b,...}
```
A comma separated list of options, like `{a,b}` or `{a,{b,c}}` or `{,a,}`.
```js
/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/
// {x..y[..incr]}
```
A numeric sequence from `x` to `y` inclusive, with optional increment.
If `x` or `y` start with a leading `0`, all the numbers will be padded
to have equal length. Negative numbers and backwards iteration work too.
```js
/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/
// {x..y[..incr]}
```
An alphabetic sequence from `x` to `y` inclusive, with optional increment.
`x` and `y` must be exactly one character, and if given, `incr` must be a
number.
For compatibility reasons, the string `${` is not eligible for brace expansion.
## Installation
With [npm](https://npmjs.org) do:
```bash
npm install brace-expansion
```
## Contributors
- [Julian Gruber](https://github.com/juliangruber)
- [Isaac Z. Schlueter](https://github.com/isaacs)
## Sponsors
This module is proudly supported by my [Sponsors](https://github.com/juliangruber/sponsors)!
Do you want to support modules like this to improve their quality, stability and weigh in on new features? Then please consider donating to my [Patreon](https://www.patreon.com/juliangruber). Not sure how much of my modules you're using? Try [feross/thanks](https://github.com/feross/thanks)!
## License
(MIT)
Copyright (c) 2013 Julian Gruber &lt;julian@juliangruber.com&gt;
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -1,201 +0,0 @@
var concatMap = require('concat-map');
var balanced = require('balanced-match');
module.exports = expandTop;
var escSlash = '\0SLASH'+Math.random()+'\0';
var escOpen = '\0OPEN'+Math.random()+'\0';
var escClose = '\0CLOSE'+Math.random()+'\0';
var escComma = '\0COMMA'+Math.random()+'\0';
var escPeriod = '\0PERIOD'+Math.random()+'\0';
function numeric(str) {
return parseInt(str, 10) == str
? parseInt(str, 10)
: str.charCodeAt(0);
}
function escapeBraces(str) {
return str.split('\\\\').join(escSlash)
.split('\\{').join(escOpen)
.split('\\}').join(escClose)
.split('\\,').join(escComma)
.split('\\.').join(escPeriod);
}
function unescapeBraces(str) {
return str.split(escSlash).join('\\')
.split(escOpen).join('{')
.split(escClose).join('}')
.split(escComma).join(',')
.split(escPeriod).join('.');
}
// Basically just str.split(","), but handling cases
// where we have nested braced sections, which should be
// treated as individual members, like {a,{b,c},d}
function parseCommaParts(str) {
if (!str)
return [''];
var parts = [];
var m = balanced('{', '}', str);
if (!m)
return str.split(',');
var pre = m.pre;
var body = m.body;
var post = m.post;
var p = pre.split(',');
p[p.length-1] += '{' + body + '}';
var postParts = parseCommaParts(post);
if (post.length) {
p[p.length-1] += postParts.shift();
p.push.apply(p, postParts);
}
parts.push.apply(parts, p);
return parts;
}
function expandTop(str) {
if (!str)
return [];
// I don't know why Bash 4.3 does this, but it does.
// Anything starting with {} will have the first two bytes preserved
// but *only* at the top level, so {},a}b will not expand to anything,
// but a{},b}c will be expanded to [a}c,abc].
// One could argue that this is a bug in Bash, but since the goal of
// this module is to match Bash's rules, we escape a leading {}
if (str.substr(0, 2) === '{}') {
str = '\\{\\}' + str.substr(2);
}
return expand(escapeBraces(str), true).map(unescapeBraces);
}
function identity(e) {
return e;
}
function embrace(str) {
return '{' + str + '}';
}
function isPadded(el) {
return /^-?0\d/.test(el);
}
function lte(i, y) {
return i <= y;
}
function gte(i, y) {
return i >= y;
}
function expand(str, isTop) {
var expansions = [];
var m = balanced('{', '}', str);
if (!m || /\$$/.test(m.pre)) return [str];
var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
var isSequence = isNumericSequence || isAlphaSequence;
var isOptions = m.body.indexOf(',') >= 0;
if (!isSequence && !isOptions) {
// {a},b}
if (m.post.match(/,.*\}/)) {
str = m.pre + '{' + m.body + escClose + m.post;
return expand(str);
}
return [str];
}
var n;
if (isSequence) {
n = m.body.split(/\.\./);
} else {
n = parseCommaParts(m.body);
if (n.length === 1) {
// x{{a,b}}y ==> x{a}y x{b}y
n = expand(n[0], false).map(embrace);
if (n.length === 1) {
var post = m.post.length
? expand(m.post, false)
: [''];
return post.map(function(p) {
return m.pre + n[0] + p;
});
}
}
}
// at this point, n is the parts, and we know it's not a comma set
// with a single entry.
// no need to expand pre, since it is guaranteed to be free of brace-sets
var pre = m.pre;
var post = m.post.length
? expand(m.post, false)
: [''];
var N;
if (isSequence) {
var x = numeric(n[0]);
var y = numeric(n[1]);
var width = Math.max(n[0].length, n[1].length)
var incr = n.length == 3
? Math.abs(numeric(n[2]))
: 1;
var test = lte;
var reverse = y < x;
if (reverse) {
incr *= -1;
test = gte;
}
var pad = n.some(isPadded);
N = [];
for (var i = x; test(i, y); i += incr) {
var c;
if (isAlphaSequence) {
c = String.fromCharCode(i);
if (c === '\\')
c = '';
} else {
c = String(i);
if (pad) {
var need = width - c.length;
if (need > 0) {
var z = new Array(need + 1).join('0');
if (i < 0)
c = '-' + z + c.slice(1);
else
c = z + c;
}
}
}
N.push(c);
}
} else {
N = concatMap(n, function(el) { return expand(el, false) });
}
for (var j = 0; j < N.length; j++) {
for (var k = 0; k < post.length; k++) {
var expansion = pre + N[j] + post[k];
if (!isTop || isSequence || expansion)
expansions.push(expansion);
}
}
return expansions;
}

View File

@ -1,47 +0,0 @@
{
"name": "brace-expansion",
"description": "Brace expansion as known from sh/bash",
"version": "1.1.11",
"repository": {
"type": "git",
"url": "git://github.com/juliangruber/brace-expansion.git"
},
"homepage": "https://github.com/juliangruber/brace-expansion",
"main": "index.js",
"scripts": {
"test": "tape test/*.js",
"gentest": "bash test/generate.sh",
"bench": "matcha test/perf/bench.js"
},
"dependencies": {
"balanced-match": "^1.0.0",
"concat-map": "0.0.1"
},
"devDependencies": {
"matcha": "^0.7.0",
"tape": "^4.6.0"
},
"keywords": [],
"author": {
"name": "Julian Gruber",
"email": "mail@juliangruber.com",
"url": "http://juliangruber.com"
},
"license": "MIT",
"testling": {
"files": "test/*.js",
"browsers": [
"ie/8..latest",
"firefox/20..latest",
"firefox/nightly",
"chrome/25..latest",
"chrome/canary",
"opera/12..latest",
"opera/next",
"safari/5.1..latest",
"ipad/6.0..latest",
"iphone/6.0..latest",
"android-browser/4.2..latest"
]
}
}

184
src/node_modules/braces/CHANGELOG.md generated vendored
View File

@ -1,184 +0,0 @@
# Release history
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
<details>
<summary><strong>Guiding Principles</strong></summary>
- Changelogs are for humans, not machines.
- There should be an entry for every single version.
- The same types of changes should be grouped.
- Versions and sections should be linkable.
- The latest version comes first.
- The release date of each versions is displayed.
- Mention whether you follow Semantic Versioning.
</details>
<details>
<summary><strong>Types of changes</strong></summary>
Changelog entries are classified using the following labels _(from [keep-a-changelog](http://keepachangelog.com/)_):
- `Added` for new features.
- `Changed` for changes in existing functionality.
- `Deprecated` for soon-to-be removed features.
- `Removed` for now removed features.
- `Fixed` for any bug fixes.
- `Security` in case of vulnerabilities.
</details>
## [3.0.0] - 2018-04-08
v3.0 is a complete refactor, resulting in a faster, smaller codebase, with fewer deps, and a more accurate parser and compiler.
**Breaking Changes**
- The undocumented `.makeRe` method was removed
**Non-breaking changes**
- Caching was removed
## [2.3.2] - 2018-04-08
- start refactoring
- cover sets
- better range handling
## [2.3.1] - 2018-02-17
- Remove unnecessary escape in Regex. (#14)
## [2.3.0] - 2017-10-19
- minor code reorganization
- optimize regex
- expose `maxLength` option
## [2.2.1] - 2017-05-30
- don't condense when braces contain extglobs
## [2.2.0] - 2017-05-28
- ensure word boundaries are preserved
- fixes edge case where extglob characters precede a brace pattern
## [2.1.1] - 2017-04-27
- use snapdragon-node
- handle edge case
- optimizations, lint
## [2.0.4] - 2017-04-11
- pass opts to compiler
- minor optimization in create method
- re-write parser handlers to remove negation regex
## [2.0.3] - 2016-12-10
- use split-string
- clear queue at the end
- adds sequences example
- add unit tests
## [2.0.2] - 2016-10-21
- fix comma handling in nested extglobs
## [2.0.1] - 2016-10-20
- add comments
- more tests, ensure quotes are stripped
## [2.0.0] - 2016-10-19
- don't expand braces inside character classes
- add quantifier pattern
## [1.8.5] - 2016-05-21
- Refactor (#10)
## [1.8.4] - 2016-04-20
- fixes https://github.com/jonschlinkert/micromatch/issues/66
## [1.8.0] - 2015-03-18
- adds exponent examples, tests
- fixes the first example in https://github.com/jonschlinkert/micromatch/issues/38
## [1.6.0] - 2015-01-30
- optimizations, `bash` mode:
- improve path escaping
## [1.5.0] - 2015-01-28
- Merge pull request #5 from eush77/lib-files
## [1.4.0] - 2015-01-24
- add extglob tests
- externalize exponent function
- better whitespace handling
## [1.3.0] - 2015-01-24
- make regex patterns explicity
## [1.1.0] - 2015-01-11
- don't create a match group with `makeRe`
## [1.0.0] - 2014-12-23
- Merge commit '97b05f5544f8348736a8efaecf5c32bbe3e2ad6e'
- support empty brace syntax
- better bash coverage
- better support for regex strings
## [0.1.4] - 2014-11-14
- improve recognition of bad args, recognize mismatched argument types
- support escaping
- remove pathname-expansion
- support whitespace in patterns
## [0.1.0]
- first commit
[2.3.2]: https://github.com/micromatch/braces/compare/2.3.1...2.3.2
[2.3.1]: https://github.com/micromatch/braces/compare/2.3.0...2.3.1
[2.3.0]: https://github.com/micromatch/braces/compare/2.2.1...2.3.0
[2.2.1]: https://github.com/micromatch/braces/compare/2.2.0...2.2.1
[2.2.0]: https://github.com/micromatch/braces/compare/2.1.1...2.2.0
[2.1.1]: https://github.com/micromatch/braces/compare/2.1.0...2.1.1
[2.1.0]: https://github.com/micromatch/braces/compare/2.0.4...2.1.0
[2.0.4]: https://github.com/micromatch/braces/compare/2.0.3...2.0.4
[2.0.3]: https://github.com/micromatch/braces/compare/2.0.2...2.0.3
[2.0.2]: https://github.com/micromatch/braces/compare/2.0.1...2.0.2
[2.0.1]: https://github.com/micromatch/braces/compare/2.0.0...2.0.1
[2.0.0]: https://github.com/micromatch/braces/compare/1.8.5...2.0.0
[1.8.5]: https://github.com/micromatch/braces/compare/1.8.4...1.8.5
[1.8.4]: https://github.com/micromatch/braces/compare/1.8.0...1.8.4
[1.8.0]: https://github.com/micromatch/braces/compare/1.6.0...1.8.0
[1.6.0]: https://github.com/micromatch/braces/compare/1.5.0...1.6.0
[1.5.0]: https://github.com/micromatch/braces/compare/1.4.0...1.5.0
[1.4.0]: https://github.com/micromatch/braces/compare/1.3.0...1.4.0
[1.3.0]: https://github.com/micromatch/braces/compare/1.2.0...1.3.0
[1.2.0]: https://github.com/micromatch/braces/compare/1.1.0...1.2.0
[1.1.0]: https://github.com/micromatch/braces/compare/1.0.0...1.1.0
[1.0.0]: https://github.com/micromatch/braces/compare/0.1.4...1.0.0
[0.1.4]: https://github.com/micromatch/braces/compare/0.1.0...0.1.4
[Unreleased]: https://github.com/micromatch/braces/compare/0.1.0...HEAD
[keep-a-changelog]: https://github.com/olivierlacan/keep-a-changelog

21
src/node_modules/braces/LICENSE generated vendored
View File

@ -1,21 +0,0 @@
The MIT License (MIT)
Copyright (c) 2014-2018, Jon Schlinkert.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

593
src/node_modules/braces/README.md generated vendored
View File

@ -1,593 +0,0 @@
# braces [![Donate](https://img.shields.io/badge/Donate-PayPal-green.svg)](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=W8YFZ425KND68) [![NPM version](https://img.shields.io/npm/v/braces.svg?style=flat)](https://www.npmjs.com/package/braces) [![NPM monthly downloads](https://img.shields.io/npm/dm/braces.svg?style=flat)](https://npmjs.org/package/braces) [![NPM total downloads](https://img.shields.io/npm/dt/braces.svg?style=flat)](https://npmjs.org/package/braces) [![Linux Build Status](https://img.shields.io/travis/micromatch/braces.svg?style=flat&label=Travis)](https://travis-ci.org/micromatch/braces)
> Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support for the Bash 4.3 braces specification, without sacrificing speed.
Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support.
## Install
Install with [npm](https://www.npmjs.com/):
```sh
$ npm install --save braces
```
## v3.0.0 Released!!
See the [changelog](CHANGELOG.md) for details.
## Why use braces?
Brace patterns make globs more powerful by adding the ability to match specific ranges and sequences of characters.
* **Accurate** - complete support for the [Bash 4.3 Brace Expansion](www.gnu.org/software/bash/) specification (passes all of the Bash braces tests)
* **[fast and performant](#benchmarks)** - Starts fast, runs fast and [scales well](#performance) as patterns increase in complexity.
* **Organized code base** - The parser and compiler are easy to maintain and update when edge cases crop up.
* **Well-tested** - Thousands of test assertions, and passes all of the Bash, minimatch, and [brace-expansion](https://github.com/juliangruber/brace-expansion) unit tests (as of the date this was written).
* **Safer** - You shouldn't have to worry about users defining aggressive or malicious brace patterns that can break your application. Braces takes measures to prevent malicious regex that can be used for DDoS attacks (see [catastrophic backtracking](https://www.regular-expressions.info/catastrophic.html)).
* [Supports lists](#lists) - (aka "sets") `a/{b,c}/d` => `['a/b/d', 'a/c/d']`
* [Supports sequences](#sequences) - (aka "ranges") `{01..03}` => `['01', '02', '03']`
* [Supports steps](#steps) - (aka "increments") `{2..10..2}` => `['2', '4', '6', '8', '10']`
* [Supports escaping](#escaping) - To prevent evaluation of special characters.
## Usage
The main export is a function that takes one or more brace `patterns` and `options`.
```js
const braces = require('braces');
// braces(patterns[, options]);
console.log(braces(['{01..05}', '{a..e}']));
//=> ['(0[1-5])', '([a-e])']
console.log(braces(['{01..05}', '{a..e}'], { expand: true }));
//=> ['01', '02', '03', '04', '05', 'a', 'b', 'c', 'd', 'e']
```
### Brace Expansion vs. Compilation
By default, brace patterns are compiled into strings that are optimized for creating regular expressions and matching.
**Compiled**
```js
console.log(braces('a/{x,y,z}/b'));
//=> ['a/(x|y|z)/b']
console.log(braces(['a/{01..20}/b', 'a/{1..5}/b']));
//=> [ 'a/(0[1-9]|1[0-9]|20)/b', 'a/([1-5])/b' ]
```
**Expanded**
Enable brace expansion by setting the `expand` option to true, or by using [braces.expand()](#expand) (returns an array similar to what you'd expect from Bash, or `echo {1..5}`, or [minimatch](https://github.com/isaacs/minimatch)):
```js
console.log(braces('a/{x,y,z}/b', { expand: true }));
//=> ['a/x/b', 'a/y/b', 'a/z/b']
console.log(braces.expand('{01..10}'));
//=> ['01','02','03','04','05','06','07','08','09','10']
```
### Lists
Expand lists (like Bash "sets"):
```js
console.log(braces('a/{foo,bar,baz}/*.js'));
//=> ['a/(foo|bar|baz)/*.js']
console.log(braces.expand('a/{foo,bar,baz}/*.js'));
//=> ['a/foo/*.js', 'a/bar/*.js', 'a/baz/*.js']
```
### Sequences
Expand ranges of characters (like Bash "sequences"):
```js
console.log(braces.expand('{1..3}')); // ['1', '2', '3']
console.log(braces.expand('a/{1..3}/b')); // ['a/1/b', 'a/2/b', 'a/3/b']
console.log(braces('{a..c}', { expand: true })); // ['a', 'b', 'c']
console.log(braces('foo/{a..c}', { expand: true })); // ['foo/a', 'foo/b', 'foo/c']
// supports zero-padded ranges
console.log(braces('a/{01..03}/b')); //=> ['a/(0[1-3])/b']
console.log(braces('a/{001..300}/b')); //=> ['a/(0{2}[1-9]|0[1-9][0-9]|[12][0-9]{2}|300)/b']
```
See [fill-range](https://github.com/jonschlinkert/fill-range) for all available range-expansion options.
### Steppped ranges
Steps, or increments, may be used with ranges:
```js
console.log(braces.expand('{2..10..2}'));
//=> ['2', '4', '6', '8', '10']
console.log(braces('{2..10..2}'));
//=> ['(2|4|6|8|10)']
```
When the [.optimize](#optimize) method is used, or [options.optimize](#optionsoptimize) is set to true, sequences are passed to [to-regex-range](https://github.com/jonschlinkert/to-regex-range) for expansion.
### Nesting
Brace patterns may be nested. The results of each expanded string are not sorted, and left to right order is preserved.
**"Expanded" braces**
```js
console.log(braces.expand('a{b,c,/{x,y}}/e'));
//=> ['ab/e', 'ac/e', 'a/x/e', 'a/y/e']
console.log(braces.expand('a/{x,{1..5},y}/c'));
//=> ['a/x/c', 'a/1/c', 'a/2/c', 'a/3/c', 'a/4/c', 'a/5/c', 'a/y/c']
```
**"Optimized" braces**
```js
console.log(braces('a{b,c,/{x,y}}/e'));
//=> ['a(b|c|/(x|y))/e']
console.log(braces('a/{x,{1..5},y}/c'));
//=> ['a/(x|([1-5])|y)/c']
```
### Escaping
**Escaping braces**
A brace pattern will not be expanded or evaluted if _either the opening or closing brace is escaped_:
```js
console.log(braces.expand('a\\{d,c,b}e'));
//=> ['a{d,c,b}e']
console.log(braces.expand('a{d,c,b\\}e'));
//=> ['a{d,c,b}e']
```
**Escaping commas**
Commas inside braces may also be escaped:
```js
console.log(braces.expand('a{b\\,c}d'));
//=> ['a{b,c}d']
console.log(braces.expand('a{d\\,c,b}e'));
//=> ['ad,ce', 'abe']
```
**Single items**
Following bash conventions, a brace pattern is also not expanded when it contains a single character:
```js
console.log(braces.expand('a{b}c'));
//=> ['a{b}c']
```
## Options
### options.maxLength
**Type**: `Number`
**Default**: `65,536`
**Description**: Limit the length of the input string. Useful when the input string is generated or your application allows users to pass a string, et cetera.
```js
console.log(braces('a/{b,c}/d', { maxLength: 3 })); //=> throws an error
```
### options.expand
**Type**: `Boolean`
**Default**: `undefined`
**Description**: Generate an "expanded" brace pattern (alternatively you can use the `braces.expand()` method, which does the same thing).
```js
console.log(braces('a/{b,c}/d', { expand: true }));
//=> [ 'a/b/d', 'a/c/d' ]
```
### options.nodupes
**Type**: `Boolean`
**Default**: `undefined`
**Description**: Remove duplicates from the returned array.
### options.rangeLimit
**Type**: `Number`
**Default**: `1000`
**Description**: To prevent malicious patterns from being passed by users, an error is thrown when `braces.expand()` is used or `options.expand` is true and the generated range will exceed the `rangeLimit`.
You can customize `options.rangeLimit` or set it to `Inifinity` to disable this altogether.
**Examples**
```js
// pattern exceeds the "rangeLimit", so it's optimized automatically
console.log(braces.expand('{1..1000}'));
//=> ['([1-9]|[1-9][0-9]{1,2}|1000)']
// pattern does not exceed "rangeLimit", so it's NOT optimized
console.log(braces.expand('{1..100}'));
//=> ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23', '24', '25', '26', '27', '28', '29', '30', '31', '32', '33', '34', '35', '36', '37', '38', '39', '40', '41', '42', '43', '44', '45', '46', '47', '48', '49', '50', '51', '52', '53', '54', '55', '56', '57', '58', '59', '60', '61', '62', '63', '64', '65', '66', '67', '68', '69', '70', '71', '72', '73', '74', '75', '76', '77', '78', '79', '80', '81', '82', '83', '84', '85', '86', '87', '88', '89', '90', '91', '92', '93', '94', '95', '96', '97', '98', '99', '100']
```
### options.transform
**Type**: `Function`
**Default**: `undefined`
**Description**: Customize range expansion.
**Example: Transforming non-numeric values**
```js
const alpha = braces.expand('x/{a..e}/y', {
transform(value, index) {
// When non-numeric values are passed, "value" is a character code.
return 'foo/' + String.fromCharCode(value) + '-' + index;
}
});
console.log(alpha);
//=> [ 'x/foo/a-0/y', 'x/foo/b-1/y', 'x/foo/c-2/y', 'x/foo/d-3/y', 'x/foo/e-4/y' ]
```
**Example: Transforming numeric values**
```js
const numeric = braces.expand('{1..5}', {
transform(value) {
// when numeric values are passed, "value" is a number
return 'foo/' + value * 2;
}
});
console.log(numeric);
//=> [ 'foo/2', 'foo/4', 'foo/6', 'foo/8', 'foo/10' ]
```
### options.quantifiers
**Type**: `Boolean`
**Default**: `undefined`
**Description**: In regular expressions, quanitifiers can be used to specify how many times a token can be repeated. For example, `a{1,3}` will match the letter `a` one to three times.
Unfortunately, regex quantifiers happen to share the same syntax as [Bash lists](#lists)
The `quantifiers` option tells braces to detect when [regex quantifiers](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp#quantifiers) are defined in the given pattern, and not to try to expand them as lists.
**Examples**
```js
const braces = require('braces');
console.log(braces('a/b{1,3}/{x,y,z}'));
//=> [ 'a/b(1|3)/(x|y|z)' ]
console.log(braces('a/b{1,3}/{x,y,z}', {quantifiers: true}));
//=> [ 'a/b{1,3}/(x|y|z)' ]
console.log(braces('a/b{1,3}/{x,y,z}', {quantifiers: true, expand: true}));
//=> [ 'a/b{1,3}/x', 'a/b{1,3}/y', 'a/b{1,3}/z' ]
```
### options.unescape
**Type**: `Boolean`
**Default**: `undefined`
**Description**: Strip backslashes that were used for escaping from the result.
## What is "brace expansion"?
Brace expansion is a type of parameter expansion that was made popular by unix shells for generating lists of strings, as well as regex-like matching when used alongside wildcards (globs).
In addition to "expansion", braces are also used for matching. In other words:
* [brace expansion](#brace-expansion) is for generating new lists
* [brace matching](#brace-matching) is for filtering existing lists
<details>
<summary><strong>More about brace expansion</strong> (click to expand)</summary>
There are two main types of brace expansion:
1. **lists**: which are defined using comma-separated values inside curly braces: `{a,b,c}`
2. **sequences**: which are defined using a starting value and an ending value, separated by two dots: `a{1..3}b`. Optionally, a third argument may be passed to define a "step" or increment to use: `a{1..100..10}b`. These are also sometimes referred to as "ranges".
Here are some example brace patterns to illustrate how they work:
**Sets**
```
{a,b,c} => a b c
{a,b,c}{1,2} => a1 a2 b1 b2 c1 c2
```
**Sequences**
```
{1..9} => 1 2 3 4 5 6 7 8 9
{4..-4} => 4 3 2 1 0 -1 -2 -3 -4
{1..20..3} => 1 4 7 10 13 16 19
{a..j} => a b c d e f g h i j
{j..a} => j i h g f e d c b a
{a..z..3} => a d g j m p s v y
```
**Combination**
Sets and sequences can be mixed together or used along with any other strings.
```
{a,b,c}{1..3} => a1 a2 a3 b1 b2 b3 c1 c2 c3
foo/{a,b,c}/bar => foo/a/bar foo/b/bar foo/c/bar
```
The fact that braces can be "expanded" from relatively simple patterns makes them ideal for quickly generating test fixtures, file paths, and similar use cases.
## Brace matching
In addition to _expansion_, brace patterns are also useful for performing regular-expression-like matching.
For example, the pattern `foo/{1..3}/bar` would match any of following strings:
```
foo/1/bar
foo/2/bar
foo/3/bar
```
But not:
```
baz/1/qux
baz/2/qux
baz/3/qux
```
Braces can also be combined with [glob patterns](https://github.com/jonschlinkert/micromatch) to perform more advanced wildcard matching. For example, the pattern `*/{1..3}/*` would match any of following strings:
```
foo/1/bar
foo/2/bar
foo/3/bar
baz/1/qux
baz/2/qux
baz/3/qux
```
## Brace matching pitfalls
Although brace patterns offer a user-friendly way of matching ranges or sets of strings, there are also some major disadvantages and potential risks you should be aware of.
### tldr
**"brace bombs"**
* brace expansion can eat up a huge amount of processing resources
* as brace patterns increase _linearly in size_, the system resources required to expand the pattern increase exponentially
* users can accidentally (or intentially) exhaust your system's resources resulting in the equivalent of a DoS attack (bonus: no programming knowledge is required!)
For a more detailed explanation with examples, see the [geometric complexity](#geometric-complexity) section.
### The solution
Jump to the [performance section](#performance) to see how Braces solves this problem in comparison to other libraries.
### Geometric complexity
At minimum, brace patterns with sets limited to two elements have quadradic or `O(n^2)` complexity. But the complexity of the algorithm increases exponentially as the number of sets, _and elements per set_, increases, which is `O(n^c)`.
For example, the following sets demonstrate quadratic (`O(n^2)`) complexity:
```
{1,2}{3,4} => (2X2) => 13 14 23 24
{1,2}{3,4}{5,6} => (2X2X2) => 135 136 145 146 235 236 245 246
```
But add an element to a set, and we get a n-fold Cartesian product with `O(n^c)` complexity:
```
{1,2,3}{4,5,6}{7,8,9} => (3X3X3) => 147 148 149 157 158 159 167 168 169 247 248
249 257 258 259 267 268 269 347 348 349 357
358 359 367 368 369
```
Now, imagine how this complexity grows given that each element is a n-tuple:
```
{1..100}{1..100} => (100X100) => 10,000 elements (38.4 kB)
{1..100}{1..100}{1..100} => (100X100X100) => 1,000,000 elements (5.76 MB)
```
Although these examples are clearly contrived, they demonstrate how brace patterns can quickly grow out of control.
**More information**
Interested in learning more about brace expansion?
* [linuxjournal/bash-brace-expansion](http://www.linuxjournal.com/content/bash-brace-expansion)
* [rosettacode/Brace_expansion](https://rosettacode.org/wiki/Brace_expansion)
* [cartesian product](https://en.wikipedia.org/wiki/Cartesian_product)
</details>
## Performance
Braces is not only screaming fast, it's also more accurate the other brace expansion libraries.
### Better algorithms
Fortunately there is a solution to the ["brace bomb" problem](#brace-matching-pitfalls): _don't expand brace patterns into an array when they're used for matching_.
Instead, convert the pattern into an optimized regular expression. This is easier said than done, and braces is the only library that does this currently.
**The proof is in the numbers**
Minimatch gets exponentially slower as patterns increase in complexity, braces does not. The following results were generated using `braces()` and `minimatch.braceExpand()`, respectively.
| **Pattern** | **braces** | **[minimatch][]** |
| --- | --- | --- |
| `{1..9007199254740991}`[^1] | `298 B` (5ms 459μs)| N/A (freezes) |
| `{1..1000000000000000}` | `41 B` (1ms 15μs) | N/A (freezes) |
| `{1..100000000000000}` | `40 B` (890μs) | N/A (freezes) |
| `{1..10000000000000}` | `39 B` (2ms 49μs) | N/A (freezes) |
| `{1..1000000000000}` | `38 B` (608μs) | N/A (freezes) |
| `{1..100000000000}` | `37 B` (397μs) | N/A (freezes) |
| `{1..10000000000}` | `35 B` (983μs) | N/A (freezes) |
| `{1..1000000000}` | `34 B` (798μs) | N/A (freezes) |
| `{1..100000000}` | `33 B` (733μs) | N/A (freezes) |
| `{1..10000000}` | `32 B` (5ms 632μs) | `78.89 MB` (16s 388ms 569μs) |
| `{1..1000000}` | `31 B` (1ms 381μs) | `6.89 MB` (1s 496ms 887μs) |
| `{1..100000}` | `30 B` (950μs) | `588.89 kB` (146ms 921μs) |
| `{1..10000}` | `29 B` (1ms 114μs) | `48.89 kB` (14ms 187μs) |
| `{1..1000}` | `28 B` (760μs) | `3.89 kB` (1ms 453μs) |
| `{1..100}` | `22 B` (345μs) | `291 B` (196μs) |
| `{1..10}` | `10 B` (533μs) | `20 B` (37μs) |
| `{1..3}` | `7 B` (190μs) | `5 B` (27μs) |
### Faster algorithms
When you need expansion, braces is still much faster.
_(the following results were generated using `braces.expand()` and `minimatch.braceExpand()`, respectively)_
| **Pattern** | **braces** | **[minimatch][]** |
| --- | --- | --- |
| `{1..10000000}` | `78.89 MB` (2s 698ms 642μs) | `78.89 MB` (18s 601ms 974μs) |
| `{1..1000000}` | `6.89 MB` (458ms 576μs) | `6.89 MB` (1s 491ms 621μs) |
| `{1..100000}` | `588.89 kB` (20ms 728μs) | `588.89 kB` (156ms 919μs) |
| `{1..10000}` | `48.89 kB` (2ms 202μs) | `48.89 kB` (13ms 641μs) |
| `{1..1000}` | `3.89 kB` (1ms 796μs) | `3.89 kB` (1ms 958μs) |
| `{1..100}` | `291 B` (424μs) | `291 B` (211μs) |
| `{1..10}` | `20 B` (487μs) | `20 B` (72μs) |
| `{1..3}` | `5 B` (166μs) | `5 B` (27μs) |
If you'd like to run these comparisons yourself, see [test/support/generate.js](test/support/generate.js).
## Benchmarks
### Running benchmarks
Install dev dependencies:
```bash
npm i -d && npm benchmark
```
### Latest results
Braces is more accurate, without sacrificing performance.
```bash
# range (expanded)
braces x 29,040 ops/sec ±3.69% (91 runs sampled))
minimatch x 4,735 ops/sec ±1.28% (90 runs sampled)
# range (optimized for regex)
braces x 382,878 ops/sec ±0.56% (94 runs sampled)
minimatch x 1,040 ops/sec ±0.44% (93 runs sampled)
# nested ranges (expanded)
braces x 19,744 ops/sec ±2.27% (92 runs sampled))
minimatch x 4,579 ops/sec ±0.50% (93 runs sampled)
# nested ranges (optimized for regex)
braces x 246,019 ops/sec ±2.02% (93 runs sampled)
minimatch x 1,028 ops/sec ±0.39% (94 runs sampled)
# set (expanded)
braces x 138,641 ops/sec ±0.53% (95 runs sampled)
minimatch x 219,582 ops/sec ±0.98% (94 runs sampled)
# set (optimized for regex)
braces x 388,408 ops/sec ±0.41% (95 runs sampled)
minimatch x 44,724 ops/sec ±0.91% (89 runs sampled)
# nested sets (expanded)
braces x 84,966 ops/sec ±0.48% (94 runs sampled)
minimatch x 140,720 ops/sec ±0.37% (95 runs sampled)
# nested sets (optimized for regex)
braces x 263,340 ops/sec ±2.06% (92 runs sampled)
minimatch x 28,714 ops/sec ±0.40% (90 runs sampled)
```
## About
<details>
<summary><strong>Contributing</strong></summary>
Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new).
</details>
<details>
<summary><strong>Running Tests</strong></summary>
Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command:
```sh
$ npm install && npm test
```
</details>
<details>
<summary><strong>Building docs</strong></summary>
_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_
To generate the readme, run the following command:
```sh
$ npm install -g verbose/verb#dev verb-generate-readme && verb
```
</details>
### Contributors
| **Commits** | **Contributor** |
| --- | --- |
| 197 | [jonschlinkert](https://github.com/jonschlinkert) |
| 4 | [doowb](https://github.com/doowb) |
| 1 | [es128](https://github.com/es128) |
| 1 | [eush77](https://github.com/eush77) |
| 1 | [hemanth](https://github.com/hemanth) |
| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) |
### Author
**Jon Schlinkert**
* [GitHub Profile](https://github.com/jonschlinkert)
* [Twitter Profile](https://twitter.com/jonschlinkert)
* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert)
### License
Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert).
Released under the [MIT License](LICENSE).
***
_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 08, 2019._

170
src/node_modules/braces/index.js generated vendored
View File

@ -1,170 +0,0 @@
'use strict';
const stringify = require('./lib/stringify');
const compile = require('./lib/compile');
const expand = require('./lib/expand');
const parse = require('./lib/parse');
/**
* Expand the given pattern or create a regex-compatible string.
*
* ```js
* const braces = require('braces');
* console.log(braces('{a,b,c}', { compile: true })); //=> ['(a|b|c)']
* console.log(braces('{a,b,c}')); //=> ['a', 'b', 'c']
* ```
* @param {String} `str`
* @param {Object} `options`
* @return {String}
* @api public
*/
const braces = (input, options = {}) => {
let output = [];
if (Array.isArray(input)) {
for (let pattern of input) {
let result = braces.create(pattern, options);
if (Array.isArray(result)) {
output.push(...result);
} else {
output.push(result);
}
}
} else {
output = [].concat(braces.create(input, options));
}
if (options && options.expand === true && options.nodupes === true) {
output = [...new Set(output)];
}
return output;
};
/**
* Parse the given `str` with the given `options`.
*
* ```js
* // braces.parse(pattern, [, options]);
* const ast = braces.parse('a/{b,c}/d');
* console.log(ast);
* ```
* @param {String} pattern Brace pattern to parse
* @param {Object} options
* @return {Object} Returns an AST
* @api public
*/
braces.parse = (input, options = {}) => parse(input, options);
/**
* Creates a braces string from an AST, or an AST node.
*
* ```js
* const braces = require('braces');
* let ast = braces.parse('foo/{a,b}/bar');
* console.log(stringify(ast.nodes[2])); //=> '{a,b}'
* ```
* @param {String} `input` Brace pattern or AST.
* @param {Object} `options`
* @return {Array} Returns an array of expanded values.
* @api public
*/
braces.stringify = (input, options = {}) => {
if (typeof input === 'string') {
return stringify(braces.parse(input, options), options);
}
return stringify(input, options);
};
/**
* Compiles a brace pattern into a regex-compatible, optimized string.
* This method is called by the main [braces](#braces) function by default.
*
* ```js
* const braces = require('braces');
* console.log(braces.compile('a/{b,c}/d'));
* //=> ['a/(b|c)/d']
* ```
* @param {String} `input` Brace pattern or AST.
* @param {Object} `options`
* @return {Array} Returns an array of expanded values.
* @api public
*/
braces.compile = (input, options = {}) => {
if (typeof input === 'string') {
input = braces.parse(input, options);
}
return compile(input, options);
};
/**
* Expands a brace pattern into an array. This method is called by the
* main [braces](#braces) function when `options.expand` is true. Before
* using this method it's recommended that you read the [performance notes](#performance))
* and advantages of using [.compile](#compile) instead.
*
* ```js
* const braces = require('braces');
* console.log(braces.expand('a/{b,c}/d'));
* //=> ['a/b/d', 'a/c/d'];
* ```
* @param {String} `pattern` Brace pattern
* @param {Object} `options`
* @return {Array} Returns an array of expanded values.
* @api public
*/
braces.expand = (input, options = {}) => {
if (typeof input === 'string') {
input = braces.parse(input, options);
}
let result = expand(input, options);
// filter out empty strings if specified
if (options.noempty === true) {
result = result.filter(Boolean);
}
// filter out duplicates if specified
if (options.nodupes === true) {
result = [...new Set(result)];
}
return result;
};
/**
* Processes a brace pattern and returns either an expanded array
* (if `options.expand` is true), a highly optimized regex-compatible string.
* This method is called by the main [braces](#braces) function.
*
* ```js
* const braces = require('braces');
* console.log(braces.create('user-{200..300}/project-{a,b,c}-{1..10}'))
* //=> 'user-(20[0-9]|2[1-9][0-9]|300)/project-(a|b|c)-([1-9]|10)'
* ```
* @param {String} `pattern` Brace pattern
* @param {Object} `options`
* @return {Array} Returns an array of expanded values.
* @api public
*/
braces.create = (input, options = {}) => {
if (input === '' || input.length < 3) {
return [input];
}
return options.expand !== true
? braces.compile(input, options)
: braces.expand(input, options);
};
/**
* Expose "braces"
*/
module.exports = braces;

View File

@ -1,57 +0,0 @@
'use strict';
const fill = require('fill-range');
const utils = require('./utils');
const compile = (ast, options = {}) => {
let walk = (node, parent = {}) => {
let invalidBlock = utils.isInvalidBrace(parent);
let invalidNode = node.invalid === true && options.escapeInvalid === true;
let invalid = invalidBlock === true || invalidNode === true;
let prefix = options.escapeInvalid === true ? '\\' : '';
let output = '';
if (node.isOpen === true) {
return prefix + node.value;
}
if (node.isClose === true) {
return prefix + node.value;
}
if (node.type === 'open') {
return invalid ? (prefix + node.value) : '(';
}
if (node.type === 'close') {
return invalid ? (prefix + node.value) : ')';
}
if (node.type === 'comma') {
return node.prev.type === 'comma' ? '' : (invalid ? node.value : '|');
}
if (node.value) {
return node.value;
}
if (node.nodes && node.ranges > 0) {
let args = utils.reduce(node.nodes);
let range = fill(...args, { ...options, wrap: false, toRegex: true });
if (range.length !== 0) {
return args.length > 1 && range.length > 1 ? `(${range})` : range;
}
}
if (node.nodes) {
for (let child of node.nodes) {
output += walk(child, node);
}
}
return output;
};
return walk(ast);
};
module.exports = compile;

View File

@ -1,57 +0,0 @@
'use strict';
module.exports = {
MAX_LENGTH: 1024 * 64,
// Digits
CHAR_0: '0', /* 0 */
CHAR_9: '9', /* 9 */
// Alphabet chars.
CHAR_UPPERCASE_A: 'A', /* A */
CHAR_LOWERCASE_A: 'a', /* a */
CHAR_UPPERCASE_Z: 'Z', /* Z */
CHAR_LOWERCASE_Z: 'z', /* z */
CHAR_LEFT_PARENTHESES: '(', /* ( */
CHAR_RIGHT_PARENTHESES: ')', /* ) */
CHAR_ASTERISK: '*', /* * */
// Non-alphabetic chars.
CHAR_AMPERSAND: '&', /* & */
CHAR_AT: '@', /* @ */
CHAR_BACKSLASH: '\\', /* \ */
CHAR_BACKTICK: '`', /* ` */
CHAR_CARRIAGE_RETURN: '\r', /* \r */
CHAR_CIRCUMFLEX_ACCENT: '^', /* ^ */
CHAR_COLON: ':', /* : */
CHAR_COMMA: ',', /* , */
CHAR_DOLLAR: '$', /* . */
CHAR_DOT: '.', /* . */
CHAR_DOUBLE_QUOTE: '"', /* " */
CHAR_EQUAL: '=', /* = */
CHAR_EXCLAMATION_MARK: '!', /* ! */
CHAR_FORM_FEED: '\f', /* \f */
CHAR_FORWARD_SLASH: '/', /* / */
CHAR_HASH: '#', /* # */
CHAR_HYPHEN_MINUS: '-', /* - */
CHAR_LEFT_ANGLE_BRACKET: '<', /* < */
CHAR_LEFT_CURLY_BRACE: '{', /* { */
CHAR_LEFT_SQUARE_BRACKET: '[', /* [ */
CHAR_LINE_FEED: '\n', /* \n */
CHAR_NO_BREAK_SPACE: '\u00A0', /* \u00A0 */
CHAR_PERCENT: '%', /* % */
CHAR_PLUS: '+', /* + */
CHAR_QUESTION_MARK: '?', /* ? */
CHAR_RIGHT_ANGLE_BRACKET: '>', /* > */
CHAR_RIGHT_CURLY_BRACE: '}', /* } */
CHAR_RIGHT_SQUARE_BRACKET: ']', /* ] */
CHAR_SEMICOLON: ';', /* ; */
CHAR_SINGLE_QUOTE: '\'', /* ' */
CHAR_SPACE: ' ', /* */
CHAR_TAB: '\t', /* \t */
CHAR_UNDERSCORE: '_', /* _ */
CHAR_VERTICAL_LINE: '|', /* | */
CHAR_ZERO_WIDTH_NOBREAK_SPACE: '\uFEFF' /* \uFEFF */
};

113
src/node_modules/braces/lib/expand.js generated vendored
View File

@ -1,113 +0,0 @@
'use strict';
const fill = require('fill-range');
const stringify = require('./stringify');
const utils = require('./utils');
const append = (queue = '', stash = '', enclose = false) => {
let result = [];
queue = [].concat(queue);
stash = [].concat(stash);
if (!stash.length) return queue;
if (!queue.length) {
return enclose ? utils.flatten(stash).map(ele => `{${ele}}`) : stash;
}
for (let item of queue) {
if (Array.isArray(item)) {
for (let value of item) {
result.push(append(value, stash, enclose));
}
} else {
for (let ele of stash) {
if (enclose === true && typeof ele === 'string') ele = `{${ele}}`;
result.push(Array.isArray(ele) ? append(item, ele, enclose) : (item + ele));
}
}
}
return utils.flatten(result);
};
const expand = (ast, options = {}) => {
let rangeLimit = options.rangeLimit === void 0 ? 1000 : options.rangeLimit;
let walk = (node, parent = {}) => {
node.queue = [];
let p = parent;
let q = parent.queue;
while (p.type !== 'brace' && p.type !== 'root' && p.parent) {
p = p.parent;
q = p.queue;
}
if (node.invalid || node.dollar) {
q.push(append(q.pop(), stringify(node, options)));
return;
}
if (node.type === 'brace' && node.invalid !== true && node.nodes.length === 2) {
q.push(append(q.pop(), ['{}']));
return;
}
if (node.nodes && node.ranges > 0) {
let args = utils.reduce(node.nodes);
if (utils.exceedsLimit(...args, options.step, rangeLimit)) {
throw new RangeError('expanded array length exceeds range limit. Use options.rangeLimit to increase or disable the limit.');
}
let range = fill(...args, options);
if (range.length === 0) {
range = stringify(node, options);
}
q.push(append(q.pop(), range));
node.nodes = [];
return;
}
let enclose = utils.encloseBrace(node);
let queue = node.queue;
let block = node;
while (block.type !== 'brace' && block.type !== 'root' && block.parent) {
block = block.parent;
queue = block.queue;
}
for (let i = 0; i < node.nodes.length; i++) {
let child = node.nodes[i];
if (child.type === 'comma' && node.type === 'brace') {
if (i === 1) queue.push('');
queue.push('');
continue;
}
if (child.type === 'close') {
q.push(append(q.pop(), queue, enclose));
continue;
}
if (child.value && child.type !== 'open') {
queue.push(append(queue.pop(), child.value));
continue;
}
if (child.nodes) {
walk(child, node);
}
}
return queue;
};
return utils.flatten(walk(ast));
};
module.exports = expand;

333
src/node_modules/braces/lib/parse.js generated vendored
View File

@ -1,333 +0,0 @@
'use strict';
const stringify = require('./stringify');
/**
* Constants
*/
const {
MAX_LENGTH,
CHAR_BACKSLASH, /* \ */
CHAR_BACKTICK, /* ` */
CHAR_COMMA, /* , */
CHAR_DOT, /* . */
CHAR_LEFT_PARENTHESES, /* ( */
CHAR_RIGHT_PARENTHESES, /* ) */
CHAR_LEFT_CURLY_BRACE, /* { */
CHAR_RIGHT_CURLY_BRACE, /* } */
CHAR_LEFT_SQUARE_BRACKET, /* [ */
CHAR_RIGHT_SQUARE_BRACKET, /* ] */
CHAR_DOUBLE_QUOTE, /* " */
CHAR_SINGLE_QUOTE, /* ' */
CHAR_NO_BREAK_SPACE,
CHAR_ZERO_WIDTH_NOBREAK_SPACE
} = require('./constants');
/**
* parse
*/
const parse = (input, options = {}) => {
if (typeof input !== 'string') {
throw new TypeError('Expected a string');
}
let opts = options || {};
let max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;
if (input.length > max) {
throw new SyntaxError(`Input length (${input.length}), exceeds max characters (${max})`);
}
let ast = { type: 'root', input, nodes: [] };
let stack = [ast];
let block = ast;
let prev = ast;
let brackets = 0;
let length = input.length;
let index = 0;
let depth = 0;
let value;
let memo = {};
/**
* Helpers
*/
const advance = () => input[index++];
const push = node => {
if (node.type === 'text' && prev.type === 'dot') {
prev.type = 'text';
}
if (prev && prev.type === 'text' && node.type === 'text') {
prev.value += node.value;
return;
}
block.nodes.push(node);
node.parent = block;
node.prev = prev;
prev = node;
return node;
};
push({ type: 'bos' });
while (index < length) {
block = stack[stack.length - 1];
value = advance();
/**
* Invalid chars
*/
if (value === CHAR_ZERO_WIDTH_NOBREAK_SPACE || value === CHAR_NO_BREAK_SPACE) {
continue;
}
/**
* Escaped chars
*/
if (value === CHAR_BACKSLASH) {
push({ type: 'text', value: (options.keepEscaping ? value : '') + advance() });
continue;
}
/**
* Right square bracket (literal): ']'
*/
if (value === CHAR_RIGHT_SQUARE_BRACKET) {
push({ type: 'text', value: '\\' + value });
continue;
}
/**
* Left square bracket: '['
*/
if (value === CHAR_LEFT_SQUARE_BRACKET) {
brackets++;
let closed = true;
let next;
while (index < length && (next = advance())) {
value += next;
if (next === CHAR_LEFT_SQUARE_BRACKET) {
brackets++;
continue;
}
if (next === CHAR_BACKSLASH) {
value += advance();
continue;
}
if (next === CHAR_RIGHT_SQUARE_BRACKET) {
brackets--;
if (brackets === 0) {
break;
}
}
}
push({ type: 'text', value });
continue;
}
/**
* Parentheses
*/
if (value === CHAR_LEFT_PARENTHESES) {
block = push({ type: 'paren', nodes: [] });
stack.push(block);
push({ type: 'text', value });
continue;
}
if (value === CHAR_RIGHT_PARENTHESES) {
if (block.type !== 'paren') {
push({ type: 'text', value });
continue;
}
block = stack.pop();
push({ type: 'text', value });
block = stack[stack.length - 1];
continue;
}
/**
* Quotes: '|"|`
*/
if (value === CHAR_DOUBLE_QUOTE || value === CHAR_SINGLE_QUOTE || value === CHAR_BACKTICK) {
let open = value;
let next;
if (options.keepQuotes !== true) {
value = '';
}
while (index < length && (next = advance())) {
if (next === CHAR_BACKSLASH) {
value += next + advance();
continue;
}
if (next === open) {
if (options.keepQuotes === true) value += next;
break;
}
value += next;
}
push({ type: 'text', value });
continue;
}
/**
* Left curly brace: '{'
*/
if (value === CHAR_LEFT_CURLY_BRACE) {
depth++;
let dollar = prev.value && prev.value.slice(-1) === '$' || block.dollar === true;
let brace = {
type: 'brace',
open: true,
close: false,
dollar,
depth,
commas: 0,
ranges: 0,
nodes: []
};
block = push(brace);
stack.push(block);
push({ type: 'open', value });
continue;
}
/**
* Right curly brace: '}'
*/
if (value === CHAR_RIGHT_CURLY_BRACE) {
if (block.type !== 'brace') {
push({ type: 'text', value });
continue;
}
let type = 'close';
block = stack.pop();
block.close = true;
push({ type, value });
depth--;
block = stack[stack.length - 1];
continue;
}
/**
* Comma: ','
*/
if (value === CHAR_COMMA && depth > 0) {
if (block.ranges > 0) {
block.ranges = 0;
let open = block.nodes.shift();
block.nodes = [open, { type: 'text', value: stringify(block) }];
}
push({ type: 'comma', value });
block.commas++;
continue;
}
/**
* Dot: '.'
*/
if (value === CHAR_DOT && depth > 0 && block.commas === 0) {
let siblings = block.nodes;
if (depth === 0 || siblings.length === 0) {
push({ type: 'text', value });
continue;
}
if (prev.type === 'dot') {
block.range = [];
prev.value += value;
prev.type = 'range';
if (block.nodes.length !== 3 && block.nodes.length !== 5) {
block.invalid = true;
block.ranges = 0;
prev.type = 'text';
continue;
}
block.ranges++;
block.args = [];
continue;
}
if (prev.type === 'range') {
siblings.pop();
let before = siblings[siblings.length - 1];
before.value += prev.value + value;
prev = before;
block.ranges--;
continue;
}
push({ type: 'dot', value });
continue;
}
/**
* Text
*/
push({ type: 'text', value });
}
// Mark imbalanced braces and brackets as invalid
do {
block = stack.pop();
if (block.type !== 'root') {
block.nodes.forEach(node => {
if (!node.nodes) {
if (node.type === 'open') node.isOpen = true;
if (node.type === 'close') node.isClose = true;
if (!node.nodes) node.type = 'text';
node.invalid = true;
}
});
// get the location of the block on parent.nodes (block's siblings)
let parent = stack[stack.length - 1];
let index = parent.nodes.indexOf(block);
// replace the (invalid) block with it's nodes
parent.nodes.splice(index, 1, ...block.nodes);
}
} while (stack.length > 0);
push({ type: 'eos' });
return ast;
};
module.exports = parse;

View File

@ -1,32 +0,0 @@
'use strict';
const utils = require('./utils');
module.exports = (ast, options = {}) => {
let stringify = (node, parent = {}) => {
let invalidBlock = options.escapeInvalid && utils.isInvalidBrace(parent);
let invalidNode = node.invalid === true && options.escapeInvalid === true;
let output = '';
if (node.value) {
if ((invalidBlock || invalidNode) && utils.isOpenOrClose(node)) {
return '\\' + node.value;
}
return node.value;
}
if (node.value) {
return node.value;
}
if (node.nodes) {
for (let child of node.nodes) {
output += stringify(child);
}
}
return output;
};
return stringify(ast);
};

112
src/node_modules/braces/lib/utils.js generated vendored
View File

@ -1,112 +0,0 @@
'use strict';
exports.isInteger = num => {
if (typeof num === 'number') {
return Number.isInteger(num);
}
if (typeof num === 'string' && num.trim() !== '') {
return Number.isInteger(Number(num));
}
return false;
};
/**
* Find a node of the given type
*/
exports.find = (node, type) => node.nodes.find(node => node.type === type);
/**
* Find a node of the given type
*/
exports.exceedsLimit = (min, max, step = 1, limit) => {
if (limit === false) return false;
if (!exports.isInteger(min) || !exports.isInteger(max)) return false;
return ((Number(max) - Number(min)) / Number(step)) >= limit;
};
/**
* Escape the given node with '\\' before node.value
*/
exports.escapeNode = (block, n = 0, type) => {
let node = block.nodes[n];
if (!node) return;
if ((type && node.type === type) || node.type === 'open' || node.type === 'close') {
if (node.escaped !== true) {
node.value = '\\' + node.value;
node.escaped = true;
}
}
};
/**
* Returns true if the given brace node should be enclosed in literal braces
*/
exports.encloseBrace = node => {
if (node.type !== 'brace') return false;
if ((node.commas >> 0 + node.ranges >> 0) === 0) {
node.invalid = true;
return true;
}
return false;
};
/**
* Returns true if a brace node is invalid.
*/
exports.isInvalidBrace = block => {
if (block.type !== 'brace') return false;
if (block.invalid === true || block.dollar) return true;
if ((block.commas >> 0 + block.ranges >> 0) === 0) {
block.invalid = true;
return true;
}
if (block.open !== true || block.close !== true) {
block.invalid = true;
return true;
}
return false;
};
/**
* Returns true if a node is an open or close node
*/
exports.isOpenOrClose = node => {
if (node.type === 'open' || node.type === 'close') {
return true;
}
return node.open === true || node.close === true;
};
/**
* Reduce an array of text nodes.
*/
exports.reduce = nodes => nodes.reduce((acc, node) => {
if (node.type === 'text') acc.push(node.value);
if (node.type === 'range') node.type = 'text';
return acc;
}, []);
/**
* Flatten an array
*/
exports.flatten = (...args) => {
const result = [];
const flat = arr => {
for (let i = 0; i < arr.length; i++) {
let ele = arr[i];
Array.isArray(ele) ? flat(ele, result) : ele !== void 0 && result.push(ele);
}
return result;
};
flat(args);
return result;
};

77
src/node_modules/braces/package.json generated vendored
View File

@ -1,77 +0,0 @@
{
"name": "braces",
"description": "Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support for the Bash 4.3 braces specification, without sacrificing speed.",
"version": "3.0.2",
"homepage": "https://github.com/micromatch/braces",
"author": "Jon Schlinkert (https://github.com/jonschlinkert)",
"contributors": [
"Brian Woodward (https://twitter.com/doowb)",
"Elan Shanker (https://github.com/es128)",
"Eugene Sharygin (https://github.com/eush77)",
"hemanth.hm (http://h3manth.com)",
"Jon Schlinkert (http://twitter.com/jonschlinkert)"
],
"repository": "micromatch/braces",
"bugs": {
"url": "https://github.com/micromatch/braces/issues"
},
"license": "MIT",
"files": [
"index.js",
"lib"
],
"main": "index.js",
"engines": {
"node": ">=8"
},
"scripts": {
"test": "mocha",
"benchmark": "node benchmark"
},
"dependencies": {
"fill-range": "^7.0.1"
},
"devDependencies": {
"ansi-colors": "^3.2.4",
"bash-path": "^2.0.1",
"gulp-format-md": "^2.0.0",
"mocha": "^6.1.1"
},
"keywords": [
"alpha",
"alphabetical",
"bash",
"brace",
"braces",
"expand",
"expansion",
"filepath",
"fill",
"fs",
"glob",
"globbing",
"letter",
"match",
"matches",
"matching",
"number",
"numerical",
"path",
"range",
"ranges",
"sh"
],
"verb": {
"toc": false,
"layout": "default",
"tasks": [
"readme"
],
"lint": {
"reflinks": true
},
"plugins": [
"gulp-format-md"
]
}
}

21
src/node_modules/chokidar/LICENSE generated vendored
View File

@ -1,21 +0,0 @@
The MIT License (MIT)
Copyright (c) 2012-2019 Paul Miller (https://paulmillr.com), Elan Shanker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the “Software”), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

308
src/node_modules/chokidar/README.md generated vendored
View File

@ -1,308 +0,0 @@
# Chokidar [![Weekly downloads](https://img.shields.io/npm/dw/chokidar.svg)](https://github.com/paulmillr/chokidar) [![Yearly downloads](https://img.shields.io/npm/dy/chokidar.svg)](https://github.com/paulmillr/chokidar)
> Minimal and efficient cross-platform file watching library
[![NPM](https://nodei.co/npm/chokidar.png)](https://www.npmjs.com/package/chokidar)
## Why?
Node.js `fs.watch`:
* Doesn't report filenames on MacOS.
* Doesn't report events at all when using editors like Sublime on MacOS.
* Often reports events twice.
* Emits most changes as `rename`.
* Does not provide an easy way to recursively watch file trees.
* Does not support recursive watching on Linux.
Node.js `fs.watchFile`:
* Almost as bad at event handling.
* Also does not provide any recursive watching.
* Results in high CPU utilization.
Chokidar resolves these problems.
Initially made for **[Brunch](https://brunch.io/)** (an ultra-swift web app build tool), it is now used in
[Microsoft's Visual Studio Code](https://github.com/microsoft/vscode),
[gulp](https://github.com/gulpjs/gulp/),
[karma](https://karma-runner.github.io/),
[PM2](https://github.com/Unitech/PM2),
[browserify](http://browserify.org/),
[webpack](https://webpack.github.io/),
[BrowserSync](https://www.browsersync.io/),
and [many others](https://www.npmjs.com/browse/depended/chokidar).
It has proven itself in production environments.
Version 3 is out! Check out our blog post about it: [Chokidar 3: How to save 32TB of traffic every week](https://paulmillr.com/posts/chokidar-3-save-32tb-of-traffic/)
## How?
Chokidar does still rely on the Node.js core `fs` module, but when using
`fs.watch` and `fs.watchFile` for watching, it normalizes the events it
receives, often checking for truth by getting file stats and/or dir contents.
On MacOS, chokidar by default uses a native extension exposing the Darwin
`FSEvents` API. This provides very efficient recursive watching compared with
implementations like `kqueue` available on most \*nix platforms. Chokidar still
does have to do some work to normalize the events received that way as well.
On most other platforms, the `fs.watch`-based implementation is the default, which
avoids polling and keeps CPU usage down. Be advised that chokidar will initiate
watchers recursively for everything within scope of the paths that have been
specified, so be judicious about not wasting system resources by watching much
more than needed.
## Getting started
Install with npm:
```sh
npm install chokidar
```
Then `require` and use it in your code:
```javascript
const chokidar = require('chokidar');
// One-liner for current directory
chokidar.watch('.').on('all', (event, path) => {
console.log(event, path);
});
```
## API
```javascript
// Example of a more typical implementation structure
// Initialize watcher.
const watcher = chokidar.watch('file, dir, glob, or array', {
ignored: /(^|[\/\\])\../, // ignore dotfiles
persistent: true
});
// Something to use when events are received.
const log = console.log.bind(console);
// Add event listeners.
watcher
.on('add', path => log(`File ${path} has been added`))
.on('change', path => log(`File ${path} has been changed`))
.on('unlink', path => log(`File ${path} has been removed`));
// More possible events.
watcher
.on('addDir', path => log(`Directory ${path} has been added`))
.on('unlinkDir', path => log(`Directory ${path} has been removed`))
.on('error', error => log(`Watcher error: ${error}`))
.on('ready', () => log('Initial scan complete. Ready for changes'))
.on('raw', (event, path, details) => { // internal
log('Raw event info:', event, path, details);
});
// 'add', 'addDir' and 'change' events also receive stat() results as second
// argument when available: https://nodejs.org/api/fs.html#fs_class_fs_stats
watcher.on('change', (path, stats) => {
if (stats) console.log(`File ${path} changed size to ${stats.size}`);
});
// Watch new files.
watcher.add('new-file');
watcher.add(['new-file-2', 'new-file-3', '**/other-file*']);
// Get list of actual paths being watched on the filesystem
var watchedPaths = watcher.getWatched();
// Un-watch some files.
await watcher.unwatch('new-file*');
// Stop watching.
// The method is async!
watcher.close().then(() => console.log('closed'));
// Full list of options. See below for descriptions.
// Do not use this example!
chokidar.watch('file', {
persistent: true,
ignored: '*.txt',
ignoreInitial: false,
followSymlinks: true,
cwd: '.',
disableGlobbing: false,
usePolling: false,
interval: 100,
binaryInterval: 300,
alwaysStat: false,
depth: 99,
awaitWriteFinish: {
stabilityThreshold: 2000,
pollInterval: 100
},
ignorePermissionErrors: false,
atomic: true // or a custom 'atomicity delay', in milliseconds (default 100)
});
```
`chokidar.watch(paths, [options])`
* `paths` (string or array of strings). Paths to files, dirs to be watched
recursively, or glob patterns.
- Note: globs must not contain windows separators (`\`),
because that's how they work by the standard —
you'll need to replace them with forward slashes (`/`).
- Note 2: for additional glob documentation, check out low-level
library: [picomatch](https://github.com/micromatch/picomatch).
* `options` (object) Options object as defined below:
#### Persistence
* `persistent` (default: `true`). Indicates whether the process
should continue to run as long as files are being watched. If set to
`false` when using `fsevents` to watch, no more events will be emitted
after `ready`, even if the process continues to run.
#### Path filtering
* `ignored` ([anymatch](https://github.com/es128/anymatch)-compatible definition)
Defines files/paths to be ignored. The whole relative or absolute path is
tested, not just filename. If a function with two arguments is provided, it
gets called twice per path - once with a single argument (the path), second
time with two arguments (the path and the
[`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats)
object of that path).
* `ignoreInitial` (default: `false`). If set to `false` then `add`/`addDir` events are also emitted for matching paths while
instantiating the watching as chokidar discovers these file paths (before the `ready` event).
* `followSymlinks` (default: `true`). When `false`, only the
symlinks themselves will be watched for changes instead of following
the link references and bubbling events through the link's path.
* `cwd` (no default). The base directory from which watch `paths` are to be
derived. Paths emitted with events will be relative to this.
* `disableGlobbing` (default: `false`). If set to `true` then the strings passed to `.watch()` and `.add()` are treated as
literal path names, even if they look like globs.
#### Performance
* `usePolling` (default: `false`).
Whether to use fs.watchFile (backed by polling), or fs.watch. If polling
leads to high CPU utilization, consider setting this to `false`. It is
typically necessary to **set this to `true` to successfully watch files over
a network**, and it may be necessary to successfully watch files in other
non-standard situations. Setting to `true` explicitly on MacOS overrides the
`useFsEvents` default. You may also set the CHOKIDAR_USEPOLLING env variable
to true (1) or false (0) in order to override this option.
* _Polling-specific settings_ (effective when `usePolling: true`)
* `interval` (default: `100`). Interval of file system polling, in milliseconds. You may also
set the CHOKIDAR_INTERVAL env variable to override this option.
* `binaryInterval` (default: `300`). Interval of file system
polling for binary files.
([see list of binary extensions](https://github.com/sindresorhus/binary-extensions/blob/master/binary-extensions.json))
* `useFsEvents` (default: `true` on MacOS). Whether to use the
`fsevents` watching interface if available. When set to `true` explicitly
and `fsevents` is available this supercedes the `usePolling` setting. When
set to `false` on MacOS, `usePolling: true` becomes the default.
* `alwaysStat` (default: `false`). If relying upon the
[`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats)
object that may get passed with `add`, `addDir`, and `change` events, set
this to `true` to ensure it is provided even in cases where it wasn't
already available from the underlying watch events.
* `depth` (default: `undefined`). If set, limits how many levels of
subdirectories will be traversed.
* `awaitWriteFinish` (default: `false`).
By default, the `add` event will fire when a file first appears on disk, before
the entire file has been written. Furthermore, in some cases some `change`
events will be emitted while the file is being written. In some cases,
especially when watching for large files there will be a need to wait for the
write operation to finish before responding to a file creation or modification.
Setting `awaitWriteFinish` to `true` (or a truthy value) will poll file size,
holding its `add` and `change` events until the size does not change for a
configurable amount of time. The appropriate duration setting is heavily
dependent on the OS and hardware. For accurate detection this parameter should
be relatively high, making file watching much less responsive.
Use with caution.
* *`options.awaitWriteFinish` can be set to an object in order to adjust
timing params:*
* `awaitWriteFinish.stabilityThreshold` (default: 2000). Amount of time in
milliseconds for a file size to remain constant before emitting its event.
* `awaitWriteFinish.pollInterval` (default: 100). File size polling interval, in milliseconds.
#### Errors
* `ignorePermissionErrors` (default: `false`). Indicates whether to watch files
that don't have read permissions if possible. If watching fails due to `EPERM`
or `EACCES` with this set to `true`, the errors will be suppressed silently.
* `atomic` (default: `true` if `useFsEvents` and `usePolling` are `false`).
Automatically filters out artifacts that occur when using editors that use
"atomic writes" instead of writing directly to the source file. If a file is
re-added within 100 ms of being deleted, Chokidar emits a `change` event
rather than `unlink` then `add`. If the default of 100 ms does not work well
for you, you can override it by setting `atomic` to a custom value, in
milliseconds.
### Methods & Events
`chokidar.watch()` produces an instance of `FSWatcher`. Methods of `FSWatcher`:
* `.add(path / paths)`: Add files, directories, or glob patterns for tracking.
Takes an array of strings or just one string.
* `.on(event, callback)`: Listen for an FS event.
Available events: `add`, `addDir`, `change`, `unlink`, `unlinkDir`, `ready`,
`raw`, `error`.
Additionally `all` is available which gets emitted with the underlying event
name and path for every event other than `ready`, `raw`, and `error`. `raw` is internal, use it carefully.
* `.unwatch(path / paths)`: Stop watching files, directories, or glob patterns.
Takes an array of strings or just one string.
* `.close()`: **async** Removes all listeners from watched files. Asynchronous, returns Promise. Use with `await` to ensure bugs don't happen.
* `.getWatched()`: Returns an object representing all the paths on the file
system being watched by this `FSWatcher` instance. The object's keys are all the
directories (using absolute paths unless the `cwd` option was used), and the
values are arrays of the names of the items contained in each directory.
## CLI
If you need a CLI interface for your file watching, check out
[chokidar-cli](https://github.com/open-cli-tools/chokidar-cli), allowing you to
execute a command on each change, or get a stdio stream of change events.
## Install Troubleshooting
* `npm WARN optional dep failed, continuing fsevents@n.n.n`
* This message is normal part of how `npm` handles optional dependencies and is
not indicative of a problem. Even if accompanied by other related error messages,
Chokidar should function properly.
* `TypeError: fsevents is not a constructor`
* Update chokidar by doing `rm -rf node_modules package-lock.json yarn.lock && npm install`, or update your dependency that uses chokidar.
* Chokidar is producing `ENOSP` error on Linux, like this:
* `bash: cannot set terminal process group (-1): Inappropriate ioctl for device bash: no job control in this shell`
`Error: watch /home/ ENOSPC`
* This means Chokidar ran out of file handles and you'll need to increase their count by executing the following command in Terminal:
`echo fs.inotify.max_user_watches=524288 | sudo tee -a /etc/sysctl.conf && sudo sysctl -p`
## Changelog
For more detailed changelog, see [`full_changelog.md`](.github/full_changelog.md).
- **v3.5 (Jan 6, 2021):** Support for ARM Macs with Apple Silicon. Fixes for deleted symlinks.
- **v3.4 (Apr 26, 2020):** Support for directory-based symlinks. Fixes for macos file replacement.
- **v3.3 (Nov 2, 2019):** `FSWatcher#close()` method became async. That fixes IO race conditions related to close method.
- **v3.2 (Oct 1, 2019):** Improve Linux RAM usage by 50%. Race condition fixes. Windows glob fixes. Improve stability by using tight range of dependency versions.
- **v3.1 (Sep 16, 2019):** dotfiles are no longer filtered out by default. Use `ignored` option if needed. Improve initial Linux scan time by 50%.
- **v3 (Apr 30, 2019):** massive CPU & RAM consumption improvements; reduces deps / package size by a factor of 17x and bumps Node.js requirement to v8.16 and higher.
- **v2 (Dec 29, 2017):** Globs are now posix-style-only; without windows support. Tons of bugfixes.
- **v1 (Apr 7, 2015):** Glob support, symlink support, tons of bugfixes. Node 0.8+ is supported
- **v0.1 (Apr 20, 2012):** Initial release, extracted from [Brunch](https://github.com/brunch/brunch/blob/9847a065aea300da99bd0753f90354cde9de1261/src/helpers.coffee#L66)
## Also
Why was chokidar named this way? What's the meaning behind it?
>Chowkidar is a transliteration of a Hindi word meaning 'watchman, gatekeeper', चौकीदार. This ultimately comes from Sanskrit _ चतुष्क_ (crossway, quadrangle, consisting-of-four).
## License
MIT (c) Paul Miller (<https://paulmillr.com>), see [LICENSE](LICENSE) file.

973
src/node_modules/chokidar/index.js generated vendored
View File

@ -1,973 +0,0 @@
'use strict';
const { EventEmitter } = require('events');
const fs = require('fs');
const sysPath = require('path');
const { promisify } = require('util');
const readdirp = require('readdirp');
const anymatch = require('anymatch').default;
const globParent = require('glob-parent');
const isGlob = require('is-glob');
const braces = require('braces');
const normalizePath = require('normalize-path');
const NodeFsHandler = require('./lib/nodefs-handler');
const FsEventsHandler = require('./lib/fsevents-handler');
const {
EV_ALL,
EV_READY,
EV_ADD,
EV_CHANGE,
EV_UNLINK,
EV_ADD_DIR,
EV_UNLINK_DIR,
EV_RAW,
EV_ERROR,
STR_CLOSE,
STR_END,
BACK_SLASH_RE,
DOUBLE_SLASH_RE,
SLASH_OR_BACK_SLASH_RE,
DOT_RE,
REPLACER_RE,
SLASH,
SLASH_SLASH,
BRACE_START,
BANG,
ONE_DOT,
TWO_DOTS,
GLOBSTAR,
SLASH_GLOBSTAR,
ANYMATCH_OPTS,
STRING_TYPE,
FUNCTION_TYPE,
EMPTY_STR,
EMPTY_FN,
isWindows,
isMacos,
isIBMi
} = require('./lib/constants');
const stat = promisify(fs.stat);
const readdir = promisify(fs.readdir);
/**
* @typedef {String} Path
* @typedef {'all'|'add'|'addDir'|'change'|'unlink'|'unlinkDir'|'raw'|'error'|'ready'} EventName
* @typedef {'readdir'|'watch'|'add'|'remove'|'change'} ThrottleType
*/
/**
*
* @typedef {Object} WatchHelpers
* @property {Boolean} followSymlinks
* @property {'stat'|'lstat'} statMethod
* @property {Path} path
* @property {Path} watchPath
* @property {Function} entryPath
* @property {Boolean} hasGlob
* @property {Object} globFilter
* @property {Function} filterPath
* @property {Function} filterDir
*/
const arrify = (value = []) => Array.isArray(value) ? value : [value];
const flatten = (list, result = []) => {
list.forEach(item => {
if (Array.isArray(item)) {
flatten(item, result);
} else {
result.push(item);
}
});
return result;
};
const unifyPaths = (paths_) => {
/**
* @type {Array<String>}
*/
const paths = flatten(arrify(paths_));
if (!paths.every(p => typeof p === STRING_TYPE)) {
throw new TypeError(`Non-string provided as watch path: ${paths}`);
}
return paths.map(normalizePathToUnix);
};
// If SLASH_SLASH occurs at the beginning of path, it is not replaced
// because "//StoragePC/DrivePool/Movies" is a valid network path
const toUnix = (string) => {
let str = string.replace(BACK_SLASH_RE, SLASH);
let prepend = false;
if (str.startsWith(SLASH_SLASH)) {
prepend = true;
}
while (str.match(DOUBLE_SLASH_RE)) {
str = str.replace(DOUBLE_SLASH_RE, SLASH);
}
if (prepend) {
str = SLASH + str;
}
return str;
};
// Our version of upath.normalize
// TODO: this is not equal to path-normalize module - investigate why
const normalizePathToUnix = (path) => toUnix(sysPath.normalize(toUnix(path)));
const normalizeIgnored = (cwd = EMPTY_STR) => (path) => {
if (typeof path !== STRING_TYPE) return path;
return normalizePathToUnix(sysPath.isAbsolute(path) ? path : sysPath.join(cwd, path));
};
const getAbsolutePath = (path, cwd) => {
if (sysPath.isAbsolute(path)) {
return path;
}
if (path.startsWith(BANG)) {
return BANG + sysPath.join(cwd, path.slice(1));
}
return sysPath.join(cwd, path);
};
const undef = (opts, key) => opts[key] === undefined;
/**
* Directory entry.
* @property {Path} path
* @property {Set<Path>} items
*/
class DirEntry {
/**
* @param {Path} dir
* @param {Function} removeWatcher
*/
constructor(dir, removeWatcher) {
this.path = dir;
this._removeWatcher = removeWatcher;
/** @type {Set<Path>} */
this.items = new Set();
}
add(item) {
const {items} = this;
if (!items) return;
if (item !== ONE_DOT && item !== TWO_DOTS) items.add(item);
}
async remove(item) {
const {items} = this;
if (!items) return;
items.delete(item);
if (items.size > 0) return;
const dir = this.path;
try {
await readdir(dir);
} catch (err) {
if (this._removeWatcher) {
this._removeWatcher(sysPath.dirname(dir), sysPath.basename(dir));
}
}
}
has(item) {
const {items} = this;
if (!items) return;
return items.has(item);
}
/**
* @returns {Array<String>}
*/
getChildren() {
const {items} = this;
if (!items) return;
return [...items.values()];
}
dispose() {
this.items.clear();
delete this.path;
delete this._removeWatcher;
delete this.items;
Object.freeze(this);
}
}
const STAT_METHOD_F = 'stat';
const STAT_METHOD_L = 'lstat';
class WatchHelper {
constructor(path, watchPath, follow, fsw) {
this.fsw = fsw;
this.path = path = path.replace(REPLACER_RE, EMPTY_STR);
this.watchPath = watchPath;
this.fullWatchPath = sysPath.resolve(watchPath);
this.hasGlob = watchPath !== path;
/** @type {object|boolean} */
if (path === EMPTY_STR) this.hasGlob = false;
this.globSymlink = this.hasGlob && follow ? undefined : false;
this.globFilter = this.hasGlob ? anymatch(path, undefined, ANYMATCH_OPTS) : false;
this.dirParts = this.getDirParts(path);
this.dirParts.forEach((parts) => {
if (parts.length > 1) parts.pop();
});
this.followSymlinks = follow;
this.statMethod = follow ? STAT_METHOD_F : STAT_METHOD_L;
}
checkGlobSymlink(entry) {
// only need to resolve once
// first entry should always have entry.parentDir === EMPTY_STR
if (this.globSymlink === undefined) {
this.globSymlink = entry.fullParentDir === this.fullWatchPath ?
false : {realPath: entry.fullParentDir, linkPath: this.fullWatchPath};
}
if (this.globSymlink) {
return entry.fullPath.replace(this.globSymlink.realPath, this.globSymlink.linkPath);
}
return entry.fullPath;
}
entryPath(entry) {
return sysPath.join(this.watchPath,
sysPath.relative(this.watchPath, this.checkGlobSymlink(entry))
);
}
filterPath(entry) {
const {stats} = entry;
if (stats && stats.isSymbolicLink()) return this.filterDir(entry);
const resolvedPath = this.entryPath(entry);
const matchesGlob = this.hasGlob && typeof this.globFilter === FUNCTION_TYPE ?
this.globFilter(resolvedPath) : true;
return matchesGlob &&
this.fsw._isntIgnored(resolvedPath, stats) &&
this.fsw._hasReadPermissions(stats);
}
getDirParts(path) {
if (!this.hasGlob) return [];
const parts = [];
const expandedPath = path.includes(BRACE_START) ? braces.expand(path) : [path];
expandedPath.forEach((path) => {
parts.push(sysPath.relative(this.watchPath, path).split(SLASH_OR_BACK_SLASH_RE));
});
return parts;
}
filterDir(entry) {
if (this.hasGlob) {
const entryParts = this.getDirParts(this.checkGlobSymlink(entry));
let globstar = false;
this.unmatchedGlob = !this.dirParts.some((parts) => {
return parts.every((part, i) => {
if (part === GLOBSTAR) globstar = true;
return globstar || !entryParts[0][i] || anymatch(part, entryParts[0][i], ANYMATCH_OPTS);
});
});
}
return !this.unmatchedGlob && this.fsw._isntIgnored(this.entryPath(entry), entry.stats);
}
}
/**
* Watches files & directories for changes. Emitted events:
* `add`, `addDir`, `change`, `unlink`, `unlinkDir`, `all`, `error`
*
* new FSWatcher()
* .add(directories)
* .on('add', path => log('File', path, 'was added'))
*/
class FSWatcher extends EventEmitter {
// Not indenting methods for history sake; for now.
constructor(_opts) {
super();
const opts = {};
if (_opts) Object.assign(opts, _opts); // for frozen objects
/** @type {Map<String, DirEntry>} */
this._watched = new Map();
/** @type {Map<String, Array>} */
this._closers = new Map();
/** @type {Set<String>} */
this._ignoredPaths = new Set();
/** @type {Map<ThrottleType, Map>} */
this._throttled = new Map();
/** @type {Map<Path, String|Boolean>} */
this._symlinkPaths = new Map();
this._streams = new Set();
this.closed = false;
// Set up default options.
if (undef(opts, 'persistent')) opts.persistent = true;
if (undef(opts, 'ignoreInitial')) opts.ignoreInitial = false;
if (undef(opts, 'ignorePermissionErrors')) opts.ignorePermissionErrors = false;
if (undef(opts, 'interval')) opts.interval = 100;
if (undef(opts, 'binaryInterval')) opts.binaryInterval = 300;
if (undef(opts, 'disableGlobbing')) opts.disableGlobbing = false;
opts.enableBinaryInterval = opts.binaryInterval !== opts.interval;
// Enable fsevents on OS X when polling isn't explicitly enabled.
if (undef(opts, 'useFsEvents')) opts.useFsEvents = !opts.usePolling;
// If we can't use fsevents, ensure the options reflect it's disabled.
const canUseFsEvents = FsEventsHandler.canUse();
if (!canUseFsEvents) opts.useFsEvents = false;
// Use polling on Mac if not using fsevents.
// Other platforms use non-polling fs_watch.
if (undef(opts, 'usePolling') && !opts.useFsEvents) {
opts.usePolling = isMacos;
}
// Always default to polling on IBM i because fs.watch() is not available on IBM i.
if(isIBMi) {
opts.usePolling = true;
}
// Global override (useful for end-developers that need to force polling for all
// instances of chokidar, regardless of usage/dependency depth)
const envPoll = process.env.CHOKIDAR_USEPOLLING;
if (envPoll !== undefined) {
const envLower = envPoll.toLowerCase();
if (envLower === 'false' || envLower === '0') {
opts.usePolling = false;
} else if (envLower === 'true' || envLower === '1') {
opts.usePolling = true;
} else {
opts.usePolling = !!envLower;
}
}
const envInterval = process.env.CHOKIDAR_INTERVAL;
if (envInterval) {
opts.interval = Number.parseInt(envInterval, 10);
}
// Editor atomic write normalization enabled by default with fs.watch
if (undef(opts, 'atomic')) opts.atomic = !opts.usePolling && !opts.useFsEvents;
if (opts.atomic) this._pendingUnlinks = new Map();
if (undef(opts, 'followSymlinks')) opts.followSymlinks = true;
if (undef(opts, 'awaitWriteFinish')) opts.awaitWriteFinish = false;
if (opts.awaitWriteFinish === true) opts.awaitWriteFinish = {};
const awf = opts.awaitWriteFinish;
if (awf) {
if (!awf.stabilityThreshold) awf.stabilityThreshold = 2000;
if (!awf.pollInterval) awf.pollInterval = 100;
this._pendingWrites = new Map();
}
if (opts.ignored) opts.ignored = arrify(opts.ignored);
let readyCalls = 0;
this._emitReady = () => {
readyCalls++;
if (readyCalls >= this._readyCount) {
this._emitReady = EMPTY_FN;
this._readyEmitted = true;
// use process.nextTick to allow time for listener to be bound
process.nextTick(() => this.emit(EV_READY));
}
};
this._emitRaw = (...args) => this.emit(EV_RAW, ...args);
this._readyEmitted = false;
this.options = opts;
// Initialize with proper watcher.
if (opts.useFsEvents) {
this._fsEventsHandler = new FsEventsHandler(this);
} else {
this._nodeFsHandler = new NodeFsHandler(this);
}
// Youre frozen when your hearts not open.
Object.freeze(opts);
}
// Public methods
/**
* Adds paths to be watched on an existing FSWatcher instance
* @param {Path|Array<Path>} paths_
* @param {String=} _origAdd private; for handling non-existent paths to be watched
* @param {Boolean=} _internal private; indicates a non-user add
* @returns {FSWatcher} for chaining
*/
add(paths_, _origAdd, _internal) {
const {cwd, disableGlobbing} = this.options;
this.closed = false;
let paths = unifyPaths(paths_);
if (cwd) {
paths = paths.map((path) => {
const absPath = getAbsolutePath(path, cwd);
// Check `path` instead of `absPath` because the cwd portion can't be a glob
if (disableGlobbing || !isGlob(path)) {
return absPath;
}
return normalizePath(absPath);
});
}
// set aside negated glob strings
paths = paths.filter((path) => {
if (path.startsWith(BANG)) {
this._ignoredPaths.add(path.slice(1));
return false;
}
// if a path is being added that was previously ignored, stop ignoring it
this._ignoredPaths.delete(path);
this._ignoredPaths.delete(path + SLASH_GLOBSTAR);
// reset the cached userIgnored anymatch fn
// to make ignoredPaths changes effective
this._userIgnored = undefined;
return true;
});
if (this.options.useFsEvents && this._fsEventsHandler) {
if (!this._readyCount) this._readyCount = paths.length;
if (this.options.persistent) this._readyCount *= 2;
paths.forEach((path) => this._fsEventsHandler._addToFsEvents(path));
} else {
if (!this._readyCount) this._readyCount = 0;
this._readyCount += paths.length;
Promise.all(
paths.map(async path => {
const res = await this._nodeFsHandler._addToNodeFs(path, !_internal, 0, 0, _origAdd);
if (res) this._emitReady();
return res;
})
).then(results => {
if (this.closed) return;
results.filter(item => item).forEach(item => {
this.add(sysPath.dirname(item), sysPath.basename(_origAdd || item));
});
});
}
return this;
}
/**
* Close watchers or start ignoring events from specified paths.
* @param {Path|Array<Path>} paths_ - string or array of strings, file/directory paths and/or globs
* @returns {FSWatcher} for chaining
*/
unwatch(paths_) {
if (this.closed) return this;
const paths = unifyPaths(paths_);
const {cwd} = this.options;
paths.forEach((path) => {
// convert to absolute path unless relative path already matches
if (!sysPath.isAbsolute(path) && !this._closers.has(path)) {
if (cwd) path = sysPath.join(cwd, path);
path = sysPath.resolve(path);
}
this._closePath(path);
this._ignoredPaths.add(path);
if (this._watched.has(path)) {
this._ignoredPaths.add(path + SLASH_GLOBSTAR);
}
// reset the cached userIgnored anymatch fn
// to make ignoredPaths changes effective
this._userIgnored = undefined;
});
return this;
}
/**
* Close watchers and remove all listeners from watched paths.
* @returns {Promise<void>}.
*/
close() {
if (this.closed) return this._closePromise;
this.closed = true;
// Memory management.
this.removeAllListeners();
const closers = [];
this._closers.forEach(closerList => closerList.forEach(closer => {
const promise = closer();
if (promise instanceof Promise) closers.push(promise);
}));
this._streams.forEach(stream => stream.destroy());
this._userIgnored = undefined;
this._readyCount = 0;
this._readyEmitted = false;
this._watched.forEach(dirent => dirent.dispose());
['closers', 'watched', 'streams', 'symlinkPaths', 'throttled'].forEach(key => {
this[`_${key}`].clear();
});
this._closePromise = closers.length ? Promise.all(closers).then(() => undefined) : Promise.resolve();
return this._closePromise;
}
/**
* Expose list of watched paths
* @returns {Object} for chaining
*/
getWatched() {
const watchList = {};
this._watched.forEach((entry, dir) => {
const key = this.options.cwd ? sysPath.relative(this.options.cwd, dir) : dir;
watchList[key || ONE_DOT] = entry.getChildren().sort();
});
return watchList;
}
emitWithAll(event, args) {
this.emit(...args);
if (event !== EV_ERROR) this.emit(EV_ALL, ...args);
}
// Common helpers
// --------------
/**
* Normalize and emit events.
* Calling _emit DOES NOT MEAN emit() would be called!
* @param {EventName} event Type of event
* @param {Path} path File or directory path
* @param {*=} val1 arguments to be passed with event
* @param {*=} val2
* @param {*=} val3
* @returns the error if defined, otherwise the value of the FSWatcher instance's `closed` flag
*/
async _emit(event, path, val1, val2, val3) {
if (this.closed) return;
const opts = this.options;
if (isWindows) path = sysPath.normalize(path);
if (opts.cwd) path = sysPath.relative(opts.cwd, path);
/** @type Array<any> */
const args = [event, path];
if (val3 !== undefined) args.push(val1, val2, val3);
else if (val2 !== undefined) args.push(val1, val2);
else if (val1 !== undefined) args.push(val1);
const awf = opts.awaitWriteFinish;
let pw;
if (awf && (pw = this._pendingWrites.get(path))) {
pw.lastChange = new Date();
return this;
}
if (opts.atomic) {
if (event === EV_UNLINK) {
this._pendingUnlinks.set(path, args);
setTimeout(() => {
this._pendingUnlinks.forEach((entry, path) => {
this.emit(...entry);
this.emit(EV_ALL, ...entry);
this._pendingUnlinks.delete(path);
});
}, typeof opts.atomic === 'number' ? opts.atomic : 100);
return this;
}
if (event === EV_ADD && this._pendingUnlinks.has(path)) {
event = args[0] = EV_CHANGE;
this._pendingUnlinks.delete(path);
}
}
if (awf && (event === EV_ADD || event === EV_CHANGE) && this._readyEmitted) {
const awfEmit = (err, stats) => {
if (err) {
event = args[0] = EV_ERROR;
args[1] = err;
this.emitWithAll(event, args);
} else if (stats) {
// if stats doesn't exist the file must have been deleted
if (args.length > 2) {
args[2] = stats;
} else {
args.push(stats);
}
this.emitWithAll(event, args);
}
};
this._awaitWriteFinish(path, awf.stabilityThreshold, event, awfEmit);
return this;
}
if (event === EV_CHANGE) {
const isThrottled = !this._throttle(EV_CHANGE, path, 50);
if (isThrottled) return this;
}
if (opts.alwaysStat && val1 === undefined &&
(event === EV_ADD || event === EV_ADD_DIR || event === EV_CHANGE)
) {
const fullPath = opts.cwd ? sysPath.join(opts.cwd, path) : path;
let stats;
try {
stats = await stat(fullPath);
} catch (err) {}
// Suppress event when fs_stat fails, to avoid sending undefined 'stat'
if (!stats || this.closed) return;
args.push(stats);
}
this.emitWithAll(event, args);
return this;
}
/**
* Common handler for errors
* @param {Error} error
* @returns {Error|Boolean} The error if defined, otherwise the value of the FSWatcher instance's `closed` flag
*/
_handleError(error) {
const code = error && error.code;
if (error && code !== 'ENOENT' && code !== 'ENOTDIR' &&
(!this.options.ignorePermissionErrors || (code !== 'EPERM' && code !== 'EACCES'))
) {
this.emit(EV_ERROR, error);
}
return error || this.closed;
}
/**
* Helper utility for throttling
* @param {ThrottleType} actionType type being throttled
* @param {Path} path being acted upon
* @param {Number} timeout duration of time to suppress duplicate actions
* @returns {Object|false} tracking object or false if action should be suppressed
*/
_throttle(actionType, path, timeout) {
if (!this._throttled.has(actionType)) {
this._throttled.set(actionType, new Map());
}
/** @type {Map<Path, Object>} */
const action = this._throttled.get(actionType);
/** @type {Object} */
const actionPath = action.get(path);
if (actionPath) {
actionPath.count++;
return false;
}
let timeoutObject;
const clear = () => {
const item = action.get(path);
const count = item ? item.count : 0;
action.delete(path);
clearTimeout(timeoutObject);
if (item) clearTimeout(item.timeoutObject);
return count;
};
timeoutObject = setTimeout(clear, timeout);
const thr = {timeoutObject, clear, count: 0};
action.set(path, thr);
return thr;
}
_incrReadyCount() {
return this._readyCount++;
}
/**
* Awaits write operation to finish.
* Polls a newly created file for size variations. When files size does not change for 'threshold' milliseconds calls callback.
* @param {Path} path being acted upon
* @param {Number} threshold Time in milliseconds a file size must be fixed before acknowledging write OP is finished
* @param {EventName} event
* @param {Function} awfEmit Callback to be called when ready for event to be emitted.
*/
_awaitWriteFinish(path, threshold, event, awfEmit) {
let timeoutHandler;
let fullPath = path;
if (this.options.cwd && !sysPath.isAbsolute(path)) {
fullPath = sysPath.join(this.options.cwd, path);
}
const now = new Date();
const awaitWriteFinish = (prevStat) => {
fs.stat(fullPath, (err, curStat) => {
if (err || !this._pendingWrites.has(path)) {
if (err && err.code !== 'ENOENT') awfEmit(err);
return;
}
const now = Number(new Date());
if (prevStat && curStat.size !== prevStat.size) {
this._pendingWrites.get(path).lastChange = now;
}
const pw = this._pendingWrites.get(path);
const df = now - pw.lastChange;
if (df >= threshold) {
this._pendingWrites.delete(path);
awfEmit(undefined, curStat);
} else {
timeoutHandler = setTimeout(
awaitWriteFinish,
this.options.awaitWriteFinish.pollInterval,
curStat
);
}
});
};
if (!this._pendingWrites.has(path)) {
this._pendingWrites.set(path, {
lastChange: now,
cancelWait: () => {
this._pendingWrites.delete(path);
clearTimeout(timeoutHandler);
return event;
}
});
timeoutHandler = setTimeout(
awaitWriteFinish,
this.options.awaitWriteFinish.pollInterval
);
}
}
_getGlobIgnored() {
return [...this._ignoredPaths.values()];
}
/**
* Determines whether user has asked to ignore this path.
* @param {Path} path filepath or dir
* @param {fs.Stats=} stats result of fs.stat
* @returns {Boolean}
*/
_isIgnored(path, stats) {
if (this.options.atomic && DOT_RE.test(path)) return true;
if (!this._userIgnored) {
const {cwd} = this.options;
const ign = this.options.ignored;
const ignored = ign && ign.map(normalizeIgnored(cwd));
const paths = arrify(ignored)
.filter((path) => typeof path === STRING_TYPE && !isGlob(path))
.map((path) => path + SLASH_GLOBSTAR);
const list = this._getGlobIgnored().map(normalizeIgnored(cwd)).concat(ignored, paths);
this._userIgnored = anymatch(list, undefined, ANYMATCH_OPTS);
}
return this._userIgnored([path, stats]);
}
_isntIgnored(path, stat) {
return !this._isIgnored(path, stat);
}
/**
* Provides a set of common helpers and properties relating to symlink and glob handling.
* @param {Path} path file, directory, or glob pattern being watched
* @param {Number=} depth at any depth > 0, this isn't a glob
* @returns {WatchHelper} object containing helpers for this path
*/
_getWatchHelpers(path, depth) {
const watchPath = depth || this.options.disableGlobbing || !isGlob(path) ? path : globParent(path);
const follow = this.options.followSymlinks;
return new WatchHelper(path, watchPath, follow, this);
}
// Directory helpers
// -----------------
/**
* Provides directory tracking objects
* @param {String} directory path of the directory
* @returns {DirEntry} the directory's tracking object
*/
_getWatchedDir(directory) {
if (!this._boundRemove) this._boundRemove = this._remove.bind(this);
const dir = sysPath.resolve(directory);
if (!this._watched.has(dir)) this._watched.set(dir, new DirEntry(dir, this._boundRemove));
return this._watched.get(dir);
}
// File helpers
// ------------
/**
* Check for read permissions.
* Based on this answer on SO: https://stackoverflow.com/a/11781404/1358405
* @param {fs.Stats} stats - object, result of fs_stat
* @returns {Boolean} indicates whether the file can be read
*/
_hasReadPermissions(stats) {
if (this.options.ignorePermissionErrors) return true;
// stats.mode may be bigint
const md = stats && Number.parseInt(stats.mode, 10);
const st = md & 0o777;
const it = Number.parseInt(st.toString(8)[0], 10);
return Boolean(4 & it);
}
/**
* Handles emitting unlink events for
* files and directories, and via recursion, for
* files and directories within directories that are unlinked
* @param {String} directory within which the following item is located
* @param {String} item base path of item/directory
* @returns {void}
*/
_remove(directory, item, isDirectory) {
// if what is being deleted is a directory, get that directory's paths
// for recursive deleting and cleaning of watched object
// if it is not a directory, nestedDirectoryChildren will be empty array
const path = sysPath.join(directory, item);
const fullPath = sysPath.resolve(path);
isDirectory = isDirectory != null
? isDirectory
: this._watched.has(path) || this._watched.has(fullPath);
// prevent duplicate handling in case of arriving here nearly simultaneously
// via multiple paths (such as _handleFile and _handleDir)
if (!this._throttle('remove', path, 100)) return;
// if the only watched file is removed, watch for its return
if (!isDirectory && !this.options.useFsEvents && this._watched.size === 1) {
this.add(directory, item, true);
}
// This will create a new entry in the watched object in either case
// so we got to do the directory check beforehand
const wp = this._getWatchedDir(path);
const nestedDirectoryChildren = wp.getChildren();
// Recursively remove children directories / files.
nestedDirectoryChildren.forEach(nested => this._remove(path, nested));
// Check if item was on the watched list and remove it
const parent = this._getWatchedDir(directory);
const wasTracked = parent.has(item);
parent.remove(item);
// Fixes issue #1042 -> Relative paths were detected and added as symlinks
// (https://github.com/paulmillr/chokidar/blob/e1753ddbc9571bdc33b4a4af172d52cb6e611c10/lib/nodefs-handler.js#L612),
// but never removed from the map in case the path was deleted.
// This leads to an incorrect state if the path was recreated:
// https://github.com/paulmillr/chokidar/blob/e1753ddbc9571bdc33b4a4af172d52cb6e611c10/lib/nodefs-handler.js#L553
if (this._symlinkPaths.has(fullPath)) {
this._symlinkPaths.delete(fullPath);
}
// If we wait for this file to be fully written, cancel the wait.
let relPath = path;
if (this.options.cwd) relPath = sysPath.relative(this.options.cwd, path);
if (this.options.awaitWriteFinish && this._pendingWrites.has(relPath)) {
const event = this._pendingWrites.get(relPath).cancelWait();
if (event === EV_ADD) return;
}
// The Entry will either be a directory that just got removed
// or a bogus entry to a file, in either case we have to remove it
this._watched.delete(path);
this._watched.delete(fullPath);
const eventName = isDirectory ? EV_UNLINK_DIR : EV_UNLINK;
if (wasTracked && !this._isIgnored(path)) this._emit(eventName, path);
// Avoid conflicts if we later create another file with the same name
if (!this.options.useFsEvents) {
this._closePath(path);
}
}
/**
* Closes all watchers for a path
* @param {Path} path
*/
_closePath(path) {
this._closeFile(path)
const dir = sysPath.dirname(path);
this._getWatchedDir(dir).remove(sysPath.basename(path));
}
/**
* Closes only file-specific watchers
* @param {Path} path
*/
_closeFile(path) {
const closers = this._closers.get(path);
if (!closers) return;
closers.forEach(closer => closer());
this._closers.delete(path);
}
/**
*
* @param {Path} path
* @param {Function} closer
*/
_addPathCloser(path, closer) {
if (!closer) return;
let list = this._closers.get(path);
if (!list) {
list = [];
this._closers.set(path, list);
}
list.push(closer);
}
_readdirp(root, opts) {
if (this.closed) return;
const options = {type: EV_ALL, alwaysStat: true, lstat: true, ...opts};
let stream = readdirp(root, options);
this._streams.add(stream);
stream.once(STR_CLOSE, () => {
stream = undefined;
});
stream.once(STR_END, () => {
if (stream) {
this._streams.delete(stream);
stream = undefined;
}
});
return stream;
}
}
// Export FSWatcher class
exports.FSWatcher = FSWatcher;
/**
* Instantiates watcher with paths to be tracked.
* @param {String|Array<String>} paths file/directory paths and/or globs
* @param {Object=} options chokidar opts
* @returns an instance of FSWatcher for chaining.
*/
const watch = (paths, options) => {
const watcher = new FSWatcher(options);
watcher.add(paths);
return watcher;
};
exports.watch = watch;

View File

@ -1,65 +0,0 @@
'use strict';
const {sep} = require('path');
const {platform} = process;
const os = require('os');
exports.EV_ALL = 'all';
exports.EV_READY = 'ready';
exports.EV_ADD = 'add';
exports.EV_CHANGE = 'change';
exports.EV_ADD_DIR = 'addDir';
exports.EV_UNLINK = 'unlink';
exports.EV_UNLINK_DIR = 'unlinkDir';
exports.EV_RAW = 'raw';
exports.EV_ERROR = 'error';
exports.STR_DATA = 'data';
exports.STR_END = 'end';
exports.STR_CLOSE = 'close';
exports.FSEVENT_CREATED = 'created';
exports.FSEVENT_MODIFIED = 'modified';
exports.FSEVENT_DELETED = 'deleted';
exports.FSEVENT_MOVED = 'moved';
exports.FSEVENT_CLONED = 'cloned';
exports.FSEVENT_UNKNOWN = 'unknown';
exports.FSEVENT_TYPE_FILE = 'file';
exports.FSEVENT_TYPE_DIRECTORY = 'directory';
exports.FSEVENT_TYPE_SYMLINK = 'symlink';
exports.KEY_LISTENERS = 'listeners';
exports.KEY_ERR = 'errHandlers';
exports.KEY_RAW = 'rawEmitters';
exports.HANDLER_KEYS = [exports.KEY_LISTENERS, exports.KEY_ERR, exports.KEY_RAW];
exports.DOT_SLASH = `.${sep}`;
exports.BACK_SLASH_RE = /\\/g;
exports.DOUBLE_SLASH_RE = /\/\//;
exports.SLASH_OR_BACK_SLASH_RE = /[/\\]/;
exports.DOT_RE = /\..*\.(sw[px])$|~$|\.subl.*\.tmp/;
exports.REPLACER_RE = /^\.[/\\]/;
exports.SLASH = '/';
exports.SLASH_SLASH = '//';
exports.BRACE_START = '{';
exports.BANG = '!';
exports.ONE_DOT = '.';
exports.TWO_DOTS = '..';
exports.STAR = '*';
exports.GLOBSTAR = '**';
exports.ROOT_GLOBSTAR = '/**/*';
exports.SLASH_GLOBSTAR = '/**';
exports.DIR_SUFFIX = 'Dir';
exports.ANYMATCH_OPTS = {dot: true};
exports.STRING_TYPE = 'string';
exports.FUNCTION_TYPE = 'function';
exports.EMPTY_STR = '';
exports.EMPTY_FN = () => {};
exports.IDENTITY_FN = val => val;
exports.isWindows = platform === 'win32';
exports.isMacos = platform === 'darwin';
exports.isLinux = platform === 'linux';
exports.isIBMi = os.type() === 'OS400';

View File

@ -1,524 +0,0 @@
'use strict';
const fs = require('fs');
const sysPath = require('path');
const { promisify } = require('util');
let fsevents;
try {
fsevents = require('fsevents');
} catch (error) {
if (process.env.CHOKIDAR_PRINT_FSEVENTS_REQUIRE_ERROR) console.error(error);
}
if (fsevents) {
// TODO: real check
const mtch = process.version.match(/v(\d+)\.(\d+)/);
if (mtch && mtch[1] && mtch[2]) {
const maj = Number.parseInt(mtch[1], 10);
const min = Number.parseInt(mtch[2], 10);
if (maj === 8 && min < 16) {
fsevents = undefined;
}
}
}
const {
EV_ADD,
EV_CHANGE,
EV_ADD_DIR,
EV_UNLINK,
EV_ERROR,
STR_DATA,
STR_END,
FSEVENT_CREATED,
FSEVENT_MODIFIED,
FSEVENT_DELETED,
FSEVENT_MOVED,
// FSEVENT_CLONED,
FSEVENT_UNKNOWN,
FSEVENT_TYPE_FILE,
FSEVENT_TYPE_DIRECTORY,
FSEVENT_TYPE_SYMLINK,
ROOT_GLOBSTAR,
DIR_SUFFIX,
DOT_SLASH,
FUNCTION_TYPE,
EMPTY_FN,
IDENTITY_FN
} = require('./constants');
const Depth = (value) => isNaN(value) ? {} : {depth: value};
const stat = promisify(fs.stat);
const lstat = promisify(fs.lstat);
const realpath = promisify(fs.realpath);
const statMethods = { stat, lstat };
/**
* @typedef {String} Path
*/
/**
* @typedef {Object} FsEventsWatchContainer
* @property {Set<Function>} listeners
* @property {Function} rawEmitter
* @property {{stop: Function}} watcher
*/
// fsevents instance helper functions
/**
* Object to hold per-process fsevents instances (may be shared across chokidar FSWatcher instances)
* @type {Map<Path,FsEventsWatchContainer>}
*/
const FSEventsWatchers = new Map();
// Threshold of duplicate path prefixes at which to start
// consolidating going forward
const consolidateThreshhold = 10;
const wrongEventFlags = new Set([
69888, 70400, 71424, 72704, 73472, 131328, 131840, 262912
]);
/**
* Instantiates the fsevents interface
* @param {Path} path path to be watched
* @param {Function} callback called when fsevents is bound and ready
* @returns {{stop: Function}} new fsevents instance
*/
const createFSEventsInstance = (path, callback) => {
const stop = fsevents.watch(path, callback);
return {stop};
};
/**
* Instantiates the fsevents interface or binds listeners to an existing one covering
* the same file tree.
* @param {Path} path - to be watched
* @param {Path} realPath - real path for symlinks
* @param {Function} listener - called when fsevents emits events
* @param {Function} rawEmitter - passes data to listeners of the 'raw' event
* @returns {Function} closer
*/
function setFSEventsListener(path, realPath, listener, rawEmitter) {
let watchPath = sysPath.extname(realPath) ? sysPath.dirname(realPath) : realPath;
const parentPath = sysPath.dirname(watchPath);
let cont = FSEventsWatchers.get(watchPath);
// If we've accumulated a substantial number of paths that
// could have been consolidated by watching one directory
// above the current one, create a watcher on the parent
// path instead, so that we do consolidate going forward.
if (couldConsolidate(parentPath)) {
watchPath = parentPath;
}
const resolvedPath = sysPath.resolve(path);
const hasSymlink = resolvedPath !== realPath;
const filteredListener = (fullPath, flags, info) => {
if (hasSymlink) fullPath = fullPath.replace(realPath, resolvedPath);
if (
fullPath === resolvedPath ||
!fullPath.indexOf(resolvedPath + sysPath.sep)
) listener(fullPath, flags, info);
};
// check if there is already a watcher on a parent path
// modifies `watchPath` to the parent path when it finds a match
let watchedParent = false;
for (const watchedPath of FSEventsWatchers.keys()) {
if (realPath.indexOf(sysPath.resolve(watchedPath) + sysPath.sep) === 0) {
watchPath = watchedPath;
cont = FSEventsWatchers.get(watchPath);
watchedParent = true;
break;
}
}
if (cont || watchedParent) {
cont.listeners.add(filteredListener);
} else {
cont = {
listeners: new Set([filteredListener]),
rawEmitter,
watcher: createFSEventsInstance(watchPath, (fullPath, flags) => {
if (!cont.listeners.size) return;
const info = fsevents.getInfo(fullPath, flags);
cont.listeners.forEach(list => {
list(fullPath, flags, info);
});
cont.rawEmitter(info.event, fullPath, info);
})
};
FSEventsWatchers.set(watchPath, cont);
}
// removes this instance's listeners and closes the underlying fsevents
// instance if there are no more listeners left
return () => {
const lst = cont.listeners;
lst.delete(filteredListener);
if (!lst.size) {
FSEventsWatchers.delete(watchPath);
if (cont.watcher) return cont.watcher.stop().then(() => {
cont.rawEmitter = cont.watcher = undefined;
Object.freeze(cont);
});
}
};
}
// Decide whether or not we should start a new higher-level
// parent watcher
const couldConsolidate = (path) => {
let count = 0;
for (const watchPath of FSEventsWatchers.keys()) {
if (watchPath.indexOf(path) === 0) {
count++;
if (count >= consolidateThreshhold) {
return true;
}
}
}
return false;
};
// returns boolean indicating whether fsevents can be used
const canUse = () => fsevents && FSEventsWatchers.size < 128;
// determines subdirectory traversal levels from root to path
const calcDepth = (path, root) => {
let i = 0;
while (!path.indexOf(root) && (path = sysPath.dirname(path)) !== root) i++;
return i;
};
// returns boolean indicating whether the fsevents' event info has the same type
// as the one returned by fs.stat
const sameTypes = (info, stats) => (
info.type === FSEVENT_TYPE_DIRECTORY && stats.isDirectory() ||
info.type === FSEVENT_TYPE_SYMLINK && stats.isSymbolicLink() ||
info.type === FSEVENT_TYPE_FILE && stats.isFile()
)
/**
* @mixin
*/
class FsEventsHandler {
/**
* @param {import('../index').FSWatcher} fsw
*/
constructor(fsw) {
this.fsw = fsw;
}
checkIgnored(path, stats) {
const ipaths = this.fsw._ignoredPaths;
if (this.fsw._isIgnored(path, stats)) {
ipaths.add(path);
if (stats && stats.isDirectory()) {
ipaths.add(path + ROOT_GLOBSTAR);
}
return true;
}
ipaths.delete(path);
ipaths.delete(path + ROOT_GLOBSTAR);
}
addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts) {
const event = watchedDir.has(item) ? EV_CHANGE : EV_ADD;
this.handleEvent(event, path, fullPath, realPath, parent, watchedDir, item, info, opts);
}
async checkExists(path, fullPath, realPath, parent, watchedDir, item, info, opts) {
try {
const stats = await stat(path)
if (this.fsw.closed) return;
if (sameTypes(info, stats)) {
this.addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts);
} else {
this.handleEvent(EV_UNLINK, path, fullPath, realPath, parent, watchedDir, item, info, opts);
}
} catch (error) {
if (error.code === 'EACCES') {
this.addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts);
} else {
this.handleEvent(EV_UNLINK, path, fullPath, realPath, parent, watchedDir, item, info, opts);
}
}
}
handleEvent(event, path, fullPath, realPath, parent, watchedDir, item, info, opts) {
if (this.fsw.closed || this.checkIgnored(path)) return;
if (event === EV_UNLINK) {
const isDirectory = info.type === FSEVENT_TYPE_DIRECTORY
// suppress unlink events on never before seen files
if (isDirectory || watchedDir.has(item)) {
this.fsw._remove(parent, item, isDirectory);
}
} else {
if (event === EV_ADD) {
// track new directories
if (info.type === FSEVENT_TYPE_DIRECTORY) this.fsw._getWatchedDir(path);
if (info.type === FSEVENT_TYPE_SYMLINK && opts.followSymlinks) {
// push symlinks back to the top of the stack to get handled
const curDepth = opts.depth === undefined ?
undefined : calcDepth(fullPath, realPath) + 1;
return this._addToFsEvents(path, false, true, curDepth);
}
// track new paths
// (other than symlinks being followed, which will be tracked soon)
this.fsw._getWatchedDir(parent).add(item);
}
/**
* @type {'add'|'addDir'|'unlink'|'unlinkDir'}
*/
const eventName = info.type === FSEVENT_TYPE_DIRECTORY ? event + DIR_SUFFIX : event;
this.fsw._emit(eventName, path);
if (eventName === EV_ADD_DIR) this._addToFsEvents(path, false, true);
}
}
/**
* Handle symlinks encountered during directory scan
* @param {String} watchPath - file/dir path to be watched with fsevents
* @param {String} realPath - real path (in case of symlinks)
* @param {Function} transform - path transformer
* @param {Function} globFilter - path filter in case a glob pattern was provided
* @returns {Function} closer for the watcher instance
*/
_watchWithFsEvents(watchPath, realPath, transform, globFilter) {
if (this.fsw.closed || this.fsw._isIgnored(watchPath)) return;
const opts = this.fsw.options;
const watchCallback = async (fullPath, flags, info) => {
if (this.fsw.closed) return;
if (
opts.depth !== undefined &&
calcDepth(fullPath, realPath) > opts.depth
) return;
const path = transform(sysPath.join(
watchPath, sysPath.relative(watchPath, fullPath)
));
if (globFilter && !globFilter(path)) return;
// ensure directories are tracked
const parent = sysPath.dirname(path);
const item = sysPath.basename(path);
const watchedDir = this.fsw._getWatchedDir(
info.type === FSEVENT_TYPE_DIRECTORY ? path : parent
);
// correct for wrong events emitted
if (wrongEventFlags.has(flags) || info.event === FSEVENT_UNKNOWN) {
if (typeof opts.ignored === FUNCTION_TYPE) {
let stats;
try {
stats = await stat(path);
} catch (error) {}
if (this.fsw.closed) return;
if (this.checkIgnored(path, stats)) return;
if (sameTypes(info, stats)) {
this.addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts);
} else {
this.handleEvent(EV_UNLINK, path, fullPath, realPath, parent, watchedDir, item, info, opts);
}
} else {
this.checkExists(path, fullPath, realPath, parent, watchedDir, item, info, opts);
}
} else {
switch (info.event) {
case FSEVENT_CREATED:
case FSEVENT_MODIFIED:
return this.addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts);
case FSEVENT_DELETED:
case FSEVENT_MOVED:
return this.checkExists(path, fullPath, realPath, parent, watchedDir, item, info, opts);
}
}
};
const closer = setFSEventsListener(
watchPath,
realPath,
watchCallback,
this.fsw._emitRaw
);
this.fsw._emitReady();
return closer;
}
/**
* Handle symlinks encountered during directory scan
* @param {String} linkPath path to symlink
* @param {String} fullPath absolute path to the symlink
* @param {Function} transform pre-existing path transformer
* @param {Number} curDepth level of subdirectories traversed to where symlink is
* @returns {Promise<void>}
*/
async _handleFsEventsSymlink(linkPath, fullPath, transform, curDepth) {
// don't follow the same symlink more than once
if (this.fsw.closed || this.fsw._symlinkPaths.has(fullPath)) return;
this.fsw._symlinkPaths.set(fullPath, true);
this.fsw._incrReadyCount();
try {
const linkTarget = await realpath(linkPath);
if (this.fsw.closed) return;
if (this.fsw._isIgnored(linkTarget)) {
return this.fsw._emitReady();
}
this.fsw._incrReadyCount();
// add the linkTarget for watching with a wrapper for transform
// that causes emitted paths to incorporate the link's path
this._addToFsEvents(linkTarget || linkPath, (path) => {
let aliasedPath = linkPath;
if (linkTarget && linkTarget !== DOT_SLASH) {
aliasedPath = path.replace(linkTarget, linkPath);
} else if (path !== DOT_SLASH) {
aliasedPath = sysPath.join(linkPath, path);
}
return transform(aliasedPath);
}, false, curDepth);
} catch(error) {
if (this.fsw._handleError(error)) {
return this.fsw._emitReady();
}
}
}
/**
*
* @param {Path} newPath
* @param {fs.Stats} stats
*/
emitAdd(newPath, stats, processPath, opts, forceAdd) {
const pp = processPath(newPath);
const isDir = stats.isDirectory();
const dirObj = this.fsw._getWatchedDir(sysPath.dirname(pp));
const base = sysPath.basename(pp);
// ensure empty dirs get tracked
if (isDir) this.fsw._getWatchedDir(pp);
if (dirObj.has(base)) return;
dirObj.add(base);
if (!opts.ignoreInitial || forceAdd === true) {
this.fsw._emit(isDir ? EV_ADD_DIR : EV_ADD, pp, stats);
}
}
initWatch(realPath, path, wh, processPath) {
if (this.fsw.closed) return;
const closer = this._watchWithFsEvents(
wh.watchPath,
sysPath.resolve(realPath || wh.watchPath),
processPath,
wh.globFilter
);
this.fsw._addPathCloser(path, closer);
}
/**
* Handle added path with fsevents
* @param {String} path file/dir path or glob pattern
* @param {Function|Boolean=} transform converts working path to what the user expects
* @param {Boolean=} forceAdd ensure add is emitted
* @param {Number=} priorDepth Level of subdirectories already traversed.
* @returns {Promise<void>}
*/
async _addToFsEvents(path, transform, forceAdd, priorDepth) {
if (this.fsw.closed) {
return;
}
const opts = this.fsw.options;
const processPath = typeof transform === FUNCTION_TYPE ? transform : IDENTITY_FN;
const wh = this.fsw._getWatchHelpers(path);
// evaluate what is at the path we're being asked to watch
try {
const stats = await statMethods[wh.statMethod](wh.watchPath);
if (this.fsw.closed) return;
if (this.fsw._isIgnored(wh.watchPath, stats)) {
throw null;
}
if (stats.isDirectory()) {
// emit addDir unless this is a glob parent
if (!wh.globFilter) this.emitAdd(processPath(path), stats, processPath, opts, forceAdd);
// don't recurse further if it would exceed depth setting
if (priorDepth && priorDepth > opts.depth) return;
// scan the contents of the dir
this.fsw._readdirp(wh.watchPath, {
fileFilter: entry => wh.filterPath(entry),
directoryFilter: entry => wh.filterDir(entry),
...Depth(opts.depth - (priorDepth || 0))
}).on(STR_DATA, (entry) => {
// need to check filterPath on dirs b/c filterDir is less restrictive
if (this.fsw.closed) {
return;
}
if (entry.stats.isDirectory() && !wh.filterPath(entry)) return;
const joinedPath = sysPath.join(wh.watchPath, entry.path);
const {fullPath} = entry;
if (wh.followSymlinks && entry.stats.isSymbolicLink()) {
// preserve the current depth here since it can't be derived from
// real paths past the symlink
const curDepth = opts.depth === undefined ?
undefined : calcDepth(joinedPath, sysPath.resolve(wh.watchPath)) + 1;
this._handleFsEventsSymlink(joinedPath, fullPath, processPath, curDepth);
} else {
this.emitAdd(joinedPath, entry.stats, processPath, opts, forceAdd);
}
}).on(EV_ERROR, EMPTY_FN).on(STR_END, () => {
this.fsw._emitReady();
});
} else {
this.emitAdd(wh.watchPath, stats, processPath, opts, forceAdd);
this.fsw._emitReady();
}
} catch (error) {
if (!error || this.fsw._handleError(error)) {
// TODO: Strange thing: "should not choke on an ignored watch path" will be failed without 2 ready calls -__-
this.fsw._emitReady();
this.fsw._emitReady();
}
}
if (opts.persistent && forceAdd !== true) {
if (typeof transform === FUNCTION_TYPE) {
// realpath has already been resolved
this.initWatch(undefined, path, wh, processPath);
} else {
let realPath;
try {
realPath = await realpath(wh.watchPath);
} catch (e) {}
this.initWatch(realPath, path, wh, processPath);
}
}
}
}
module.exports = FsEventsHandler;
module.exports.canUse = canUse;

View File

@ -1,654 +0,0 @@
'use strict';
const fs = require('fs');
const sysPath = require('path');
const { promisify } = require('util');
const isBinaryPath = require('is-binary-path');
const {
isWindows,
isLinux,
EMPTY_FN,
EMPTY_STR,
KEY_LISTENERS,
KEY_ERR,
KEY_RAW,
HANDLER_KEYS,
EV_CHANGE,
EV_ADD,
EV_ADD_DIR,
EV_ERROR,
STR_DATA,
STR_END,
BRACE_START,
STAR
} = require('./constants');
const THROTTLE_MODE_WATCH = 'watch';
const open = promisify(fs.open);
const stat = promisify(fs.stat);
const lstat = promisify(fs.lstat);
const close = promisify(fs.close);
const fsrealpath = promisify(fs.realpath);
const statMethods = { lstat, stat };
// TODO: emit errors properly. Example: EMFILE on Macos.
const foreach = (val, fn) => {
if (val instanceof Set) {
val.forEach(fn);
} else {
fn(val);
}
};
const addAndConvert = (main, prop, item) => {
let container = main[prop];
if (!(container instanceof Set)) {
main[prop] = container = new Set([container]);
}
container.add(item);
};
const clearItem = cont => key => {
const set = cont[key];
if (set instanceof Set) {
set.clear();
} else {
delete cont[key];
}
};
const delFromSet = (main, prop, item) => {
const container = main[prop];
if (container instanceof Set) {
container.delete(item);
} else if (container === item) {
delete main[prop];
}
};
const isEmptySet = (val) => val instanceof Set ? val.size === 0 : !val;
/**
* @typedef {String} Path
*/
// fs_watch helpers
// object to hold per-process fs_watch instances
// (may be shared across chokidar FSWatcher instances)
/**
* @typedef {Object} FsWatchContainer
* @property {Set} listeners
* @property {Set} errHandlers
* @property {Set} rawEmitters
* @property {fs.FSWatcher=} watcher
* @property {Boolean=} watcherUnusable
*/
/**
* @type {Map<String,FsWatchContainer>}
*/
const FsWatchInstances = new Map();
/**
* Instantiates the fs_watch interface
* @param {String} path to be watched
* @param {Object} options to be passed to fs_watch
* @param {Function} listener main event handler
* @param {Function} errHandler emits info about errors
* @param {Function} emitRaw emits raw event data
* @returns {fs.FSWatcher} new fsevents instance
*/
function createFsWatchInstance(path, options, listener, errHandler, emitRaw) {
const handleEvent = (rawEvent, evPath) => {
listener(path);
emitRaw(rawEvent, evPath, {watchedPath: path});
// emit based on events occurring for files from a directory's watcher in
// case the file's watcher misses it (and rely on throttling to de-dupe)
if (evPath && path !== evPath) {
fsWatchBroadcast(
sysPath.resolve(path, evPath), KEY_LISTENERS, sysPath.join(path, evPath)
);
}
};
try {
return fs.watch(path, options, handleEvent);
} catch (error) {
errHandler(error);
}
}
/**
* Helper for passing fs_watch event data to a collection of listeners
* @param {Path} fullPath absolute path bound to fs_watch instance
* @param {String} type listener type
* @param {*=} val1 arguments to be passed to listeners
* @param {*=} val2
* @param {*=} val3
*/
const fsWatchBroadcast = (fullPath, type, val1, val2, val3) => {
const cont = FsWatchInstances.get(fullPath);
if (!cont) return;
foreach(cont[type], (listener) => {
listener(val1, val2, val3);
});
};
/**
* Instantiates the fs_watch interface or binds listeners
* to an existing one covering the same file system entry
* @param {String} path
* @param {String} fullPath absolute path
* @param {Object} options to be passed to fs_watch
* @param {Object} handlers container for event listener functions
*/
const setFsWatchListener = (path, fullPath, options, handlers) => {
const {listener, errHandler, rawEmitter} = handlers;
let cont = FsWatchInstances.get(fullPath);
/** @type {fs.FSWatcher=} */
let watcher;
if (!options.persistent) {
watcher = createFsWatchInstance(
path, options, listener, errHandler, rawEmitter
);
return watcher.close.bind(watcher);
}
if (cont) {
addAndConvert(cont, KEY_LISTENERS, listener);
addAndConvert(cont, KEY_ERR, errHandler);
addAndConvert(cont, KEY_RAW, rawEmitter);
} else {
watcher = createFsWatchInstance(
path,
options,
fsWatchBroadcast.bind(null, fullPath, KEY_LISTENERS),
errHandler, // no need to use broadcast here
fsWatchBroadcast.bind(null, fullPath, KEY_RAW)
);
if (!watcher) return;
watcher.on(EV_ERROR, async (error) => {
const broadcastErr = fsWatchBroadcast.bind(null, fullPath, KEY_ERR);
cont.watcherUnusable = true; // documented since Node 10.4.1
// Workaround for https://github.com/joyent/node/issues/4337
if (isWindows && error.code === 'EPERM') {
try {
const fd = await open(path, 'r');
await close(fd);
broadcastErr(error);
} catch (err) {}
} else {
broadcastErr(error);
}
});
cont = {
listeners: listener,
errHandlers: errHandler,
rawEmitters: rawEmitter,
watcher
};
FsWatchInstances.set(fullPath, cont);
}
// const index = cont.listeners.indexOf(listener);
// removes this instance's listeners and closes the underlying fs_watch
// instance if there are no more listeners left
return () => {
delFromSet(cont, KEY_LISTENERS, listener);
delFromSet(cont, KEY_ERR, errHandler);
delFromSet(cont, KEY_RAW, rawEmitter);
if (isEmptySet(cont.listeners)) {
// Check to protect against issue gh-730.
// if (cont.watcherUnusable) {
cont.watcher.close();
// }
FsWatchInstances.delete(fullPath);
HANDLER_KEYS.forEach(clearItem(cont));
cont.watcher = undefined;
Object.freeze(cont);
}
};
};
// fs_watchFile helpers
// object to hold per-process fs_watchFile instances
// (may be shared across chokidar FSWatcher instances)
const FsWatchFileInstances = new Map();
/**
* Instantiates the fs_watchFile interface or binds listeners
* to an existing one covering the same file system entry
* @param {String} path to be watched
* @param {String} fullPath absolute path
* @param {Object} options options to be passed to fs_watchFile
* @param {Object} handlers container for event listener functions
* @returns {Function} closer
*/
const setFsWatchFileListener = (path, fullPath, options, handlers) => {
const {listener, rawEmitter} = handlers;
let cont = FsWatchFileInstances.get(fullPath);
/* eslint-disable no-unused-vars, prefer-destructuring */
let listeners = new Set();
let rawEmitters = new Set();
const copts = cont && cont.options;
if (copts && (copts.persistent < options.persistent || copts.interval > options.interval)) {
// "Upgrade" the watcher to persistence or a quicker interval.
// This creates some unlikely edge case issues if the user mixes
// settings in a very weird way, but solving for those cases
// doesn't seem worthwhile for the added complexity.
listeners = cont.listeners;
rawEmitters = cont.rawEmitters;
fs.unwatchFile(fullPath);
cont = undefined;
}
/* eslint-enable no-unused-vars, prefer-destructuring */
if (cont) {
addAndConvert(cont, KEY_LISTENERS, listener);
addAndConvert(cont, KEY_RAW, rawEmitter);
} else {
// TODO
// listeners.add(listener);
// rawEmitters.add(rawEmitter);
cont = {
listeners: listener,
rawEmitters: rawEmitter,
options,
watcher: fs.watchFile(fullPath, options, (curr, prev) => {
foreach(cont.rawEmitters, (rawEmitter) => {
rawEmitter(EV_CHANGE, fullPath, {curr, prev});
});
const currmtime = curr.mtimeMs;
if (curr.size !== prev.size || currmtime > prev.mtimeMs || currmtime === 0) {
foreach(cont.listeners, (listener) => listener(path, curr));
}
})
};
FsWatchFileInstances.set(fullPath, cont);
}
// const index = cont.listeners.indexOf(listener);
// Removes this instance's listeners and closes the underlying fs_watchFile
// instance if there are no more listeners left.
return () => {
delFromSet(cont, KEY_LISTENERS, listener);
delFromSet(cont, KEY_RAW, rawEmitter);
if (isEmptySet(cont.listeners)) {
FsWatchFileInstances.delete(fullPath);
fs.unwatchFile(fullPath);
cont.options = cont.watcher = undefined;
Object.freeze(cont);
}
};
};
/**
* @mixin
*/
class NodeFsHandler {
/**
* @param {import("../index").FSWatcher} fsW
*/
constructor(fsW) {
this.fsw = fsW;
this._boundHandleError = (error) => fsW._handleError(error);
}
/**
* Watch file for changes with fs_watchFile or fs_watch.
* @param {String} path to file or dir
* @param {Function} listener on fs change
* @returns {Function} closer for the watcher instance
*/
_watchWithNodeFs(path, listener) {
const opts = this.fsw.options;
const directory = sysPath.dirname(path);
const basename = sysPath.basename(path);
const parent = this.fsw._getWatchedDir(directory);
parent.add(basename);
const absolutePath = sysPath.resolve(path);
const options = {persistent: opts.persistent};
if (!listener) listener = EMPTY_FN;
let closer;
if (opts.usePolling) {
options.interval = opts.enableBinaryInterval && isBinaryPath(basename) ?
opts.binaryInterval : opts.interval;
closer = setFsWatchFileListener(path, absolutePath, options, {
listener,
rawEmitter: this.fsw._emitRaw
});
} else {
closer = setFsWatchListener(path, absolutePath, options, {
listener,
errHandler: this._boundHandleError,
rawEmitter: this.fsw._emitRaw
});
}
return closer;
}
/**
* Watch a file and emit add event if warranted.
* @param {Path} file Path
* @param {fs.Stats} stats result of fs_stat
* @param {Boolean} initialAdd was the file added at watch instantiation?
* @returns {Function} closer for the watcher instance
*/
_handleFile(file, stats, initialAdd) {
if (this.fsw.closed) {
return;
}
const dirname = sysPath.dirname(file);
const basename = sysPath.basename(file);
const parent = this.fsw._getWatchedDir(dirname);
// stats is always present
let prevStats = stats;
// if the file is already being watched, do nothing
if (parent.has(basename)) return;
const listener = async (path, newStats) => {
if (!this.fsw._throttle(THROTTLE_MODE_WATCH, file, 5)) return;
if (!newStats || newStats.mtimeMs === 0) {
try {
const newStats = await stat(file);
if (this.fsw.closed) return;
// Check that change event was not fired because of changed only accessTime.
const at = newStats.atimeMs;
const mt = newStats.mtimeMs;
if (!at || at <= mt || mt !== prevStats.mtimeMs) {
this.fsw._emit(EV_CHANGE, file, newStats);
}
if (isLinux && prevStats.ino !== newStats.ino) {
this.fsw._closeFile(path)
prevStats = newStats;
this.fsw._addPathCloser(path, this._watchWithNodeFs(file, listener));
} else {
prevStats = newStats;
}
} catch (error) {
// Fix issues where mtime is null but file is still present
this.fsw._remove(dirname, basename);
}
// add is about to be emitted if file not already tracked in parent
} else if (parent.has(basename)) {
// Check that change event was not fired because of changed only accessTime.
const at = newStats.atimeMs;
const mt = newStats.mtimeMs;
if (!at || at <= mt || mt !== prevStats.mtimeMs) {
this.fsw._emit(EV_CHANGE, file, newStats);
}
prevStats = newStats;
}
}
// kick off the watcher
const closer = this._watchWithNodeFs(file, listener);
// emit an add event if we're supposed to
if (!(initialAdd && this.fsw.options.ignoreInitial) && this.fsw._isntIgnored(file)) {
if (!this.fsw._throttle(EV_ADD, file, 0)) return;
this.fsw._emit(EV_ADD, file, stats);
}
return closer;
}
/**
* Handle symlinks encountered while reading a dir.
* @param {Object} entry returned by readdirp
* @param {String} directory path of dir being read
* @param {String} path of this item
* @param {String} item basename of this item
* @returns {Promise<Boolean>} true if no more processing is needed for this entry.
*/
async _handleSymlink(entry, directory, path, item) {
if (this.fsw.closed) {
return;
}
const full = entry.fullPath;
const dir = this.fsw._getWatchedDir(directory);
if (!this.fsw.options.followSymlinks) {
// watch symlink directly (don't follow) and detect changes
this.fsw._incrReadyCount();
let linkPath;
try {
linkPath = await fsrealpath(path);
} catch (e) {
this.fsw._emitReady();
return true;
}
if (this.fsw.closed) return;
if (dir.has(item)) {
if (this.fsw._symlinkPaths.get(full) !== linkPath) {
this.fsw._symlinkPaths.set(full, linkPath);
this.fsw._emit(EV_CHANGE, path, entry.stats);
}
} else {
dir.add(item);
this.fsw._symlinkPaths.set(full, linkPath);
this.fsw._emit(EV_ADD, path, entry.stats);
}
this.fsw._emitReady();
return true;
}
// don't follow the same symlink more than once
if (this.fsw._symlinkPaths.has(full)) {
return true;
}
this.fsw._symlinkPaths.set(full, true);
}
_handleRead(directory, initialAdd, wh, target, dir, depth, throttler) {
// Normalize the directory name on Windows
directory = sysPath.join(directory, EMPTY_STR);
if (!wh.hasGlob) {
throttler = this.fsw._throttle('readdir', directory, 1000);
if (!throttler) return;
}
const previous = this.fsw._getWatchedDir(wh.path);
const current = new Set();
let stream = this.fsw._readdirp(directory, {
fileFilter: entry => wh.filterPath(entry),
directoryFilter: entry => wh.filterDir(entry),
depth: 0
}).on(STR_DATA, async (entry) => {
if (this.fsw.closed) {
stream = undefined;
return;
}
const item = entry.path;
let path = sysPath.join(directory, item);
current.add(item);
if (entry.stats.isSymbolicLink() && await this._handleSymlink(entry, directory, path, item)) {
return;
}
if (this.fsw.closed) {
stream = undefined;
return;
}
// Files that present in current directory snapshot
// but absent in previous are added to watch list and
// emit `add` event.
if (item === target || !target && !previous.has(item)) {
this.fsw._incrReadyCount();
// ensure relativeness of path is preserved in case of watcher reuse
path = sysPath.join(dir, sysPath.relative(dir, path));
this._addToNodeFs(path, initialAdd, wh, depth + 1);
}
}).on(EV_ERROR, this._boundHandleError);
return new Promise(resolve =>
stream.once(STR_END, () => {
if (this.fsw.closed) {
stream = undefined;
return;
}
const wasThrottled = throttler ? throttler.clear() : false;
resolve();
// Files that absent in current directory snapshot
// but present in previous emit `remove` event
// and are removed from @watched[directory].
previous.getChildren().filter((item) => {
return item !== directory &&
!current.has(item) &&
// in case of intersecting globs;
// a path may have been filtered out of this readdir, but
// shouldn't be removed because it matches a different glob
(!wh.hasGlob || wh.filterPath({
fullPath: sysPath.resolve(directory, item)
}));
}).forEach((item) => {
this.fsw._remove(directory, item);
});
stream = undefined;
// one more time for any missed in case changes came in extremely quickly
if (wasThrottled) this._handleRead(directory, false, wh, target, dir, depth, throttler);
})
);
}
/**
* Read directory to add / remove files from `@watched` list and re-read it on change.
* @param {String} dir fs path
* @param {fs.Stats} stats
* @param {Boolean} initialAdd
* @param {Number} depth relative to user-supplied path
* @param {String} target child path targeted for watch
* @param {Object} wh Common watch helpers for this path
* @param {String} realpath
* @returns {Promise<Function>} closer for the watcher instance.
*/
async _handleDir(dir, stats, initialAdd, depth, target, wh, realpath) {
const parentDir = this.fsw._getWatchedDir(sysPath.dirname(dir));
const tracked = parentDir.has(sysPath.basename(dir));
if (!(initialAdd && this.fsw.options.ignoreInitial) && !target && !tracked) {
if (!wh.hasGlob || wh.globFilter(dir)) this.fsw._emit(EV_ADD_DIR, dir, stats);
}
// ensure dir is tracked (harmless if redundant)
parentDir.add(sysPath.basename(dir));
this.fsw._getWatchedDir(dir);
let throttler;
let closer;
const oDepth = this.fsw.options.depth;
if ((oDepth == null || depth <= oDepth) && !this.fsw._symlinkPaths.has(realpath)) {
if (!target) {
await this._handleRead(dir, initialAdd, wh, target, dir, depth, throttler);
if (this.fsw.closed) return;
}
closer = this._watchWithNodeFs(dir, (dirPath, stats) => {
// if current directory is removed, do nothing
if (stats && stats.mtimeMs === 0) return;
this._handleRead(dirPath, false, wh, target, dir, depth, throttler);
});
}
return closer;
}
/**
* Handle added file, directory, or glob pattern.
* Delegates call to _handleFile / _handleDir after checks.
* @param {String} path to file or ir
* @param {Boolean} initialAdd was the file added at watch instantiation?
* @param {Object} priorWh depth relative to user-supplied path
* @param {Number} depth Child path actually targeted for watch
* @param {String=} target Child path actually targeted for watch
* @returns {Promise}
*/
async _addToNodeFs(path, initialAdd, priorWh, depth, target) {
const ready = this.fsw._emitReady;
if (this.fsw._isIgnored(path) || this.fsw.closed) {
ready();
return false;
}
const wh = this.fsw._getWatchHelpers(path, depth);
if (!wh.hasGlob && priorWh) {
wh.hasGlob = priorWh.hasGlob;
wh.globFilter = priorWh.globFilter;
wh.filterPath = entry => priorWh.filterPath(entry);
wh.filterDir = entry => priorWh.filterDir(entry);
}
// evaluate what is at the path we're being asked to watch
try {
const stats = await statMethods[wh.statMethod](wh.watchPath);
if (this.fsw.closed) return;
if (this.fsw._isIgnored(wh.watchPath, stats)) {
ready();
return false;
}
const follow = this.fsw.options.followSymlinks && !path.includes(STAR) && !path.includes(BRACE_START);
let closer;
if (stats.isDirectory()) {
const absPath = sysPath.resolve(path);
const targetPath = follow ? await fsrealpath(path) : path;
if (this.fsw.closed) return;
closer = await this._handleDir(wh.watchPath, stats, initialAdd, depth, target, wh, targetPath);
if (this.fsw.closed) return;
// preserve this symlink's target path
if (absPath !== targetPath && targetPath !== undefined) {
this.fsw._symlinkPaths.set(absPath, targetPath);
}
} else if (stats.isSymbolicLink()) {
const targetPath = follow ? await fsrealpath(path) : path;
if (this.fsw.closed) return;
const parent = sysPath.dirname(wh.watchPath);
this.fsw._getWatchedDir(parent).add(wh.watchPath);
this.fsw._emit(EV_ADD, wh.watchPath, stats);
closer = await this._handleDir(parent, stats, initialAdd, depth, path, wh, targetPath);
if (this.fsw.closed) return;
// preserve this symlink's target path
if (targetPath !== undefined) {
this.fsw._symlinkPaths.set(sysPath.resolve(path), targetPath);
}
} else {
closer = this._handleFile(wh.watchPath, stats, initialAdd);
}
ready();
this.fsw._addPathCloser(path, closer);
return false;
} catch (error) {
if (this.fsw._handleError(error)) {
ready();
return path;
}
}
}
}
module.exports = NodeFsHandler;

View File

@ -1,85 +0,0 @@
{
"name": "chokidar",
"description": "Minimal and efficient cross-platform file watching library",
"version": "3.5.3",
"homepage": "https://github.com/paulmillr/chokidar",
"author": "Paul Miller (https://paulmillr.com)",
"contributors": [
"Paul Miller (https://paulmillr.com)",
"Elan Shanker"
],
"engines": {
"node": ">= 8.10.0"
},
"main": "index.js",
"dependencies": {
"anymatch": "~3.1.2",
"braces": "~3.0.2",
"glob-parent": "~5.1.2",
"is-binary-path": "~2.1.0",
"is-glob": "~4.0.1",
"normalize-path": "~3.0.0",
"readdirp": "~3.6.0"
},
"optionalDependencies": {
"fsevents": "~2.3.2"
},
"devDependencies": {
"@types/node": "^14",
"chai": "^4.3",
"dtslint": "^3.3.0",
"eslint": "^7.0.0",
"mocha": "^7.0.0",
"nyc": "^15.0.0",
"rimraf": "^3.0.0",
"sinon": "^9.0.1",
"sinon-chai": "^3.3.0",
"typescript": "~4.4.3",
"upath": "^1.2.0"
},
"files": [
"index.js",
"lib/*.js",
"types/index.d.ts"
],
"repository": {
"type": "git",
"url": "git+https://github.com/paulmillr/chokidar.git"
},
"bugs": {
"url": "https://github.com/paulmillr/chokidar/issues"
},
"license": "MIT",
"scripts": {
"dtslint": "dtslint types",
"lint": "eslint --report-unused-disable-directives --ignore-path .gitignore .",
"mocha": "mocha --exit --timeout 90000",
"test": "npm run lint && npm run mocha"
},
"keywords": [
"fs",
"watch",
"watchFile",
"watcher",
"watching",
"file",
"fsevents"
],
"types": "./types/index.d.ts",
"nyc": {
"include": [
"index.js",
"lib/*.js"
],
"reporter": [
"html",
"text"
]
},
"funding": [
{
"type": "individual",
"url": "https://paulmillr.com/funding/"
}
]
}

View File

@ -1,188 +0,0 @@
// TypeScript Version: 3.0
/// <reference types="node" />
import * as fs from "fs";
import { EventEmitter } from "events";
import { Matcher } from 'anymatch';
export class FSWatcher extends EventEmitter implements fs.FSWatcher {
options: WatchOptions;
/**
* Constructs a new FSWatcher instance with optional WatchOptions parameter.
*/
constructor(options?: WatchOptions);
/**
* Add files, directories, or glob patterns for tracking. Takes an array of strings or just one
* string.
*/
add(paths: string | ReadonlyArray<string>): this;
/**
* Stop watching files, directories, or glob patterns. Takes an array of strings or just one
* string.
*/
unwatch(paths: string | ReadonlyArray<string>): this;
/**
* Returns an object representing all the paths on the file system being watched by this
* `FSWatcher` instance. The object's keys are all the directories (using absolute paths unless
* the `cwd` option was used), and the values are arrays of the names of the items contained in
* each directory.
*/
getWatched(): {
[directory: string]: string[];
};
/**
* Removes all listeners from watched files.
*/
close(): Promise<void>;
on(event: 'add'|'addDir'|'change', listener: (path: string, stats?: fs.Stats) => void): this;
on(event: 'all', listener: (eventName: 'add'|'addDir'|'change'|'unlink'|'unlinkDir', path: string, stats?: fs.Stats) => void): this;
/**
* Error occurred
*/
on(event: 'error', listener: (error: Error) => void): this;
/**
* Exposes the native Node `fs.FSWatcher events`
*/
on(event: 'raw', listener: (eventName: string, path: string, details: any) => void): this;
/**
* Fires when the initial scan is complete
*/
on(event: 'ready', listener: () => void): this;
on(event: 'unlink'|'unlinkDir', listener: (path: string) => void): this;
on(event: string, listener: (...args: any[]) => void): this;
}
export interface WatchOptions {
/**
* Indicates whether the process should continue to run as long as files are being watched. If
* set to `false` when using `fsevents` to watch, no more events will be emitted after `ready`,
* even if the process continues to run.
*/
persistent?: boolean;
/**
* ([anymatch](https://github.com/micromatch/anymatch)-compatible definition) Defines files/paths to
* be ignored. The whole relative or absolute path is tested, not just filename. If a function
* with two arguments is provided, it gets called twice per path - once with a single argument
* (the path), second time with two arguments (the path and the
* [`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats) object of that path).
*/
ignored?: Matcher;
/**
* If set to `false` then `add`/`addDir` events are also emitted for matching paths while
* instantiating the watching as chokidar discovers these file paths (before the `ready` event).
*/
ignoreInitial?: boolean;
/**
* When `false`, only the symlinks themselves will be watched for changes instead of following
* the link references and bubbling events through the link's path.
*/
followSymlinks?: boolean;
/**
* The base directory from which watch `paths` are to be derived. Paths emitted with events will
* be relative to this.
*/
cwd?: string;
/**
* If set to true then the strings passed to .watch() and .add() are treated as literal path
* names, even if they look like globs. Default: false.
*/
disableGlobbing?: boolean;
/**
* Whether to use fs.watchFile (backed by polling), or fs.watch. If polling leads to high CPU
* utilization, consider setting this to `false`. It is typically necessary to **set this to
* `true` to successfully watch files over a network**, and it may be necessary to successfully
* watch files in other non-standard situations. Setting to `true` explicitly on OS X overrides
* the `useFsEvents` default.
*/
usePolling?: boolean;
/**
* Whether to use the `fsevents` watching interface if available. When set to `true` explicitly
* and `fsevents` is available this supercedes the `usePolling` setting. When set to `false` on
* OS X, `usePolling: true` becomes the default.
*/
useFsEvents?: boolean;
/**
* If relying upon the [`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats) object that
* may get passed with `add`, `addDir`, and `change` events, set this to `true` to ensure it is
* provided even in cases where it wasn't already available from the underlying watch events.
*/
alwaysStat?: boolean;
/**
* If set, limits how many levels of subdirectories will be traversed.
*/
depth?: number;
/**
* Interval of file system polling.
*/
interval?: number;
/**
* Interval of file system polling for binary files. ([see list of binary extensions](https://gi
* thub.com/sindresorhus/binary-extensions/blob/master/binary-extensions.json))
*/
binaryInterval?: number;
/**
* Indicates whether to watch files that don't have read permissions if possible. If watching
* fails due to `EPERM` or `EACCES` with this set to `true`, the errors will be suppressed
* silently.
*/
ignorePermissionErrors?: boolean;
/**
* `true` if `useFsEvents` and `usePolling` are `false`). Automatically filters out artifacts
* that occur when using editors that use "atomic writes" instead of writing directly to the
* source file. If a file is re-added within 100 ms of being deleted, Chokidar emits a `change`
* event rather than `unlink` then `add`. If the default of 100 ms does not work well for you,
* you can override it by setting `atomic` to a custom value, in milliseconds.
*/
atomic?: boolean | number;
/**
* can be set to an object in order to adjust timing params:
*/
awaitWriteFinish?: AwaitWriteFinishOptions | boolean;
}
export interface AwaitWriteFinishOptions {
/**
* Amount of time in milliseconds for a file size to remain constant before emitting its event.
*/
stabilityThreshold?: number;
/**
* File size polling interval.
*/
pollInterval?: number;
}
/**
* produces an instance of `FSWatcher`.
*/
export function watch(
paths: string | ReadonlyArray<string>,
options?: WatchOptions
): FSWatcher;

View File

@ -1,4 +0,0 @@
language: node_js
node_js:
- 0.4
- 0.6

18
src/node_modules/concat-map/LICENSE generated vendored
View File

@ -1,18 +0,0 @@
This software is released under the MIT license:
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -1,62 +0,0 @@
concat-map
==========
Concatenative mapdashery.
[![browser support](http://ci.testling.com/substack/node-concat-map.png)](http://ci.testling.com/substack/node-concat-map)
[![build status](https://secure.travis-ci.org/substack/node-concat-map.png)](http://travis-ci.org/substack/node-concat-map)
example
=======
``` js
var concatMap = require('concat-map');
var xs = [ 1, 2, 3, 4, 5, 6 ];
var ys = concatMap(xs, function (x) {
return x % 2 ? [ x - 0.1, x, x + 0.1 ] : [];
});
console.dir(ys);
```
***
```
[ 0.9, 1, 1.1, 2.9, 3, 3.1, 4.9, 5, 5.1 ]
```
methods
=======
``` js
var concatMap = require('concat-map')
```
concatMap(xs, fn)
-----------------
Return an array of concatenated elements by calling `fn(x, i)` for each element
`x` and each index `i` in the array `xs`.
When `fn(x, i)` returns an array, its result will be concatenated with the
result array. If `fn(x, i)` returns anything else, that value will be pushed
onto the end of the result array.
install
=======
With [npm](http://npmjs.org) do:
```
npm install concat-map
```
license
=======
MIT
notes
=====
This module was written while sitting high above the ground in a tree.

View File

@ -1,6 +0,0 @@
var concatMap = require('../');
var xs = [ 1, 2, 3, 4, 5, 6 ];
var ys = concatMap(xs, function (x) {
return x % 2 ? [ x - 0.1, x, x + 0.1 ] : [];
});
console.dir(ys);

13
src/node_modules/concat-map/index.js generated vendored
View File

@ -1,13 +0,0 @@
module.exports = function (xs, fn) {
var res = [];
for (var i = 0; i < xs.length; i++) {
var x = fn(xs[i], i);
if (isArray(x)) res.push.apply(res, x);
else res.push(x);
}
return res;
};
var isArray = Array.isArray || function (xs) {
return Object.prototype.toString.call(xs) === '[object Array]';
};

View File

@ -1,43 +0,0 @@
{
"name" : "concat-map",
"description" : "concatenative mapdashery",
"version" : "0.0.1",
"repository" : {
"type" : "git",
"url" : "git://github.com/substack/node-concat-map.git"
},
"main" : "index.js",
"keywords" : [
"concat",
"concatMap",
"map",
"functional",
"higher-order"
],
"directories" : {
"example" : "example",
"test" : "test"
},
"scripts" : {
"test" : "tape test/*.js"
},
"devDependencies" : {
"tape" : "~2.4.0"
},
"license" : "MIT",
"author" : {
"name" : "James Halliday",
"email" : "mail@substack.net",
"url" : "http://substack.net"
},
"testling" : {
"files" : "test/*.js",
"browsers" : {
"ie" : [ 6, 7, 8, 9 ],
"ff" : [ 3.5, 10, 15.0 ],
"chrome" : [ 10, 22 ],
"safari" : [ 5.1 ],
"opera" : [ 12 ]
}
}
}

View File

@ -1,39 +0,0 @@
var concatMap = require('../');
var test = require('tape');
test('empty or not', function (t) {
var xs = [ 1, 2, 3, 4, 5, 6 ];
var ixes = [];
var ys = concatMap(xs, function (x, ix) {
ixes.push(ix);
return x % 2 ? [ x - 0.1, x, x + 0.1 ] : [];
});
t.same(ys, [ 0.9, 1, 1.1, 2.9, 3, 3.1, 4.9, 5, 5.1 ]);
t.same(ixes, [ 0, 1, 2, 3, 4, 5 ]);
t.end();
});
test('always something', function (t) {
var xs = [ 'a', 'b', 'c', 'd' ];
var ys = concatMap(xs, function (x) {
return x === 'b' ? [ 'B', 'B', 'B' ] : [ x ];
});
t.same(ys, [ 'a', 'B', 'B', 'B', 'c', 'd' ]);
t.end();
});
test('scalars', function (t) {
var xs = [ 'a', 'b', 'c', 'd' ];
var ys = concatMap(xs, function (x) {
return x === 'b' ? [ 'B', 'B', 'B' ] : x;
});
t.same(ys, [ 'a', 'B', 'B', 'B', 'c', 'd' ]);
t.end();
});
test('undefs', function (t) {
var xs = [ 'a', 'b', 'c', 'd' ];
var ys = concatMap(xs, function () {});
t.same(ys, [ undefined, undefined, undefined, undefined ]);
t.end();
});

View File

@ -0,0 +1,657 @@
1.20.1 / 2022-10-06
===================
* deps: qs@6.11.0
* perf: remove unnecessary object clone
1.20.0 / 2022-04-02
===================
* Fix error message for json parse whitespace in `strict`
* Fix internal error when inflated body exceeds limit
* Prevent loss of async hooks context
* Prevent hanging when request already read
* deps: depd@2.0.0
- Replace internal `eval` usage with `Function` constructor
- Use instance methods on `process` to check for listeners
* deps: http-errors@2.0.0
- deps: depd@2.0.0
- deps: statuses@2.0.1
* deps: on-finished@2.4.1
* deps: qs@6.10.3
* deps: raw-body@2.5.1
- deps: http-errors@2.0.0
1.19.2 / 2022-02-15
===================
* deps: bytes@3.1.2
* deps: qs@6.9.7
* Fix handling of `__proto__` keys
* deps: raw-body@2.4.3
- deps: bytes@3.1.2
1.19.1 / 2021-12-10
===================
* deps: bytes@3.1.1
* deps: http-errors@1.8.1
- deps: inherits@2.0.4
- deps: toidentifier@1.0.1
- deps: setprototypeof@1.2.0
* deps: qs@6.9.6
* deps: raw-body@2.4.2
- deps: bytes@3.1.1
- deps: http-errors@1.8.1
* deps: safe-buffer@5.2.1
* deps: type-is@~1.6.18
1.19.0 / 2019-04-25
===================
* deps: bytes@3.1.0
- Add petabyte (`pb`) support
* deps: http-errors@1.7.2
- Set constructor name when possible
- deps: setprototypeof@1.1.1
- deps: statuses@'>= 1.5.0 < 2'
* deps: iconv-lite@0.4.24
- Added encoding MIK
* deps: qs@6.7.0
- Fix parsing array brackets after index
* deps: raw-body@2.4.0
- deps: bytes@3.1.0
- deps: http-errors@1.7.2
- deps: iconv-lite@0.4.24
* deps: type-is@~1.6.17
- deps: mime-types@~2.1.24
- perf: prevent internal `throw` on invalid type
1.18.3 / 2018-05-14
===================
* Fix stack trace for strict json parse error
* deps: depd@~1.1.2
- perf: remove argument reassignment
* deps: http-errors@~1.6.3
- deps: depd@~1.1.2
- deps: setprototypeof@1.1.0
- deps: statuses@'>= 1.3.1 < 2'
* deps: iconv-lite@0.4.23
- Fix loading encoding with year appended
- Fix deprecation warnings on Node.js 10+
* deps: qs@6.5.2
* deps: raw-body@2.3.3
- deps: http-errors@1.6.3
- deps: iconv-lite@0.4.23
* deps: type-is@~1.6.16
- deps: mime-types@~2.1.18
1.18.2 / 2017-09-22
===================
* deps: debug@2.6.9
* perf: remove argument reassignment
1.18.1 / 2017-09-12
===================
* deps: content-type@~1.0.4
- perf: remove argument reassignment
- perf: skip parameter parsing when no parameters
* deps: iconv-lite@0.4.19
- Fix ISO-8859-1 regression
- Update Windows-1255
* deps: qs@6.5.1
- Fix parsing & compacting very deep objects
* deps: raw-body@2.3.2
- deps: iconv-lite@0.4.19
1.18.0 / 2017-09-08
===================
* Fix JSON strict violation error to match native parse error
* Include the `body` property on verify errors
* Include the `type` property on all generated errors
* Use `http-errors` to set status code on errors
* deps: bytes@3.0.0
* deps: debug@2.6.8
* deps: depd@~1.1.1
- Remove unnecessary `Buffer` loading
* deps: http-errors@~1.6.2
- deps: depd@1.1.1
* deps: iconv-lite@0.4.18
- Add support for React Native
- Add a warning if not loaded as utf-8
- Fix CESU-8 decoding in Node.js 8
- Improve speed of ISO-8859-1 encoding
* deps: qs@6.5.0
* deps: raw-body@2.3.1
- Use `http-errors` for standard emitted errors
- deps: bytes@3.0.0
- deps: iconv-lite@0.4.18
- perf: skip buffer decoding on overage chunk
* perf: prevent internal `throw` when missing charset
1.17.2 / 2017-05-17
===================
* deps: debug@2.6.7
- Fix `DEBUG_MAX_ARRAY_LENGTH`
- deps: ms@2.0.0
* deps: type-is@~1.6.15
- deps: mime-types@~2.1.15
1.17.1 / 2017-03-06
===================
* deps: qs@6.4.0
- Fix regression parsing keys starting with `[`
1.17.0 / 2017-03-01
===================
* deps: http-errors@~1.6.1
- Make `message` property enumerable for `HttpError`s
- deps: setprototypeof@1.0.3
* deps: qs@6.3.1
- Fix compacting nested arrays
1.16.1 / 2017-02-10
===================
* deps: debug@2.6.1
- Fix deprecation messages in WebStorm and other editors
- Undeprecate `DEBUG_FD` set to `1` or `2`
1.16.0 / 2017-01-17
===================
* deps: debug@2.6.0
- Allow colors in workers
- Deprecated `DEBUG_FD` environment variable
- Fix error when running under React Native
- Use same color for same namespace
- deps: ms@0.7.2
* deps: http-errors@~1.5.1
- deps: inherits@2.0.3
- deps: setprototypeof@1.0.2
- deps: statuses@'>= 1.3.1 < 2'
* deps: iconv-lite@0.4.15
- Added encoding MS-31J
- Added encoding MS-932
- Added encoding MS-936
- Added encoding MS-949
- Added encoding MS-950
- Fix GBK/GB18030 handling of Euro character
* deps: qs@6.2.1
- Fix array parsing from skipping empty values
* deps: raw-body@~2.2.0
- deps: iconv-lite@0.4.15
* deps: type-is@~1.6.14
- deps: mime-types@~2.1.13
1.15.2 / 2016-06-19
===================
* deps: bytes@2.4.0
* deps: content-type@~1.0.2
- perf: enable strict mode
* deps: http-errors@~1.5.0
- Use `setprototypeof` module to replace `__proto__` setting
- deps: statuses@'>= 1.3.0 < 2'
- perf: enable strict mode
* deps: qs@6.2.0
* deps: raw-body@~2.1.7
- deps: bytes@2.4.0
- perf: remove double-cleanup on happy path
* deps: type-is@~1.6.13
- deps: mime-types@~2.1.11
1.15.1 / 2016-05-05
===================
* deps: bytes@2.3.0
- Drop partial bytes on all parsed units
- Fix parsing byte string that looks like hex
* deps: raw-body@~2.1.6
- deps: bytes@2.3.0
* deps: type-is@~1.6.12
- deps: mime-types@~2.1.10
1.15.0 / 2016-02-10
===================
* deps: http-errors@~1.4.0
- Add `HttpError` export, for `err instanceof createError.HttpError`
- deps: inherits@2.0.1
- deps: statuses@'>= 1.2.1 < 2'
* deps: qs@6.1.0
* deps: type-is@~1.6.11
- deps: mime-types@~2.1.9
1.14.2 / 2015-12-16
===================
* deps: bytes@2.2.0
* deps: iconv-lite@0.4.13
* deps: qs@5.2.0
* deps: raw-body@~2.1.5
- deps: bytes@2.2.0
- deps: iconv-lite@0.4.13
* deps: type-is@~1.6.10
- deps: mime-types@~2.1.8
1.14.1 / 2015-09-27
===================
* Fix issue where invalid charset results in 400 when `verify` used
* deps: iconv-lite@0.4.12
- Fix CESU-8 decoding in Node.js 4.x
* deps: raw-body@~2.1.4
- Fix masking critical errors from `iconv-lite`
- deps: iconv-lite@0.4.12
* deps: type-is@~1.6.9
- deps: mime-types@~2.1.7
1.14.0 / 2015-09-16
===================
* Fix JSON strict parse error to match syntax errors
* Provide static `require` analysis in `urlencoded` parser
* deps: depd@~1.1.0
- Support web browser loading
* deps: qs@5.1.0
* deps: raw-body@~2.1.3
- Fix sync callback when attaching data listener causes sync read
* deps: type-is@~1.6.8
- Fix type error when given invalid type to match against
- deps: mime-types@~2.1.6
1.13.3 / 2015-07-31
===================
* deps: type-is@~1.6.6
- deps: mime-types@~2.1.4
1.13.2 / 2015-07-05
===================
* deps: iconv-lite@0.4.11
* deps: qs@4.0.0
- Fix dropping parameters like `hasOwnProperty`
- Fix user-visible incompatibilities from 3.1.0
- Fix various parsing edge cases
* deps: raw-body@~2.1.2
- Fix error stack traces to skip `makeError`
- deps: iconv-lite@0.4.11
* deps: type-is@~1.6.4
- deps: mime-types@~2.1.2
- perf: enable strict mode
- perf: remove argument reassignment
1.13.1 / 2015-06-16
===================
* deps: qs@2.4.2
- Downgraded from 3.1.0 because of user-visible incompatibilities
1.13.0 / 2015-06-14
===================
* Add `statusCode` property on `Error`s, in addition to `status`
* Change `type` default to `application/json` for JSON parser
* Change `type` default to `application/x-www-form-urlencoded` for urlencoded parser
* Provide static `require` analysis
* Use the `http-errors` module to generate errors
* deps: bytes@2.1.0
- Slight optimizations
* deps: iconv-lite@0.4.10
- The encoding UTF-16 without BOM now defaults to UTF-16LE when detection fails
- Leading BOM is now removed when decoding
* deps: on-finished@~2.3.0
- Add defined behavior for HTTP `CONNECT` requests
- Add defined behavior for HTTP `Upgrade` requests
- deps: ee-first@1.1.1
* deps: qs@3.1.0
- Fix dropping parameters like `hasOwnProperty`
- Fix various parsing edge cases
- Parsed object now has `null` prototype
* deps: raw-body@~2.1.1
- Use `unpipe` module for unpiping requests
- deps: iconv-lite@0.4.10
* deps: type-is@~1.6.3
- deps: mime-types@~2.1.1
- perf: reduce try block size
- perf: remove bitwise operations
* perf: enable strict mode
* perf: remove argument reassignment
* perf: remove delete call
1.12.4 / 2015-05-10
===================
* deps: debug@~2.2.0
* deps: qs@2.4.2
- Fix allowing parameters like `constructor`
* deps: on-finished@~2.2.1
* deps: raw-body@~2.0.1
- Fix a false-positive when unpiping in Node.js 0.8
- deps: bytes@2.0.1
* deps: type-is@~1.6.2
- deps: mime-types@~2.0.11
1.12.3 / 2015-04-15
===================
* Slight efficiency improvement when not debugging
* deps: depd@~1.0.1
* deps: iconv-lite@0.4.8
- Add encoding alias UNICODE-1-1-UTF-7
* deps: raw-body@1.3.4
- Fix hanging callback if request aborts during read
- deps: iconv-lite@0.4.8
1.12.2 / 2015-03-16
===================
* deps: qs@2.4.1
- Fix error when parameter `hasOwnProperty` is present
1.12.1 / 2015-03-15
===================
* deps: debug@~2.1.3
- Fix high intensity foreground color for bold
- deps: ms@0.7.0
* deps: type-is@~1.6.1
- deps: mime-types@~2.0.10
1.12.0 / 2015-02-13
===================
* add `debug` messages
* accept a function for the `type` option
* use `content-type` to parse `Content-Type` headers
* deps: iconv-lite@0.4.7
- Gracefully support enumerables on `Object.prototype`
* deps: raw-body@1.3.3
- deps: iconv-lite@0.4.7
* deps: type-is@~1.6.0
- fix argument reassignment
- fix false-positives in `hasBody` `Transfer-Encoding` check
- support wildcard for both type and subtype (`*/*`)
- deps: mime-types@~2.0.9
1.11.0 / 2015-01-30
===================
* make internal `extended: true` depth limit infinity
* deps: type-is@~1.5.6
- deps: mime-types@~2.0.8
1.10.2 / 2015-01-20
===================
* deps: iconv-lite@0.4.6
- Fix rare aliases of single-byte encodings
* deps: raw-body@1.3.2
- deps: iconv-lite@0.4.6
1.10.1 / 2015-01-01
===================
* deps: on-finished@~2.2.0
* deps: type-is@~1.5.5
- deps: mime-types@~2.0.7
1.10.0 / 2014-12-02
===================
* make internal `extended: true` array limit dynamic
1.9.3 / 2014-11-21
==================
* deps: iconv-lite@0.4.5
- Fix Windows-31J and X-SJIS encoding support
* deps: qs@2.3.3
- Fix `arrayLimit` behavior
* deps: raw-body@1.3.1
- deps: iconv-lite@0.4.5
* deps: type-is@~1.5.3
- deps: mime-types@~2.0.3
1.9.2 / 2014-10-27
==================
* deps: qs@2.3.2
- Fix parsing of mixed objects and values
1.9.1 / 2014-10-22
==================
* deps: on-finished@~2.1.1
- Fix handling of pipelined requests
* deps: qs@2.3.0
- Fix parsing of mixed implicit and explicit arrays
* deps: type-is@~1.5.2
- deps: mime-types@~2.0.2
1.9.0 / 2014-09-24
==================
* include the charset in "unsupported charset" error message
* include the encoding in "unsupported content encoding" error message
* deps: depd@~1.0.0
1.8.4 / 2014-09-23
==================
* fix content encoding to be case-insensitive
1.8.3 / 2014-09-19
==================
* deps: qs@2.2.4
- Fix issue with object keys starting with numbers truncated
1.8.2 / 2014-09-15
==================
* deps: depd@0.4.5
1.8.1 / 2014-09-07
==================
* deps: media-typer@0.3.0
* deps: type-is@~1.5.1
1.8.0 / 2014-09-05
==================
* make empty-body-handling consistent between chunked requests
- empty `json` produces `{}`
- empty `raw` produces `new Buffer(0)`
- empty `text` produces `''`
- empty `urlencoded` produces `{}`
* deps: qs@2.2.3
- Fix issue where first empty value in array is discarded
* deps: type-is@~1.5.0
- fix `hasbody` to be true for `content-length: 0`
1.7.0 / 2014-09-01
==================
* add `parameterLimit` option to `urlencoded` parser
* change `urlencoded` extended array limit to 100
* respond with 413 when over `parameterLimit` in `urlencoded`
1.6.7 / 2014-08-29
==================
* deps: qs@2.2.2
- Remove unnecessary cloning
1.6.6 / 2014-08-27
==================
* deps: qs@2.2.0
- Array parsing fix
- Performance improvements
1.6.5 / 2014-08-16
==================
* deps: on-finished@2.1.0
1.6.4 / 2014-08-14
==================
* deps: qs@1.2.2
1.6.3 / 2014-08-10
==================
* deps: qs@1.2.1
1.6.2 / 2014-08-07
==================
* deps: qs@1.2.0
- Fix parsing array of objects
1.6.1 / 2014-08-06
==================
* deps: qs@1.1.0
- Accept urlencoded square brackets
- Accept empty values in implicit array notation
1.6.0 / 2014-08-05
==================
* deps: qs@1.0.2
- Complete rewrite
- Limits array length to 20
- Limits object depth to 5
- Limits parameters to 1,000
1.5.2 / 2014-07-27
==================
* deps: depd@0.4.4
- Work-around v8 generating empty stack traces
1.5.1 / 2014-07-26
==================
* deps: depd@0.4.3
- Fix exception when global `Error.stackTraceLimit` is too low
1.5.0 / 2014-07-20
==================
* deps: depd@0.4.2
- Add `TRACE_DEPRECATION` environment variable
- Remove non-standard grey color from color output
- Support `--no-deprecation` argument
- Support `--trace-deprecation` argument
* deps: iconv-lite@0.4.4
- Added encoding UTF-7
* deps: raw-body@1.3.0
- deps: iconv-lite@0.4.4
- Added encoding UTF-7
- Fix `Cannot switch to old mode now` error on Node.js 0.10+
* deps: type-is@~1.3.2
1.4.3 / 2014-06-19
==================
* deps: type-is@1.3.1
- fix global variable leak
1.4.2 / 2014-06-19
==================
* deps: type-is@1.3.0
- improve type parsing
1.4.1 / 2014-06-19
==================
* fix urlencoded extended deprecation message
1.4.0 / 2014-06-19
==================
* add `text` parser
* add `raw` parser
* check accepted charset in content-type (accepts utf-8)
* check accepted encoding in content-encoding (accepts identity)
* deprecate `bodyParser()` middleware; use `.json()` and `.urlencoded()` as needed
* deprecate `urlencoded()` without provided `extended` option
* lazy-load urlencoded parsers
* parsers split into files for reduced mem usage
* support gzip and deflate bodies
- set `inflate: false` to turn off
* deps: raw-body@1.2.2
- Support all encodings from `iconv-lite`
1.3.1 / 2014-06-11
==================
* deps: type-is@1.2.1
- Switch dependency from mime to mime-types@1.0.0
1.3.0 / 2014-05-31
==================
* add `extended` option to urlencoded parser
1.2.2 / 2014-05-27
==================
* deps: raw-body@1.1.6
- assert stream encoding on node.js 0.8
- assert stream encoding on node.js < 0.10.6
- deps: bytes@1
1.2.1 / 2014-05-26
==================
* invoke `next(err)` after request fully read
- prevents hung responses and socket hang ups
1.2.0 / 2014-05-11
==================
* add `verify` option
* deps: type-is@1.2.0
- support suffix matching
1.1.2 / 2014-05-11
==================
* improve json parser speed
1.1.1 / 2014-05-11
==================
* fix repeated limit parsing with every request
1.1.0 / 2014-05-10
==================
* add `type` option
* deps: pin for safety and consistency
1.0.2 / 2014-04-14
==================
* use `type-is` module
1.0.1 / 2014-03-20
==================
* lower default limits to 100kb

View File

@ -0,0 +1,23 @@
(The MIT License)
Copyright (c) 2014 Jonathan Ong <me@jongleberry.com>
Copyright (c) 2014-2015 Douglas Christopher Wilson <doug@somethingdoug.com>
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
'Software'), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -0,0 +1,464 @@
# body-parser
[![NPM Version][npm-image]][npm-url]
[![NPM Downloads][downloads-image]][downloads-url]
[![Build Status][github-actions-ci-image]][github-actions-ci-url]
[![Test Coverage][coveralls-image]][coveralls-url]
Node.js body parsing middleware.
Parse incoming request bodies in a middleware before your handlers, available
under the `req.body` property.
**Note** As `req.body`'s shape is based on user-controlled input, all
properties and values in this object are untrusted and should be validated
before trusting. For example, `req.body.foo.toString()` may fail in multiple
ways, for example the `foo` property may not be there or may not be a string,
and `toString` may not be a function and instead a string or other user input.
[Learn about the anatomy of an HTTP transaction in Node.js](https://nodejs.org/en/docs/guides/anatomy-of-an-http-transaction/).
_This does not handle multipart bodies_, due to their complex and typically
large nature. For multipart bodies, you may be interested in the following
modules:
* [busboy](https://www.npmjs.org/package/busboy#readme) and
[connect-busboy](https://www.npmjs.org/package/connect-busboy#readme)
* [multiparty](https://www.npmjs.org/package/multiparty#readme) and
[connect-multiparty](https://www.npmjs.org/package/connect-multiparty#readme)
* [formidable](https://www.npmjs.org/package/formidable#readme)
* [multer](https://www.npmjs.org/package/multer#readme)
This module provides the following parsers:
* [JSON body parser](#bodyparserjsonoptions)
* [Raw body parser](#bodyparserrawoptions)
* [Text body parser](#bodyparsertextoptions)
* [URL-encoded form body parser](#bodyparserurlencodedoptions)
Other body parsers you might be interested in:
- [body](https://www.npmjs.org/package/body#readme)
- [co-body](https://www.npmjs.org/package/co-body#readme)
## Installation
```sh
$ npm install body-parser
```
## API
```js
var bodyParser = require('body-parser')
```
The `bodyParser` object exposes various factories to create middlewares. All
middlewares will populate the `req.body` property with the parsed body when
the `Content-Type` request header matches the `type` option, or an empty
object (`{}`) if there was no body to parse, the `Content-Type` was not matched,
or an error occurred.
The various errors returned by this module are described in the
[errors section](#errors).
### bodyParser.json([options])
Returns middleware that only parses `json` and only looks at requests where
the `Content-Type` header matches the `type` option. This parser accepts any
Unicode encoding of the body and supports automatic inflation of `gzip` and
`deflate` encodings.
A new `body` object containing the parsed data is populated on the `request`
object after the middleware (i.e. `req.body`).
#### Options
The `json` function takes an optional `options` object that may contain any of
the following keys:
##### inflate
When set to `true`, then deflated (compressed) bodies will be inflated; when
`false`, deflated bodies are rejected. Defaults to `true`.
##### limit
Controls the maximum request body size. If this is a number, then the value
specifies the number of bytes; if it is a string, the value is passed to the
[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults
to `'100kb'`.
##### reviver
The `reviver` option is passed directly to `JSON.parse` as the second
argument. You can find more information on this argument
[in the MDN documentation about JSON.parse](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse#Example.3A_Using_the_reviver_parameter).
##### strict
When set to `true`, will only accept arrays and objects; when `false` will
accept anything `JSON.parse` accepts. Defaults to `true`.
##### type
The `type` option is used to determine what media type the middleware will
parse. This option can be a string, array of strings, or a function. If not a
function, `type` option is passed directly to the
[type-is](https://www.npmjs.org/package/type-is#readme) library and this can
be an extension name (like `json`), a mime type (like `application/json`), or
a mime type with a wildcard (like `*/*` or `*/json`). If a function, the `type`
option is called as `fn(req)` and the request is parsed if it returns a truthy
value. Defaults to `application/json`.
##### verify
The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`,
where `buf` is a `Buffer` of the raw request body and `encoding` is the
encoding of the request. The parsing can be aborted by throwing an error.
### bodyParser.raw([options])
Returns middleware that parses all bodies as a `Buffer` and only looks at
requests where the `Content-Type` header matches the `type` option. This
parser supports automatic inflation of `gzip` and `deflate` encodings.
A new `body` object containing the parsed data is populated on the `request`
object after the middleware (i.e. `req.body`). This will be a `Buffer` object
of the body.
#### Options
The `raw` function takes an optional `options` object that may contain any of
the following keys:
##### inflate
When set to `true`, then deflated (compressed) bodies will be inflated; when
`false`, deflated bodies are rejected. Defaults to `true`.
##### limit
Controls the maximum request body size. If this is a number, then the value
specifies the number of bytes; if it is a string, the value is passed to the
[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults
to `'100kb'`.
##### type
The `type` option is used to determine what media type the middleware will
parse. This option can be a string, array of strings, or a function.
If not a function, `type` option is passed directly to the
[type-is](https://www.npmjs.org/package/type-is#readme) library and this
can be an extension name (like `bin`), a mime type (like
`application/octet-stream`), or a mime type with a wildcard (like `*/*` or
`application/*`). If a function, the `type` option is called as `fn(req)`
and the request is parsed if it returns a truthy value. Defaults to
`application/octet-stream`.
##### verify
The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`,
where `buf` is a `Buffer` of the raw request body and `encoding` is the
encoding of the request. The parsing can be aborted by throwing an error.
### bodyParser.text([options])
Returns middleware that parses all bodies as a string and only looks at
requests where the `Content-Type` header matches the `type` option. This
parser supports automatic inflation of `gzip` and `deflate` encodings.
A new `body` string containing the parsed data is populated on the `request`
object after the middleware (i.e. `req.body`). This will be a string of the
body.
#### Options
The `text` function takes an optional `options` object that may contain any of
the following keys:
##### defaultCharset
Specify the default character set for the text content if the charset is not
specified in the `Content-Type` header of the request. Defaults to `utf-8`.
##### inflate
When set to `true`, then deflated (compressed) bodies will be inflated; when
`false`, deflated bodies are rejected. Defaults to `true`.
##### limit
Controls the maximum request body size. If this is a number, then the value
specifies the number of bytes; if it is a string, the value is passed to the
[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults
to `'100kb'`.
##### type
The `type` option is used to determine what media type the middleware will
parse. This option can be a string, array of strings, or a function. If not
a function, `type` option is passed directly to the
[type-is](https://www.npmjs.org/package/type-is#readme) library and this can
be an extension name (like `txt`), a mime type (like `text/plain`), or a mime
type with a wildcard (like `*/*` or `text/*`). If a function, the `type`
option is called as `fn(req)` and the request is parsed if it returns a
truthy value. Defaults to `text/plain`.
##### verify
The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`,
where `buf` is a `Buffer` of the raw request body and `encoding` is the
encoding of the request. The parsing can be aborted by throwing an error.
### bodyParser.urlencoded([options])
Returns middleware that only parses `urlencoded` bodies and only looks at
requests where the `Content-Type` header matches the `type` option. This
parser accepts only UTF-8 encoding of the body and supports automatic
inflation of `gzip` and `deflate` encodings.
A new `body` object containing the parsed data is populated on the `request`
object after the middleware (i.e. `req.body`). This object will contain
key-value pairs, where the value can be a string or array (when `extended` is
`false`), or any type (when `extended` is `true`).
#### Options
The `urlencoded` function takes an optional `options` object that may contain
any of the following keys:
##### extended
The `extended` option allows to choose between parsing the URL-encoded data
with the `querystring` library (when `false`) or the `qs` library (when
`true`). The "extended" syntax allows for rich objects and arrays to be
encoded into the URL-encoded format, allowing for a JSON-like experience
with URL-encoded. For more information, please
[see the qs library](https://www.npmjs.org/package/qs#readme).
Defaults to `true`, but using the default has been deprecated. Please
research into the difference between `qs` and `querystring` and choose the
appropriate setting.
##### inflate
When set to `true`, then deflated (compressed) bodies will be inflated; when
`false`, deflated bodies are rejected. Defaults to `true`.
##### limit
Controls the maximum request body size. If this is a number, then the value
specifies the number of bytes; if it is a string, the value is passed to the
[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults
to `'100kb'`.
##### parameterLimit
The `parameterLimit` option controls the maximum number of parameters that
are allowed in the URL-encoded data. If a request contains more parameters
than this value, a 413 will be returned to the client. Defaults to `1000`.
##### type
The `type` option is used to determine what media type the middleware will
parse. This option can be a string, array of strings, or a function. If not
a function, `type` option is passed directly to the
[type-is](https://www.npmjs.org/package/type-is#readme) library and this can
be an extension name (like `urlencoded`), a mime type (like
`application/x-www-form-urlencoded`), or a mime type with a wildcard (like
`*/x-www-form-urlencoded`). If a function, the `type` option is called as
`fn(req)` and the request is parsed if it returns a truthy value. Defaults
to `application/x-www-form-urlencoded`.
##### verify
The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`,
where `buf` is a `Buffer` of the raw request body and `encoding` is the
encoding of the request. The parsing can be aborted by throwing an error.
## Errors
The middlewares provided by this module create errors using the
[`http-errors` module](https://www.npmjs.com/package/http-errors). The errors
will typically have a `status`/`statusCode` property that contains the suggested
HTTP response code, an `expose` property to determine if the `message` property
should be displayed to the client, a `type` property to determine the type of
error without matching against the `message`, and a `body` property containing
the read body, if available.
The following are the common errors created, though any error can come through
for various reasons.
### content encoding unsupported
This error will occur when the request had a `Content-Encoding` header that
contained an encoding but the "inflation" option was set to `false`. The
`status` property is set to `415`, the `type` property is set to
`'encoding.unsupported'`, and the `charset` property will be set to the
encoding that is unsupported.
### entity parse failed
This error will occur when the request contained an entity that could not be
parsed by the middleware. The `status` property is set to `400`, the `type`
property is set to `'entity.parse.failed'`, and the `body` property is set to
the entity value that failed parsing.
### entity verify failed
This error will occur when the request contained an entity that could not be
failed verification by the defined `verify` option. The `status` property is
set to `403`, the `type` property is set to `'entity.verify.failed'`, and the
`body` property is set to the entity value that failed verification.
### request aborted
This error will occur when the request is aborted by the client before reading
the body has finished. The `received` property will be set to the number of
bytes received before the request was aborted and the `expected` property is
set to the number of expected bytes. The `status` property is set to `400`
and `type` property is set to `'request.aborted'`.
### request entity too large
This error will occur when the request body's size is larger than the "limit"
option. The `limit` property will be set to the byte limit and the `length`
property will be set to the request body's length. The `status` property is
set to `413` and the `type` property is set to `'entity.too.large'`.
### request size did not match content length
This error will occur when the request's length did not match the length from
the `Content-Length` header. This typically occurs when the request is malformed,
typically when the `Content-Length` header was calculated based on characters
instead of bytes. The `status` property is set to `400` and the `type` property
is set to `'request.size.invalid'`.
### stream encoding should not be set
This error will occur when something called the `req.setEncoding` method prior
to this middleware. This module operates directly on bytes only and you cannot
call `req.setEncoding` when using this module. The `status` property is set to
`500` and the `type` property is set to `'stream.encoding.set'`.
### stream is not readable
This error will occur when the request is no longer readable when this middleware
attempts to read it. This typically means something other than a middleware from
this module read the request body already and the middleware was also configured to
read the same request. The `status` property is set to `500` and the `type`
property is set to `'stream.not.readable'`.
### too many parameters
This error will occur when the content of the request exceeds the configured
`parameterLimit` for the `urlencoded` parser. The `status` property is set to
`413` and the `type` property is set to `'parameters.too.many'`.
### unsupported charset "BOGUS"
This error will occur when the request had a charset parameter in the
`Content-Type` header, but the `iconv-lite` module does not support it OR the
parser does not support it. The charset is contained in the message as well
as in the `charset` property. The `status` property is set to `415`, the
`type` property is set to `'charset.unsupported'`, and the `charset` property
is set to the charset that is unsupported.
### unsupported content encoding "bogus"
This error will occur when the request had a `Content-Encoding` header that
contained an unsupported encoding. The encoding is contained in the message
as well as in the `encoding` property. The `status` property is set to `415`,
the `type` property is set to `'encoding.unsupported'`, and the `encoding`
property is set to the encoding that is unsupported.
## Examples
### Express/Connect top-level generic
This example demonstrates adding a generic JSON and URL-encoded parser as a
top-level middleware, which will parse the bodies of all incoming requests.
This is the simplest setup.
```js
var express = require('express')
var bodyParser = require('body-parser')
var app = express()
// parse application/x-www-form-urlencoded
app.use(bodyParser.urlencoded({ extended: false }))
// parse application/json
app.use(bodyParser.json())
app.use(function (req, res) {
res.setHeader('Content-Type', 'text/plain')
res.write('you posted:\n')
res.end(JSON.stringify(req.body, null, 2))
})
```
### Express route-specific
This example demonstrates adding body parsers specifically to the routes that
need them. In general, this is the most recommended way to use body-parser with
Express.
```js
var express = require('express')
var bodyParser = require('body-parser')
var app = express()
// create application/json parser
var jsonParser = bodyParser.json()
// create application/x-www-form-urlencoded parser
var urlencodedParser = bodyParser.urlencoded({ extended: false })
// POST /login gets urlencoded bodies
app.post('/login', urlencodedParser, function (req, res) {
res.send('welcome, ' + req.body.username)
})
// POST /api/users gets JSON bodies
app.post('/api/users', jsonParser, function (req, res) {
// create user in req.body
})
```
### Change accepted type for parsers
All the parsers accept a `type` option which allows you to change the
`Content-Type` that the middleware will parse.
```js
var express = require('express')
var bodyParser = require('body-parser')
var app = express()
// parse various different custom JSON types as JSON
app.use(bodyParser.json({ type: 'application/*+json' }))
// parse some custom thing into a Buffer
app.use(bodyParser.raw({ type: 'application/vnd.custom-type' }))
// parse an HTML body into a string
app.use(bodyParser.text({ type: 'text/html' }))
```
## License
[MIT](LICENSE)
[npm-image]: https://img.shields.io/npm/v/body-parser.svg
[npm-url]: https://npmjs.org/package/body-parser
[coveralls-image]: https://img.shields.io/coveralls/expressjs/body-parser/master.svg
[coveralls-url]: https://coveralls.io/r/expressjs/body-parser?branch=master
[downloads-image]: https://img.shields.io/npm/dm/body-parser.svg
[downloads-url]: https://npmjs.org/package/body-parser
[github-actions-ci-image]: https://img.shields.io/github/workflow/status/expressjs/body-parser/ci/master?label=ci
[github-actions-ci-url]: https://github.com/expressjs/body-parser/actions/workflows/ci.yml

View File

@ -0,0 +1,25 @@
# Security Policies and Procedures
## Reporting a Bug
The Express team and community take all security bugs seriously. Thank you
for improving the security of Express. We appreciate your efforts and
responsible disclosure and will make every effort to acknowledge your
contributions.
Report security bugs by emailing the current owner(s) of `body-parser`. This
information can be found in the npm registry using the command
`npm owner ls body-parser`.
If unsure or unable to get the information from the above, open an issue
in the [project issue tracker](https://github.com/expressjs/body-parser/issues)
asking for the current contact information.
To ensure the timely response to your report, please ensure that the entirety
of the report is contained within the email body and not solely behind a web
link or an attachment.
At least one owner will acknowledge your email within 48 hours, and will send a
more detailed response within 48 hours indicating the next steps in handling
your report. After the initial reply to your report, the owners will
endeavor to keep you informed of the progress towards a fix and full
announcement, and may ask for additional information or guidance.

View File

@ -0,0 +1,156 @@
/*!
* body-parser
* Copyright(c) 2014-2015 Douglas Christopher Wilson
* MIT Licensed
*/
'use strict'
/**
* Module dependencies.
* @private
*/
var deprecate = require('depd')('body-parser')
/**
* Cache of loaded parsers.
* @private
*/
var parsers = Object.create(null)
/**
* @typedef Parsers
* @type {function}
* @property {function} json
* @property {function} raw
* @property {function} text
* @property {function} urlencoded
*/
/**
* Module exports.
* @type {Parsers}
*/
exports = module.exports = deprecate.function(bodyParser,
'bodyParser: use individual json/urlencoded middlewares')
/**
* JSON parser.
* @public
*/
Object.defineProperty(exports, 'json', {
configurable: true,
enumerable: true,
get: createParserGetter('json')
})
/**
* Raw parser.
* @public
*/
Object.defineProperty(exports, 'raw', {
configurable: true,
enumerable: true,
get: createParserGetter('raw')
})
/**
* Text parser.
* @public
*/
Object.defineProperty(exports, 'text', {
configurable: true,
enumerable: true,
get: createParserGetter('text')
})
/**
* URL-encoded parser.
* @public
*/
Object.defineProperty(exports, 'urlencoded', {
configurable: true,
enumerable: true,
get: createParserGetter('urlencoded')
})
/**
* Create a middleware to parse json and urlencoded bodies.
*
* @param {object} [options]
* @return {function}
* @deprecated
* @public
*/
function bodyParser (options) {
// use default type for parsers
var opts = Object.create(options || null, {
type: {
configurable: true,
enumerable: true,
value: undefined,
writable: true
}
})
var _urlencoded = exports.urlencoded(opts)
var _json = exports.json(opts)
return function bodyParser (req, res, next) {
_json(req, res, function (err) {
if (err) return next(err)
_urlencoded(req, res, next)
})
}
}
/**
* Create a getter for loading a parser.
* @private
*/
function createParserGetter (name) {
return function get () {
return loadParser(name)
}
}
/**
* Load a parser module.
* @private
*/
function loadParser (parserName) {
var parser = parsers[parserName]
if (parser !== undefined) {
return parser
}
// this uses a switch for static require analysis
switch (parserName) {
case 'json':
parser = require('./lib/types/json')
break
case 'raw':
parser = require('./lib/types/raw')
break
case 'text':
parser = require('./lib/types/text')
break
case 'urlencoded':
parser = require('./lib/types/urlencoded')
break
}
// store to prevent invoking require()
return (parsers[parserName] = parser)
}

View File

@ -0,0 +1,205 @@
/*!
* body-parser
* Copyright(c) 2014-2015 Douglas Christopher Wilson
* MIT Licensed
*/
'use strict'
/**
* Module dependencies.
* @private
*/
var createError = require('http-errors')
var destroy = require('destroy')
var getBody = require('raw-body')
var iconv = require('iconv-lite')
var onFinished = require('on-finished')
var unpipe = require('unpipe')
var zlib = require('zlib')
/**
* Module exports.
*/
module.exports = read
/**
* Read a request into a buffer and parse.
*
* @param {object} req
* @param {object} res
* @param {function} next
* @param {function} parse
* @param {function} debug
* @param {object} options
* @private
*/
function read (req, res, next, parse, debug, options) {
var length
var opts = options
var stream
// flag as parsed
req._body = true
// read options
var encoding = opts.encoding !== null
? opts.encoding
: null
var verify = opts.verify
try {
// get the content stream
stream = contentstream(req, debug, opts.inflate)
length = stream.length
stream.length = undefined
} catch (err) {
return next(err)
}
// set raw-body options
opts.length = length
opts.encoding = verify
? null
: encoding
// assert charset is supported
if (opts.encoding === null && encoding !== null && !iconv.encodingExists(encoding)) {
return next(createError(415, 'unsupported charset "' + encoding.toUpperCase() + '"', {
charset: encoding.toLowerCase(),
type: 'charset.unsupported'
}))
}
// read body
debug('read body')
getBody(stream, opts, function (error, body) {
if (error) {
var _error
if (error.type === 'encoding.unsupported') {
// echo back charset
_error = createError(415, 'unsupported charset "' + encoding.toUpperCase() + '"', {
charset: encoding.toLowerCase(),
type: 'charset.unsupported'
})
} else {
// set status code on error
_error = createError(400, error)
}
// unpipe from stream and destroy
if (stream !== req) {
unpipe(req)
destroy(stream, true)
}
// read off entire request
dump(req, function onfinished () {
next(createError(400, _error))
})
return
}
// verify
if (verify) {
try {
debug('verify body')
verify(req, res, body, encoding)
} catch (err) {
next(createError(403, err, {
body: body,
type: err.type || 'entity.verify.failed'
}))
return
}
}
// parse
var str = body
try {
debug('parse body')
str = typeof body !== 'string' && encoding !== null
? iconv.decode(body, encoding)
: body
req.body = parse(str)
} catch (err) {
next(createError(400, err, {
body: str,
type: err.type || 'entity.parse.failed'
}))
return
}
next()
})
}
/**
* Get the content stream of the request.
*
* @param {object} req
* @param {function} debug
* @param {boolean} [inflate=true]
* @return {object}
* @api private
*/
function contentstream (req, debug, inflate) {
var encoding = (req.headers['content-encoding'] || 'identity').toLowerCase()
var length = req.headers['content-length']
var stream
debug('content-encoding "%s"', encoding)
if (inflate === false && encoding !== 'identity') {
throw createError(415, 'content encoding unsupported', {
encoding: encoding,
type: 'encoding.unsupported'
})
}
switch (encoding) {
case 'deflate':
stream = zlib.createInflate()
debug('inflate body')
req.pipe(stream)
break
case 'gzip':
stream = zlib.createGunzip()
debug('gunzip body')
req.pipe(stream)
break
case 'identity':
stream = req
stream.length = length
break
default:
throw createError(415, 'unsupported content encoding "' + encoding + '"', {
encoding: encoding,
type: 'encoding.unsupported'
})
}
return stream
}
/**
* Dump the contents of a request.
*
* @param {object} req
* @param {function} callback
* @api private
*/
function dump (req, callback) {
if (onFinished.isFinished(req)) {
callback(null)
} else {
onFinished(req, callback)
req.resume()
}
}

View File

@ -0,0 +1,236 @@
/*!
* body-parser
* Copyright(c) 2014 Jonathan Ong
* Copyright(c) 2014-2015 Douglas Christopher Wilson
* MIT Licensed
*/
'use strict'
/**
* Module dependencies.
* @private
*/
var bytes = require('bytes')
var contentType = require('content-type')
var createError = require('http-errors')
var debug = require('debug')('body-parser:json')
var read = require('../read')
var typeis = require('type-is')
/**
* Module exports.
*/
module.exports = json
/**
* RegExp to match the first non-space in a string.
*
* Allowed whitespace is defined in RFC 7159:
*
* ws = *(
* %x20 / ; Space
* %x09 / ; Horizontal tab
* %x0A / ; Line feed or New line
* %x0D ) ; Carriage return
*/
var FIRST_CHAR_REGEXP = /^[\x20\x09\x0a\x0d]*([^\x20\x09\x0a\x0d])/ // eslint-disable-line no-control-regex
/**
* Create a middleware to parse JSON bodies.
*
* @param {object} [options]
* @return {function}
* @public
*/
function json (options) {
var opts = options || {}
var limit = typeof opts.limit !== 'number'
? bytes.parse(opts.limit || '100kb')
: opts.limit
var inflate = opts.inflate !== false
var reviver = opts.reviver
var strict = opts.strict !== false
var type = opts.type || 'application/json'
var verify = opts.verify || false
if (verify !== false && typeof verify !== 'function') {
throw new TypeError('option verify must be function')
}
// create the appropriate type checking function
var shouldParse = typeof type !== 'function'
? typeChecker(type)
: type
function parse (body) {
if (body.length === 0) {
// special-case empty json body, as it's a common client-side mistake
// TODO: maybe make this configurable or part of "strict" option
return {}
}
if (strict) {
var first = firstchar(body)
if (first !== '{' && first !== '[') {
debug('strict violation')
throw createStrictSyntaxError(body, first)
}
}
try {
debug('parse json')
return JSON.parse(body, reviver)
} catch (e) {
throw normalizeJsonSyntaxError(e, {
message: e.message,
stack: e.stack
})
}
}
return function jsonParser (req, res, next) {
if (req._body) {
debug('body already parsed')
next()
return
}
req.body = req.body || {}
// skip requests without bodies
if (!typeis.hasBody(req)) {
debug('skip empty body')
next()
return
}
debug('content-type %j', req.headers['content-type'])
// determine if request should be parsed
if (!shouldParse(req)) {
debug('skip parsing')
next()
return
}
// assert charset per RFC 7159 sec 8.1
var charset = getCharset(req) || 'utf-8'
if (charset.slice(0, 4) !== 'utf-') {
debug('invalid charset')
next(createError(415, 'unsupported charset "' + charset.toUpperCase() + '"', {
charset: charset,
type: 'charset.unsupported'
}))
return
}
// read
read(req, res, next, parse, debug, {
encoding: charset,
inflate: inflate,
limit: limit,
verify: verify
})
}
}
/**
* Create strict violation syntax error matching native error.
*
* @param {string} str
* @param {string} char
* @return {Error}
* @private
*/
function createStrictSyntaxError (str, char) {
var index = str.indexOf(char)
var partial = index !== -1
? str.substring(0, index) + '#'
: ''
try {
JSON.parse(partial); /* istanbul ignore next */ throw new SyntaxError('strict violation')
} catch (e) {
return normalizeJsonSyntaxError(e, {
message: e.message.replace('#', char),
stack: e.stack
})
}
}
/**
* Get the first non-whitespace character in a string.
*
* @param {string} str
* @return {function}
* @private
*/
function firstchar (str) {
var match = FIRST_CHAR_REGEXP.exec(str)
return match
? match[1]
: undefined
}
/**
* Get the charset of a request.
*
* @param {object} req
* @api private
*/
function getCharset (req) {
try {
return (contentType.parse(req).parameters.charset || '').toLowerCase()
} catch (e) {
return undefined
}
}
/**
* Normalize a SyntaxError for JSON.parse.
*
* @param {SyntaxError} error
* @param {object} obj
* @return {SyntaxError}
*/
function normalizeJsonSyntaxError (error, obj) {
var keys = Object.getOwnPropertyNames(error)
for (var i = 0; i < keys.length; i++) {
var key = keys[i]
if (key !== 'stack' && key !== 'message') {
delete error[key]
}
}
// replace stack before message for Node.js 0.10 and below
error.stack = obj.stack.replace(error.message, obj.message)
error.message = obj.message
return error
}
/**
* Get the simple type checker.
*
* @param {string} type
* @return {function}
*/
function typeChecker (type) {
return function checkType (req) {
return Boolean(typeis(req, type))
}
}

View File

@ -0,0 +1,101 @@
/*!
* body-parser
* Copyright(c) 2014-2015 Douglas Christopher Wilson
* MIT Licensed
*/
'use strict'
/**
* Module dependencies.
*/
var bytes = require('bytes')
var debug = require('debug')('body-parser:raw')
var read = require('../read')
var typeis = require('type-is')
/**
* Module exports.
*/
module.exports = raw
/**
* Create a middleware to parse raw bodies.
*
* @param {object} [options]
* @return {function}
* @api public
*/
function raw (options) {
var opts = options || {}
var inflate = opts.inflate !== false
var limit = typeof opts.limit !== 'number'
? bytes.parse(opts.limit || '100kb')
: opts.limit
var type = opts.type || 'application/octet-stream'
var verify = opts.verify || false
if (verify !== false && typeof verify !== 'function') {
throw new TypeError('option verify must be function')
}
// create the appropriate type checking function
var shouldParse = typeof type !== 'function'
? typeChecker(type)
: type
function parse (buf) {
return buf
}
return function rawParser (req, res, next) {
if (req._body) {
debug('body already parsed')
next()
return
}
req.body = req.body || {}
// skip requests without bodies
if (!typeis.hasBody(req)) {
debug('skip empty body')
next()
return
}
debug('content-type %j', req.headers['content-type'])
// determine if request should be parsed
if (!shouldParse(req)) {
debug('skip parsing')
next()
return
}
// read
read(req, res, next, parse, debug, {
encoding: null,
inflate: inflate,
limit: limit,
verify: verify
})
}
}
/**
* Get the simple type checker.
*
* @param {string} type
* @return {function}
*/
function typeChecker (type) {
return function checkType (req) {
return Boolean(typeis(req, type))
}
}

View File

@ -0,0 +1,121 @@
/*!
* body-parser
* Copyright(c) 2014-2015 Douglas Christopher Wilson
* MIT Licensed
*/
'use strict'
/**
* Module dependencies.
*/
var bytes = require('bytes')
var contentType = require('content-type')
var debug = require('debug')('body-parser:text')
var read = require('../read')
var typeis = require('type-is')
/**
* Module exports.
*/
module.exports = text
/**
* Create a middleware to parse text bodies.
*
* @param {object} [options]
* @return {function}
* @api public
*/
function text (options) {
var opts = options || {}
var defaultCharset = opts.defaultCharset || 'utf-8'
var inflate = opts.inflate !== false
var limit = typeof opts.limit !== 'number'
? bytes.parse(opts.limit || '100kb')
: opts.limit
var type = opts.type || 'text/plain'
var verify = opts.verify || false
if (verify !== false && typeof verify !== 'function') {
throw new TypeError('option verify must be function')
}
// create the appropriate type checking function
var shouldParse = typeof type !== 'function'
? typeChecker(type)
: type
function parse (buf) {
return buf
}
return function textParser (req, res, next) {
if (req._body) {
debug('body already parsed')
next()
return
}
req.body = req.body || {}
// skip requests without bodies
if (!typeis.hasBody(req)) {
debug('skip empty body')
next()
return
}
debug('content-type %j', req.headers['content-type'])
// determine if request should be parsed
if (!shouldParse(req)) {
debug('skip parsing')
next()
return
}
// get charset
var charset = getCharset(req) || defaultCharset
// read
read(req, res, next, parse, debug, {
encoding: charset,
inflate: inflate,
limit: limit,
verify: verify
})
}
}
/**
* Get the charset of a request.
*
* @param {object} req
* @api private
*/
function getCharset (req) {
try {
return (contentType.parse(req).parameters.charset || '').toLowerCase()
} catch (e) {
return undefined
}
}
/**
* Get the simple type checker.
*
* @param {string} type
* @return {function}
*/
function typeChecker (type) {
return function checkType (req) {
return Boolean(typeis(req, type))
}
}

View File

@ -0,0 +1,284 @@
/*!
* body-parser
* Copyright(c) 2014 Jonathan Ong
* Copyright(c) 2014-2015 Douglas Christopher Wilson
* MIT Licensed
*/
'use strict'
/**
* Module dependencies.
* @private
*/
var bytes = require('bytes')
var contentType = require('content-type')
var createError = require('http-errors')
var debug = require('debug')('body-parser:urlencoded')
var deprecate = require('depd')('body-parser')
var read = require('../read')
var typeis = require('type-is')
/**
* Module exports.
*/
module.exports = urlencoded
/**
* Cache of parser modules.
*/
var parsers = Object.create(null)
/**
* Create a middleware to parse urlencoded bodies.
*
* @param {object} [options]
* @return {function}
* @public
*/
function urlencoded (options) {
var opts = options || {}
// notice because option default will flip in next major
if (opts.extended === undefined) {
deprecate('undefined extended: provide extended option')
}
var extended = opts.extended !== false
var inflate = opts.inflate !== false
var limit = typeof opts.limit !== 'number'
? bytes.parse(opts.limit || '100kb')
: opts.limit
var type = opts.type || 'application/x-www-form-urlencoded'
var verify = opts.verify || false
if (verify !== false && typeof verify !== 'function') {
throw new TypeError('option verify must be function')
}
// create the appropriate query parser
var queryparse = extended
? extendedparser(opts)
: simpleparser(opts)
// create the appropriate type checking function
var shouldParse = typeof type !== 'function'
? typeChecker(type)
: type
function parse (body) {
return body.length
? queryparse(body)
: {}
}
return function urlencodedParser (req, res, next) {
if (req._body) {
debug('body already parsed')
next()
return
}
req.body = req.body || {}
// skip requests without bodies
if (!typeis.hasBody(req)) {
debug('skip empty body')
next()
return
}
debug('content-type %j', req.headers['content-type'])
// determine if request should be parsed
if (!shouldParse(req)) {
debug('skip parsing')
next()
return
}
// assert charset
var charset = getCharset(req) || 'utf-8'
if (charset !== 'utf-8') {
debug('invalid charset')
next(createError(415, 'unsupported charset "' + charset.toUpperCase() + '"', {
charset: charset,
type: 'charset.unsupported'
}))
return
}
// read
read(req, res, next, parse, debug, {
debug: debug,
encoding: charset,
inflate: inflate,
limit: limit,
verify: verify
})
}
}
/**
* Get the extended query parser.
*
* @param {object} options
*/
function extendedparser (options) {
var parameterLimit = options.parameterLimit !== undefined
? options.parameterLimit
: 1000
var parse = parser('qs')
if (isNaN(parameterLimit) || parameterLimit < 1) {
throw new TypeError('option parameterLimit must be a positive number')
}
if (isFinite(parameterLimit)) {
parameterLimit = parameterLimit | 0
}
return function queryparse (body) {
var paramCount = parameterCount(body, parameterLimit)
if (paramCount === undefined) {
debug('too many parameters')
throw createError(413, 'too many parameters', {
type: 'parameters.too.many'
})
}
var arrayLimit = Math.max(100, paramCount)
debug('parse extended urlencoding')
return parse(body, {
allowPrototypes: true,
arrayLimit: arrayLimit,
depth: Infinity,
parameterLimit: parameterLimit
})
}
}
/**
* Get the charset of a request.
*
* @param {object} req
* @api private
*/
function getCharset (req) {
try {
return (contentType.parse(req).parameters.charset || '').toLowerCase()
} catch (e) {
return undefined
}
}
/**
* Count the number of parameters, stopping once limit reached
*
* @param {string} body
* @param {number} limit
* @api private
*/
function parameterCount (body, limit) {
var count = 0
var index = 0
while ((index = body.indexOf('&', index)) !== -1) {
count++
index++
if (count === limit) {
return undefined
}
}
return count
}
/**
* Get parser for module name dynamically.
*
* @param {string} name
* @return {function}
* @api private
*/
function parser (name) {
var mod = parsers[name]
if (mod !== undefined) {
return mod.parse
}
// this uses a switch for static require analysis
switch (name) {
case 'qs':
mod = require('qs')
break
case 'querystring':
mod = require('querystring')
break
}
// store to prevent invoking require()
parsers[name] = mod
return mod.parse
}
/**
* Get the simple query parser.
*
* @param {object} options
*/
function simpleparser (options) {
var parameterLimit = options.parameterLimit !== undefined
? options.parameterLimit
: 1000
var parse = parser('querystring')
if (isNaN(parameterLimit) || parameterLimit < 1) {
throw new TypeError('option parameterLimit must be a positive number')
}
if (isFinite(parameterLimit)) {
parameterLimit = parameterLimit | 0
}
return function queryparse (body) {
var paramCount = parameterCount(body, parameterLimit)
if (paramCount === undefined) {
debug('too many parameters')
throw createError(413, 'too many parameters', {
type: 'parameters.too.many'
})
}
debug('parse urlencoding')
return parse(body, undefined, undefined, { maxKeys: parameterLimit })
}
}
/**
* Get the simple type checker.
*
* @param {string} type
* @return {function}
*/
function typeChecker (type) {
return function checkType (req) {
return Boolean(typeis(req, type))
}
}

View File

@ -0,0 +1,56 @@
{
"name": "body-parser",
"description": "Node.js body parsing middleware",
"version": "1.20.1",
"contributors": [
"Douglas Christopher Wilson <doug@somethingdoug.com>",
"Jonathan Ong <me@jongleberry.com> (http://jongleberry.com)"
],
"license": "MIT",
"repository": "expressjs/body-parser",
"dependencies": {
"bytes": "3.1.2",
"content-type": "~1.0.4",
"debug": "2.6.9",
"depd": "2.0.0",
"destroy": "1.2.0",
"http-errors": "2.0.0",
"iconv-lite": "0.4.24",
"on-finished": "2.4.1",
"qs": "6.11.0",
"raw-body": "2.5.1",
"type-is": "~1.6.18",
"unpipe": "1.0.0"
},
"devDependencies": {
"eslint": "8.24.0",
"eslint-config-standard": "14.1.1",
"eslint-plugin-import": "2.26.0",
"eslint-plugin-markdown": "3.0.0",
"eslint-plugin-node": "11.1.0",
"eslint-plugin-promise": "6.0.1",
"eslint-plugin-standard": "4.1.0",
"methods": "1.1.2",
"mocha": "10.0.0",
"nyc": "15.1.0",
"safe-buffer": "5.2.1",
"supertest": "6.3.0"
},
"files": [
"lib/",
"LICENSE",
"HISTORY.md",
"SECURITY.md",
"index.js"
],
"engines": {
"node": ">= 0.8",
"npm": "1.2.8000 || >= 1.4.16"
},
"scripts": {
"lint": "eslint .",
"test": "mocha --require test/support/env --reporter spec --check-leaks --bail test/",
"test-ci": "nyc --reporter=lcov --reporter=text npm test",
"test-cov": "nyc --reporter=html --reporter=text npm test"
}
}

View File

@ -0,0 +1,303 @@
2.5.1 / 2022-02-28
==================
* Fix error on early async hooks implementations
2.5.0 / 2022-02-21
==================
* Prevent loss of async hooks context
* Prevent hanging when stream is not readable
* deps: http-errors@2.0.0
- deps: depd@2.0.0
- deps: statuses@2.0.1
2.4.3 / 2022-02-14
==================
* deps: bytes@3.1.2
2.4.2 / 2021-11-16
==================
* deps: bytes@3.1.1
* deps: http-errors@1.8.1
- deps: setprototypeof@1.2.0
- deps: toidentifier@1.0.1
2.4.1 / 2019-06-25
==================
* deps: http-errors@1.7.3
- deps: inherits@2.0.4
2.4.0 / 2019-04-17
==================
* deps: bytes@3.1.0
- Add petabyte (`pb`) support
* deps: http-errors@1.7.2
- Set constructor name when possible
- deps: setprototypeof@1.1.1
- deps: statuses@'>= 1.5.0 < 2'
* deps: iconv-lite@0.4.24
- Added encoding MIK
2.3.3 / 2018-05-08
==================
* deps: http-errors@1.6.3
- deps: depd@~1.1.2
- deps: setprototypeof@1.1.0
- deps: statuses@'>= 1.3.1 < 2'
* deps: iconv-lite@0.4.23
- Fix loading encoding with year appended
- Fix deprecation warnings on Node.js 10+
2.3.2 / 2017-09-09
==================
* deps: iconv-lite@0.4.19
- Fix ISO-8859-1 regression
- Update Windows-1255
2.3.1 / 2017-09-07
==================
* deps: bytes@3.0.0
* deps: http-errors@1.6.2
- deps: depd@1.1.1
* perf: skip buffer decoding on overage chunk
2.3.0 / 2017-08-04
==================
* Add TypeScript definitions
* Use `http-errors` for standard emitted errors
* deps: bytes@2.5.0
* deps: iconv-lite@0.4.18
- Add support for React Native
- Add a warning if not loaded as utf-8
- Fix CESU-8 decoding in Node.js 8
- Improve speed of ISO-8859-1 encoding
2.2.0 / 2017-01-02
==================
* deps: iconv-lite@0.4.15
- Added encoding MS-31J
- Added encoding MS-932
- Added encoding MS-936
- Added encoding MS-949
- Added encoding MS-950
- Fix GBK/GB18030 handling of Euro character
2.1.7 / 2016-06-19
==================
* deps: bytes@2.4.0
* perf: remove double-cleanup on happy path
2.1.6 / 2016-03-07
==================
* deps: bytes@2.3.0
- Drop partial bytes on all parsed units
- Fix parsing byte string that looks like hex
2.1.5 / 2015-11-30
==================
* deps: bytes@2.2.0
* deps: iconv-lite@0.4.13
2.1.4 / 2015-09-27
==================
* Fix masking critical errors from `iconv-lite`
* deps: iconv-lite@0.4.12
- Fix CESU-8 decoding in Node.js 4.x
2.1.3 / 2015-09-12
==================
* Fix sync callback when attaching data listener causes sync read
- Node.js 0.10 compatibility issue
2.1.2 / 2015-07-05
==================
* Fix error stack traces to skip `makeError`
* deps: iconv-lite@0.4.11
- Add encoding CESU-8
2.1.1 / 2015-06-14
==================
* Use `unpipe` module for unpiping requests
2.1.0 / 2015-05-28
==================
* deps: iconv-lite@0.4.10
- Improved UTF-16 endianness detection
- Leading BOM is now removed when decoding
- The encoding UTF-16 without BOM now defaults to UTF-16LE when detection fails
2.0.2 / 2015-05-21
==================
* deps: bytes@2.1.0
- Slight optimizations
2.0.1 / 2015-05-10
==================
* Fix a false-positive when unpiping in Node.js 0.8
2.0.0 / 2015-05-08
==================
* Return a promise without callback instead of thunk
* deps: bytes@2.0.1
- units no longer case sensitive when parsing
1.3.4 / 2015-04-15
==================
* Fix hanging callback if request aborts during read
* deps: iconv-lite@0.4.8
- Add encoding alias UNICODE-1-1-UTF-7
1.3.3 / 2015-02-08
==================
* deps: iconv-lite@0.4.7
- Gracefully support enumerables on `Object.prototype`
1.3.2 / 2015-01-20
==================
* deps: iconv-lite@0.4.6
- Fix rare aliases of single-byte encodings
1.3.1 / 2014-11-21
==================
* deps: iconv-lite@0.4.5
- Fix Windows-31J and X-SJIS encoding support
1.3.0 / 2014-07-20
==================
* Fully unpipe the stream on error
- Fixes `Cannot switch to old mode now` error on Node.js 0.10+
1.2.3 / 2014-07-20
==================
* deps: iconv-lite@0.4.4
- Added encoding UTF-7
1.2.2 / 2014-06-19
==================
* Send invalid encoding error to callback
1.2.1 / 2014-06-15
==================
* deps: iconv-lite@0.4.3
- Added encodings UTF-16BE and UTF-16 with BOM
1.2.0 / 2014-06-13
==================
* Passing string as `options` interpreted as encoding
* Support all encodings from `iconv-lite`
1.1.7 / 2014-06-12
==================
* use `string_decoder` module from npm
1.1.6 / 2014-05-27
==================
* check encoding for old streams1
* support node.js < 0.10.6
1.1.5 / 2014-05-14
==================
* bump bytes
1.1.4 / 2014-04-19
==================
* allow true as an option
* bump bytes
1.1.3 / 2014-03-02
==================
* fix case when length=null
1.1.2 / 2013-12-01
==================
* be less strict on state.encoding check
1.1.1 / 2013-11-27
==================
* add engines
1.1.0 / 2013-11-27
==================
* add err.statusCode and err.type
* allow for encoding option to be true
* pause the stream instead of dumping on error
* throw if the stream's encoding is set
1.0.1 / 2013-11-19
==================
* dont support streams1, throw if dev set encoding
1.0.0 / 2013-11-17
==================
* rename `expected` option to `length`
0.2.0 / 2013-11-15
==================
* republish
0.1.1 / 2013-11-15
==================
* use bytes
0.1.0 / 2013-11-11
==================
* generator support
0.0.3 / 2013-10-10
==================
* update repo
0.0.2 / 2013-09-14
==================
* dump stream on bad headers
* listen to events after defining received and buffers
0.0.1 / 2013-09-14
==================
* Initial release

View File

@ -1,6 +1,7 @@
The MIT License (MIT)
Copyright (c) 2014-present, Jon Schlinkert.
Copyright (c) 2013-2014 Jonathan Ong <me@jongleberry.com>
Copyright (c) 2014-2022 Douglas Christopher Wilson <doug@somethingdoug.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

View File

@ -0,0 +1,223 @@
# raw-body
[![NPM Version][npm-image]][npm-url]
[![NPM Downloads][downloads-image]][downloads-url]
[![Node.js Version][node-version-image]][node-version-url]
[![Build status][github-actions-ci-image]][github-actions-ci-url]
[![Test coverage][coveralls-image]][coveralls-url]
Gets the entire buffer of a stream either as a `Buffer` or a string.
Validates the stream's length against an expected length and maximum limit.
Ideal for parsing request bodies.
## Install
This is a [Node.js](https://nodejs.org/en/) module available through the
[npm registry](https://www.npmjs.com/). Installation is done using the
[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally):
```sh
$ npm install raw-body
```
### TypeScript
This module includes a [TypeScript](https://www.typescriptlang.org/)
declaration file to enable auto complete in compatible editors and type
information for TypeScript projects. This module depends on the Node.js
types, so install `@types/node`:
```sh
$ npm install @types/node
```
## API
```js
var getRawBody = require('raw-body')
```
### getRawBody(stream, [options], [callback])
**Returns a promise if no callback specified and global `Promise` exists.**
Options:
- `length` - The length of the stream.
If the contents of the stream do not add up to this length,
an `400` error code is returned.
- `limit` - The byte limit of the body.
This is the number of bytes or any string format supported by
[bytes](https://www.npmjs.com/package/bytes),
for example `1000`, `'500kb'` or `'3mb'`.
If the body ends up being larger than this limit,
a `413` error code is returned.
- `encoding` - The encoding to use to decode the body into a string.
By default, a `Buffer` instance will be returned when no encoding is specified.
Most likely, you want `utf-8`, so setting `encoding` to `true` will decode as `utf-8`.
You can use any type of encoding supported by [iconv-lite](https://www.npmjs.org/package/iconv-lite#readme).
You can also pass a string in place of options to just specify the encoding.
If an error occurs, the stream will be paused, everything unpiped,
and you are responsible for correctly disposing the stream.
For HTTP requests, you may need to finish consuming the stream if
you want to keep the socket open for future requests. For streams
that use file descriptors, you should `stream.destroy()` or
`stream.close()` to prevent leaks.
## Errors
This module creates errors depending on the error condition during reading.
The error may be an error from the underlying Node.js implementation, but is
otherwise an error created by this module, which has the following attributes:
* `limit` - the limit in bytes
* `length` and `expected` - the expected length of the stream
* `received` - the received bytes
* `encoding` - the invalid encoding
* `status` and `statusCode` - the corresponding status code for the error
* `type` - the error type
### Types
The errors from this module have a `type` property which allows for the programmatic
determination of the type of error returned.
#### encoding.unsupported
This error will occur when the `encoding` option is specified, but the value does
not map to an encoding supported by the [iconv-lite](https://www.npmjs.org/package/iconv-lite#readme)
module.
#### entity.too.large
This error will occur when the `limit` option is specified, but the stream has
an entity that is larger.
#### request.aborted
This error will occur when the request stream is aborted by the client before
reading the body has finished.
#### request.size.invalid
This error will occur when the `length` option is specified, but the stream has
emitted more bytes.
#### stream.encoding.set
This error will occur when the given stream has an encoding set on it, making it
a decoded stream. The stream should not have an encoding set and is expected to
emit `Buffer` objects.
#### stream.not.readable
This error will occur when the given stream is not readable.
## Examples
### Simple Express example
```js
var contentType = require('content-type')
var express = require('express')
var getRawBody = require('raw-body')
var app = express()
app.use(function (req, res, next) {
getRawBody(req, {
length: req.headers['content-length'],
limit: '1mb',
encoding: contentType.parse(req).parameters.charset
}, function (err, string) {
if (err) return next(err)
req.text = string
next()
})
})
// now access req.text
```
### Simple Koa example
```js
var contentType = require('content-type')
var getRawBody = require('raw-body')
var koa = require('koa')
var app = koa()
app.use(function * (next) {
this.text = yield getRawBody(this.req, {
length: this.req.headers['content-length'],
limit: '1mb',
encoding: contentType.parse(this.req).parameters.charset
})
yield next
})
// now access this.text
```
### Using as a promise
To use this library as a promise, simply omit the `callback` and a promise is
returned, provided that a global `Promise` is defined.
```js
var getRawBody = require('raw-body')
var http = require('http')
var server = http.createServer(function (req, res) {
getRawBody(req)
.then(function (buf) {
res.statusCode = 200
res.end(buf.length + ' bytes submitted')
})
.catch(function (err) {
res.statusCode = 500
res.end(err.message)
})
})
server.listen(3000)
```
### Using with TypeScript
```ts
import * as getRawBody from 'raw-body';
import * as http from 'http';
const server = http.createServer((req, res) => {
getRawBody(req)
.then((buf) => {
res.statusCode = 200;
res.end(buf.length + ' bytes submitted');
})
.catch((err) => {
res.statusCode = err.statusCode;
res.end(err.message);
});
});
server.listen(3000);
```
## License
[MIT](LICENSE)
[npm-image]: https://img.shields.io/npm/v/raw-body.svg
[npm-url]: https://npmjs.org/package/raw-body
[node-version-image]: https://img.shields.io/node/v/raw-body.svg
[node-version-url]: https://nodejs.org/en/download/
[coveralls-image]: https://img.shields.io/coveralls/stream-utils/raw-body/master.svg
[coveralls-url]: https://coveralls.io/r/stream-utils/raw-body?branch=master
[downloads-image]: https://img.shields.io/npm/dm/raw-body.svg
[downloads-url]: https://npmjs.org/package/raw-body
[github-actions-ci-image]: https://img.shields.io/github/workflow/status/stream-utils/raw-body/ci/master?label=ci
[github-actions-ci-url]: https://github.com/jshttp/stream-utils/raw-body?query=workflow%3Aci

View File

@ -0,0 +1,24 @@
# Security Policies and Procedures
## Reporting a Bug
The `raw-body` team and community take all security bugs seriously. Thank you
for improving the security of Express. We appreciate your efforts and
responsible disclosure and will make every effort to acknowledge your
contributions.
Report security bugs by emailing the current owners of `raw-body`. This information
can be found in the npm registry using the command `npm owner ls raw-body`.
If unsure or unable to get the information from the above, open an issue
in the [project issue tracker](https://github.com/stream-utils/raw-body/issues)
asking for the current contact information.
To ensure the timely response to your report, please ensure that the entirety
of the report is contained within the email body and not solely behind a web
link or an attachment.
At least one owner will acknowledge your email within 48 hours, and will send a
more detailed response within 48 hours indicating the next steps in handling
your report. After the initial reply to your report, the owners will
endeavor to keep you informed of the progress towards a fix and full
announcement, and may ask for additional information or guidance.

View File

@ -0,0 +1,87 @@
import { Readable } from 'stream';
declare namespace getRawBody {
export type Encoding = string | true;
export interface Options {
/**
* The expected length of the stream.
*/
length?: number | string | null;
/**
* The byte limit of the body. This is the number of bytes or any string
* format supported by `bytes`, for example `1000`, `'500kb'` or `'3mb'`.
*/
limit?: number | string | null;
/**
* The encoding to use to decode the body into a string. By default, a
* `Buffer` instance will be returned when no encoding is specified. Most
* likely, you want `utf-8`, so setting encoding to `true` will decode as
* `utf-8`. You can use any type of encoding supported by `iconv-lite`.
*/
encoding?: Encoding | null;
}
export interface RawBodyError extends Error {
/**
* The limit in bytes.
*/
limit?: number;
/**
* The expected length of the stream.
*/
length?: number;
expected?: number;
/**
* The received bytes.
*/
received?: number;
/**
* The encoding.
*/
encoding?: string;
/**
* The corresponding status code for the error.
*/
status: number;
statusCode: number;
/**
* The error type.
*/
type: string;
}
}
/**
* Gets the entire buffer of a stream either as a `Buffer` or a string.
* Validates the stream's length against an expected length and maximum
* limit. Ideal for parsing request bodies.
*/
declare function getRawBody(
stream: Readable,
callback: (err: getRawBody.RawBodyError, body: Buffer) => void
): void;
declare function getRawBody(
stream: Readable,
options: (getRawBody.Options & { encoding: getRawBody.Encoding }) | getRawBody.Encoding,
callback: (err: getRawBody.RawBodyError, body: string) => void
): void;
declare function getRawBody(
stream: Readable,
options: getRawBody.Options,
callback: (err: getRawBody.RawBodyError, body: Buffer) => void
): void;
declare function getRawBody(
stream: Readable,
options: (getRawBody.Options & { encoding: getRawBody.Encoding }) | getRawBody.Encoding
): Promise<string>;
declare function getRawBody(
stream: Readable,
options?: getRawBody.Options
): Promise<Buffer>;
export = getRawBody;

329
src/node_modules/express/node_modules/raw-body/index.js generated vendored Normal file
View File

@ -0,0 +1,329 @@
/*!
* raw-body
* Copyright(c) 2013-2014 Jonathan Ong
* Copyright(c) 2014-2022 Douglas Christopher Wilson
* MIT Licensed
*/
'use strict'
/**
* Module dependencies.
* @private
*/
var asyncHooks = tryRequireAsyncHooks()
var bytes = require('bytes')
var createError = require('http-errors')
var iconv = require('iconv-lite')
var unpipe = require('unpipe')
/**
* Module exports.
* @public
*/
module.exports = getRawBody
/**
* Module variables.
* @private
*/
var ICONV_ENCODING_MESSAGE_REGEXP = /^Encoding not recognized: /
/**
* Get the decoder for a given encoding.
*
* @param {string} encoding
* @private
*/
function getDecoder (encoding) {
if (!encoding) return null
try {
return iconv.getDecoder(encoding)
} catch (e) {
// error getting decoder
if (!ICONV_ENCODING_MESSAGE_REGEXP.test(e.message)) throw e
// the encoding was not found
throw createError(415, 'specified encoding unsupported', {
encoding: encoding,
type: 'encoding.unsupported'
})
}
}
/**
* Get the raw body of a stream (typically HTTP).
*
* @param {object} stream
* @param {object|string|function} [options]
* @param {function} [callback]
* @public
*/
function getRawBody (stream, options, callback) {
var done = callback
var opts = options || {}
if (options === true || typeof options === 'string') {
// short cut for encoding
opts = {
encoding: options
}
}
if (typeof options === 'function') {
done = options
opts = {}
}
// validate callback is a function, if provided
if (done !== undefined && typeof done !== 'function') {
throw new TypeError('argument callback must be a function')
}
// require the callback without promises
if (!done && !global.Promise) {
throw new TypeError('argument callback is required')
}
// get encoding
var encoding = opts.encoding !== true
? opts.encoding
: 'utf-8'
// convert the limit to an integer
var limit = bytes.parse(opts.limit)
// convert the expected length to an integer
var length = opts.length != null && !isNaN(opts.length)
? parseInt(opts.length, 10)
: null
if (done) {
// classic callback style
return readStream(stream, encoding, length, limit, wrap(done))
}
return new Promise(function executor (resolve, reject) {
readStream(stream, encoding, length, limit, function onRead (err, buf) {
if (err) return reject(err)
resolve(buf)
})
})
}
/**
* Halt a stream.
*
* @param {Object} stream
* @private
*/
function halt (stream) {
// unpipe everything from the stream
unpipe(stream)
// pause stream
if (typeof stream.pause === 'function') {
stream.pause()
}
}
/**
* Read the data from the stream.
*
* @param {object} stream
* @param {string} encoding
* @param {number} length
* @param {number} limit
* @param {function} callback
* @public
*/
function readStream (stream, encoding, length, limit, callback) {
var complete = false
var sync = true
// check the length and limit options.
// note: we intentionally leave the stream paused,
// so users should handle the stream themselves.
if (limit !== null && length !== null && length > limit) {
return done(createError(413, 'request entity too large', {
expected: length,
length: length,
limit: limit,
type: 'entity.too.large'
}))
}
// streams1: assert request encoding is buffer.
// streams2+: assert the stream encoding is buffer.
// stream._decoder: streams1
// state.encoding: streams2
// state.decoder: streams2, specifically < 0.10.6
var state = stream._readableState
if (stream._decoder || (state && (state.encoding || state.decoder))) {
// developer error
return done(createError(500, 'stream encoding should not be set', {
type: 'stream.encoding.set'
}))
}
if (typeof stream.readable !== 'undefined' && !stream.readable) {
return done(createError(500, 'stream is not readable', {
type: 'stream.not.readable'
}))
}
var received = 0
var decoder
try {
decoder = getDecoder(encoding)
} catch (err) {
return done(err)
}
var buffer = decoder
? ''
: []
// attach listeners
stream.on('aborted', onAborted)
stream.on('close', cleanup)
stream.on('data', onData)
stream.on('end', onEnd)
stream.on('error', onEnd)
// mark sync section complete
sync = false
function done () {
var args = new Array(arguments.length)
// copy arguments
for (var i = 0; i < args.length; i++) {
args[i] = arguments[i]
}
// mark complete
complete = true
if (sync) {
process.nextTick(invokeCallback)
} else {
invokeCallback()
}
function invokeCallback () {
cleanup()
if (args[0]) {
// halt the stream on error
halt(stream)
}
callback.apply(null, args)
}
}
function onAborted () {
if (complete) return
done(createError(400, 'request aborted', {
code: 'ECONNABORTED',
expected: length,
length: length,
received: received,
type: 'request.aborted'
}))
}
function onData (chunk) {
if (complete) return
received += chunk.length
if (limit !== null && received > limit) {
done(createError(413, 'request entity too large', {
limit: limit,
received: received,
type: 'entity.too.large'
}))
} else if (decoder) {
buffer += decoder.write(chunk)
} else {
buffer.push(chunk)
}
}
function onEnd (err) {
if (complete) return
if (err) return done(err)
if (length !== null && received !== length) {
done(createError(400, 'request size did not match content length', {
expected: length,
length: length,
received: received,
type: 'request.size.invalid'
}))
} else {
var string = decoder
? buffer + (decoder.end() || '')
: Buffer.concat(buffer)
done(null, string)
}
}
function cleanup () {
buffer = null
stream.removeListener('aborted', onAborted)
stream.removeListener('data', onData)
stream.removeListener('end', onEnd)
stream.removeListener('error', onEnd)
stream.removeListener('close', cleanup)
}
}
/**
* Try to require async_hooks
* @private
*/
function tryRequireAsyncHooks () {
try {
return require('async_hooks')
} catch (e) {
return {}
}
}
/**
* Wrap function with async resource, if possible.
* AsyncResource.bind static method backported.
* @private
*/
function wrap (fn) {
var res
// create anonymous resource
if (asyncHooks.AsyncResource) {
res = new asyncHooks.AsyncResource(fn.name || 'bound-anonymous-fn')
}
// incompatible node.js
if (!res || !res.runInAsyncScope) {
return fn
}
// return bound function
return res.runInAsyncScope.bind(res, fn, null)
}

View File

@ -0,0 +1,49 @@
{
"name": "raw-body",
"description": "Get and validate the raw body of a readable stream.",
"version": "2.5.1",
"author": "Jonathan Ong <me@jongleberry.com> (http://jongleberry.com)",
"contributors": [
"Douglas Christopher Wilson <doug@somethingdoug.com>",
"Raynos <raynos2@gmail.com>"
],
"license": "MIT",
"repository": "stream-utils/raw-body",
"dependencies": {
"bytes": "3.1.2",
"http-errors": "2.0.0",
"iconv-lite": "0.4.24",
"unpipe": "1.0.0"
},
"devDependencies": {
"bluebird": "3.7.2",
"eslint": "7.32.0",
"eslint-config-standard": "14.1.1",
"eslint-plugin-import": "2.25.4",
"eslint-plugin-markdown": "2.2.1",
"eslint-plugin-node": "11.1.0",
"eslint-plugin-promise": "5.2.0",
"eslint-plugin-standard": "4.1.0",
"mocha": "9.2.1",
"nyc": "15.1.0",
"readable-stream": "2.3.7",
"safe-buffer": "5.2.1"
},
"engines": {
"node": ">= 0.8"
},
"files": [
"HISTORY.md",
"LICENSE",
"README.md",
"SECURITY.md",
"index.d.ts",
"index.js"
],
"scripts": {
"lint": "eslint .",
"test": "mocha --trace-deprecation --reporter spec --bail --check-leaks test/",
"test-ci": "nyc --reporter=lcovonly --reporter=text npm test",
"test-cov": "nyc --reporter=html --reporter=text npm test"
}
}

237
src/node_modules/fill-range/README.md generated vendored
View File

@ -1,237 +0,0 @@
# fill-range [![Donate](https://img.shields.io/badge/Donate-PayPal-green.svg)](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=W8YFZ425KND68) [![NPM version](https://img.shields.io/npm/v/fill-range.svg?style=flat)](https://www.npmjs.com/package/fill-range) [![NPM monthly downloads](https://img.shields.io/npm/dm/fill-range.svg?style=flat)](https://npmjs.org/package/fill-range) [![NPM total downloads](https://img.shields.io/npm/dt/fill-range.svg?style=flat)](https://npmjs.org/package/fill-range) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/fill-range.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/fill-range)
> Fill in a range of numbers or letters, optionally passing an increment or `step` to use, or create a regex-compatible range with `options.toRegex`
Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support.
## Install
Install with [npm](https://www.npmjs.com/):
```sh
$ npm install --save fill-range
```
## Usage
Expands numbers and letters, optionally using a `step` as the last argument. _(Numbers may be defined as JavaScript numbers or strings)_.
```js
const fill = require('fill-range');
// fill(from, to[, step, options]);
console.log(fill('1', '10')); //=> ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10']
console.log(fill('1', '10', { toRegex: true })); //=> [1-9]|10
```
**Params**
* `from`: **{String|Number}** the number or letter to start with
* `to`: **{String|Number}** the number or letter to end with
* `step`: **{String|Number|Object|Function}** Optionally pass a [step](#optionsstep) to use.
* `options`: **{Object|Function}**: See all available [options](#options)
## Examples
By default, an array of values is returned.
**Alphabetical ranges**
```js
console.log(fill('a', 'e')); //=> ['a', 'b', 'c', 'd', 'e']
console.log(fill('A', 'E')); //=> [ 'A', 'B', 'C', 'D', 'E' ]
```
**Numerical ranges**
Numbers can be defined as actual numbers or strings.
```js
console.log(fill(1, 5)); //=> [ 1, 2, 3, 4, 5 ]
console.log(fill('1', '5')); //=> [ 1, 2, 3, 4, 5 ]
```
**Negative ranges**
Numbers can be defined as actual numbers or strings.
```js
console.log(fill('-5', '-1')); //=> [ '-5', '-4', '-3', '-2', '-1' ]
console.log(fill('-5', '5')); //=> [ '-5', '-4', '-3', '-2', '-1', '0', '1', '2', '3', '4', '5' ]
```
**Steps (increments)**
```js
// numerical ranges with increments
console.log(fill('0', '25', 4)); //=> [ '0', '4', '8', '12', '16', '20', '24' ]
console.log(fill('0', '25', 5)); //=> [ '0', '5', '10', '15', '20', '25' ]
console.log(fill('0', '25', 6)); //=> [ '0', '6', '12', '18', '24' ]
// alphabetical ranges with increments
console.log(fill('a', 'z', 4)); //=> [ 'a', 'e', 'i', 'm', 'q', 'u', 'y' ]
console.log(fill('a', 'z', 5)); //=> [ 'a', 'f', 'k', 'p', 'u', 'z' ]
console.log(fill('a', 'z', 6)); //=> [ 'a', 'g', 'm', 's', 'y' ]
```
## Options
### options.step
**Type**: `number` (formatted as a string or number)
**Default**: `undefined`
**Description**: The increment to use for the range. Can be used with letters or numbers.
**Example(s)**
```js
// numbers
console.log(fill('1', '10', 2)); //=> [ '1', '3', '5', '7', '9' ]
console.log(fill('1', '10', 3)); //=> [ '1', '4', '7', '10' ]
console.log(fill('1', '10', 4)); //=> [ '1', '5', '9' ]
// letters
console.log(fill('a', 'z', 5)); //=> [ 'a', 'f', 'k', 'p', 'u', 'z' ]
console.log(fill('a', 'z', 7)); //=> [ 'a', 'h', 'o', 'v' ]
console.log(fill('a', 'z', 9)); //=> [ 'a', 'j', 's' ]
```
### options.strictRanges
**Type**: `boolean`
**Default**: `false`
**Description**: By default, `null` is returned when an invalid range is passed. Enable this option to throw a `RangeError` on invalid ranges.
**Example(s)**
The following are all invalid:
```js
fill('1.1', '2'); // decimals not supported in ranges
fill('a', '2'); // incompatible range values
fill(1, 10, 'foo'); // invalid "step" argument
```
### options.stringify
**Type**: `boolean`
**Default**: `undefined`
**Description**: Cast all returned values to strings. By default, integers are returned as numbers.
**Example(s)**
```js
console.log(fill(1, 5)); //=> [ 1, 2, 3, 4, 5 ]
console.log(fill(1, 5, { stringify: true })); //=> [ '1', '2', '3', '4', '5' ]
```
### options.toRegex
**Type**: `boolean`
**Default**: `undefined`
**Description**: Create a regex-compatible source string, instead of expanding values to an array.
**Example(s)**
```js
// alphabetical range
console.log(fill('a', 'e', { toRegex: true })); //=> '[a-e]'
// alphabetical with step
console.log(fill('a', 'z', 3, { toRegex: true })); //=> 'a|d|g|j|m|p|s|v|y'
// numerical range
console.log(fill('1', '100', { toRegex: true })); //=> '[1-9]|[1-9][0-9]|100'
// numerical range with zero padding
console.log(fill('000001', '100000', { toRegex: true }));
//=> '0{5}[1-9]|0{4}[1-9][0-9]|0{3}[1-9][0-9]{2}|0{2}[1-9][0-9]{3}|0[1-9][0-9]{4}|100000'
```
### options.transform
**Type**: `function`
**Default**: `undefined`
**Description**: Customize each value in the returned array (or [string](#optionstoRegex)). _(you can also pass this function as the last argument to `fill()`)_.
**Example(s)**
```js
// add zero padding
console.log(fill(1, 5, value => String(value).padStart(4, '0')));
//=> ['0001', '0002', '0003', '0004', '0005']
```
## About
<details>
<summary><strong>Contributing</strong></summary>
Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new).
</details>
<details>
<summary><strong>Running Tests</strong></summary>
Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command:
```sh
$ npm install && npm test
```
</details>
<details>
<summary><strong>Building docs</strong></summary>
_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_
To generate the readme, run the following command:
```sh
$ npm install -g verbose/verb#dev verb-generate-readme && verb
```
</details>
### Contributors
| **Commits** | **Contributor** |
| --- | --- |
| 116 | [jonschlinkert](https://github.com/jonschlinkert) |
| 4 | [paulmillr](https://github.com/paulmillr) |
| 2 | [realityking](https://github.com/realityking) |
| 2 | [bluelovers](https://github.com/bluelovers) |
| 1 | [edorivai](https://github.com/edorivai) |
| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) |
### Author
**Jon Schlinkert**
* [GitHub Profile](https://github.com/jonschlinkert)
* [Twitter Profile](https://twitter.com/jonschlinkert)
* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert)
Please consider supporting me on Patreon, or [start your own Patreon page](https://patreon.com/invite/bxpbvm)!
<a href="https://www.patreon.com/jonschlinkert">
<img src="https://c5.patreon.com/external/logo/become_a_patron_button@2x.png" height="50">
</a>
### License
Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert).
Released under the [MIT License](LICENSE).
***
_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 08, 2019._

249
src/node_modules/fill-range/index.js generated vendored
View File

@ -1,249 +0,0 @@
/*!
* fill-range <https://github.com/jonschlinkert/fill-range>
*
* Copyright (c) 2014-present, Jon Schlinkert.
* Licensed under the MIT License.
*/
'use strict';
const util = require('util');
const toRegexRange = require('to-regex-range');
const isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val);
const transform = toNumber => {
return value => toNumber === true ? Number(value) : String(value);
};
const isValidValue = value => {
return typeof value === 'number' || (typeof value === 'string' && value !== '');
};
const isNumber = num => Number.isInteger(+num);
const zeros = input => {
let value = `${input}`;
let index = -1;
if (value[0] === '-') value = value.slice(1);
if (value === '0') return false;
while (value[++index] === '0');
return index > 0;
};
const stringify = (start, end, options) => {
if (typeof start === 'string' || typeof end === 'string') {
return true;
}
return options.stringify === true;
};
const pad = (input, maxLength, toNumber) => {
if (maxLength > 0) {
let dash = input[0] === '-' ? '-' : '';
if (dash) input = input.slice(1);
input = (dash + input.padStart(dash ? maxLength - 1 : maxLength, '0'));
}
if (toNumber === false) {
return String(input);
}
return input;
};
const toMaxLen = (input, maxLength) => {
let negative = input[0] === '-' ? '-' : '';
if (negative) {
input = input.slice(1);
maxLength--;
}
while (input.length < maxLength) input = '0' + input;
return negative ? ('-' + input) : input;
};
const toSequence = (parts, options) => {
parts.negatives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0);
parts.positives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0);
let prefix = options.capture ? '' : '?:';
let positives = '';
let negatives = '';
let result;
if (parts.positives.length) {
positives = parts.positives.join('|');
}
if (parts.negatives.length) {
negatives = `-(${prefix}${parts.negatives.join('|')})`;
}
if (positives && negatives) {
result = `${positives}|${negatives}`;
} else {
result = positives || negatives;
}
if (options.wrap) {
return `(${prefix}${result})`;
}
return result;
};
const toRange = (a, b, isNumbers, options) => {
if (isNumbers) {
return toRegexRange(a, b, { wrap: false, ...options });
}
let start = String.fromCharCode(a);
if (a === b) return start;
let stop = String.fromCharCode(b);
return `[${start}-${stop}]`;
};
const toRegex = (start, end, options) => {
if (Array.isArray(start)) {
let wrap = options.wrap === true;
let prefix = options.capture ? '' : '?:';
return wrap ? `(${prefix}${start.join('|')})` : start.join('|');
}
return toRegexRange(start, end, options);
};
const rangeError = (...args) => {
return new RangeError('Invalid range arguments: ' + util.inspect(...args));
};
const invalidRange = (start, end, options) => {
if (options.strictRanges === true) throw rangeError([start, end]);
return [];
};
const invalidStep = (step, options) => {
if (options.strictRanges === true) {
throw new TypeError(`Expected step "${step}" to be a number`);
}
return [];
};
const fillNumbers = (start, end, step = 1, options = {}) => {
let a = Number(start);
let b = Number(end);
if (!Number.isInteger(a) || !Number.isInteger(b)) {
if (options.strictRanges === true) throw rangeError([start, end]);
return [];
}
// fix negative zero
if (a === 0) a = 0;
if (b === 0) b = 0;
let descending = a > b;
let startString = String(start);
let endString = String(end);
let stepString = String(step);
step = Math.max(Math.abs(step), 1);
let padded = zeros(startString) || zeros(endString) || zeros(stepString);
let maxLen = padded ? Math.max(startString.length, endString.length, stepString.length) : 0;
let toNumber = padded === false && stringify(start, end, options) === false;
let format = options.transform || transform(toNumber);
if (options.toRegex && step === 1) {
return toRange(toMaxLen(start, maxLen), toMaxLen(end, maxLen), true, options);
}
let parts = { negatives: [], positives: [] };
let push = num => parts[num < 0 ? 'negatives' : 'positives'].push(Math.abs(num));
let range = [];
let index = 0;
while (descending ? a >= b : a <= b) {
if (options.toRegex === true && step > 1) {
push(a);
} else {
range.push(pad(format(a, index), maxLen, toNumber));
}
a = descending ? a - step : a + step;
index++;
}
if (options.toRegex === true) {
return step > 1
? toSequence(parts, options)
: toRegex(range, null, { wrap: false, ...options });
}
return range;
};
const fillLetters = (start, end, step = 1, options = {}) => {
if ((!isNumber(start) && start.length > 1) || (!isNumber(end) && end.length > 1)) {
return invalidRange(start, end, options);
}
let format = options.transform || (val => String.fromCharCode(val));
let a = `${start}`.charCodeAt(0);
let b = `${end}`.charCodeAt(0);
let descending = a > b;
let min = Math.min(a, b);
let max = Math.max(a, b);
if (options.toRegex && step === 1) {
return toRange(min, max, false, options);
}
let range = [];
let index = 0;
while (descending ? a >= b : a <= b) {
range.push(format(a, index));
a = descending ? a - step : a + step;
index++;
}
if (options.toRegex === true) {
return toRegex(range, null, { wrap: false, options });
}
return range;
};
const fill = (start, end, step, options = {}) => {
if (end == null && isValidValue(start)) {
return [start];
}
if (!isValidValue(start) || !isValidValue(end)) {
return invalidRange(start, end, options);
}
if (typeof step === 'function') {
return fill(start, end, 1, { transform: step });
}
if (isObject(step)) {
return fill(start, end, 0, step);
}
let opts = { ...options };
if (opts.capture === true) opts.wrap = true;
step = step || opts.step || 1;
if (!isNumber(step)) {
if (step != null && !isObject(step)) return invalidStep(step, opts);
return fill(start, end, 1, step);
}
if (isNumber(start) && isNumber(end)) {
return fillNumbers(start, end, step, opts);
}
return fillLetters(start, end, Math.max(Math.abs(step), 1), opts);
};
module.exports = fill;

View File

@ -1,69 +0,0 @@
{
"name": "fill-range",
"description": "Fill in a range of numbers or letters, optionally passing an increment or `step` to use, or create a regex-compatible range with `options.toRegex`",
"version": "7.0.1",
"homepage": "https://github.com/jonschlinkert/fill-range",
"author": "Jon Schlinkert (https://github.com/jonschlinkert)",
"contributors": [
"Edo Rivai (edo.rivai.nl)",
"Jon Schlinkert (http://twitter.com/jonschlinkert)",
"Paul Miller (paulmillr.com)",
"Rouven Weßling (www.rouvenwessling.de)",
"(https://github.com/wtgtybhertgeghgtwtg)"
],
"repository": "jonschlinkert/fill-range",
"bugs": {
"url": "https://github.com/jonschlinkert/fill-range/issues"
},
"license": "MIT",
"files": [
"index.js"
],
"main": "index.js",
"engines": {
"node": ">=8"
},
"scripts": {
"test": "mocha"
},
"dependencies": {
"to-regex-range": "^5.0.1"
},
"devDependencies": {
"gulp-format-md": "^2.0.0",
"mocha": "^6.1.1"
},
"keywords": [
"alpha",
"alphabetical",
"array",
"bash",
"brace",
"expand",
"expansion",
"fill",
"glob",
"match",
"matches",
"matching",
"number",
"numerical",
"range",
"ranges",
"regex",
"sh"
],
"verb": {
"toc": false,
"layout": "default",
"tasks": [
"readme"
],
"plugins": [
"gulp-format-md"
],
"lint": {
"reflinks": true
}
}
}

View File

@ -1,110 +0,0 @@
### [5.1.2](https://github.com/gulpjs/glob-parent/compare/v5.1.1...v5.1.2) (2021-03-06)
### Bug Fixes
* eliminate ReDoS ([#36](https://github.com/gulpjs/glob-parent/issues/36)) ([f923116](https://github.com/gulpjs/glob-parent/commit/f9231168b0041fea3f8f954b3cceb56269fc6366))
### [5.1.1](https://github.com/gulpjs/glob-parent/compare/v5.1.0...v5.1.1) (2021-01-27)
### Bug Fixes
* unescape exclamation mark ([#26](https://github.com/gulpjs/glob-parent/issues/26)) ([a98874f](https://github.com/gulpjs/glob-parent/commit/a98874f1a59e407f4fb1beb0db4efa8392da60bb))
## [5.1.0](https://github.com/gulpjs/glob-parent/compare/v5.0.0...v5.1.0) (2021-01-27)
### Features
* add `flipBackslashes` option to disable auto conversion of slashes (closes [#24](https://github.com/gulpjs/glob-parent/issues/24)) ([#25](https://github.com/gulpjs/glob-parent/issues/25)) ([eecf91d](https://github.com/gulpjs/glob-parent/commit/eecf91d5e3834ed78aee39c4eaaae654d76b87b3))
## [5.0.0](https://github.com/gulpjs/glob-parent/compare/v4.0.0...v5.0.0) (2021-01-27)
### ⚠ BREAKING CHANGES
* Drop support for node <6 & bump dependencies
### Miscellaneous Chores
* Drop support for node <6 & bump dependencies ([896c0c0](https://github.com/gulpjs/glob-parent/commit/896c0c00b4e7362f60b96e7fc295ae929245255a))
## [4.0.0](https://github.com/gulpjs/glob-parent/compare/v3.1.0...v4.0.0) (2021-01-27)
### ⚠ BREAKING CHANGES
* question marks are valid path characters on Windows so avoid flagging as a glob when alone
* Update is-glob dependency
### Features
* hoist regexps and strings for performance gains ([4a80667](https://github.com/gulpjs/glob-parent/commit/4a80667c69355c76a572a5892b0f133c8e1f457e))
* question marks are valid path characters on Windows so avoid flagging as a glob when alone ([2a551dd](https://github.com/gulpjs/glob-parent/commit/2a551dd0dc3235e78bf3c94843d4107072d17841))
* Update is-glob dependency ([e41fcd8](https://github.com/gulpjs/glob-parent/commit/e41fcd895d1f7bc617dba45c9d935a7949b9c281))
## [3.1.0](https://github.com/gulpjs/glob-parent/compare/v3.0.1...v3.1.0) (2021-01-27)
### Features
* allow basic win32 backslash use ([272afa5](https://github.com/gulpjs/glob-parent/commit/272afa5fd070fc0f796386a5993d4ee4a846988b))
* handle extglobs (parentheses) containing separators ([7db1bdb](https://github.com/gulpjs/glob-parent/commit/7db1bdb0756e55fd14619e8ce31aa31b17b117fd))
* new approach to braces/brackets handling ([8269bd8](https://github.com/gulpjs/glob-parent/commit/8269bd89290d99fac9395a354fb56fdcdb80f0be))
* pre-process braces/brackets sections ([9ef8a87](https://github.com/gulpjs/glob-parent/commit/9ef8a87f66b1a43d0591e7a8e4fc5a18415ee388))
* preserve escaped brace/bracket at end of string ([8cfb0ba](https://github.com/gulpjs/glob-parent/commit/8cfb0ba84202d51571340dcbaf61b79d16a26c76))
### Bug Fixes
* trailing escaped square brackets ([99ec9fe](https://github.com/gulpjs/glob-parent/commit/99ec9fecc60ee488ded20a94dd4f18b4f55c4ccf))
### [3.0.1](https://github.com/gulpjs/glob-parent/compare/v3.0.0...v3.0.1) (2021-01-27)
### Features
* use path-dirname ponyfill ([cdbea5f](https://github.com/gulpjs/glob-parent/commit/cdbea5f32a58a54e001a75ddd7c0fccd4776aacc))
### Bug Fixes
* unescape glob-escaped dirnames on output ([598c533](https://github.com/gulpjs/glob-parent/commit/598c533bdf49c1428bc063aa9b8db40c5a86b030))
## [3.0.0](https://github.com/gulpjs/glob-parent/compare/v2.0.0...v3.0.0) (2021-01-27)
### ⚠ BREAKING CHANGES
* update is-glob dependency
### Features
* update is-glob dependency ([5c5f8ef](https://github.com/gulpjs/glob-parent/commit/5c5f8efcee362a8e7638cf8220666acd8784f6bd))
## [2.0.0](https://github.com/gulpjs/glob-parent/compare/v1.3.0...v2.0.0) (2021-01-27)
### Features
* move up to dirname regardless of glob characters ([f97fb83](https://github.com/gulpjs/glob-parent/commit/f97fb83be2e0a9fc8d3b760e789d2ecadd6aa0c2))
## [1.3.0](https://github.com/gulpjs/glob-parent/compare/v1.2.0...v1.3.0) (2021-01-27)
## [1.2.0](https://github.com/gulpjs/glob-parent/compare/v1.1.0...v1.2.0) (2021-01-27)
### Reverts
* feat: make regex test strings smaller ([dc80fa9](https://github.com/gulpjs/glob-parent/commit/dc80fa9658dca20549cfeba44bbd37d5246fcce0))
## [1.1.0](https://github.com/gulpjs/glob-parent/compare/v1.0.0...v1.1.0) (2021-01-27)
### Features
* make regex test strings smaller ([cd83220](https://github.com/gulpjs/glob-parent/commit/cd832208638f45169f986d80fcf66e401f35d233))
## 1.0.0 (2021-01-27)

15
src/node_modules/glob-parent/LICENSE generated vendored
View File

@ -1,15 +0,0 @@
The ISC License
Copyright (c) 2015, 2019 Elan Shanker
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

View File

@ -1,137 +0,0 @@
<p align="center">
<a href="https://gulpjs.com">
<img height="257" width="114" src="https://raw.githubusercontent.com/gulpjs/artwork/master/gulp-2x.png">
</a>
</p>
# glob-parent
[![NPM version][npm-image]][npm-url] [![Downloads][downloads-image]][npm-url] [![Azure Pipelines Build Status][azure-pipelines-image]][azure-pipelines-url] [![Travis Build Status][travis-image]][travis-url] [![AppVeyor Build Status][appveyor-image]][appveyor-url] [![Coveralls Status][coveralls-image]][coveralls-url] [![Gitter chat][gitter-image]][gitter-url]
Extract the non-magic parent path from a glob string.
## Usage
```js
var globParent = require('glob-parent');
globParent('path/to/*.js'); // 'path/to'
globParent('/root/path/to/*.js'); // '/root/path/to'
globParent('/*.js'); // '/'
globParent('*.js'); // '.'
globParent('**/*.js'); // '.'
globParent('path/{to,from}'); // 'path'
globParent('path/!(to|from)'); // 'path'
globParent('path/?(to|from)'); // 'path'
globParent('path/+(to|from)'); // 'path'
globParent('path/*(to|from)'); // 'path'
globParent('path/@(to|from)'); // 'path'
globParent('path/**/*'); // 'path'
// if provided a non-glob path, returns the nearest dir
globParent('path/foo/bar.js'); // 'path/foo'
globParent('path/foo/'); // 'path/foo'
globParent('path/foo'); // 'path' (see issue #3 for details)
```
## API
### `globParent(maybeGlobString, [options])`
Takes a string and returns the part of the path before the glob begins. Be aware of Escaping rules and Limitations below.
#### options
```js
{
// Disables the automatic conversion of slashes for Windows
flipBackslashes: true
}
```
## Escaping
The following characters have special significance in glob patterns and must be escaped if you want them to be treated as regular path characters:
- `?` (question mark) unless used as a path segment alone
- `*` (asterisk)
- `|` (pipe)
- `(` (opening parenthesis)
- `)` (closing parenthesis)
- `{` (opening curly brace)
- `}` (closing curly brace)
- `[` (opening bracket)
- `]` (closing bracket)
**Example**
```js
globParent('foo/[bar]/') // 'foo'
globParent('foo/\\[bar]/') // 'foo/[bar]'
```
## Limitations
### Braces & Brackets
This library attempts a quick and imperfect method of determining which path
parts have glob magic without fully parsing/lexing the pattern. There are some
advanced use cases that can trip it up, such as nested braces where the outer
pair is escaped and the inner one contains a path separator. If you find
yourself in the unlikely circumstance of being affected by this or need to
ensure higher-fidelity glob handling in your library, it is recommended that you
pre-process your input with [expand-braces] and/or [expand-brackets].
### Windows
Backslashes are not valid path separators for globs. If a path with backslashes
is provided anyway, for simple cases, glob-parent will replace the path
separator for you and return the non-glob parent path (now with
forward-slashes, which are still valid as Windows path separators).
This cannot be used in conjunction with escape characters.
```js
// BAD
globParent('C:\\Program Files \\(x86\\)\\*.ext') // 'C:/Program Files /(x86/)'
// GOOD
globParent('C:/Program Files\\(x86\\)/*.ext') // 'C:/Program Files (x86)'
```
If you are using escape characters for a pattern without path parts (i.e.
relative to `cwd`), prefix with `./` to avoid confusing glob-parent.
```js
// BAD
globParent('foo \\[bar]') // 'foo '
globParent('foo \\[bar]*') // 'foo '
// GOOD
globParent('./foo \\[bar]') // 'foo [bar]'
globParent('./foo \\[bar]*') // '.'
```
## License
ISC
[expand-braces]: https://github.com/jonschlinkert/expand-braces
[expand-brackets]: https://github.com/jonschlinkert/expand-brackets
[downloads-image]: https://img.shields.io/npm/dm/glob-parent.svg
[npm-url]: https://www.npmjs.com/package/glob-parent
[npm-image]: https://img.shields.io/npm/v/glob-parent.svg
[azure-pipelines-url]: https://dev.azure.com/gulpjs/gulp/_build/latest?definitionId=2&branchName=master
[azure-pipelines-image]: https://dev.azure.com/gulpjs/gulp/_apis/build/status/glob-parent?branchName=master
[travis-url]: https://travis-ci.org/gulpjs/glob-parent
[travis-image]: https://img.shields.io/travis/gulpjs/glob-parent.svg?label=travis-ci
[appveyor-url]: https://ci.appveyor.com/project/gulpjs/glob-parent
[appveyor-image]: https://img.shields.io/appveyor/ci/gulpjs/glob-parent.svg?label=appveyor
[coveralls-url]: https://coveralls.io/r/gulpjs/glob-parent
[coveralls-image]: https://img.shields.io/coveralls/gulpjs/glob-parent/master.svg
[gitter-url]: https://gitter.im/gulpjs/gulp
[gitter-image]: https://badges.gitter.im/gulpjs/gulp.svg

View File

@ -1,42 +0,0 @@
'use strict';
var isGlob = require('is-glob');
var pathPosixDirname = require('path').posix.dirname;
var isWin32 = require('os').platform() === 'win32';
var slash = '/';
var backslash = /\\/g;
var enclosure = /[\{\[].*[\}\]]$/;
var globby = /(^|[^\\])([\{\[]|\([^\)]+$)/;
var escaped = /\\([\!\*\?\|\[\]\(\)\{\}])/g;
/**
* @param {string} str
* @param {Object} opts
* @param {boolean} [opts.flipBackslashes=true]
* @returns {string}
*/
module.exports = function globParent(str, opts) {
var options = Object.assign({ flipBackslashes: true }, opts);
// flip windows path separators
if (options.flipBackslashes && isWin32 && str.indexOf(slash) < 0) {
str = str.replace(backslash, slash);
}
// special case for strings ending in enclosure containing path separator
if (enclosure.test(str)) {
str += slash;
}
// preserves full path in case of trailing path separator
str += 'a';
// remove path parts that are globby
do {
str = pathPosixDirname(str);
} while (isGlob(str) || globby.test(str));
// remove escape chars and return result
return str.replace(escaped, '$1');
};

View File

@ -1,48 +0,0 @@
{
"name": "glob-parent",
"version": "5.1.2",
"description": "Extract the non-magic parent path from a glob string.",
"author": "Gulp Team <team@gulpjs.com> (https://gulpjs.com/)",
"contributors": [
"Elan Shanker (https://github.com/es128)",
"Blaine Bublitz <blaine.bublitz@gmail.com>"
],
"repository": "gulpjs/glob-parent",
"license": "ISC",
"engines": {
"node": ">= 6"
},
"main": "index.js",
"files": [
"LICENSE",
"index.js"
],
"scripts": {
"lint": "eslint .",
"pretest": "npm run lint",
"test": "nyc mocha --async-only",
"azure-pipelines": "nyc mocha --async-only --reporter xunit -O output=test.xunit",
"coveralls": "nyc report --reporter=text-lcov | coveralls"
},
"dependencies": {
"is-glob": "^4.0.1"
},
"devDependencies": {
"coveralls": "^3.0.11",
"eslint": "^2.13.1",
"eslint-config-gulp": "^3.0.1",
"expect": "^1.20.2",
"mocha": "^6.0.2",
"nyc": "^13.3.0"
},
"keywords": [
"glob",
"parent",
"strip",
"path",
"dirname",
"directory",
"base",
"wildcard"
]
}

8
src/node_modules/has-flag/index.js generated vendored
View File

@ -1,8 +0,0 @@
'use strict';
module.exports = (flag, argv) => {
argv = argv || process.argv;
const prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--');
const pos = argv.indexOf(prefix + flag);
const terminatorPos = argv.indexOf('--');
return pos !== -1 && (terminatorPos === -1 ? true : pos < terminatorPos);
};

9
src/node_modules/has-flag/license generated vendored
View File

@ -1,9 +0,0 @@
MIT License
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -1,44 +0,0 @@
{
"name": "has-flag",
"version": "3.0.0",
"description": "Check if argv has a specific flag",
"license": "MIT",
"repository": "sindresorhus/has-flag",
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "sindresorhus.com"
},
"engines": {
"node": ">=4"
},
"scripts": {
"test": "xo && ava"
},
"files": [
"index.js"
],
"keywords": [
"has",
"check",
"detect",
"contains",
"find",
"flag",
"cli",
"command-line",
"argv",
"process",
"arg",
"args",
"argument",
"arguments",
"getopt",
"minimist",
"optimist"
],
"devDependencies": {
"ava": "*",
"xo": "*"
}
}

70
src/node_modules/has-flag/readme.md generated vendored
View File

@ -1,70 +0,0 @@
# has-flag [![Build Status](https://travis-ci.org/sindresorhus/has-flag.svg?branch=master)](https://travis-ci.org/sindresorhus/has-flag)
> Check if [`argv`](https://nodejs.org/docs/latest/api/process.html#process_process_argv) has a specific flag
Correctly stops looking after an `--` argument terminator.
## Install
```
$ npm install has-flag
```
## Usage
```js
// foo.js
const hasFlag = require('has-flag');
hasFlag('unicorn');
//=> true
hasFlag('--unicorn');
//=> true
hasFlag('f');
//=> true
hasFlag('-f');
//=> true
hasFlag('foo=bar');
//=> true
hasFlag('foo');
//=> false
hasFlag('rainbow');
//=> false
```
```
$ node foo.js -f --unicorn --foo=bar -- --rainbow
```
## API
### hasFlag(flag, [argv])
Returns a boolean for whether the flag exists.
#### flag
Type: `string`
CLI flag to look for. The `--` prefix is optional.
#### argv
Type: `string[]`<br>
Default: `process.argv`
CLI arguments.
## License
MIT © [Sindre Sorhus](https://sindresorhus.com)

View File

@ -1,14 +0,0 @@
ISC License (ISC)
Copyright (c) 2016, Mark Wubben
Permission to use, copy, modify, and/or distribute this software for any purpose
with or without fee is hereby granted, provided that the above copyright notice
and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
THIS SOFTWARE.

View File

@ -1,26 +0,0 @@
# ignore-by-default
This is a package aimed at Node.js development tools. It provides a list of
directories that should probably be ignored by such tools, e.g. when watching
for file changes.
It's used by [AVA](https://www.npmjs.com/package/ava) and
[nodemon](https://www.npmjs.com/package/nodemon).
[Please contribute!](./CONTRIBUTING.md)
## Installation
```
npm install --save ignore-by-default
```
## Usage
The `ignore-by-default` module exports a `directories()` function, which will
return an array of directory names. These are the ones you should ignore.
```js
// ['.git', '.sass_cache', …]
var ignoredDirectories = require('ignore-by-default').directories()
```

View File

@ -1,12 +0,0 @@
'use strict'
exports.directories = function () {
return [
'.git', // Git repository files, see <https://git-scm.com/>
'.nyc_output', // Temporary directory where nyc stores coverage data, see <https://github.com/bcoe/nyc>
'.sass-cache', // Cache folder for node-sass, see <https://github.com/sass/node-sass>
'bower_components', // Where Bower packages are installed, see <http://bower.io/>
'coverage', // Standard output directory for code coverage reports, see <https://github.com/gotwarlost/istanbul>
'node_modules' // Where Node modules are installed, see <https://nodejs.org/>
]
}

View File

@ -1,34 +0,0 @@
{
"name": "ignore-by-default",
"version": "1.0.1",
"description": "A list of directories you should ignore by default",
"main": "index.js",
"files": [
"index.js"
],
"scripts": {
"test": "standard && node test.js"
},
"repository": {
"type": "git",
"url": "git+https://github.com/novemberborn/ignore-by-default.git"
},
"keywords": [
"ignore",
"chokidar",
"watcher",
"exclude",
"glob",
"pattern"
],
"author": "Mark Wubben (https://novemberborn.net/)",
"license": "ISC",
"bugs": {
"url": "https://github.com/novemberborn/ignore-by-default/issues"
},
"homepage": "https://github.com/novemberborn/ignore-by-default#readme",
"devDependencies": {
"figures": "^1.4.0",
"standard": "^6.0.4"
}
}

View File

@ -1,17 +0,0 @@
/**
Check if a file path is a binary file.
@example
```
import isBinaryPath = require('is-binary-path');
isBinaryPath('source/unicorn.png');
//=> true
isBinaryPath('source/unicorn.txt');
//=> false
```
*/
declare function isBinaryPath(filePath: string): boolean;
export = isBinaryPath;

View File

@ -1,7 +0,0 @@
'use strict';
const path = require('path');
const binaryExtensions = require('binary-extensions');
const extensions = new Set(binaryExtensions);
module.exports = filePath => extensions.has(path.extname(filePath).slice(1).toLowerCase());

Some files were not shown because too many files have changed in this diff Show More