downgrade node-fetch

This commit is contained in:
Mylloon 2022-06-23 02:06:38 +02:00
parent 9411af827f
commit d2cd39dd6d
Signed by: Anri
GPG key ID: A82D63DFF8D1317F
152 changed files with 7852 additions and 59339 deletions

104
node_modules/.package-lock.json generated vendored
View file

@ -165,14 +165,6 @@
"resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz",
"integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==" "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ=="
}, },
"node_modules/data-uri-to-buffer": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.0.tgz",
"integrity": "sha512-Vr3mLBA8qWmcuschSLAOogKgQ/Jwxulv3RNE4FXnYWRGujzrRWQI4m12fQqRkwX06C0KanhLr4hK+GydchZsaA==",
"engines": {
"node": ">= 12"
}
},
"node_modules/debug": { "node_modules/debug": {
"version": "2.6.9", "version": "2.6.9",
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
@ -279,28 +271,6 @@
"node": ">= 0.10.0" "node": ">= 0.10.0"
} }
}, },
"node_modules/fetch-blob": {
"version": "3.1.5",
"resolved": "https://registry.npmjs.org/fetch-blob/-/fetch-blob-3.1.5.tgz",
"integrity": "sha512-N64ZpKqoLejlrwkIAnb9iLSA3Vx/kjgzpcDhygcqJ2KKjky8nCgUQ+dzXtbrLaWZGZNmNfQTsiQ0weZ1svglHg==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/jimmywarting"
},
{
"type": "paypal",
"url": "https://paypal.me/jimmywarting"
}
],
"dependencies": {
"node-domexception": "^1.0.0",
"web-streams-polyfill": "^3.0.3"
},
"engines": {
"node": "^12.20 || >= 14.13"
}
},
"node_modules/filelist": { "node_modules/filelist": {
"version": "1.0.4", "version": "1.0.4",
"resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.4.tgz", "resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.4.tgz",
@ -345,17 +315,6 @@
"node": ">= 0.8" "node": ">= 0.8"
} }
}, },
"node_modules/formdata-polyfill": {
"version": "4.0.10",
"resolved": "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz",
"integrity": "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==",
"dependencies": {
"fetch-blob": "^3.1.2"
},
"engines": {
"node": ">=12.20.0"
}
},
"node_modules/forwarded": { "node_modules/forwarded": {
"version": "0.2.0", "version": "0.2.0",
"resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz",
@ -562,39 +521,23 @@
"node": ">= 0.6" "node": ">= 0.6"
} }
}, },
"node_modules/node-domexception": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz",
"integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/jimmywarting"
},
{
"type": "github",
"url": "https://paypal.me/jimmywarting"
}
],
"engines": {
"node": ">=10.5.0"
}
},
"node_modules/node-fetch": { "node_modules/node-fetch": {
"version": "3.2.6", "version": "2.6.7",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.2.6.tgz", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz",
"integrity": "sha512-LAy/HZnLADOVkVPubaxHDft29booGglPFDr2Hw0J1AercRh01UiVFm++KMDnJeH9sHgNB4hsXPii7Sgym/sTbw==", "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==",
"dependencies": { "dependencies": {
"data-uri-to-buffer": "^4.0.0", "whatwg-url": "^5.0.0"
"fetch-blob": "^3.1.4",
"formdata-polyfill": "^4.0.10"
}, },
"engines": { "engines": {
"node": "^12.20.0 || ^14.13.1 || >=16.0.0" "node": "4.x || >=6.0.0"
}, },
"funding": { "peerDependencies": {
"type": "opencollective", "encoding": "^0.1.0"
"url": "https://opencollective.com/node-fetch" },
"peerDependenciesMeta": {
"encoding": {
"optional": true
}
} }
}, },
"node_modules/object-inspect": { "node_modules/object-inspect": {
@ -788,6 +731,11 @@
"node": ">=0.6" "node": ">=0.6"
} }
}, },
"node_modules/tr46": {
"version": "0.0.3",
"resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
"integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="
},
"node_modules/type-is": { "node_modules/type-is": {
"version": "1.6.18", "version": "1.6.18",
"resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz",
@ -824,12 +772,18 @@
"node": ">= 0.8" "node": ">= 0.8"
} }
}, },
"node_modules/web-streams-polyfill": { "node_modules/webidl-conversions": {
"version": "3.2.1", "version": "3.0.1",
"resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.2.1.tgz", "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
"integrity": "sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q==", "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="
"engines": { },
"node": ">= 8" "node_modules/whatwg-url": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz",
"integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==",
"dependencies": {
"tr46": "~0.0.3",
"webidl-conversions": "^3.0.0"
} }
} }
} }

View file

@ -1,88 +0,0 @@
data-uri-to-buffer
==================
### Generate a Buffer instance from a [Data URI][rfc] string
[![Build Status](https://travis-ci.org/TooTallNate/node-data-uri-to-buffer.svg?branch=master)](https://travis-ci.org/TooTallNate/node-data-uri-to-buffer)
This module accepts a ["data" URI][rfc] String of data, and returns a
node.js `Buffer` instance with the decoded data.
Installation
------------
Install with `npm`:
``` bash
$ npm install data-uri-to-buffer
```
Example
-------
``` js
var dataUriToBuffer = require('data-uri-to-buffer');
// plain-text data is supported
var uri = 'data:,Hello%2C%20World!';
var decoded = dataUriToBuffer(uri);
console.log(decoded.toString());
// 'Hello, World!'
// base64-encoded data is supported
uri = 'data:text/plain;base64,SGVsbG8sIFdvcmxkIQ%3D%3D';
decoded = dataUriToBuffer(uri);
console.log(decoded.toString());
// 'Hello, World!'
```
API
---
### dataUriToBuffer(String uri) → Buffer
The `type` property on the Buffer instance gets set to the main type portion of
the "mediatype" portion of the "data" URI, or defaults to `"text/plain"` if not
specified.
The `typeFull` property on the Buffer instance gets set to the entire
"mediatype" portion of the "data" URI (including all parameters), or defaults
to `"text/plain;charset=US-ASCII"` if not specified.
The `charset` property on the Buffer instance gets set to the Charset portion of
the "mediatype" portion of the "data" URI, or defaults to `"US-ASCII"` if the
entire type is not specified, or defaults to `""` otherwise.
*Note*: If the only the main type is specified but not the charset, e.g.
`"data:text/plain,abc"`, the charset is set to the empty string. The spec only
defaults to US-ASCII as charset if the entire type is not specified.
License
-------
(The MIT License)
Copyright (c) 2014 Nathan Rajlich <nathan@tootallnate.net>
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
'Software'), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
[rfc]: http://tools.ietf.org/html/rfc2397

View file

@ -1,15 +0,0 @@
/// <reference types="node" />
export interface MimeBuffer extends Buffer {
type: string;
typeFull: string;
charset: string;
}
/**
* Returns a `Buffer` instance from the given data URI `uri`.
*
* @param {String} uri Data URI to turn into a Buffer instance
* @returns {Buffer} Buffer instance from Data URI
* @api public
*/
export declare function dataUriToBuffer(uri: string): MimeBuffer;
export default dataUriToBuffer;

View file

@ -1,53 +0,0 @@
/**
* Returns a `Buffer` instance from the given data URI `uri`.
*
* @param {String} uri Data URI to turn into a Buffer instance
* @returns {Buffer} Buffer instance from Data URI
* @api public
*/
export function dataUriToBuffer(uri) {
if (!/^data:/i.test(uri)) {
throw new TypeError('`uri` does not appear to be a Data URI (must begin with "data:")');
}
// strip newlines
uri = uri.replace(/\r?\n/g, '');
// split the URI up into the "metadata" and the "data" portions
const firstComma = uri.indexOf(',');
if (firstComma === -1 || firstComma <= 4) {
throw new TypeError('malformed data: URI');
}
// remove the "data:" scheme and parse the metadata
const meta = uri.substring(5, firstComma).split(';');
let charset = '';
let base64 = false;
const type = meta[0] || 'text/plain';
let typeFull = type;
for (let i = 1; i < meta.length; i++) {
if (meta[i] === 'base64') {
base64 = true;
}
else {
typeFull += `;${meta[i]}`;
if (meta[i].indexOf('charset=') === 0) {
charset = meta[i].substring(8);
}
}
}
// defaults to US-ASCII only if type is not provided
if (!meta[0] && !charset.length) {
typeFull += ';charset=US-ASCII';
charset = 'US-ASCII';
}
// get the encoded data portion and decode URI-encoded chars
const encoding = base64 ? 'base64' : 'ascii';
const data = unescape(uri.substring(firstComma + 1));
const buffer = Buffer.from(data, encoding);
// set `.type` and `.typeFull` properties to MIME type
buffer.type = type;
buffer.typeFull = typeFull;
// set the `.charset` property
buffer.charset = charset;
return buffer;
}
export default dataUriToBuffer;
//# sourceMappingURL=index.js.map

View file

@ -1 +0,0 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAMA;;;;;;GAMG;AACH,MAAM,UAAU,eAAe,CAAC,GAAW;IAC1C,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;QACzB,MAAM,IAAI,SAAS,CAClB,kEAAkE,CAClE,CAAC;KACF;IAED,iBAAiB;IACjB,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,QAAQ,EAAE,EAAE,CAAC,CAAC;IAEhC,+DAA+D;IAC/D,MAAM,UAAU,GAAG,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IACpC,IAAI,UAAU,KAAK,CAAC,CAAC,IAAI,UAAU,IAAI,CAAC,EAAE;QACzC,MAAM,IAAI,SAAS,CAAC,qBAAqB,CAAC,CAAC;KAC3C;IAED,mDAAmD;IACnD,MAAM,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAErD,IAAI,OAAO,GAAG,EAAE,CAAC;IACjB,IAAI,MAAM,GAAG,KAAK,CAAC;IACnB,MAAM,IAAI,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,YAAY,CAAC;IACrC,IAAI,QAAQ,GAAG,IAAI,CAAC;IACpB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACrC,IAAI,IAAI,CAAC,CAAC,CAAC,KAAK,QAAQ,EAAE;YACzB,MAAM,GAAG,IAAI,CAAC;SACd;aAAM;YACN,QAAQ,IAAI,IAAM,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC;YAC5B,IAAI,IAAI,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,KAAK,CAAC,EAAE;gBACtC,OAAO,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;aAC/B;SACD;KACD;IACD,oDAAoD;IACpD,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE;QAChC,QAAQ,IAAI,mBAAmB,CAAC;QAChC,OAAO,GAAG,UAAU,CAAC;KACrB;IAED,4DAA4D;IAC5D,MAAM,QAAQ,GAAG,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC;IAC7C,MAAM,IAAI,GAAG,QAAQ,CAAC,GAAG,CAAC,SAAS,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC;IACrD,MAAM,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAe,CAAC;IAEzD,sDAAsD;IACtD,MAAM,CAAC,IAAI,GAAG,IAAI,CAAC;IACnB,MAAM,CAAC,QAAQ,GAAG,QAAQ,CAAC;IAE3B,8BAA8B;IAC9B,MAAM,CAAC,OAAO,GAAG,OAAO,CAAC;IAEzB,OAAO,MAAM,CAAC;AACf,CAAC;AAED,eAAe,eAAe,CAAC"}

View file

@ -1,15 +0,0 @@
/// <reference types="node" />
export interface MimeBuffer extends Buffer {
type: string;
typeFull: string;
charset: string;
}
/**
* Returns a `Buffer` instance from the given data URI `uri`.
*
* @param {String} uri Data URI to turn into a Buffer instance
* @returns {Buffer} Buffer instance from Data URI
* @api public
*/
export declare function dataUriToBuffer(uri: string): MimeBuffer;
export default dataUriToBuffer;

View file

@ -1,53 +0,0 @@
/**
* Returns a `Buffer` instance from the given data URI `uri`.
*
* @param {String} uri Data URI to turn into a Buffer instance
* @returns {Buffer} Buffer instance from Data URI
* @api public
*/
export function dataUriToBuffer(uri) {
if (!/^data:/i.test(uri)) {
throw new TypeError('`uri` does not appear to be a Data URI (must begin with "data:")');
}
// strip newlines
uri = uri.replace(/\r?\n/g, '');
// split the URI up into the "metadata" and the "data" portions
const firstComma = uri.indexOf(',');
if (firstComma === -1 || firstComma <= 4) {
throw new TypeError('malformed data: URI');
}
// remove the "data:" scheme and parse the metadata
const meta = uri.substring(5, firstComma).split(';');
let charset = '';
let base64 = false;
const type = meta[0] || 'text/plain';
let typeFull = type;
for (let i = 1; i < meta.length; i++) {
if (meta[i] === 'base64') {
base64 = true;
}
else {
typeFull += `;${meta[i]}`;
if (meta[i].indexOf('charset=') === 0) {
charset = meta[i].substring(8);
}
}
}
// defaults to US-ASCII only if type is not provided
if (!meta[0] && !charset.length) {
typeFull += ';charset=US-ASCII';
charset = 'US-ASCII';
}
// get the encoded data portion and decode URI-encoded chars
const encoding = base64 ? 'base64' : 'ascii';
const data = unescape(uri.substring(firstComma + 1));
const buffer = Buffer.from(data, encoding);
// set `.type` and `.typeFull` properties to MIME type
buffer.type = type;
buffer.typeFull = typeFull;
// set the `.charset` property
buffer.charset = charset;
return buffer;
}
export default dataUriToBuffer;
//# sourceMappingURL=index.js.map

View file

@ -1 +0,0 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAMA;;;;;;GAMG;AACH,MAAM,UAAU,eAAe,CAAC,GAAW;IAC1C,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;QACzB,MAAM,IAAI,SAAS,CAClB,kEAAkE,CAClE,CAAC;KACF;IAED,iBAAiB;IACjB,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,QAAQ,EAAE,EAAE,CAAC,CAAC;IAEhC,+DAA+D;IAC/D,MAAM,UAAU,GAAG,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IACpC,IAAI,UAAU,KAAK,CAAC,CAAC,IAAI,UAAU,IAAI,CAAC,EAAE;QACzC,MAAM,IAAI,SAAS,CAAC,qBAAqB,CAAC,CAAC;KAC3C;IAED,mDAAmD;IACnD,MAAM,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAErD,IAAI,OAAO,GAAG,EAAE,CAAC;IACjB,IAAI,MAAM,GAAG,KAAK,CAAC;IACnB,MAAM,IAAI,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,YAAY,CAAC;IACrC,IAAI,QAAQ,GAAG,IAAI,CAAC;IACpB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACrC,IAAI,IAAI,CAAC,CAAC,CAAC,KAAK,QAAQ,EAAE;YACzB,MAAM,GAAG,IAAI,CAAC;SACd;aAAM;YACN,QAAQ,IAAI,IAAM,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC;YAC5B,IAAI,IAAI,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,KAAK,CAAC,EAAE;gBACtC,OAAO,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;aAC/B;SACD;KACD;IACD,oDAAoD;IACpD,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE;QAChC,QAAQ,IAAI,mBAAmB,CAAC;QAChC,OAAO,GAAG,UAAU,CAAC;KACrB;IAED,4DAA4D;IAC5D,MAAM,QAAQ,GAAG,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC;IAC7C,MAAM,IAAI,GAAG,QAAQ,CAAC,GAAG,CAAC,SAAS,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC;IACrD,MAAM,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAe,CAAC;IAEzD,sDAAsD;IACtD,MAAM,CAAC,IAAI,GAAG,IAAI,CAAC;IACnB,MAAM,CAAC,QAAQ,GAAG,QAAQ,CAAC;IAE3B,8BAA8B;IAC9B,MAAM,CAAC,OAAO,GAAG,OAAO,CAAC;IAEzB,OAAO,MAAM,CAAC;AACf,CAAC;AAED,eAAe,eAAe,CAAC"}

View file

@ -1,4 +0,0 @@
/**
* Module dependencies.
*/
export {};

View file

@ -1,144 +0,0 @@
/**
* Module dependencies.
*/
import assert from 'assert';
import dataUriToBuffer from '../src';
describe('data-uri-to-buffer', function () {
it('should decode bare-bones Data URIs', function () {
var uri = 'data:,Hello%2C%20World!';
var buf = dataUriToBuffer(uri);
assert.equal('text/plain', buf.type);
assert.equal('text/plain;charset=US-ASCII', buf.typeFull);
assert.equal('US-ASCII', buf.charset);
assert.equal('Hello, World!', buf.toString());
});
it('should decode bare-bones "base64" Data URIs', function () {
var uri = 'data:text/plain;base64,SGVsbG8sIFdvcmxkIQ%3D%3D';
var buf = dataUriToBuffer(uri);
assert.equal('text/plain', buf.type);
assert.equal('Hello, World!', buf.toString());
});
it('should decode plain-text Data URIs', function () {
var html = '<!DOCTYPE html>' +
'<html lang="en">' +
'<head><title>Embedded Window</title></head>' +
'<body><h1>42</h1></body>' +
'</html>';
// Escape the HTML for URL formatting
var uri = 'data:text/html;charset=utf-8,' + encodeURIComponent(html);
var buf = dataUriToBuffer(uri);
assert.equal('text/html', buf.type);
assert.equal('utf-8', buf.charset);
assert.equal(html, buf.toString());
});
// the next 4 tests are from:
// https://bug161965.bugzilla.mozilla.org/attachment.cgi?id=94670&action=view
it('should decode "ISO-8859-8 in Base64" URIs', function () {
var uri = 'data:text/plain;charset=iso-8859-8-i;base64,+ezl7Q==';
var buf = dataUriToBuffer(uri);
assert.equal('text/plain', buf.type);
assert.equal('iso-8859-8-i', buf.charset);
assert.equal(4, buf.length);
assert.equal(0xf9, buf[0]);
assert.equal(0xec, buf[1]);
assert.equal(0xe5, buf[2]);
assert.equal(0xed, buf[3]);
});
it('should decode "ISO-8859-8 in URL-encoding" URIs', function () {
var uri = 'data:text/plain;charset=iso-8859-8-i,%f9%ec%e5%ed';
var buf = dataUriToBuffer(uri);
assert.equal('text/plain', buf.type);
assert.equal('iso-8859-8-i', buf.charset);
assert.equal(4, buf.length);
assert.equal(0xf9, buf[0]);
assert.equal(0xec, buf[1]);
assert.equal(0xe5, buf[2]);
assert.equal(0xed, buf[3]);
});
it('should decode "UTF-8 in Base64" URIs', function () {
var uri = 'data:text/plain;charset=UTF-8;base64,16nXnNeV150=';
var buf = dataUriToBuffer(uri);
assert.equal('text/plain', buf.type);
assert.equal('UTF-8', buf.charset);
assert.equal(8, buf.length);
assert.equal('שלום', buf.toString('utf8'));
});
it('should decode "UTF-8 in URL-encoding" URIs', function () {
var uri = 'data:text/plain;charset=UTF-8,%d7%a9%d7%9c%d7%95%d7%9d';
var buf = dataUriToBuffer(uri);
assert.equal('text/plain', buf.type);
assert.equal('UTF-8', buf.charset);
assert.equal(8, buf.length);
assert.equal('שלום', buf.toString('utf8'));
});
// this next one is from Wikipedia IIRC
it('should decode "base64" Data URIs with newlines', function () {
var uri = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUA\n' +
'AAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO\n' +
'9TXL0Y4OHwAAAABJRU5ErkJggg==';
var buf = dataUriToBuffer(uri);
assert.equal('image/png', buf.type);
assert.equal('iVBORw0KGgoAAAANSUhEUgAAAAUA' +
'AAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO' +
'9TXL0Y4OHwAAAABJRU5ErkJggg==', buf.toString('base64'));
});
it('should decode a plain-text URI with a space character in it', function () {
var uri = 'data:,foo bar';
var buf = dataUriToBuffer(uri);
assert.equal('text/plain', buf.type);
assert.equal('foo bar', buf.toString());
});
it('should decode data with a "," comma char', function () {
var uri = 'data:,a,b';
var buf = dataUriToBuffer(uri);
assert.equal('text/plain', buf.type);
assert.equal('a,b', buf.toString());
});
it('should decode data with traditionally reserved characters like ";"', function () {
var uri = 'data:,;test';
var buf = dataUriToBuffer(uri);
assert.equal('text/plain', buf.type);
assert.equal(';test', buf.toString());
});
it('should not default to US-ASCII if main type is provided', function () {
var uri = 'data:text/plain,abc';
var buf = dataUriToBuffer(uri);
assert.equal('text/plain', buf.type);
assert.equal('text/plain', buf.typeFull);
assert.equal('', buf.charset);
assert.equal('abc', buf.toString());
});
it('should default to text/plain if main type is not provided', function () {
var uri = 'data:;charset=UTF-8,abc';
var buf = dataUriToBuffer(uri);
assert.equal('text/plain', buf.type);
assert.equal('text/plain;charset=UTF-8', buf.typeFull);
assert.equal('UTF-8', buf.charset);
assert.equal('abc', buf.toString());
});
it('should not allow charset without a leading ;', function () {
var uri = 'data:charset=UTF-8,abc';
var buf = dataUriToBuffer(uri);
assert.equal('charset=UTF-8', buf.type);
assert.equal('charset=UTF-8', buf.typeFull);
assert.equal('', buf.charset);
assert.equal('abc', buf.toString());
});
it('should allow custom media type parameters', function () {
var uri = 'data:application/javascript;version=1.8;charset=UTF-8,abc';
var buf = dataUriToBuffer(uri);
assert.equal('application/javascript', buf.type);
assert.equal('application/javascript;version=1.8;charset=UTF-8', buf.typeFull);
assert.equal('UTF-8', buf.charset);
assert.equal('abc', buf.toString());
});
it('should allow base64 annotation anywhere', function () {
var uri = 'data:text/plain;base64;charset=UTF-8,YWJj';
var buf = dataUriToBuffer(uri);
assert.equal('text/plain', buf.type);
assert.equal('text/plain;charset=UTF-8', buf.typeFull);
assert.equal('UTF-8', buf.charset);
assert.equal('abc', buf.toString());
});
});
//# sourceMappingURL=test.js.map

File diff suppressed because one or more lines are too long

View file

@ -1,62 +0,0 @@
{
"name": "data-uri-to-buffer",
"version": "4.0.0",
"description": "Generate a Buffer instance from a Data URI string",
"type": "module",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"files": [
"dist",
"src"
],
"scripts": {
"build": "tsc",
"test": "jest",
"prepublishOnly": "npm run build"
},
"repository": {
"type": "git",
"url": "git://github.com/TooTallNate/node-data-uri-to-buffer.git"
},
"engines": {
"node": ">= 12"
},
"keywords": [
"data",
"uri",
"datauri",
"data-uri",
"buffer",
"convert",
"rfc2397",
"2397"
],
"author": "Nathan Rajlich <nathan@tootallnate.net> (http://n8.io/)",
"license": "MIT",
"bugs": {
"url": "https://github.com/TooTallNate/node-data-uri-to-buffer/issues"
},
"homepage": "https://github.com/TooTallNate/node-data-uri-to-buffer",
"devDependencies": {
"@types/es6-promisify": "^5.0.0",
"@types/mocha": "^9.0.0",
"@types/node": "^10.5.3",
"jest": "^27.2.2",
"ts-jest": "^27.0.5",
"typescript": "^4.4.3"
},
"jest": {
"preset": "ts-jest",
"globals": {
"ts-jest": {
"diagnostics": false,
"isolatedModules": true
}
},
"verbose": false,
"testEnvironment": "node",
"testMatch": [
"<rootDir>/test/**/*.test.ts"
]
}
}

View file

@ -1,68 +0,0 @@
export interface MimeBuffer extends Buffer {
type: string;
typeFull: string;
charset: string;
}
/**
* Returns a `Buffer` instance from the given data URI `uri`.
*
* @param {String} uri Data URI to turn into a Buffer instance
* @returns {Buffer} Buffer instance from Data URI
* @api public
*/
export function dataUriToBuffer(uri: string): MimeBuffer {
if (!/^data:/i.test(uri)) {
throw new TypeError(
'`uri` does not appear to be a Data URI (must begin with "data:")'
);
}
// strip newlines
uri = uri.replace(/\r?\n/g, '');
// split the URI up into the "metadata" and the "data" portions
const firstComma = uri.indexOf(',');
if (firstComma === -1 || firstComma <= 4) {
throw new TypeError('malformed data: URI');
}
// remove the "data:" scheme and parse the metadata
const meta = uri.substring(5, firstComma).split(';');
let charset = '';
let base64 = false;
const type = meta[0] || 'text/plain';
let typeFull = type;
for (let i = 1; i < meta.length; i++) {
if (meta[i] === 'base64') {
base64 = true;
} else {
typeFull += `;${ meta[i]}`;
if (meta[i].indexOf('charset=') === 0) {
charset = meta[i].substring(8);
}
}
}
// defaults to US-ASCII only if type is not provided
if (!meta[0] && !charset.length) {
typeFull += ';charset=US-ASCII';
charset = 'US-ASCII';
}
// get the encoded data portion and decode URI-encoded chars
const encoding = base64 ? 'base64' : 'ascii';
const data = unescape(uri.substring(firstComma + 1));
const buffer = Buffer.from(data, encoding) as MimeBuffer;
// set `.type` and `.typeFull` properties to MIME type
buffer.type = type;
buffer.typeFull = typeFull;
// set the `.charset` property
buffer.charset = charset;
return buffer;
}
export default dataUriToBuffer;

21
node_modules/fetch-blob/LICENSE generated vendored
View file

@ -1,21 +0,0 @@
MIT License
Copyright (c) 2019 David Frank
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

106
node_modules/fetch-blob/README.md generated vendored
View file

@ -1,106 +0,0 @@
# fetch-blob
[![npm version][npm-image]][npm-url]
[![build status][ci-image]][ci-url]
[![coverage status][codecov-image]][codecov-url]
[![install size][install-size-image]][install-size-url]
A Blob implementation in Node.js, originally from [node-fetch](https://github.com/node-fetch/node-fetch).
## Installation
```sh
npm install fetch-blob
```
<details>
<summary>Upgrading from 2x to 3x</summary>
Updating from 2 to 3 should be a breeze since there is not many changes to the blob specification.
The major cause of a major release is coding standards.
- internal WeakMaps was replaced with private fields
- internal Buffer.from was replaced with TextEncoder/Decoder
- internal buffers was replaced with Uint8Arrays
- CommonJS was replaced with ESM
- The node stream returned by calling `blob.stream()` was replaced with whatwg streams
- (Read "Differences from other blobs" for more info.)
</details>
<details>
<summary>Differences from other Blobs</summary>
- Unlike NodeJS `buffer.Blob` (Added in: v15.7.0) and browser native Blob this polyfilled version can't be sent via PostMessage
- This blob version is more arbitrary, it can be constructed with blob parts that isn't a instance of itself
it has to look and behave as a blob to be accepted as a blob part.
- The benefit of this is that you can create other types of blobs that don't contain any internal data that has to be read in other ways, such as the `BlobDataItem` created in `from.js` that wraps a file path into a blob-like item and read lazily (nodejs plans to [implement this][fs-blobs] as well)
- The `blob.stream()` is the most noticeable differences. It returns a WHATWG stream now. to keep it as a node stream you would have to do:
```js
import {Readable} from 'stream'
const stream = Readable.from(blob.stream())
```
</details>
## Usage
```js
// Ways to import
// (PS it's dependency free ESM package so regular http-import from CDN works too)
import Blob from 'fetch-blob'
import File from 'fetch-blob/file.js'
import {Blob} from 'fetch-blob'
import {File} from 'fetch-blob/file.js'
const {Blob} = await import('fetch-blob')
// Ways to read the blob:
const blob = new Blob(['hello, world'])
await blob.text()
await blob.arrayBuffer()
for await (let chunk of blob.stream()) { ... }
blob.stream().getReader().read()
blob.stream().getReader({mode: 'byob'}).read(view)
```
### Blob part backed up by filesystem
`fetch-blob/from.js` comes packed with tools to convert any filepath into either a Blob or a File
It will not read the content into memory. It will only stat the file for last modified date and file size.
```js
// The default export is sync and use fs.stat to retrieve size & last modified as a blob
import blobFromSync from 'fetch-blob/from.js'
import {File, Blob, blobFrom, blobFromSync, fileFrom, fileFromSync} from 'fetch-blob/from.js'
const fsFile = fileFromSync('./2-GiB-file.bin', 'application/octet-stream')
const fsBlob = await blobFrom('./2-GiB-file.mp4')
// Not a 4 GiB memory snapshot, just holds references
// points to where data is located on the disk
const blob = new Blob([fsFile, fsBlob, 'memory', new Uint8Array(10)])
console.log(blob.size) // ~4 GiB
```
`blobFrom|blobFromSync|fileFrom|fileFromSync(path, [mimetype])`
### Creating Blobs backed up by other async sources
Our Blob & File class are more generic then any other polyfills in the way that it can accept any blob look-a-like item
An example of this is that our blob implementation can be constructed with parts coming from [BlobDataItem](https://github.com/node-fetch/fetch-blob/blob/8ef89adad40d255a3bbd55cf38b88597c1cd5480/from.js#L32) (aka a filepath) or from [buffer.Blob](https://nodejs.org/api/buffer.html#buffer_new_buffer_blob_sources_options), It dose not have to implement all the methods - just enough that it can be read/understood by our Blob implementation. The minium requirements is that it has `Symbol.toStringTag`, `size`, `slice()` and either a `stream()` or a `arrayBuffer()` method. If you then wrap it in our Blob or File `new Blob([blobDataItem])` then you get all of the other methods that should be implemented in a blob or file
An example of this could be to create a file or blob like item coming from a remote HTTP request. Or from a DataBase
See the [MDN documentation](https://developer.mozilla.org/en-US/docs/Web/API/Blob) and [tests](https://github.com/node-fetch/fetch-blob/blob/master/test.js) for more details of how to use the Blob.
[npm-image]: https://flat.badgen.net/npm/v/fetch-blob
[npm-url]: https://www.npmjs.com/package/fetch-blob
[ci-image]: https://github.com/node-fetch/fetch-blob/workflows/CI/badge.svg
[ci-url]: https://github.com/node-fetch/fetch-blob/actions
[codecov-image]: https://flat.badgen.net/codecov/c/github/node-fetch/fetch-blob/master
[codecov-url]: https://codecov.io/gh/node-fetch/fetch-blob
[install-size-image]: https://flat.badgen.net/packagephobia/install/fetch-blob
[install-size-url]: https://packagephobia.now.sh/result?p=fetch-blob
[fs-blobs]: https://github.com/nodejs/node/issues/37340

2
node_modules/fetch-blob/file.d.ts generated vendored
View file

@ -1,2 +0,0 @@
/** @type {typeof globalThis.File} */ export const File: typeof globalThis.File;
export default File;

49
node_modules/fetch-blob/file.js generated vendored
View file

@ -1,49 +0,0 @@
import Blob from './index.js'
const _File = class File extends Blob {
#lastModified = 0
#name = ''
/**
* @param {*[]} fileBits
* @param {string} fileName
* @param {{lastModified?: number, type?: string}} options
*/// @ts-ignore
constructor (fileBits, fileName, options = {}) {
if (arguments.length < 2) {
throw new TypeError(`Failed to construct 'File': 2 arguments required, but only ${arguments.length} present.`)
}
super(fileBits, options)
if (options === null) options = {}
// Simulate WebIDL type casting for NaN value in lastModified option.
const lastModified = options.lastModified === undefined ? Date.now() : Number(options.lastModified)
if (!Number.isNaN(lastModified)) {
this.#lastModified = lastModified
}
this.#name = String(fileName)
}
get name () {
return this.#name
}
get lastModified () {
return this.#lastModified
}
get [Symbol.toStringTag] () {
return 'File'
}
static [Symbol.hasInstance] (object) {
return !!object && object instanceof Blob &&
/^(File)$/.test(object[Symbol.toStringTag])
}
}
/** @type {typeof globalThis.File} */// @ts-ignore
export const File = _File
export default File

26
node_modules/fetch-blob/from.d.ts generated vendored
View file

@ -1,26 +0,0 @@
export default blobFromSync;
/**
* @param {string} path filepath on the disk
* @param {string} [type] mimetype to use
*/
export function blobFromSync(path: string, type?: string): Blob;
import File from "./file.js";
import Blob from "./index.js";
/**
* @param {string} path filepath on the disk
* @param {string} [type] mimetype to use
* @returns {Promise<Blob>}
*/
export function blobFrom(path: string, type?: string): Promise<Blob>;
/**
* @param {string} path filepath on the disk
* @param {string} [type] mimetype to use
* @returns {Promise<File>}
*/
export function fileFrom(path: string, type?: string): Promise<File>;
/**
* @param {string} path filepath on the disk
* @param {string} [type] mimetype to use
*/
export function fileFromSync(path: string, type?: string): File;
export { File, Blob };

106
node_modules/fetch-blob/from.js generated vendored
View file

@ -1,106 +0,0 @@
import { statSync, createReadStream, promises as fs } from 'node:fs'
import { basename } from 'node:path'
import DOMException from 'node-domexception'
import File from './file.js'
import Blob from './index.js'
const { stat } = fs
/**
* @param {string} path filepath on the disk
* @param {string} [type] mimetype to use
*/
const blobFromSync = (path, type) => fromBlob(statSync(path), path, type)
/**
* @param {string} path filepath on the disk
* @param {string} [type] mimetype to use
* @returns {Promise<Blob>}
*/
const blobFrom = (path, type) => stat(path).then(stat => fromBlob(stat, path, type))
/**
* @param {string} path filepath on the disk
* @param {string} [type] mimetype to use
* @returns {Promise<File>}
*/
const fileFrom = (path, type) => stat(path).then(stat => fromFile(stat, path, type))
/**
* @param {string} path filepath on the disk
* @param {string} [type] mimetype to use
*/
const fileFromSync = (path, type) => fromFile(statSync(path), path, type)
// @ts-ignore
const fromBlob = (stat, path, type = '') => new Blob([new BlobDataItem({
path,
size: stat.size,
lastModified: stat.mtimeMs,
start: 0
})], { type })
// @ts-ignore
const fromFile = (stat, path, type = '') => new File([new BlobDataItem({
path,
size: stat.size,
lastModified: stat.mtimeMs,
start: 0
})], basename(path), { type, lastModified: stat.mtimeMs })
/**
* This is a blob backed up by a file on the disk
* with minium requirement. Its wrapped around a Blob as a blobPart
* so you have no direct access to this.
*
* @private
*/
class BlobDataItem {
#path
#start
constructor (options) {
this.#path = options.path
this.#start = options.start
this.size = options.size
this.lastModified = options.lastModified
this.originalSize = options.originalSize === undefined
? options.size
: options.originalSize
}
/**
* Slicing arguments is first validated and formatted
* to not be out of range by Blob.prototype.slice
*/
slice (start, end) {
return new BlobDataItem({
path: this.#path,
lastModified: this.lastModified,
originalSize: this.originalSize,
size: end - start,
start: this.#start + start
})
}
async * stream () {
const { mtimeMs, size } = await stat(this.#path)
if (mtimeMs > this.lastModified || this.originalSize !== size) {
throw new DOMException('The requested file could not be read, typically due to permission problems that have occurred after a reference to a file was acquired.', 'NotReadableError')
}
yield * createReadStream(this.#path, {
start: this.#start,
end: this.#start + this.size - 1
})
}
get [Symbol.toStringTag] () {
return 'Blob'
}
}
export default blobFromSync
export { File, Blob, blobFrom, blobFromSync, fileFrom, fileFromSync }

3
node_modules/fetch-blob/index.d.ts generated vendored
View file

@ -1,3 +0,0 @@
/** @type {typeof globalThis.Blob} */
export const Blob: typeof globalThis.Blob;
export default Blob;

254
node_modules/fetch-blob/index.js generated vendored
View file

@ -1,254 +0,0 @@
/*! fetch-blob. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
// TODO (jimmywarting): in the feature use conditional loading with top level await (requires 14.x)
// Node has recently added whatwg stream into core
import './streams.cjs'
// 64 KiB (same size chrome slice theirs blob into Uint8array's)
const POOL_SIZE = 65536
/** @param {(Blob | Uint8Array)[]} parts */
async function * toIterator (parts, clone = true) {
for (const part of parts) {
if ('stream' in part) {
yield * (/** @type {AsyncIterableIterator<Uint8Array>} */ (part.stream()))
} else if (ArrayBuffer.isView(part)) {
if (clone) {
let position = part.byteOffset
const end = part.byteOffset + part.byteLength
while (position !== end) {
const size = Math.min(end - position, POOL_SIZE)
const chunk = part.buffer.slice(position, position + size)
position += chunk.byteLength
yield new Uint8Array(chunk)
}
} else {
yield part
}
/* c8 ignore next 10 */
} else {
// For blobs that have arrayBuffer but no stream method (nodes buffer.Blob)
let position = 0, b = (/** @type {Blob} */ (part))
while (position !== b.size) {
const chunk = b.slice(position, Math.min(b.size, position + POOL_SIZE))
const buffer = await chunk.arrayBuffer()
position += buffer.byteLength
yield new Uint8Array(buffer)
}
}
}
}
const _Blob = class Blob {
/** @type {Array.<(Blob|Uint8Array)>} */
#parts = []
#type = ''
#size = 0
#endings = 'transparent'
/**
* The Blob() constructor returns a new Blob object. The content
* of the blob consists of the concatenation of the values given
* in the parameter array.
*
* @param {*} blobParts
* @param {{ type?: string, endings?: string }} [options]
*/
constructor (blobParts = [], options = {}) {
if (typeof blobParts !== 'object' || blobParts === null) {
throw new TypeError('Failed to construct \'Blob\': The provided value cannot be converted to a sequence.')
}
if (typeof blobParts[Symbol.iterator] !== 'function') {
throw new TypeError('Failed to construct \'Blob\': The object must have a callable @@iterator property.')
}
if (typeof options !== 'object' && typeof options !== 'function') {
throw new TypeError('Failed to construct \'Blob\': parameter 2 cannot convert to dictionary.')
}
if (options === null) options = {}
const encoder = new TextEncoder()
for (const element of blobParts) {
let part
if (ArrayBuffer.isView(element)) {
part = new Uint8Array(element.buffer.slice(element.byteOffset, element.byteOffset + element.byteLength))
} else if (element instanceof ArrayBuffer) {
part = new Uint8Array(element.slice(0))
} else if (element instanceof Blob) {
part = element
} else {
part = encoder.encode(`${element}`)
}
const size = ArrayBuffer.isView(part) ? part.byteLength : part.size
// Avoid pushing empty parts into the array to better GC them
if (size) {
this.#size += size
this.#parts.push(part)
}
}
this.#endings = `${options.endings === undefined ? 'transparent' : options.endings}`
const type = options.type === undefined ? '' : String(options.type)
this.#type = /^[\x20-\x7E]*$/.test(type) ? type : ''
}
/**
* The Blob interface's size property returns the
* size of the Blob in bytes.
*/
get size () {
return this.#size
}
/**
* The type property of a Blob object returns the MIME type of the file.
*/
get type () {
return this.#type
}
/**
* The text() method in the Blob interface returns a Promise
* that resolves with a string containing the contents of
* the blob, interpreted as UTF-8.
*
* @return {Promise<string>}
*/
async text () {
// More optimized than using this.arrayBuffer()
// that requires twice as much ram
const decoder = new TextDecoder()
let str = ''
for await (const part of toIterator(this.#parts, false)) {
str += decoder.decode(part, { stream: true })
}
// Remaining
str += decoder.decode()
return str
}
/**
* The arrayBuffer() method in the Blob interface returns a
* Promise that resolves with the contents of the blob as
* binary data contained in an ArrayBuffer.
*
* @return {Promise<ArrayBuffer>}
*/
async arrayBuffer () {
// Easier way... Just a unnecessary overhead
// const view = new Uint8Array(this.size);
// await this.stream().getReader({mode: 'byob'}).read(view);
// return view.buffer;
const data = new Uint8Array(this.size)
let offset = 0
for await (const chunk of toIterator(this.#parts, false)) {
data.set(chunk, offset)
offset += chunk.length
}
return data.buffer
}
stream () {
const it = toIterator(this.#parts, true)
return new globalThis.ReadableStream({
// @ts-ignore
type: 'bytes',
async pull (ctrl) {
const chunk = await it.next()
chunk.done ? ctrl.close() : ctrl.enqueue(chunk.value)
},
async cancel () {
await it.return()
}
})
}
/**
* The Blob interface's slice() method creates and returns a
* new Blob object which contains data from a subset of the
* blob on which it's called.
*
* @param {number} [start]
* @param {number} [end]
* @param {string} [type]
*/
slice (start = 0, end = this.size, type = '') {
const { size } = this
let relativeStart = start < 0 ? Math.max(size + start, 0) : Math.min(start, size)
let relativeEnd = end < 0 ? Math.max(size + end, 0) : Math.min(end, size)
const span = Math.max(relativeEnd - relativeStart, 0)
const parts = this.#parts
const blobParts = []
let added = 0
for (const part of parts) {
// don't add the overflow to new blobParts
if (added >= span) {
break
}
const size = ArrayBuffer.isView(part) ? part.byteLength : part.size
if (relativeStart && size <= relativeStart) {
// Skip the beginning and change the relative
// start & end position as we skip the unwanted parts
relativeStart -= size
relativeEnd -= size
} else {
let chunk
if (ArrayBuffer.isView(part)) {
chunk = part.subarray(relativeStart, Math.min(size, relativeEnd))
added += chunk.byteLength
} else {
chunk = part.slice(relativeStart, Math.min(size, relativeEnd))
added += chunk.size
}
relativeEnd -= size
blobParts.push(chunk)
relativeStart = 0 // All next sequential parts should start at 0
}
}
const blob = new Blob([], { type: String(type).toLowerCase() })
blob.#size = span
blob.#parts = blobParts
return blob
}
get [Symbol.toStringTag] () {
return 'Blob'
}
static [Symbol.hasInstance] (object) {
return (
object &&
typeof object === 'object' &&
typeof object.constructor === 'function' &&
(
typeof object.stream === 'function' ||
typeof object.arrayBuffer === 'function'
) &&
/^(Blob|File)$/.test(object[Symbol.toStringTag])
)
}
}
Object.defineProperties(_Blob.prototype, {
size: { enumerable: true },
type: { enumerable: true },
slice: { enumerable: true }
})
/** @type {typeof globalThis.Blob} */
export const Blob = _Blob
export default Blob

56
node_modules/fetch-blob/package.json generated vendored
View file

@ -1,56 +0,0 @@
{
"name": "fetch-blob",
"version": "3.1.5",
"description": "Blob & File implementation in Node.js, originally from node-fetch.",
"main": "index.js",
"type": "module",
"files": [
"from.js",
"file.js",
"file.d.ts",
"index.js",
"index.d.ts",
"from.d.ts",
"streams.cjs"
],
"scripts": {
"test": "node --experimental-loader ./test/http-loader.js ./test/test-wpt-in-node.js",
"report": "c8 --reporter json --reporter text npm run test",
"coverage": "npm run report && codecov -f coverage/coverage-final.json",
"prepublishOnly": "tsc --declaration --emitDeclarationOnly --allowJs index.js from.js"
},
"repository": "https://github.com/node-fetch/fetch-blob.git",
"keywords": [
"blob",
"file",
"node-fetch"
],
"engines": {
"node": "^12.20 || >= 14.13"
},
"author": "Jimmy Wärting <jimmy@warting.se> (https://jimmy.warting.se)",
"license": "MIT",
"bugs": {
"url": "https://github.com/node-fetch/fetch-blob/issues"
},
"homepage": "https://github.com/node-fetch/fetch-blob#readme",
"devDependencies": {
"@types/node": "^17.0.9",
"c8": "^7.11.0",
"typescript": "^4.5.4"
},
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/jimmywarting"
},
{
"type": "paypal",
"url": "https://paypal.me/jimmywarting"
}
],
"dependencies": {
"node-domexception": "^1.0.0",
"web-streams-polyfill": "^3.0.3"
}
}

51
node_modules/fetch-blob/streams.cjs generated vendored
View file

@ -1,51 +0,0 @@
/* c8 ignore start */
// 64 KiB (same size chrome slice theirs blob into Uint8array's)
const POOL_SIZE = 65536
if (!globalThis.ReadableStream) {
// `node:stream/web` got introduced in v16.5.0 as experimental
// and it's preferred over the polyfilled version. So we also
// suppress the warning that gets emitted by NodeJS for using it.
try {
const process = require('node:process')
const { emitWarning } = process
try {
process.emitWarning = () => {}
Object.assign(globalThis, require('node:stream/web'))
process.emitWarning = emitWarning
} catch (error) {
process.emitWarning = emitWarning
throw error
}
} catch (error) {
// fallback to polyfill implementation
Object.assign(globalThis, require('web-streams-polyfill/dist/ponyfill.es2018.js'))
}
}
try {
// Don't use node: prefix for this, require+node: is not supported until node v14.14
// Only `import()` can use prefix in 12.20 and later
const { Blob } = require('buffer')
if (Blob && !Blob.prototype.stream) {
Blob.prototype.stream = function name (params) {
let position = 0
const blob = this
return new ReadableStream({
type: 'bytes',
async pull (ctrl) {
const chunk = blob.slice(position, Math.min(blob.size, position + POOL_SIZE))
const buffer = await chunk.arrayBuffer()
position += buffer.byteLength
ctrl.enqueue(new Uint8Array(buffer))
if (position === blob.size) {
ctrl.close()
}
}
})
}
}
} catch (error) {}
/* c8 ignore end */

View file

@ -1,441 +0,0 @@
/* formdata-polyfill. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
/* global FormData self Blob File */
/* eslint-disable no-inner-declarations */
if (typeof Blob !== 'undefined' && (typeof FormData === 'undefined' || !FormData.prototype.keys)) {
const global = typeof globalThis === 'object'
? globalThis
: typeof window === 'object'
? window
: typeof self === 'object' ? self : this
// keep a reference to native implementation
const _FormData = global.FormData
// To be monkey patched
const _send = global.XMLHttpRequest && global.XMLHttpRequest.prototype.send
const _fetch = global.Request && global.fetch
const _sendBeacon = global.navigator && global.navigator.sendBeacon
// Might be a worker thread...
const _match = global.Element && global.Element.prototype
// Unable to patch Request/Response constructor correctly #109
// only way is to use ES6 class extend
// https://github.com/babel/babel/issues/1966
const stringTag = global.Symbol && Symbol.toStringTag
// Add missing stringTags to blob and files
if (stringTag) {
if (!Blob.prototype[stringTag]) {
Blob.prototype[stringTag] = 'Blob'
}
if ('File' in global && !File.prototype[stringTag]) {
File.prototype[stringTag] = 'File'
}
}
// Fix so you can construct your own File
try {
new File([], '') // eslint-disable-line
} catch (a) {
global.File = function File (b, d, c) {
const blob = new Blob(b, c || {})
const t = c && void 0 !== c.lastModified ? new Date(c.lastModified) : new Date()
Object.defineProperties(blob, {
name: {
value: d
},
lastModified: {
value: +t
},
toString: {
value () {
return '[object File]'
}
}
})
if (stringTag) {
Object.defineProperty(blob, stringTag, {
value: 'File'
})
}
return blob
}
}
function ensureArgs (args, expected) {
if (args.length < expected) {
throw new TypeError(`${expected} argument required, but only ${args.length} present.`)
}
}
/**
* @param {string} name
* @param {string | undefined} filename
* @returns {[string, File|string]}
*/
function normalizeArgs (name, value, filename) {
if (value instanceof Blob) {
filename = filename !== undefined
? String(filename + '')
: typeof value.name === 'string'
? value.name
: 'blob'
if (value.name !== filename || Object.prototype.toString.call(value) === '[object Blob]') {
value = new File([value], filename)
}
return [String(name), value]
}
return [String(name), String(value)]
}
// normalize line feeds for textarea
// https://html.spec.whatwg.org/multipage/form-elements.html#textarea-line-break-normalisation-transformation
function normalizeLinefeeds (value) {
return value.replace(/\r?\n|\r/g, '\r\n')
}
/**
* @template T
* @param {ArrayLike<T>} arr
* @param {{ (elm: T): void; }} cb
*/
function each (arr, cb) {
for (let i = 0; i < arr.length; i++) {
cb(arr[i])
}
}
const escape = str => str.replace(/\n/g, '%0A').replace(/\r/g, '%0D').replace(/"/g, '%22')
/**
* @implements {Iterable}
*/
class FormDataPolyfill {
/**
* FormData class
*
* @param {HTMLFormElement=} form
*/
constructor (form) {
/** @type {[string, string|File][]} */
this._data = []
const self = this
form && each(form.elements, (/** @type {HTMLInputElement} */ elm) => {
if (
!elm.name ||
elm.disabled ||
elm.type === 'submit' ||
elm.type === 'button' ||
elm.matches('form fieldset[disabled] *')
) return
if (elm.type === 'file') {
const files = elm.files && elm.files.length
? elm.files
: [new File([], '', { type: 'application/octet-stream' })] // #78
each(files, file => {
self.append(elm.name, file)
})
} else if (elm.type === 'select-multiple' || elm.type === 'select-one') {
each(elm.options, opt => {
!opt.disabled && opt.selected && self.append(elm.name, opt.value)
})
} else if (elm.type === 'checkbox' || elm.type === 'radio') {
if (elm.checked) self.append(elm.name, elm.value)
} else {
const value = elm.type === 'textarea' ? normalizeLinefeeds(elm.value) : elm.value
self.append(elm.name, value)
}
})
}
/**
* Append a field
*
* @param {string} name field name
* @param {string|Blob|File} value string / blob / file
* @param {string=} filename filename to use with blob
* @return {undefined}
*/
append (name, value, filename) {
ensureArgs(arguments, 2)
this._data.push(normalizeArgs(name, value, filename))
}
/**
* Delete all fields values given name
*
* @param {string} name Field name
* @return {undefined}
*/
delete (name) {
ensureArgs(arguments, 1)
const result = []
name = String(name)
each(this._data, entry => {
entry[0] !== name && result.push(entry)
})
this._data = result
}
/**
* Iterate over all fields as [name, value]
*
* @return {Iterator}
*/
* entries () {
for (var i = 0; i < this._data.length; i++) {
yield this._data[i]
}
}
/**
* Iterate over all fields
*
* @param {Function} callback Executed for each item with parameters (value, name, thisArg)
* @param {Object=} thisArg `this` context for callback function
*/
forEach (callback, thisArg) {
ensureArgs(arguments, 1)
for (const [name, value] of this) {
callback.call(thisArg, value, name, this)
}
}
/**
* Return first field value given name
* or null if non existent
*
* @param {string} name Field name
* @return {string|File|null} value Fields value
*/
get (name) {
ensureArgs(arguments, 1)
const entries = this._data
name = String(name)
for (let i = 0; i < entries.length; i++) {
if (entries[i][0] === name) {
return entries[i][1]
}
}
return null
}
/**
* Return all fields values given name
*
* @param {string} name Fields name
* @return {Array} [{String|File}]
*/
getAll (name) {
ensureArgs(arguments, 1)
const result = []
name = String(name)
each(this._data, data => {
data[0] === name && result.push(data[1])
})
return result
}
/**
* Check for field name existence
*
* @param {string} name Field name
* @return {boolean}
*/
has (name) {
ensureArgs(arguments, 1)
name = String(name)
for (let i = 0; i < this._data.length; i++) {
if (this._data[i][0] === name) {
return true
}
}
return false
}
/**
* Iterate over all fields name
*
* @return {Iterator}
*/
* keys () {
for (const [name] of this) {
yield name
}
}
/**
* Overwrite all values given name
*
* @param {string} name Filed name
* @param {string} value Field value
* @param {string=} filename Filename (optional)
*/
set (name, value, filename) {
ensureArgs(arguments, 2)
name = String(name)
/** @type {[string, string|File][]} */
const result = []
const args = normalizeArgs(name, value, filename)
let replace = true
// - replace the first occurrence with same name
// - discards the remaining with same name
// - while keeping the same order items where added
each(this._data, data => {
data[0] === name
? replace && (replace = !result.push(args))
: result.push(data)
})
replace && result.push(args)
this._data = result
}
/**
* Iterate over all fields
*
* @return {Iterator}
*/
* values () {
for (const [, value] of this) {
yield value
}
}
/**
* Return a native (perhaps degraded) FormData with only a `append` method
* Can throw if it's not supported
*
* @return {FormData}
*/
['_asNative'] () {
const fd = new _FormData()
for (const [name, value] of this) {
fd.append(name, value)
}
return fd
}
/**
* [_blob description]
*
* @return {Blob} [description]
*/
['_blob'] () {
const boundary = '----formdata-polyfill-' + Math.random(),
chunks = [],
p = `--${boundary}\r\nContent-Disposition: form-data; name="`
this.forEach((value, name) => typeof value == 'string'
? chunks.push(p + escape(normalizeLinefeeds(name)) + `"\r\n\r\n${normalizeLinefeeds(value)}\r\n`)
: chunks.push(p + escape(normalizeLinefeeds(name)) + `"; filename="${escape(value.name)}"\r\nContent-Type: ${value.type||"application/octet-stream"}\r\n\r\n`, value, `\r\n`))
chunks.push(`--${boundary}--`)
return new Blob(chunks, {
type: "multipart/form-data; boundary=" + boundary
})
}
/**
* The class itself is iterable
* alias for formdata.entries()
*
* @return {Iterator}
*/
[Symbol.iterator] () {
return this.entries()
}
/**
* Create the default string description.
*
* @return {string} [object FormData]
*/
toString () {
return '[object FormData]'
}
}
if (_match && !_match.matches) {
_match.matches =
_match.matchesSelector ||
_match.mozMatchesSelector ||
_match.msMatchesSelector ||
_match.oMatchesSelector ||
_match.webkitMatchesSelector ||
function (s) {
var matches = (this.document || this.ownerDocument).querySelectorAll(s)
var i = matches.length
while (--i >= 0 && matches.item(i) !== this) {}
return i > -1
}
}
if (stringTag) {
/**
* Create the default string description.
* It is accessed internally by the Object.prototype.toString().
*/
FormDataPolyfill.prototype[stringTag] = 'FormData'
}
// Patch xhr's send method to call _blob transparently
if (_send) {
const setRequestHeader = global.XMLHttpRequest.prototype.setRequestHeader
global.XMLHttpRequest.prototype.setRequestHeader = function (name, value) {
setRequestHeader.call(this, name, value)
if (name.toLowerCase() === 'content-type') this._hasContentType = true
}
global.XMLHttpRequest.prototype.send = function (data) {
// need to patch send b/c old IE don't send blob's type (#44)
if (data instanceof FormDataPolyfill) {
const blob = data['_blob']()
if (!this._hasContentType) this.setRequestHeader('Content-Type', blob.type)
_send.call(this, blob)
} else {
_send.call(this, data)
}
}
}
// Patch fetch's function to call _blob transparently
if (_fetch) {
global.fetch = function (input, init) {
if (init && init.body && init.body instanceof FormDataPolyfill) {
init.body = init.body['_blob']()
}
return _fetch.call(this, input, init)
}
}
// Patch navigator.sendBeacon to use native FormData
if (_sendBeacon) {
global.navigator.sendBeacon = function (url, data) {
if (data instanceof FormDataPolyfill) {
data = data['_asNative']()
}
return _sendBeacon.call(this, url, data)
}
}
global['FormData'] = FormDataPolyfill
}

View file

@ -1,21 +0,0 @@
MIT License
Copyright (c) 2016 Jimmy Karl Roland Wärting
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View file

@ -1,145 +0,0 @@
### A `FormData` polyfill for the browser ...and a module for NodeJS (`New!`)
```bash
npm install formdata-polyfill
```
The browser polyfill will likely have done its part already, and i hope you stop supporting old browsers c",)<br>
But NodeJS still laks a proper FormData<br>The good old form-data package is a very old and isn't spec compatible and dose some abnormal stuff to construct and read FormData instances that other http libraries are not happy about when it comes to follow the spec.
### The NodeJS / ESM version
- The modular (~2.3 KiB minified uncompressed) version of this package is independent of any browser stuff and don't patch anything
- It's as pure/spec compatible as it possible gets the test are run by WPT.
- It's compatible with [node-fetch](https://github.com/node-fetch/node-fetch).
- It have higher platform dependencies as it uses classes, symbols, ESM & private fields
- Only dependency it has is [fetch-blob](https://github.com/node-fetch/fetch-blob)
```js
// Node example
import fetch from 'node-fetch'
import File from 'fetch-blob/file.js'
import { fileFromSync } from 'fetch-blob/from.js'
import { FormData } from 'formdata-polyfill/esm.min.js'
const file = fileFromSync('./README.md')
const fd = new FormData()
fd.append('file-upload', new File(['abc'], 'hello-world.txt'))
fd.append('file-upload', file)
// it's also possible to append file/blob look-a-like items
// if you have streams coming from other destinations
fd.append('file-upload', {
size: 123,
type: '',
name: 'cat-video.mp4',
stream() { return stream },
[Symbol.toStringTag]: 'File'
})
fetch('https://httpbin.org/post', { method: 'POST', body: fd })
```
----
It also comes with way to convert FormData into Blobs - it's not something that every developer should have to deal with.
It's mainly for [node-fetch](https://github.com/node-fetch/node-fetch) and other http library to ease the process of serializing a FormData into a blob and just wish to deal with Blobs instead (Both Deno and Undici adapted a version of this [formDataToBlob](https://github.com/jimmywarting/FormData/blob/5ddea9e0de2fc5e246ab1b2f9d404dee0c319c02/formdata-to-blob.js) to the core and passes all WPT tests run by the browser itself)
```js
import { Readable } from 'node:stream'
import { FormData, formDataToBlob } from 'formdata-polyfill/esm.min.js'
const blob = formDataToBlob(new FormData())
fetch('https://httpbin.org/post', { method: 'POST', body: blob })
// node built in http and other similar http library have to do:
const stream = Readable.from(blob.stream())
const req = http.request('http://httpbin.org/post', {
method: 'post',
headers: {
'Content-Length': blob.size,
'Content-Type': blob.type
}
})
stream.pipe(req)
```
PS: blob & file that are appended to the FormData will not be read until any of the serialized blob read-methods gets called
...so uploading very large files is no biggie
### Browser polyfill
usage:
```js
import 'formdata-polyfill' // that's it
```
The browser polyfill conditionally replaces the native implementation rather than fixing the missing functions,
since otherwise there is no way to get or delete existing values in the FormData object.
Therefore this also patches `XMLHttpRequest.prototype.send` and `fetch` to send the `FormData` as a blob,
and `navigator.sendBeacon` to send native `FormData`.
I was unable to patch the Response/Request constructor
so if you are constructing them with FormData then you need to call `fd._blob()` manually.
```js
new Request(url, {
method: 'post',
body: fd._blob ? fd._blob() : fd
})
```
Dependencies
---
If you need to support IE <= 9 then I recommend you to include eligrey's [blob.js]
(which i hope you don't - since IE is now dead)
<details>
<summary>Updating from 2.x to 3.x</summary>
Previously you had to import the polyfill and use that,
since it didn't replace the global (existing) FormData implementation.
But now it transparently calls `_blob()` for you when you are sending something with fetch or XHR,
by way of monkey-patching the `XMLHttpRequest.prototype.send` and `fetch` functions.
So you maybe had something like this:
```javascript
var FormData = require('formdata-polyfill')
var fd = new FormData(form)
xhr.send(fd._blob())
```
There is no longer anything exported from the module
(though you of course still need to import it to install the polyfill),
so you can now use the FormData object as normal:
```javascript
require('formdata-polyfill')
var fd = new FormData(form)
xhr.send(fd)
```
</details>
Native Browser compatibility (as of 2021-05-08)
---
Based on this you can decide for yourself if you need this polyfill.
[![screenshot](https://user-images.githubusercontent.com/1148376/117550329-0993aa80-b040-11eb-976c-14e31f1a3ba4.png)](https://developer.mozilla.org/en-US/docs/Web/API/FormData#Browser_compatibility)
This normalizes support for the FormData API:
- `append` with filename
- `delete()`, `get()`, `getAll()`, `has()`, `set()`
- `entries()`, `keys()`, `values()`, and support for `for...of`
- Available in web workers (just include the polyfill)
[npm-image]: https://img.shields.io/npm/v/formdata-polyfill.svg
[npm-url]: https://www.npmjs.com/package/formdata-polyfill
[blob.js]: https://github.com/eligrey/Blob.js

View file

@ -1,5 +0,0 @@
export declare const FormData: {
new (): FormData;
prototype: FormData;
};
export declare function formDataToBlob(formData: FormData): Blob;

View file

@ -1,40 +0,0 @@
/*! formdata-polyfill. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
import C from 'fetch-blob'
import F from 'fetch-blob/file.js'
var {toStringTag:t,iterator:i,hasInstance:h}=Symbol,
r=Math.random,
m='append,set,get,getAll,delete,keys,values,entries,forEach,constructor'.split(','),
f=(a,b,c)=>(a+='',/^(Blob|File)$/.test(b && b[t])?[(c=c!==void 0?c+'':b[t]=='File'?b.name:'blob',a),b.name!==c||b[t]=='blob'?new F([b],c,b):b]:[a,b+'']),
e=(c,f)=>(f?c:c.replace(/\r?\n|\r/g,'\r\n')).replace(/\n/g,'%0A').replace(/\r/g,'%0D').replace(/"/g,'%22'),
x=(n, a, e)=>{if(a.length<e){throw new TypeError(`Failed to execute '${n}' on 'FormData': ${e} arguments required, but only ${a.length} present.`)}}
export const File = F
/** @type {typeof globalThis.FormData} */
export const FormData = class FormData {
#d=[];
constructor(...a){if(a.length)throw new TypeError(`Failed to construct 'FormData': parameter 1 is not of type 'HTMLFormElement'.`)}
get [t]() {return 'FormData'}
[i](){return this.entries()}
static [h](o) {return o&&typeof o==='object'&&o[t]==='FormData'&&!m.some(m=>typeof o[m]!='function')}
append(...a){x('append',arguments,2);this.#d.push(f(...a))}
delete(a){x('delete',arguments,1);a+='';this.#d=this.#d.filter(([b])=>b!==a)}
get(a){x('get',arguments,1);a+='';for(var b=this.#d,l=b.length,c=0;c<l;c++)if(b[c][0]===a)return b[c][1];return null}
getAll(a,b){x('getAll',arguments,1);b=[];a+='';this.#d.forEach(c=>c[0]===a&&b.push(c[1]));return b}
has(a){x('has',arguments,1);a+='';return this.#d.some(b=>b[0]===a)}
forEach(a,b){x('forEach',arguments,1);for(var [c,d]of this)a.call(b,d,c,this)}
set(...a){x('set',arguments,2);var b=[],c=!0;a=f(...a);this.#d.forEach(d=>{d[0]===a[0]?c&&(c=!b.push(a)):b.push(d)});c&&b.push(a);this.#d=b}
*entries(){yield*this.#d}
*keys(){for(var[a]of this)yield a}
*values(){for(var[,a]of this)yield a}}
/** @param {FormData} F */
export function formDataToBlob (F,B=C){
var b=`${r()}${r()}`.replace(/\./g, '').slice(-28).padStart(32, '-'),c=[],p=`--${b}\r\nContent-Disposition: form-data; name="`
F.forEach((v,n)=>typeof v=='string'
?c.push(p+e(n)+`"\r\n\r\n${v.replace(/\r(?!\n)|(?<!\r)\n/g, '\r\n')}\r\n`)
:c.push(p+e(n)+`"; filename="${e(v.name, 1)}"\r\nContent-Type: ${v.type||"application/octet-stream"}\r\n\r\n`, v, '\r\n'))
c.push(`--${b}--`)
return new B(c,{type:"multipart/form-data; boundary="+b})}

View file

@ -1,39 +0,0 @@
/*! formdata-polyfill. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
const escape = (str, filename) =>
(filename ? str : str.replace(/\r?\n|\r/g, '\r\n'))
.replace(/\n/g, '%0A')
.replace(/\r/g, '%0D')
.replace(/"/g, '%22')
/**
* pure function to convert any formData instance to a Blob
* instances synchronous without reading all of the files
*
* @param {FormData|*} formData an instance of a formData Class
* @param {Blob|*} [BlobClass=Blob] the Blob class to use when constructing it
*/
export function formDataToBlob (formData, BlobClass = Blob) {
const boundary = ('----formdata-polyfill-' + Math.random())
const chunks = []
const prefix = `--${boundary}\r\nContent-Disposition: form-data; name="`
for (let [name, value] of formData) {
if (typeof value === 'string') {
chunks.push(prefix + escape(name) + `"\r\n\r\n${value.replace(/\r(?!\n)|(?<!\r)\n/g, '\r\n')}\r\n`)
} else {
chunks.push(
prefix + escape(name) + `"; filename="${escape(value.name, 1)}"\r\n` +
`Content-Type: ${value.type || 'application/octet-stream'}\r\n\r\n`,
value,
'\r\n'
)
}
}
chunks.push(`--${boundary}--`)
return new BlobClass(chunks, {
type: 'multipart/form-data; boundary=' + boundary
})
}

View file

@ -1,21 +0,0 @@
/*! formdata-polyfill. MIT License. Jimmy W?rting <https://jimmy.warting.se/opensource> */
;(function(){var h;function l(a){var b=0;return function(){return b<a.length?{done:!1,value:a[b++]}:{done:!0}}}var m="function"==typeof Object.defineProperties?Object.defineProperty:function(a,b,c){if(a==Array.prototype||a==Object.prototype)return a;a[b]=c.value;return a};
function n(a){a=["object"==typeof globalThis&&globalThis,a,"object"==typeof window&&window,"object"==typeof self&&self,"object"==typeof global&&global];for(var b=0;b<a.length;++b){var c=a[b];if(c&&c.Math==Math)return c}throw Error("Cannot find global object");}var q=n(this);function r(a,b){if(b)a:{var c=q;a=a.split(".");for(var d=0;d<a.length-1;d++){var e=a[d];if(!(e in c))break a;c=c[e]}a=a[a.length-1];d=c[a];b=b(d);b!=d&&null!=b&&m(c,a,{configurable:!0,writable:!0,value:b})}}
r("Symbol",function(a){function b(f){if(this instanceof b)throw new TypeError("Symbol is not a constructor");return new c(d+(f||"")+"_"+e++,f)}function c(f,g){this.A=f;m(this,"description",{configurable:!0,writable:!0,value:g})}if(a)return a;c.prototype.toString=function(){return this.A};var d="jscomp_symbol_"+(1E9*Math.random()>>>0)+"_",e=0;return b});
r("Symbol.iterator",function(a){if(a)return a;a=Symbol("Symbol.iterator");for(var b="Array Int8Array Uint8Array Uint8ClampedArray Int16Array Uint16Array Int32Array Uint32Array Float32Array Float64Array".split(" "),c=0;c<b.length;c++){var d=q[b[c]];"function"===typeof d&&"function"!=typeof d.prototype[a]&&m(d.prototype,a,{configurable:!0,writable:!0,value:function(){return u(l(this))}})}return a});function u(a){a={next:a};a[Symbol.iterator]=function(){return this};return a}
function v(a){var b="undefined"!=typeof Symbol&&Symbol.iterator&&a[Symbol.iterator];return b?b.call(a):{next:l(a)}}var w;if("function"==typeof Object.setPrototypeOf)w=Object.setPrototypeOf;else{var y;a:{var z={a:!0},A={};try{A.__proto__=z;y=A.a;break a}catch(a){}y=!1}w=y?function(a,b){a.__proto__=b;if(a.__proto__!==b)throw new TypeError(a+" is not extensible");return a}:null}var B=w;function C(){this.m=!1;this.j=null;this.v=void 0;this.h=1;this.u=this.C=0;this.l=null}
function D(a){if(a.m)throw new TypeError("Generator is already running");a.m=!0}C.prototype.o=function(a){this.v=a};C.prototype.s=function(a){this.l={D:a,F:!0};this.h=this.C||this.u};C.prototype.return=function(a){this.l={return:a};this.h=this.u};function E(a,b){a.h=3;return{value:b}}function F(a){this.g=new C;this.G=a}F.prototype.o=function(a){D(this.g);if(this.g.j)return G(this,this.g.j.next,a,this.g.o);this.g.o(a);return H(this)};
function I(a,b){D(a.g);var c=a.g.j;if(c)return G(a,"return"in c?c["return"]:function(d){return{value:d,done:!0}},b,a.g.return);a.g.return(b);return H(a)}F.prototype.s=function(a){D(this.g);if(this.g.j)return G(this,this.g.j["throw"],a,this.g.o);this.g.s(a);return H(this)};
function G(a,b,c,d){try{var e=b.call(a.g.j,c);if(!(e instanceof Object))throw new TypeError("Iterator result "+e+" is not an object");if(!e.done)return a.g.m=!1,e;var f=e.value}catch(g){return a.g.j=null,a.g.s(g),H(a)}a.g.j=null;d.call(a.g,f);return H(a)}function H(a){for(;a.g.h;)try{var b=a.G(a.g);if(b)return a.g.m=!1,{value:b.value,done:!1}}catch(c){a.g.v=void 0,a.g.s(c)}a.g.m=!1;if(a.g.l){b=a.g.l;a.g.l=null;if(b.F)throw b.D;return{value:b.return,done:!0}}return{value:void 0,done:!0}}
function J(a){this.next=function(b){return a.o(b)};this.throw=function(b){return a.s(b)};this.return=function(b){return I(a,b)};this[Symbol.iterator]=function(){return this}}function K(a,b){b=new J(new F(b));B&&a.prototype&&B(b,a.prototype);return b}function L(a,b){a instanceof String&&(a+="");var c=0,d=!1,e={next:function(){if(!d&&c<a.length){var f=c++;return{value:b(f,a[f]),done:!1}}d=!0;return{done:!0,value:void 0}}};e[Symbol.iterator]=function(){return e};return e}
r("Array.prototype.entries",function(a){return a?a:function(){return L(this,function(b,c){return[b,c]})}});
if("undefined"!==typeof Blob&&("undefined"===typeof FormData||!FormData.prototype.keys)){var M=function(a,b){for(var c=0;c<a.length;c++)b(a[c])},N=function(a){return a.replace(/\r?\n|\r/g,"\r\n")},O=function(a,b,c){if(b instanceof Blob){c=void 0!==c?String(c+""):"string"===typeof b.name?b.name:"blob";if(b.name!==c||"[object Blob]"===Object.prototype.toString.call(b))b=new File([b],c);return[String(a),b]}return[String(a),String(b)]},P=function(a,b){if(a.length<b)throw new TypeError(b+" argument required, but only "+
a.length+" present.");},Q="object"===typeof globalThis?globalThis:"object"===typeof window?window:"object"===typeof self?self:this,R=Q.FormData,S=Q.XMLHttpRequest&&Q.XMLHttpRequest.prototype.send,T=Q.Request&&Q.fetch,U=Q.navigator&&Q.navigator.sendBeacon,V=Q.Element&&Q.Element.prototype,W=Q.Symbol&&Symbol.toStringTag;W&&(Blob.prototype[W]||(Blob.prototype[W]="Blob"),"File"in Q&&!File.prototype[W]&&(File.prototype[W]="File"));try{new File([],"")}catch(a){Q.File=function(b,c,d){b=new Blob(b,d||{});
Object.defineProperties(b,{name:{value:c},lastModified:{value:+(d&&void 0!==d.lastModified?new Date(d.lastModified):new Date)},toString:{value:function(){return"[object File]"}}});W&&Object.defineProperty(b,W,{value:"File"});return b}}var escape=function(a){return a.replace(/\n/g,"%0A").replace(/\r/g,"%0D").replace(/"/g,"%22")},X=function(a){this.i=[];var b=this;a&&M(a.elements,function(c){if(c.name&&!c.disabled&&"submit"!==c.type&&"button"!==c.type&&!c.matches("form fieldset[disabled] *"))if("file"===
c.type){var d=c.files&&c.files.length?c.files:[new File([],"",{type:"application/octet-stream"})];M(d,function(e){b.append(c.name,e)})}else"select-multiple"===c.type||"select-one"===c.type?M(c.options,function(e){!e.disabled&&e.selected&&b.append(c.name,e.value)}):"checkbox"===c.type||"radio"===c.type?c.checked&&b.append(c.name,c.value):(d="textarea"===c.type?N(c.value):c.value,b.append(c.name,d))})};h=X.prototype;h.append=function(a,b,c){P(arguments,2);this.i.push(O(a,b,c))};h.delete=function(a){P(arguments,
1);var b=[];a=String(a);M(this.i,function(c){c[0]!==a&&b.push(c)});this.i=b};h.entries=function b(){var c,d=this;return K(b,function(e){1==e.h&&(c=0);if(3!=e.h)return c<d.i.length?e=E(e,d.i[c]):(e.h=0,e=void 0),e;c++;e.h=2})};h.forEach=function(b,c){P(arguments,1);for(var d=v(this),e=d.next();!e.done;e=d.next()){var f=v(e.value);e=f.next().value;f=f.next().value;b.call(c,f,e,this)}};h.get=function(b){P(arguments,1);var c=this.i;b=String(b);for(var d=0;d<c.length;d++)if(c[d][0]===b)return c[d][1];
return null};h.getAll=function(b){P(arguments,1);var c=[];b=String(b);M(this.i,function(d){d[0]===b&&c.push(d[1])});return c};h.has=function(b){P(arguments,1);b=String(b);for(var c=0;c<this.i.length;c++)if(this.i[c][0]===b)return!0;return!1};h.keys=function c(){var d=this,e,f,g,k,p;return K(c,function(t){1==t.h&&(e=v(d),f=e.next());if(3!=t.h){if(f.done){t.h=0;return}g=f.value;k=v(g);p=k.next().value;return E(t,p)}f=e.next();t.h=2})};h.set=function(c,d,e){P(arguments,2);c=String(c);var f=[],g=O(c,
d,e),k=!0;M(this.i,function(p){p[0]===c?k&&(k=!f.push(g)):f.push(p)});k&&f.push(g);this.i=f};h.values=function d(){var e=this,f,g,k,p,t;return K(d,function(x){1==x.h&&(f=v(e),g=f.next());if(3!=x.h){if(g.done){x.h=0;return}k=g.value;p=v(k);p.next();t=p.next().value;return E(x,t)}g=f.next();x.h=2})};X.prototype._asNative=function(){for(var d=new R,e=v(this),f=e.next();!f.done;f=e.next()){var g=v(f.value);f=g.next().value;g=g.next().value;d.append(f,g)}return d};X.prototype._blob=function(){var d="----formdata-polyfill-"+
Math.random(),e=[],f="--"+d+'\r\nContent-Disposition: form-data; name="';this.forEach(function(g,k){return"string"==typeof g?e.push(f+escape(N(k))+('"\r\n\r\n'+N(g)+"\r\n")):e.push(f+escape(N(k))+('"; filename="'+escape(g.name)+'"\r\nContent-Type: '+(g.type||"application/octet-stream")+"\r\n\r\n"),g,"\r\n")});e.push("--"+d+"--");return new Blob(e,{type:"multipart/form-data; boundary="+d})};X.prototype[Symbol.iterator]=function(){return this.entries()};X.prototype.toString=function(){return"[object FormData]"};
V&&!V.matches&&(V.matches=V.matchesSelector||V.mozMatchesSelector||V.msMatchesSelector||V.oMatchesSelector||V.webkitMatchesSelector||function(d){d=(this.document||this.ownerDocument).querySelectorAll(d);for(var e=d.length;0<=--e&&d.item(e)!==this;);return-1<e});W&&(X.prototype[W]="FormData");if(S){var Y=Q.XMLHttpRequest.prototype.setRequestHeader;Q.XMLHttpRequest.prototype.setRequestHeader=function(d,e){Y.call(this,d,e);"content-type"===d.toLowerCase()&&(this.B=!0)};Q.XMLHttpRequest.prototype.send=
function(d){d instanceof X?(d=d._blob(),this.B||this.setRequestHeader("Content-Type",d.type),S.call(this,d)):S.call(this,d)}}T&&(Q.fetch=function(d,e){e&&e.body&&e.body instanceof X&&(e.body=e.body._blob());return T.call(this,d,e)});U&&(Q.navigator.sendBeacon=function(d,e){e instanceof X&&(e=e._asNative());return U.call(this,d,e)});Q.FormData=X};})();

View file

@ -1,50 +0,0 @@
{
"name": "formdata-polyfill",
"version": "4.0.10",
"description": "HTML5 `FormData` for Browsers and Node.",
"type": "module",
"main": "formdata.min.js",
"scripts": {
"build": "node build.js",
"test": "node test/test-esm.js",
"test-wpt": "node --experimental-loader ./test/http-loader.js ./test/test-wpt-in-node.js",
"test-polyfill": "php -S localhost:4445 & open http://localhost:4445/test/test-polyfill.html"
},
"repository": {
"type": "git",
"url": "git+https://jimmywarting@github.com/jimmywarting/FormData.git"
},
"files": [
"esm.min.js",
"esm.min.d.ts",
"FormData.js",
"formdata-to-blob.js",
"formdata.min.js",
"README.md"
],
"engines": {
"node": ">=12.20.0"
},
"keywords": [
"formdata",
"fetch",
"node-fetch",
"html5",
"browser",
"polyfill"
],
"author": "Jimmy Wärting",
"license": "MIT",
"bugs": {
"url": "https://github.com/jimmywarting/FormData/issues"
},
"homepage": "https://github.com/jimmywarting/FormData#readme",
"dependencies": {
"fetch-blob": "^3.1.2"
},
"devDependencies": {
"@types/google-closure-compiler": "^0.0.19",
"@types/node": "^16.7.10",
"google-closure-compiler": "^20210808.0.0"
}
}

View file

@ -1,2 +0,0 @@
# node-domexception
An implementation of the DOMException class from NodeJS

View file

@ -1,41 +0,0 @@
# DOMException
An implementation of the DOMException class from NodeJS
This package implements the [`DOMException`](https://developer.mozilla.org/en-US/docs/Web/API/DOMException) class, from NodeJS itself.
NodeJS has DOMException built in, but it's not globally available, and you can't require/import it from somewhere.
The only possible way is to use some web-ish tools that have been introduced into NodeJS that throws an error and catch the constructor.
This way you will have the same class that NodeJS has and you can check if the error is a instance of DOMException.
The instanceof check would not have worked with a custom class such as the DOMexception provided by domenic which also is much larger in size.
```js
import DOMException from 'node-domexception'
hello().catch(err => {
if (err instanceof DOMException) {
...
}
})
const e1 = new DOMException("Something went wrong", "BadThingsError");
console.assert(e1.name === "BadThingsError");
console.assert(e1.code === 0);
const e2 = new DOMException("Another exciting error message", "NoModificationAllowedError");
console.assert(e2.name === "NoModificationAllowedError");
console.assert(e2.code === 7);
console.assert(DOMException.INUSE_ATTRIBUTE_ERR === 10);
```
## APIs
This package exposes two flavors of the `DOMException` interface depending on the imported module.
### `domexception` module
This module default-exports the `DOMException` interface constructor.
### `domexception/webidl2js-wrapper` module
This module exports the `DOMException` [interface wrapper API](https://github.com/jsdom/webidl2js#for-interfaces) generated by [webidl2js](https://github.com/jsdom/webidl2js).

View file

@ -1,36 +0,0 @@
# DOMException
An implementation of the DOMException class from NodeJS
This package implements the [`DOMException`](https://developer.mozilla.org/en-US/docs/Web/API/DOMException) class, from NodeJS itself. (including the legacy codes)
NodeJS has DOMException built in, but it's not globally available, and you can't require/import it from somewhere.
The only possible way is to use some web-ish tools that have been introduced into NodeJS that throws an error and catch the constructor.
This way you will have the same class that NodeJS has and you can check if the error is a instance of DOMException.
The instanceof check would not have worked with a custom class such as the DOMException provided by domenic which also is much larger in size.
```js
import DOMException from 'node-domexception'
import { MessageChannel } from 'worker_threads'
async function hello() {
const port = new MessageChannel().port1
const ab = new ArrayBuffer()
port.postMessage(ab, [ab, ab])
}
hello().catch(err => {
console.assert(err.name === 'DataCloneError')
console.assert(err.code === 25)
console.assert(err instanceof DOMException)
})
const e1 = new DOMException('Something went wrong', 'BadThingsError')
console.assert(e1.name === 'BadThingsError')
console.assert(e1.code === 0)
const e2 = new DOMException('Another exciting error message', 'NoModificationAllowedError')
console.assert(e2.name === 'NoModificationAllowedError')
console.assert(e2.code === 7)
console.assert(DOMException.INUSE_ATTRIBUTE_ERR === 10)
```

View file

@ -1,36 +0,0 @@
# DOMException
An implementation of the DOMException class from NodeJS
This package implements the [`DOMException`](https://developer.mozilla.org/en-US/docs/Web/API/DOMException) class that comes from NodeJS itself. (including the legacy codes)
NodeJS has DOMException built in, but it's not globally available, and you can't require/import it from somewhere.
The only possible way is to use some web-ish tools that have been introduced into NodeJS that throws an error and catch the constructor.
This way you will have the same class that NodeJS has and you can check if the error is a instance of DOMException.
The instanceof check would not have worked with a custom class such as the DOMException provided by domenic which also is much larger in size.
```js
import DOMException from 'node-domexception'
import { MessageChannel } from 'worker_threads'
async function hello() {
const port = new MessageChannel().port1
const ab = new ArrayBuffer()
port.postMessage(ab, [ab, ab])
}
hello().catch(err => {
console.assert(err.name === 'DataCloneError')
console.assert(err.code === 25)
console.assert(err instanceof DOMException)
})
const e1 = new DOMException('Something went wrong', 'BadThingsError')
console.assert(e1.name === 'BadThingsError')
console.assert(e1.code === 0)
const e2 = new DOMException('Another exciting error message', 'NoModificationAllowedError')
console.assert(e2.name === 'NoModificationAllowedError')
console.assert(e2.code === 7)
console.assert(DOMException.INUSE_ATTRIBUTE_ERR === 10)
```

View file

@ -1,36 +0,0 @@
# DOMException
An implementation of the DOMException class from NodeJS
This package exposes the [`DOMException`](https://developer.mozilla.org/en-US/docs/Web/API/DOMException) class that comes from NodeJS itself. (including all of the deprecated legacy codes)
NodeJS has it built in, but it's not globally available, and you can't require/import it from somewhere.
The only possible way is to use some web-ish tools that have been introduced into NodeJS that throws an error and catch the constructor.
This way you will have the same class that NodeJS has and you can check if the error is a instance of DOMException.
The instanceof check would not have worked with a custom class such as the DOMException provided by domenic which also is much larger in size since it has to re-construct the hole class from the ground up.
```js
import DOMException from 'node-domexception'
import { MessageChannel } from 'worker_threads'
async function hello() {
const port = new MessageChannel().port1
const ab = new ArrayBuffer()
port.postMessage(ab, [ab, ab])
}
hello().catch(err => {
console.assert(err.name === 'DataCloneError')
console.assert(err.code === 25)
console.assert(err instanceof DOMException)
})
const e1 = new DOMException('Something went wrong', 'BadThingsError')
console.assert(e1.name === 'BadThingsError')
console.assert(e1.code === 0)
const e2 = new DOMException('Another exciting error message', 'NoModificationAllowedError')
console.assert(e2.name === 'NoModificationAllowedError')
console.assert(e2.code === 7)
console.assert(DOMException.INUSE_ATTRIBUTE_ERR === 10)
```

View file

@ -1,38 +0,0 @@
# DOMException
An implementation of the DOMException class from NodeJS
This package exposes the [`DOMException`](https://developer.mozilla.org/en-US/docs/Web/API/DOMException) class that comes from NodeJS itself. (including all of the deprecated legacy codes)
NodeJS has it built in, but it's not globally available, and you can't require/import it from somewhere.
The only possible way is to use some web-ish tools that have been introduced into NodeJS that throws an error and catch the constructor.
This way you will have the same class that NodeJS has and you can check if the error is a instance of DOMException.
The instanceof check would not have worked with a custom class such as the DOMException provided by domenic which also is much larger in size since it has to re-construct the hole class from the ground up.
(plz don't depend on this package in any other environment other than node >=10.5)
```js
import DOMException from 'node-domexception'
import { MessageChannel } from 'worker_threads'
async function hello() {
const port = new MessageChannel().port1
const ab = new ArrayBuffer()
port.postMessage(ab, [ab, ab])
}
hello().catch(err => {
console.assert(err.name === 'DataCloneError')
console.assert(err.code === 25)
console.assert(err instanceof DOMException)
})
const e1 = new DOMException('Something went wrong', 'BadThingsError')
console.assert(e1.name === 'BadThingsError')
console.assert(e1.code === 0)
const e2 = new DOMException('Another exciting error message', 'NoModificationAllowedError')
console.assert(e2.name === 'NoModificationAllowedError')
console.assert(e2.code === 7)
console.assert(DOMException.INUSE_ATTRIBUTE_ERR === 10)
```

View file

@ -1,38 +0,0 @@
# DOMException
An implementation of the DOMException class from NodeJS
This package exposes the [`DOMException`](https://developer.mozilla.org/en-US/docs/Web/API/DOMException) class that comes from NodeJS itself. (including all of the deprecated legacy codes)
NodeJS has it built in, but it's not globally available, and you can't require/import it from somewhere.
The only possible way is to use some web-ish tools that have been introduced into NodeJS that throws an error and catch the constructor.
This way you will have the same class that NodeJS has and you can check if the error is a instance of DOMException.
The instanceof check would not have worked with a custom class such as the DOMException provided by domenic which also is much larger in size since it has to re-construct the hole class from the ground up.
(plz don't depend on this package in any other environment other than node >=10.5)
```js
import DOMException from 'node-domexception'
import { MessageChannel } from 'worker_threads'
async function hello() {
const port = new MessageChannel().port1
const ab = new ArrayBuffer()
port.postMessage(ab, [ab, ab])
}
hello().catch(err => {
console.assert(err.name === 'DataCloneError')
console.assert(err.code === 25)
console.assert(err instanceof DOMException)
})
const e1 = new DOMException('Something went wrong', 'BadThingsError')
console.assert(e1.name === 'BadThingsError')
console.assert(e1.code === 0)
const e2 = new DOMException('Another exciting error message', 'NoModificationAllowedError')
console.assert(e2.name === 'NoModificationAllowedError')
console.assert(e2.code === 7)
console.assert(DOMException.INUSE_ATTRIBUTE_ERR === 10)
```

View file

@ -1,8 +0,0 @@
const { MessageChannel } = require('worker_threads')
if (!globalThis.DOMException) {
const port = new MessageChannel().port1
const ab = new ArrayBuffer()
try { port.postMessage(ab, [ab, ab]) }
catch (err) { globalThis.DOMException = err.constructor }
}

View file

@ -1,9 +0,0 @@
if (!globalThis.DOMException) {
const { MessageChannel } = require('worker_threads')
const port = new MessageChannel().port1
const ab = new ArrayBuffer()
try { port.postMessage(ab, [ab, ab]) }
catch (err) { globalThis.DOMException = err.constructor }
}
module.exports

View file

@ -1,9 +0,0 @@
if (!globalThis.DOMException) {
const { MessageChannel } = require('worker_threads')
const port = new MessageChannel().port1
const ab = new ArrayBuffer()
try { port.postMessage(ab, [ab, ab]) }
catch (err) { globalThis.DOMException = err.constructor }
}
module.exports = globalThis.DOMException

View file

@ -1,11 +0,0 @@
/*! blob-to-buffer. MIT License. Jimmy Wärting <https://jimmy.warting.se> */
if (!globalThis.DOMException) {
const { MessageChannel } = require('worker_threads')
const port = new MessageChannel().port1
const ab = new ArrayBuffer()
try { port.postMessage(ab, [ab, ab]) }
catch (err) { globalThis.DOMException = err.constructor }
}
module.exports = globalThis.DOMException

View file

@ -1,11 +0,0 @@
/*! blob-to-buffer. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
if (!globalThis.DOMException) {
const { MessageChannel } = require('worker_threads')
const port = new MessageChannel().port1
const ab = new ArrayBuffer()
try { port.postMessage(ab, [ab, ab]) }
catch (err) { globalThis.DOMException = err.constructor }
}
module.exports = globalThis.DOMException

View file

@ -1,15 +0,0 @@
/*! blob-to-buffer. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
if (!globalThis.DOMException) {
var { MessageChannel } = require('worker_threads'),
port = new MessageChannel().port1,
ab = new ArrayBuffer()
try { port.postMessage(ab, [ab, ab]) }
catch (err) {
err.constructor.name === 'DOMException' && (
globalThis.DOMException = err.constructor
)
}
}
module.exports = globalThis.DOMException

View file

@ -1,15 +0,0 @@
/*! blob-to-buffer. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
if (!globalThis.DOMException) {
const { MessageChannel } = require('worker_threads'),
port = new MessageChannel().port1,
ab = new ArrayBuffer()
try { port.postMessage(ab, [ab, ab]) }
catch (err) {
err.constructor.name === 'DOMException' && (
globalThis.DOMException = err.constructor
)
}
}
module.exports = globalThis.DOMException

View file

@ -1,23 +0,0 @@
/*! blob-to-buffer. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
if (!globalThis.DOMException) {
const { MessageChannel } = require('worker_threads'),
port = new MessageChannel().port1,
ab = new ArrayBuffer()
try { port.postMessage(ab, [ab, ab]) }
catch (err) {
err.constructor.name === 'DOMException' && (
globalThis.DOMException = err.constructor
)
}
}
module.exports = globalThis.DOMException
const e1 = new DOMException("Something went wrong", "BadThingsError");
console.assert(e1.name === "BadThingsError");
console.assert(e1.code === 0);
const e2 = new DOMException("Another exciting error message", "NoModificationAllowedError");
console.assert(e2.name === "NoModificationAllowedError");
console.assert(e2.code === 7);

View file

@ -1,23 +0,0 @@
/*! blob-to-buffer. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
if (!globalThis.DOMException) {
const { MessageChannel } = require('worker_threads'),
port = new MessageChannel().port1,
ab = new ArrayBuffer()
try { port.postMessage(ab, [ab, ab]) }
catch (err) {
err.constructor.name === 'DOMException' && (
globalThis.DOMException = err.constructor
)
}
}
module.exports = globalThis.DOMException
const e1 = new DOMException("Something went wrong", "BadThingsError");
console.assert(e1.name === "BadThingsError");
console.assert(e1.code === 0);
const e2 = new DOMException("Another exciting error message", "NoModificationAllowedError");
console.assert(e2.name === "NoModificationAllowedError");
console.assert(e2.code === 2);

View file

@ -1,15 +0,0 @@
/*! blob-to-buffer. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
if (!globalThis.DOMException) {
const { MessageChannel } = require('worker_threads'),
port = new MessageChannel().port1,
ab = new ArrayBuffer()
try { port.postMessage(ab, [ab, ab]) }
catch (err) {
err.constructor.name === 'DOMException' && (
globalThis.DOMException = err.constructor
)
}
}
module.exports = globalThis.DOMException

View file

@ -1,16 +0,0 @@
/*! blob-to-buffer. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
if (!globalThis.DOMException) {
const { MessageChannel } = require('worker_threads'),
port = new MessageChannel().port1,
ab = new ArrayBuffer()
try { port.postMessage(ab, [ab, ab]) }
catch (err) {
console.log(err.code)
err.constructor.name === 'DOMException' && (
globalThis.DOMException = err.constructor
)
}
}
module.exports = globalThis.DOMException

View file

@ -1,16 +0,0 @@
/*! blob-to-buffer. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
if (!globalThis.DOMException) {
const { MessageChannel } = require('worker_threads'),
port = new MessageChannel().port1,
ab = new ArrayBuffer()
try { port.postMessage(ab, [ab, ab]) }
catch (err) {
console.log(err.code, err.name, err.message)
err.constructor.name === 'DOMException' && (
globalThis.DOMException = err.constructor
)
}
}
module.exports = globalThis.DOMException

View file

@ -1,16 +0,0 @@
/*! node-DOMException. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
if (!globalThis.DOMException) {
const { MessageChannel } = require('worker_threads'),
port = new MessageChannel().port1,
ab = new ArrayBuffer()
try { port.postMessage(ab, [ab, ab]) }
catch (err) {
console.log(err.code, err.name, err.message)
err.constructor.name === 'DOMException' && (
globalThis.DOMException = err.constructor
)
}
}
module.exports = globalThis.DOMException

View file

@ -1,17 +0,0 @@
/*! node-DOMException. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
if (!globalThis.DOMException) {
try {
const { MessageChannel } = require('worker_threads'),
port = new MessageChannel().port1,
ab = new ArrayBuffer()
port.postMessage(ab, [ab, ab])
catch (err) {
console.log(err.code, err.name, err.message)
err.constructor.name === 'DOMException' && (
globalThis.DOMException = err.constructor
)
}
}
module.exports = globalThis.DOMException

View file

@ -1,17 +0,0 @@
/*! node-DOMException. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
if (!globalThis.DOMException) {
try {
const { MessageChannel } = require('worker_threads'),
port = new MessageChannel().port1,
ab = new ArrayBuffer()
port.postMessage(ab, [ab, ab])
} catch (err) {
console.log(err.code, err.name, err.message)
err.constructor.name === 'DOMException' && (
globalThis.DOMException = err.constructor
)
}
}
module.exports = globalThis.DOMException

View file

@ -1,16 +0,0 @@
/*! node-DOMException. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
if (!globalThis.DOMException) {
try {
const { MessageChannel } = require('worker_threads'),
port = new MessageChannel().port1,
ab = new ArrayBuffer()
port.postMessage(ab, [ab, ab])
} catch (err) {
err.constructor.name === 'DOMException' && (
globalThis.DOMException = err.constructor
)
}
}
module.exports = globalThis.DOMException

View file

@ -1,16 +0,0 @@
/*! node-domexception. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
if (!globalThis.DOMException) {
try {
const { MessageChannel } = require('worker_threads'),
port = new MessageChannel().port1,
ab = new ArrayBuffer()
port.postMessage(ab, [ab, ab])
} catch (err) {
err.constructor.name === 'DOMException' && (
globalThis.DOMException = err.constructor
)
}
}
module.exports = globalThis.DOMException

View file

@ -1,41 +0,0 @@
/*! node-domexception. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
if (!globalThis.DOMException) {
try {
const { MessageChannel } = require('worker_threads'),
port = new MessageChannel().port1,
ab = new ArrayBuffer()
port.postMessage(ab, [ab, ab])
} catch (err) {
err.constructor.name === 'DOMException' && (
globalThis.DOMException = err.constructor
)
}
}
module.exports = globalThis.DOMException
const { MessageChannel } = require('worker_threads')
async function hello() {
const port = new MessageChannel().port1
const ab = new ArrayBuffer()
port.postMessage(ab, [ab, ab])
}
hello().catch(err => {
console.assert(err.name === 'DataCloneError')
console.assert(err.code === 25)
console.assert(err instanceof DOMException)
})
const e1 = new DOMException('Something went wrong', 'BadThingsError')
console.assert(e1.name === 'BadThingsError')
console.assert(e1.code === 0)
const e2 = new DOMException('Another exciting error message', 'NoModificationAllowedError')
console.assert(e2.name === 'NoModificationAllowedError')
console.assert(e2.code === 7)
console.assert(DOMException.INUSE_ATTRIBUTE_ERR === 10)

View file

@ -1,41 +0,0 @@
/*! node-domexception. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
if (!globalThis.DOMException) {
try {
const { MessageChannel } = require('worker_threads'),
port = new MessageChannel().port1,
ab = new ArrayBuffer()
port.postMessage(ab, [ab, ab])
} catch (err) {
err.constructor.name === 'DOMException' && (
globalThis.DOMException = err.constructor
)
}
}
module.exports = globalThis.DOMException
const { MessageChannel } = require('worker_threads')
async function hello() {
const port = new MessageChannel().port1
const ab = new ArrayBuffer()
port.postMessage(ab, [ab, ab])
}
hello().catch(err => {
console.assert(err.name === 'DataCloneError')
console.assert(err.code === 21)
console.assert(err instanceof DOMException)
})
const e1 = new DOMException('Something went wrong', 'BadThingsError')
console.assert(e1.name === 'BadThingsError')
console.assert(e1.code === 0)
const e2 = new DOMException('Another exciting error message', 'NoModificationAllowedError')
console.assert(e2.name === 'NoModificationAllowedError')
console.assert(e2.code === 7)
console.assert(DOMException.INUSE_ATTRIBUTE_ERR === 10)

View file

@ -1,16 +0,0 @@
/*! node-domexception. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
if (!globalThis.DOMException) {
try {
const { MessageChannel } = require('worker_threads'),
port = new MessageChannel().port1,
ab = new ArrayBuffer()
port.postMessage(ab, [ab, ab])
} catch (err) {
err.constructor.name === 'DOMException' && (
globalThis.DOMException = err.constructor
)
}
}
module.exports = globalThis.DOMException

View file

@ -1,19 +0,0 @@
{
"name": "domexception",
"version": "1.0.0",
"description": "An implementation of the DOMException class from NodeJS",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"repository": {
"type": "git",
"url": "git+https://github.com/jimmywarting/node-domexception.git"
},
"author": "",
"license": "ISC",
"bugs": {
"url": "https://github.com/jimmywarting/node-domexception/issues"
},
"homepage": "https://github.com/jimmywarting/node-domexception#readme"
}

View file

@ -1,16 +0,0 @@
{
"name": "node-domexception",
"version": "1.0.0",
"description": "An implementation of the DOMException class from NodeJS",
"main": "index.js",
"repository": {
"type": "git",
"url": "git+https://github.com/jimmywarting/node-domexception.git"
},
"author": "Jimmy Wärting",
"license": "MIT",
"bugs": {
"url": "https://github.com/jimmywarting/node-domexception/issues"
},
"homepage": "https://github.com/jimmywarting/node-domexception#readme"
}

View file

@ -1,19 +0,0 @@
{
"name": "node-domexception",
"version": "1.0.0",
"description": "An implementation of the DOMException class from NodeJS",
"main": "index.js",
"repository": {
"type": "git",
"url": "git+https://github.com/jimmywarting/node-domexception.git"
},
"engines": {
"node": ">=10.5.0"
},
"author": "Jimmy Wärting",
"license": "MIT",
"bugs": {
"url": "https://github.com/jimmywarting/node-domexception/issues"
},
"homepage": "https://github.com/jimmywarting/node-domexception#readme"
}

View file

@ -1,25 +0,0 @@
{
"name": "node-domexception",
"version": "1.0.0",
"description": "An implementation of the DOMException class from NodeJS",
"main": "index.js",
"repository": {
"type": "git",
"url": "git+https://github.com/jimmywarting/node-domexception.git"
},
"engines": {
"node": ">=10.5.0"
},
"author": "Jimmy Wärting",
"license": "MIT",
"bugs": {
"url": "https://github.com/jimmywarting/node-domexception/issues"
},
"homepage": "https://github.com/jimmywarting/node-domexception#readme",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/jimmywarting"
}
]
}

View file

@ -1,25 +0,0 @@
{
"name": "node-domexception",
"version": "1.0.0",
"description": "An implementation of the DOMException class from NodeJS",
"main": "index.js",
"repository": {
"type": "git",
"url": "git+https://github.com/jimmywarting/node-domexception.git"
},
"engines": {
"node": ">=10.5.0"
},
"author": "Jimmy Wärting",
"license": "MIT",
"bugs": {
"url": "https://github.com/jimmywarting/node-domexception/issues"
},
"homepage": "https://github.com/jimmywarting/node-domexception#readme",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/jimmywarting"
}
]
}

View file

@ -1,29 +0,0 @@
{
"name": "node-domexception",
"version": "1.0.0",
"description": "An implementation of the DOMException class from NodeJS",
"main": "index.js",
"repository": {
"type": "git",
"url": "git+https://github.com/jimmywarting/node-domexception.git"
},
"engines": {
"node": ">=10.5.0"
},
"author": "Jimmy Wärting",
"license": "MIT",
"bugs": {
"url": "https://github.com/jimmywarting/node-domexception/issues"
},
"homepage": "https://github.com/jimmywarting/node-domexception#readme",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/jimmywarting"
},
{
"type": "github",
"url": "https://paypal.me/jimmywarting"
}
]
}

View file

@ -1,29 +0,0 @@
{
"name": "node-domexception",
"version": "1.0.0",
"description": "An implementation of the DOMException class from NodeJS",
"main": "index.js",
"repository": {
"type": "git",
"url": "git+https://github.com/jimmywarting/node-domexception.git"
},
"engines": {
"node": ">=10.5.0"
},
"author": "Jimmy Wärting",
"license": "MIT",
"bugs": {
"url": "https://github.com/jimmywarting/node-domexception/issues"
},
"homepage": "https://github.com/jimmywarting/node-domexception#readme",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/jimmywarting"
},
{
"type": "github",
"url": "https://paypal.me/jimmywarting"
}
]
}

View file

@ -1,3 +0,0 @@
require('./index.js')
console.log(DOMException.INDEX_SIZE_ERR)

View file

@ -1,3 +0,0 @@
const e = require('./index.js')
console.log(e.INDEX_SIZE_ERR)

View file

@ -1,46 +0,0 @@
# DOMException
An implementation of the DOMException class from NodeJS
NodeJS has DOMException built in, but it's not globally available, and you can't require/import it from somewhere.
This package exposes the [`DOMException`](https://developer.mozilla.org/en-US/docs/Web/API/DOMException) class that comes from NodeJS itself. (including all of the legacy codes)
<sub>(plz don't depend on this package in any other environment other than node >=10.5)</sub>
```js
import DOMException from 'node-domexception'
import { MessageChannel } from 'worker_threads'
async function hello() {
const port = new MessageChannel().port1
const ab = new ArrayBuffer()
port.postMessage(ab, [ab, ab])
}
hello().catch(err => {
console.assert(err.name === 'DataCloneError')
console.assert(err.code === 25)
console.assert(err instanceof DOMException)
})
const e1 = new DOMException('Something went wrong', 'BadThingsError')
console.assert(e1.name === 'BadThingsError')
console.assert(e1.code === 0)
const e2 = new DOMException('Another exciting error message', 'NoModificationAllowedError')
console.assert(e2.name === 'NoModificationAllowedError')
console.assert(e2.code === 7)
console.assert(DOMException.INUSE_ATTRIBUTE_ERR === 10)
```
# Background
The only possible way is to use some web-ish tools that have been introduced into NodeJS that throws a DOMException and catch the constructor. This is exactly what this package dose for you and exposes it.<br>
This way you will have the same class that NodeJS has and you can check if the error is a instance of DOMException.<br>
The instanceof check would not have worked with a custom class such as the DOMException provided by domenic which also is much larger in size since it has to re-construct the hole class from the ground up.
The DOMException is used in many places such as the Fetch API, File & Blobs, PostMessaging and more. <br>
Why they decided to call it **DOM**, I don't know
Please consider sponsoring if you find this helpful

View file

@ -1,16 +0,0 @@
/*! node-domexception. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
if (!globalThis.DOMException) {
try {
const { MessageChannel } = require('worker_threads'),
port = new MessageChannel().port1,
ab = new ArrayBuffer()
port.postMessage(ab, [ab, ab])
} catch (err) {
err.constructor.name === 'DOMException' && (
globalThis.DOMException = err.constructor
)
}
}
module.exports = globalThis.DOMException

View file

@ -1,29 +0,0 @@
{
"name": "node-domexception",
"version": "1.0.0",
"description": "An implementation of the DOMException class from NodeJS",
"main": "index.js",
"repository": {
"type": "git",
"url": "git+https://github.com/jimmywarting/node-domexception.git"
},
"engines": {
"node": ">=10.5.0"
},
"author": "Jimmy Wärting",
"license": "MIT",
"bugs": {
"url": "https://github.com/jimmywarting/node-domexception/issues"
},
"homepage": "https://github.com/jimmywarting/node-domexception#readme",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/jimmywarting"
},
{
"type": "github",
"url": "https://paypal.me/jimmywarting"
}
]
}

View file

@ -1,219 +0,0 @@
/// <reference types="node" />
/// <reference lib="dom" />
import {RequestOptions} from 'http';
import {FormData} from 'formdata-polyfill/esm.min.js';
import {
Blob,
blobFrom,
blobFromSync,
File,
fileFrom,
fileFromSync
} from 'fetch-blob/from.js';
type AbortSignal = {
readonly aborted: boolean;
addEventListener: (type: 'abort', listener: (this: AbortSignal) => void) => void;
removeEventListener: (type: 'abort', listener: (this: AbortSignal) => void) => void;
};
export type HeadersInit = Headers | Record<string, string> | Iterable<readonly [string, string]> | Iterable<Iterable<string>>;
export {
FormData,
Blob,
blobFrom,
blobFromSync,
File,
fileFrom,
fileFromSync
};
/**
* This Fetch API interface allows you to perform various actions on HTTP request and response headers.
* These actions include retrieving, setting, adding to, and removing.
* A Headers object has an associated header list, which is initially empty and consists of zero or more name and value pairs.
* You can add to this using methods like append() (see Examples.)
* In all methods of this interface, header names are matched by case-insensitive byte sequence.
* */
export class Headers {
constructor(init?: HeadersInit);
append(name: string, value: string): void;
delete(name: string): void;
get(name: string): string | null;
has(name: string): boolean;
set(name: string, value: string): void;
forEach(
callbackfn: (value: string, key: string, parent: Headers) => void,
thisArg?: any
): void;
[Symbol.iterator](): IterableIterator<[string, string]>;
/**
* Returns an iterator allowing to go through all key/value pairs contained in this object.
*/
entries(): IterableIterator<[string, string]>;
/**
* Returns an iterator allowing to go through all keys of the key/value pairs contained in this object.
*/
keys(): IterableIterator<string>;
/**
* Returns an iterator allowing to go through all values of the key/value pairs contained in this object.
*/
values(): IterableIterator<string>;
/** Node-fetch extension */
raw(): Record<string, string[]>;
}
export interface RequestInit {
/**
* A BodyInit object or null to set request's body.
*/
body?: BodyInit | null;
/**
* A Headers object, an object literal, or an array of two-item arrays to set request's headers.
*/
headers?: HeadersInit;
/**
* A string to set request's method.
*/
method?: string;
/**
* A string indicating whether request follows redirects, results in an error upon encountering a redirect, or returns the redirect (in an opaque fashion). Sets request's redirect.
*/
redirect?: RequestRedirect;
/**
* An AbortSignal to set request's signal.
*/
signal?: AbortSignal | null;
/**
* A string whose value is a same-origin URL, "about:client", or the empty string, to set requests referrer.
*/
referrer?: string;
/**
* A referrer policy to set requests referrerPolicy.
*/
referrerPolicy?: ReferrerPolicy;
// Node-fetch extensions to the whatwg/fetch spec
agent?: RequestOptions['agent'] | ((parsedUrl: URL) => RequestOptions['agent']);
compress?: boolean;
counter?: number;
follow?: number;
hostname?: string;
port?: number;
protocol?: string;
size?: number;
highWaterMark?: number;
insecureHTTPParser?: boolean;
}
export interface ResponseInit {
headers?: HeadersInit;
status?: number;
statusText?: string;
}
export type BodyInit =
| Blob
| Buffer
| URLSearchParams
| FormData
| NodeJS.ReadableStream
| string;
declare class BodyMixin {
constructor(body?: BodyInit, options?: {size?: number});
readonly body: NodeJS.ReadableStream | null;
readonly bodyUsed: boolean;
readonly size: number;
/** @deprecated Use `body.arrayBuffer()` instead. */
buffer(): Promise<Buffer>;
arrayBuffer(): Promise<ArrayBuffer>;
formData(): Promise<FormData>;
blob(): Promise<Blob>;
json(): Promise<unknown>;
text(): Promise<string>;
}
// `Body` must not be exported as a class since it's not exported from the JavaScript code.
export interface Body extends Pick<BodyMixin, keyof BodyMixin> {}
export type RequestRedirect = 'error' | 'follow' | 'manual';
export type ReferrerPolicy = '' | 'no-referrer' | 'no-referrer-when-downgrade' | 'same-origin' | 'origin' | 'strict-origin' | 'origin-when-cross-origin' | 'strict-origin-when-cross-origin' | 'unsafe-url';
export type RequestInfo = string | Request;
export class Request extends BodyMixin {
constructor(input: RequestInfo, init?: RequestInit);
/**
* Returns a Headers object consisting of the headers associated with request. Note that headers added in the network layer by the user agent will not be accounted for in this object, e.g., the "Host" header.
*/
readonly headers: Headers;
/**
* Returns request's HTTP method, which is "GET" by default.
*/
readonly method: string;
/**
* Returns the redirect mode associated with request, which is a string indicating how redirects for the request will be handled during fetching. A request will follow redirects by default.
*/
readonly redirect: RequestRedirect;
/**
* Returns the signal associated with request, which is an AbortSignal object indicating whether or not request has been aborted, and its abort event handler.
*/
readonly signal: AbortSignal;
/**
* Returns the URL of request as a string.
*/
readonly url: string;
/**
* A string whose value is a same-origin URL, "about:client", or the empty string, to set requests referrer.
*/
readonly referrer: string;
/**
* A referrer policy to set requests referrerPolicy.
*/
readonly referrerPolicy: ReferrerPolicy;
clone(): Request;
}
type ResponseType = 'basic' | 'cors' | 'default' | 'error' | 'opaque' | 'opaqueredirect';
export class Response extends BodyMixin {
constructor(body?: BodyInit | null, init?: ResponseInit);
readonly headers: Headers;
readonly ok: boolean;
readonly redirected: boolean;
readonly status: number;
readonly statusText: string;
readonly type: ResponseType;
readonly url: string;
clone(): Response;
static error(): Response;
static redirect(url: string, status?: number): Response;
}
export class FetchError extends Error {
constructor(message: string, type: string, systemError?: Record<string, unknown>);
name: 'FetchError';
[Symbol.toStringTag]: 'FetchError';
type: string;
code?: string;
errno?: string;
}
export class AbortError extends Error {
type: string;
name: 'AbortError';
[Symbol.toStringTag]: 'AbortError';
}
export function isRedirect(code: number): boolean;
export default function fetch(url: RequestInfo, init?: RequestInit): Promise<Response>;

2
node_modules/node-fetch/LICENSE.md generated vendored
View file

@ -1,6 +1,6 @@
The MIT License (MIT) The MIT License (MIT)
Copyright (c) 2016 - 2020 Node Fetch Team Copyright (c) 2016 David Frank
Permission is hereby granted, free of charge, to any person obtaining a copy Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal of this software and associated documentation files (the "Software"), to deal

739
node_modules/node-fetch/README.md generated vendored

File diff suppressed because it is too large Load diff

25
node_modules/node-fetch/browser.js generated vendored Normal file
View file

@ -0,0 +1,25 @@
"use strict";
// ref: https://github.com/tc39/proposal-global
var getGlobal = function () {
// the only reliable means to get the global object is
// `Function('return this')()`
// However, this causes CSP violations in Chrome apps.
if (typeof self !== 'undefined') { return self; }
if (typeof window !== 'undefined') { return window; }
if (typeof global !== 'undefined') { return global; }
throw new Error('unable to locate global object');
}
var global = getGlobal();
module.exports = exports = global.fetch;
// Needed for TypeScript and Webpack.
if (global.fetch) {
exports.default = global.fetch.bind(global);
}
exports.Headers = global.Headers;
exports.Request = global.Request;
exports.Response = global.Response;

1688
node_modules/node-fetch/lib/index.es.js generated vendored Normal file

File diff suppressed because it is too large Load diff

1697
node_modules/node-fetch/lib/index.js generated vendored Normal file

File diff suppressed because it is too large Load diff

1686
node_modules/node-fetch/lib/index.mjs generated vendored Normal file

File diff suppressed because it is too large Load diff

159
node_modules/node-fetch/package.json generated vendored
View file

@ -1,131 +1,76 @@
{ {
"name": "node-fetch", "name": "node-fetch",
"version": "3.2.6", "version": "2.6.7",
"description": "A light-weight module that brings Fetch API to node.js", "description": "A light-weight module that brings window.fetch to node.js",
"main": "./src/index.js", "main": "lib/index.js",
"sideEffects": false, "browser": "./browser.js",
"type": "module", "module": "lib/index.mjs",
"files": [ "files": [
"src", "lib/index.js",
"@types/index.d.ts" "lib/index.mjs",
"lib/index.es.js",
"browser.js"
], ],
"types": "./@types/index.d.ts",
"engines": { "engines": {
"node": "^12.20.0 || ^14.13.1 || >=16.0.0" "node": "4.x || >=6.0.0"
}, },
"scripts": { "scripts": {
"test": "mocha", "build": "cross-env BABEL_ENV=rollup rollup -c",
"coverage": "c8 report --reporter=text-lcov | coveralls", "prepare": "npm run build",
"test-types": "tsd", "test": "cross-env BABEL_ENV=test mocha --require babel-register --throw-deprecation test/test.js",
"lint": "xo" "report": "cross-env BABEL_ENV=coverage nyc --reporter lcov --reporter text mocha -R spec test/test.js",
"coverage": "cross-env BABEL_ENV=coverage nyc --reporter json --reporter text mocha -R spec test/test.js && codecov -f coverage/coverage-final.json"
}, },
"repository": { "repository": {
"type": "git", "type": "git",
"url": "https://github.com/node-fetch/node-fetch.git" "url": "https://github.com/bitinn/node-fetch.git"
}, },
"keywords": [ "keywords": [
"fetch", "fetch",
"http", "http",
"promise", "promise"
"request",
"curl",
"wget",
"xhr",
"whatwg"
], ],
"author": "David Frank", "author": "David Frank",
"license": "MIT", "license": "MIT",
"bugs": { "bugs": {
"url": "https://github.com/node-fetch/node-fetch/issues" "url": "https://github.com/bitinn/node-fetch/issues"
}, },
"homepage": "https://github.com/node-fetch/node-fetch", "homepage": "https://github.com/bitinn/node-fetch",
"funding": { "dependencies": {
"type": "opencollective", "whatwg-url": "^5.0.0"
"url": "https://opencollective.com/node-fetch" },
"peerDependencies": {
"encoding": "^0.1.0"
},
"peerDependenciesMeta": {
"encoding": {
"optional": true
}
}, },
"devDependencies": { "devDependencies": {
"abort-controller": "^3.0.0", "@ungap/url-search-params": "^0.1.2",
"abortcontroller-polyfill": "^1.7.1", "abort-controller": "^1.1.0",
"busboy": "^1.4.0", "abortcontroller-polyfill": "^1.3.0",
"c8": "^7.7.2", "babel-core": "^6.26.3",
"chai": "^4.3.4", "babel-plugin-istanbul": "^4.1.6",
"babel-preset-env": "^1.6.1",
"babel-register": "^6.16.3",
"chai": "^3.5.0",
"chai-as-promised": "^7.1.1", "chai-as-promised": "^7.1.1",
"chai-iterator": "^3.0.2", "chai-iterator": "^1.1.1",
"chai-string": "^1.5.0", "chai-string": "~1.3.0",
"coveralls": "^3.1.0", "codecov": "3.3.0",
"form-data": "^4.0.0", "cross-env": "^5.2.0",
"formdata-node": "^4.2.4", "form-data": "^2.3.3",
"mocha": "^9.1.3", "is-builtin-module": "^1.0.0",
"p-timeout": "^5.0.0", "mocha": "^5.0.0",
"stream-consumers": "^1.0.1", "nyc": "11.9.0",
"tsd": "^0.14.0", "parted": "^0.1.1",
"xo": "^0.39.1" "promise": "^8.0.3",
}, "resumer": "0.0.0",
"dependencies": { "rollup": "^0.63.4",
"data-uri-to-buffer": "^4.0.0", "rollup-plugin-babel": "^3.0.7",
"fetch-blob": "^3.1.4", "string-to-arraybuffer": "^1.0.2",
"formdata-polyfill": "^4.0.10" "teeny-request": "3.7.0"
},
"tsd": {
"cwd": "@types",
"compilerOptions": {
"esModuleInterop": true
}
},
"xo": {
"envs": [
"node",
"browser"
],
"ignores": [
"example.js"
],
"rules": {
"complexity": 0,
"import/extensions": 0,
"import/no-useless-path-segments": 0,
"import/no-anonymous-default-export": 0,
"import/no-named-as-default": 0,
"unicorn/import-index": 0,
"unicorn/no-array-reduce": 0,
"unicorn/prefer-node-protocol": 0,
"unicorn/numeric-separators-style": 0,
"unicorn/explicit-length-check": 0,
"capitalized-comments": 0,
"node/no-unsupported-features/es-syntax": 0,
"@typescript-eslint/member-ordering": 0
},
"overrides": [
{
"files": "test/**/*.js",
"envs": [
"node",
"mocha"
],
"rules": {
"max-nested-callbacks": 0,
"no-unused-expressions": 0,
"no-warning-comments": 0,
"new-cap": 0,
"guard-for-in": 0,
"unicorn/no-array-for-each": 0,
"unicorn/prevent-abbreviations": 0,
"promise/prefer-await-to-then": 0,
"ava/no-import-test-files": 0
}
}
]
},
"runkitExampleFilename": "example.js",
"release": {
"branches": [
"+([0-9]).x",
"main",
"next",
{
"name": "beta",
"prerelease": true
}
]
} }
} }

397
node_modules/node-fetch/src/body.js generated vendored
View file

@ -1,397 +0,0 @@
/**
* Body.js
*
* Body interface provides common methods for Request and Response
*/
import Stream, {PassThrough} from 'node:stream';
import {types, deprecate, promisify} from 'node:util';
import {Buffer} from 'node:buffer';
import Blob from 'fetch-blob';
import {FormData, formDataToBlob} from 'formdata-polyfill/esm.min.js';
import {FetchError} from './errors/fetch-error.js';
import {FetchBaseError} from './errors/base.js';
import {isBlob, isURLSearchParameters} from './utils/is.js';
const pipeline = promisify(Stream.pipeline);
const INTERNALS = Symbol('Body internals');
/**
* Body mixin
*
* Ref: https://fetch.spec.whatwg.org/#body
*
* @param Stream body Readable stream
* @param Object opts Response options
* @return Void
*/
export default class Body {
constructor(body, {
size = 0
} = {}) {
let boundary = null;
if (body === null) {
// Body is undefined or null
body = null;
} else if (isURLSearchParameters(body)) {
// Body is a URLSearchParams
body = Buffer.from(body.toString());
} else if (isBlob(body)) {
// Body is blob
} else if (Buffer.isBuffer(body)) {
// Body is Buffer
} else if (types.isAnyArrayBuffer(body)) {
// Body is ArrayBuffer
body = Buffer.from(body);
} else if (ArrayBuffer.isView(body)) {
// Body is ArrayBufferView
body = Buffer.from(body.buffer, body.byteOffset, body.byteLength);
} else if (body instanceof Stream) {
// Body is stream
} else if (body instanceof FormData) {
// Body is FormData
body = formDataToBlob(body);
boundary = body.type.split('=')[1];
} else {
// None of the above
// coerce to string then buffer
body = Buffer.from(String(body));
}
let stream = body;
if (Buffer.isBuffer(body)) {
stream = Stream.Readable.from(body);
} else if (isBlob(body)) {
stream = Stream.Readable.from(body.stream());
}
this[INTERNALS] = {
body,
stream,
boundary,
disturbed: false,
error: null
};
this.size = size;
if (body instanceof Stream) {
body.on('error', error_ => {
const error = error_ instanceof FetchBaseError ?
error_ :
new FetchError(`Invalid response body while trying to fetch ${this.url}: ${error_.message}`, 'system', error_);
this[INTERNALS].error = error;
});
}
}
get body() {
return this[INTERNALS].stream;
}
get bodyUsed() {
return this[INTERNALS].disturbed;
}
/**
* Decode response as ArrayBuffer
*
* @return Promise
*/
async arrayBuffer() {
const {buffer, byteOffset, byteLength} = await consumeBody(this);
return buffer.slice(byteOffset, byteOffset + byteLength);
}
async formData() {
const ct = this.headers.get('content-type');
if (ct.startsWith('application/x-www-form-urlencoded')) {
const formData = new FormData();
const parameters = new URLSearchParams(await this.text());
for (const [name, value] of parameters) {
formData.append(name, value);
}
return formData;
}
const {toFormData} = await import('./utils/multipart-parser.js');
return toFormData(this.body, ct);
}
/**
* Return raw response as Blob
*
* @return Promise
*/
async blob() {
const ct = (this.headers && this.headers.get('content-type')) || (this[INTERNALS].body && this[INTERNALS].body.type) || '';
const buf = await this.arrayBuffer();
return new Blob([buf], {
type: ct
});
}
/**
* Decode response as json
*
* @return Promise
*/
async json() {
const text = await this.text();
return JSON.parse(text);
}
/**
* Decode response as text
*
* @return Promise
*/
async text() {
const buffer = await consumeBody(this);
return new TextDecoder().decode(buffer);
}
/**
* Decode response as buffer (non-spec api)
*
* @return Promise
*/
buffer() {
return consumeBody(this);
}
}
Body.prototype.buffer = deprecate(Body.prototype.buffer, 'Please use \'response.arrayBuffer()\' instead of \'response.buffer()\'', 'node-fetch#buffer');
// In browsers, all properties are enumerable.
Object.defineProperties(Body.prototype, {
body: {enumerable: true},
bodyUsed: {enumerable: true},
arrayBuffer: {enumerable: true},
blob: {enumerable: true},
json: {enumerable: true},
text: {enumerable: true},
data: {get: deprecate(() => {},
'data doesn\'t exist, use json(), text(), arrayBuffer(), or body instead',
'https://github.com/node-fetch/node-fetch/issues/1000 (response)')}
});
/**
* Consume and convert an entire Body to a Buffer.
*
* Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body
*
* @return Promise
*/
async function consumeBody(data) {
if (data[INTERNALS].disturbed) {
throw new TypeError(`body used already for: ${data.url}`);
}
data[INTERNALS].disturbed = true;
if (data[INTERNALS].error) {
throw data[INTERNALS].error;
}
const {body} = data;
// Body is null
if (body === null) {
return Buffer.alloc(0);
}
/* c8 ignore next 3 */
if (!(body instanceof Stream)) {
return Buffer.alloc(0);
}
// Body is stream
// get ready to actually consume the body
const accum = [];
let accumBytes = 0;
try {
for await (const chunk of body) {
if (data.size > 0 && accumBytes + chunk.length > data.size) {
const error = new FetchError(`content size at ${data.url} over limit: ${data.size}`, 'max-size');
body.destroy(error);
throw error;
}
accumBytes += chunk.length;
accum.push(chunk);
}
} catch (error) {
const error_ = error instanceof FetchBaseError ? error : new FetchError(`Invalid response body while trying to fetch ${data.url}: ${error.message}`, 'system', error);
throw error_;
}
if (body.readableEnded === true || body._readableState.ended === true) {
try {
if (accum.every(c => typeof c === 'string')) {
return Buffer.from(accum.join(''));
}
return Buffer.concat(accum, accumBytes);
} catch (error) {
throw new FetchError(`Could not create Buffer from response body for ${data.url}: ${error.message}`, 'system', error);
}
} else {
throw new FetchError(`Premature close of server response while trying to fetch ${data.url}`);
}
}
/**
* Clone body given Res/Req instance
*
* @param Mixed instance Response or Request instance
* @param String highWaterMark highWaterMark for both PassThrough body streams
* @return Mixed
*/
export const clone = (instance, highWaterMark) => {
let p1;
let p2;
let {body} = instance[INTERNALS];
// Don't allow cloning a used body
if (instance.bodyUsed) {
throw new Error('cannot clone body after it is used');
}
// Check that body is a stream and not form-data object
// note: we can't clone the form-data object without having it as a dependency
if ((body instanceof Stream) && (typeof body.getBoundary !== 'function')) {
// Tee instance body
p1 = new PassThrough({highWaterMark});
p2 = new PassThrough({highWaterMark});
body.pipe(p1);
body.pipe(p2);
// Set instance body to teed body and return the other teed body
instance[INTERNALS].stream = p1;
body = p2;
}
return body;
};
const getNonSpecFormDataBoundary = deprecate(
body => body.getBoundary(),
'form-data doesn\'t follow the spec and requires special treatment. Use alternative package',
'https://github.com/node-fetch/node-fetch/issues/1167'
);
/**
* Performs the operation "extract a `Content-Type` value from |object|" as
* specified in the specification:
* https://fetch.spec.whatwg.org/#concept-bodyinit-extract
*
* This function assumes that instance.body is present.
*
* @param {any} body Any options.body input
* @returns {string | null}
*/
export const extractContentType = (body, request) => {
// Body is null or undefined
if (body === null) {
return null;
}
// Body is string
if (typeof body === 'string') {
return 'text/plain;charset=UTF-8';
}
// Body is a URLSearchParams
if (isURLSearchParameters(body)) {
return 'application/x-www-form-urlencoded;charset=UTF-8';
}
// Body is blob
if (isBlob(body)) {
return body.type || null;
}
// Body is a Buffer (Buffer, ArrayBuffer or ArrayBufferView)
if (Buffer.isBuffer(body) || types.isAnyArrayBuffer(body) || ArrayBuffer.isView(body)) {
return null;
}
if (body instanceof FormData) {
return `multipart/form-data; boundary=${request[INTERNALS].boundary}`;
}
// Detect form data input from form-data module
if (body && typeof body.getBoundary === 'function') {
return `multipart/form-data;boundary=${getNonSpecFormDataBoundary(body)}`;
}
// Body is stream - can't really do much about this
if (body instanceof Stream) {
return null;
}
// Body constructor defaults other things to string
return 'text/plain;charset=UTF-8';
};
/**
* The Fetch Standard treats this as if "total bytes" is a property on the body.
* For us, we have to explicitly get it with a function.
*
* ref: https://fetch.spec.whatwg.org/#concept-body-total-bytes
*
* @param {any} obj.body Body object from the Body instance.
* @returns {number | null}
*/
export const getTotalBytes = request => {
const {body} = request[INTERNALS];
// Body is null or undefined
if (body === null) {
return 0;
}
// Body is Blob
if (isBlob(body)) {
return body.size;
}
// Body is Buffer
if (Buffer.isBuffer(body)) {
return body.length;
}
// Detect form data input from form-data module
if (body && typeof body.getLengthSync === 'function') {
return body.hasKnownLength && body.hasKnownLength() ? body.getLengthSync() : null;
}
// Body is stream
return null;
};
/**
* Write a Body to a Node.js WritableStream (e.g. http.Request) object.
*
* @param {Stream.Writable} dest The stream to write to.
* @param obj.body Body object from the Body instance.
* @returns {Promise<void>}
*/
export const writeToStream = async (dest, {body}) => {
if (body === null) {
// Body is null
dest.end();
} else {
// Body is stream
await pipeline(body, dest);
}
};

View file

@ -1,10 +0,0 @@
import {FetchBaseError} from './base.js';
/**
* AbortError interface for cancelled requests
*/
export class AbortError extends FetchBaseError {
constructor(message, type = 'aborted') {
super(message, type);
}
}

View file

@ -1,17 +0,0 @@
export class FetchBaseError extends Error {
constructor(message, type) {
super(message);
// Hide custom error implementation details from end-users
Error.captureStackTrace(this, this.constructor);
this.type = type;
}
get name() {
return this.constructor.name;
}
get [Symbol.toStringTag]() {
return this.constructor.name;
}
}

View file

@ -1,26 +0,0 @@
import {FetchBaseError} from './base.js';
/**
* @typedef {{ address?: string, code: string, dest?: string, errno: number, info?: object, message: string, path?: string, port?: number, syscall: string}} SystemError
*/
/**
* FetchError interface for operational errors
*/
export class FetchError extends FetchBaseError {
/**
* @param {string} message - Error message for human
* @param {string} [type] - Error type for machine
* @param {SystemError} [systemError] - For Node.js system error
*/
constructor(message, type, systemError) {
super(message, type);
// When err.type is `system`, err.erroredSysCall contains system error and err.code contains system error code
if (systemError) {
// eslint-disable-next-line no-multi-assign
this.code = this.errno = systemError.code;
this.erroredSysCall = systemError.syscall;
}
}
}

View file

@ -1,267 +0,0 @@
/**
* Headers.js
*
* Headers class offers convenient helpers
*/
import {types} from 'node:util';
import http from 'node:http';
/* c8 ignore next 9 */
const validateHeaderName = typeof http.validateHeaderName === 'function' ?
http.validateHeaderName :
name => {
if (!/^[\^`\-\w!#$%&'*+.|~]+$/.test(name)) {
const error = new TypeError(`Header name must be a valid HTTP token [${name}]`);
Object.defineProperty(error, 'code', {value: 'ERR_INVALID_HTTP_TOKEN'});
throw error;
}
};
/* c8 ignore next 9 */
const validateHeaderValue = typeof http.validateHeaderValue === 'function' ?
http.validateHeaderValue :
(name, value) => {
if (/[^\t\u0020-\u007E\u0080-\u00FF]/.test(value)) {
const error = new TypeError(`Invalid character in header content ["${name}"]`);
Object.defineProperty(error, 'code', {value: 'ERR_INVALID_CHAR'});
throw error;
}
};
/**
* @typedef {Headers | Record<string, string> | Iterable<readonly [string, string]> | Iterable<Iterable<string>>} HeadersInit
*/
/**
* This Fetch API interface allows you to perform various actions on HTTP request and response headers.
* These actions include retrieving, setting, adding to, and removing.
* A Headers object has an associated header list, which is initially empty and consists of zero or more name and value pairs.
* You can add to this using methods like append() (see Examples.)
* In all methods of this interface, header names are matched by case-insensitive byte sequence.
*
*/
export default class Headers extends URLSearchParams {
/**
* Headers class
*
* @constructor
* @param {HeadersInit} [init] - Response headers
*/
constructor(init) {
// Validate and normalize init object in [name, value(s)][]
/** @type {string[][]} */
let result = [];
if (init instanceof Headers) {
const raw = init.raw();
for (const [name, values] of Object.entries(raw)) {
result.push(...values.map(value => [name, value]));
}
} else if (init == null) { // eslint-disable-line no-eq-null, eqeqeq
// No op
} else if (typeof init === 'object' && !types.isBoxedPrimitive(init)) {
const method = init[Symbol.iterator];
// eslint-disable-next-line no-eq-null, eqeqeq
if (method == null) {
// Record<ByteString, ByteString>
result.push(...Object.entries(init));
} else {
if (typeof method !== 'function') {
throw new TypeError('Header pairs must be iterable');
}
// Sequence<sequence<ByteString>>
// Note: per spec we have to first exhaust the lists then process them
result = [...init]
.map(pair => {
if (
typeof pair !== 'object' || types.isBoxedPrimitive(pair)
) {
throw new TypeError('Each header pair must be an iterable object');
}
return [...pair];
}).map(pair => {
if (pair.length !== 2) {
throw new TypeError('Each header pair must be a name/value tuple');
}
return [...pair];
});
}
} else {
throw new TypeError('Failed to construct \'Headers\': The provided value is not of type \'(sequence<sequence<ByteString>> or record<ByteString, ByteString>)');
}
// Validate and lowercase
result =
result.length > 0 ?
result.map(([name, value]) => {
validateHeaderName(name);
validateHeaderValue(name, String(value));
return [String(name).toLowerCase(), String(value)];
}) :
undefined;
super(result);
// Returning a Proxy that will lowercase key names, validate parameters and sort keys
// eslint-disable-next-line no-constructor-return
return new Proxy(this, {
get(target, p, receiver) {
switch (p) {
case 'append':
case 'set':
return (name, value) => {
validateHeaderName(name);
validateHeaderValue(name, String(value));
return URLSearchParams.prototype[p].call(
target,
String(name).toLowerCase(),
String(value)
);
};
case 'delete':
case 'has':
case 'getAll':
return name => {
validateHeaderName(name);
return URLSearchParams.prototype[p].call(
target,
String(name).toLowerCase()
);
};
case 'keys':
return () => {
target.sort();
return new Set(URLSearchParams.prototype.keys.call(target)).keys();
};
default:
return Reflect.get(target, p, receiver);
}
}
});
/* c8 ignore next */
}
get [Symbol.toStringTag]() {
return this.constructor.name;
}
toString() {
return Object.prototype.toString.call(this);
}
get(name) {
const values = this.getAll(name);
if (values.length === 0) {
return null;
}
let value = values.join(', ');
if (/^content-encoding$/i.test(name)) {
value = value.toLowerCase();
}
return value;
}
forEach(callback, thisArg = undefined) {
for (const name of this.keys()) {
Reflect.apply(callback, thisArg, [this.get(name), name, this]);
}
}
* values() {
for (const name of this.keys()) {
yield this.get(name);
}
}
/**
* @type {() => IterableIterator<[string, string]>}
*/
* entries() {
for (const name of this.keys()) {
yield [name, this.get(name)];
}
}
[Symbol.iterator]() {
return this.entries();
}
/**
* Node-fetch non-spec method
* returning all headers and their values as array
* @returns {Record<string, string[]>}
*/
raw() {
return [...this.keys()].reduce((result, key) => {
result[key] = this.getAll(key);
return result;
}, {});
}
/**
* For better console.log(headers) and also to convert Headers into Node.js Request compatible format
*/
[Symbol.for('nodejs.util.inspect.custom')]() {
return [...this.keys()].reduce((result, key) => {
const values = this.getAll(key);
// Http.request() only supports string as Host header.
// This hack makes specifying custom Host header possible.
if (key === 'host') {
result[key] = values[0];
} else {
result[key] = values.length > 1 ? values : values[0];
}
return result;
}, {});
}
}
/**
* Re-shaping object for Web IDL tests
* Only need to do it for overridden methods
*/
Object.defineProperties(
Headers.prototype,
['get', 'entries', 'forEach', 'values'].reduce((result, property) => {
result[property] = {enumerable: true};
return result;
}, {})
);
/**
* Create a Headers object from an http.IncomingMessage.rawHeaders, ignoring those that do
* not conform to HTTP grammar productions.
* @param {import('http').IncomingMessage['rawHeaders']} headers
*/
export function fromRawHeaders(headers = []) {
return new Headers(
headers
// Split into pairs
.reduce((result, value, index, array) => {
if (index % 2 === 0) {
result.push(array.slice(index, index + 2));
}
return result;
}, [])
.filter(([name, value]) => {
try {
validateHeaderName(name);
validateHeaderValue(name, String(value));
return true;
} catch {
return false;
}
})
);
}

414
node_modules/node-fetch/src/index.js generated vendored
View file

@ -1,414 +0,0 @@
/**
* Index.js
*
* a request API compatible with window.fetch
*
* All spec algorithm step numbers are based on https://fetch.spec.whatwg.org/commit-snapshots/ae716822cb3a61843226cd090eefc6589446c1d2/.
*/
import http from 'node:http';
import https from 'node:https';
import zlib from 'node:zlib';
import Stream, {PassThrough, pipeline as pump} from 'node:stream';
import {Buffer} from 'node:buffer';
import dataUriToBuffer from 'data-uri-to-buffer';
import {writeToStream, clone} from './body.js';
import Response from './response.js';
import Headers, {fromRawHeaders} from './headers.js';
import Request, {getNodeRequestOptions} from './request.js';
import {FetchError} from './errors/fetch-error.js';
import {AbortError} from './errors/abort-error.js';
import {isRedirect} from './utils/is-redirect.js';
import {FormData} from 'formdata-polyfill/esm.min.js';
import {isDomainOrSubdomain} from './utils/is.js';
import {parseReferrerPolicyFromHeader} from './utils/referrer.js';
import {
Blob,
File,
fileFromSync,
fileFrom,
blobFromSync,
blobFrom
} from 'fetch-blob/from.js';
export {FormData, Headers, Request, Response, FetchError, AbortError, isRedirect};
export {Blob, File, fileFromSync, fileFrom, blobFromSync, blobFrom};
const supportedSchemas = new Set(['data:', 'http:', 'https:']);
/**
* Fetch function
*
* @param {string | URL | import('./request').default} url - Absolute url or Request instance
* @param {*} [options_] - Fetch options
* @return {Promise<import('./response').default>}
*/
export default async function fetch(url, options_) {
return new Promise((resolve, reject) => {
// Build request object
const request = new Request(url, options_);
const {parsedURL, options} = getNodeRequestOptions(request);
if (!supportedSchemas.has(parsedURL.protocol)) {
throw new TypeError(`node-fetch cannot load ${url}. URL scheme "${parsedURL.protocol.replace(/:$/, '')}" is not supported.`);
}
if (parsedURL.protocol === 'data:') {
const data = dataUriToBuffer(request.url);
const response = new Response(data, {headers: {'Content-Type': data.typeFull}});
resolve(response);
return;
}
// Wrap http.request into fetch
const send = (parsedURL.protocol === 'https:' ? https : http).request;
const {signal} = request;
let response = null;
const abort = () => {
const error = new AbortError('The operation was aborted.');
reject(error);
if (request.body && request.body instanceof Stream.Readable) {
request.body.destroy(error);
}
if (!response || !response.body) {
return;
}
response.body.emit('error', error);
};
if (signal && signal.aborted) {
abort();
return;
}
const abortAndFinalize = () => {
abort();
finalize();
};
// Send request
const request_ = send(parsedURL.toString(), options);
if (signal) {
signal.addEventListener('abort', abortAndFinalize);
}
const finalize = () => {
request_.abort();
if (signal) {
signal.removeEventListener('abort', abortAndFinalize);
}
};
request_.on('error', error => {
reject(new FetchError(`request to ${request.url} failed, reason: ${error.message}`, 'system', error));
finalize();
});
fixResponseChunkedTransferBadEnding(request_, error => {
if (response && response.body) {
response.body.destroy(error);
}
});
/* c8 ignore next 18 */
if (process.version < 'v14') {
// Before Node.js 14, pipeline() does not fully support async iterators and does not always
// properly handle when the socket close/end events are out of order.
request_.on('socket', s => {
let endedWithEventsCount;
s.prependListener('end', () => {
endedWithEventsCount = s._eventsCount;
});
s.prependListener('close', hadError => {
// if end happened before close but the socket didn't emit an error, do it now
if (response && endedWithEventsCount < s._eventsCount && !hadError) {
const error = new Error('Premature close');
error.code = 'ERR_STREAM_PREMATURE_CLOSE';
response.body.emit('error', error);
}
});
});
}
request_.on('response', response_ => {
request_.setTimeout(0);
const headers = fromRawHeaders(response_.rawHeaders);
// HTTP fetch step 5
if (isRedirect(response_.statusCode)) {
// HTTP fetch step 5.2
const location = headers.get('Location');
// HTTP fetch step 5.3
let locationURL = null;
try {
locationURL = location === null ? null : new URL(location, request.url);
} catch {
// error here can only be invalid URL in Location: header
// do not throw when options.redirect == manual
// let the user extract the errorneous redirect URL
if (request.redirect !== 'manual') {
reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect'));
finalize();
return;
}
}
// HTTP fetch step 5.5
switch (request.redirect) {
case 'error':
reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));
finalize();
return;
case 'manual':
// Nothing to do
break;
case 'follow': {
// HTTP-redirect fetch step 2
if (locationURL === null) {
break;
}
// HTTP-redirect fetch step 5
if (request.counter >= request.follow) {
reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));
finalize();
return;
}
// HTTP-redirect fetch step 6 (counter increment)
// Create a new Request object.
const requestOptions = {
headers: new Headers(request.headers),
follow: request.follow,
counter: request.counter + 1,
agent: request.agent,
compress: request.compress,
method: request.method,
body: clone(request),
signal: request.signal,
size: request.size,
referrer: request.referrer,
referrerPolicy: request.referrerPolicy
};
// when forwarding sensitive headers like "Authorization",
// "WWW-Authenticate", and "Cookie" to untrusted targets,
// headers will be ignored when following a redirect to a domain
// that is not a subdomain match or exact match of the initial domain.
// For example, a redirect from "foo.com" to either "foo.com" or "sub.foo.com"
// will forward the sensitive headers, but a redirect to "bar.com" will not.
if (!isDomainOrSubdomain(request.url, locationURL)) {
for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) {
requestOptions.headers.delete(name);
}
}
// HTTP-redirect fetch step 9
if (response_.statusCode !== 303 && request.body && options_.body instanceof Stream.Readable) {
reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));
finalize();
return;
}
// HTTP-redirect fetch step 11
if (response_.statusCode === 303 || ((response_.statusCode === 301 || response_.statusCode === 302) && request.method === 'POST')) {
requestOptions.method = 'GET';
requestOptions.body = undefined;
requestOptions.headers.delete('content-length');
}
// HTTP-redirect fetch step 14
const responseReferrerPolicy = parseReferrerPolicyFromHeader(headers);
if (responseReferrerPolicy) {
requestOptions.referrerPolicy = responseReferrerPolicy;
}
// HTTP-redirect fetch step 15
resolve(fetch(new Request(locationURL, requestOptions)));
finalize();
return;
}
default:
return reject(new TypeError(`Redirect option '${request.redirect}' is not a valid value of RequestRedirect`));
}
}
// Prepare response
if (signal) {
response_.once('end', () => {
signal.removeEventListener('abort', abortAndFinalize);
});
}
let body = pump(response_, new PassThrough(), error => {
if (error) {
reject(error);
}
});
// see https://github.com/nodejs/node/pull/29376
/* c8 ignore next 3 */
if (process.version < 'v12.10') {
response_.on('aborted', abortAndFinalize);
}
const responseOptions = {
url: request.url,
status: response_.statusCode,
statusText: response_.statusMessage,
headers,
size: request.size,
counter: request.counter,
highWaterMark: request.highWaterMark
};
// HTTP-network fetch step 12.1.1.3
const codings = headers.get('Content-Encoding');
// HTTP-network fetch step 12.1.1.4: handle content codings
// in following scenarios we ignore compression support
// 1. compression support is disabled
// 2. HEAD request
// 3. no Content-Encoding header
// 4. no content response (204)
// 5. content not modified response (304)
if (!request.compress || request.method === 'HEAD' || codings === null || response_.statusCode === 204 || response_.statusCode === 304) {
response = new Response(body, responseOptions);
resolve(response);
return;
}
// For Node v6+
// Be less strict when decoding compressed responses, since sometimes
// servers send slightly invalid responses that are still accepted
// by common browsers.
// Always using Z_SYNC_FLUSH is what cURL does.
const zlibOptions = {
flush: zlib.Z_SYNC_FLUSH,
finishFlush: zlib.Z_SYNC_FLUSH
};
// For gzip
if (codings === 'gzip' || codings === 'x-gzip') {
body = pump(body, zlib.createGunzip(zlibOptions), error => {
if (error) {
reject(error);
}
});
response = new Response(body, responseOptions);
resolve(response);
return;
}
// For deflate
if (codings === 'deflate' || codings === 'x-deflate') {
// Handle the infamous raw deflate response from old servers
// a hack for old IIS and Apache servers
const raw = pump(response_, new PassThrough(), error => {
if (error) {
reject(error);
}
});
raw.once('data', chunk => {
// See http://stackoverflow.com/questions/37519828
if ((chunk[0] & 0x0F) === 0x08) {
body = pump(body, zlib.createInflate(), error => {
if (error) {
reject(error);
}
});
} else {
body = pump(body, zlib.createInflateRaw(), error => {
if (error) {
reject(error);
}
});
}
response = new Response(body, responseOptions);
resolve(response);
});
raw.once('end', () => {
// Some old IIS servers return zero-length OK deflate responses, so
// 'data' is never emitted. See https://github.com/node-fetch/node-fetch/pull/903
if (!response) {
response = new Response(body, responseOptions);
resolve(response);
}
});
return;
}
// For br
if (codings === 'br') {
body = pump(body, zlib.createBrotliDecompress(), error => {
if (error) {
reject(error);
}
});
response = new Response(body, responseOptions);
resolve(response);
return;
}
// Otherwise, use response as-is
response = new Response(body, responseOptions);
resolve(response);
});
// eslint-disable-next-line promise/prefer-await-to-then
writeToStream(request_, request).catch(reject);
});
}
function fixResponseChunkedTransferBadEnding(request, errorCallback) {
const LAST_CHUNK = Buffer.from('0\r\n\r\n');
let isChunkedTransfer = false;
let properLastChunkReceived = false;
let previousChunk;
request.on('response', response => {
const {headers} = response;
isChunkedTransfer = headers['transfer-encoding'] === 'chunked' && !headers['content-length'];
});
request.on('socket', socket => {
const onSocketClose = () => {
if (isChunkedTransfer && !properLastChunkReceived) {
const error = new Error('Premature close');
error.code = 'ERR_STREAM_PREMATURE_CLOSE';
errorCallback(error);
}
};
const onData = buf => {
properLastChunkReceived = Buffer.compare(buf.slice(-5), LAST_CHUNK) === 0;
// Sometimes final 0-length chunk and end of message code are in separate packets
if (!properLastChunkReceived && previousChunk) {
properLastChunkReceived = (
Buffer.compare(previousChunk.slice(-3), LAST_CHUNK.slice(0, 3)) === 0 &&
Buffer.compare(buf.slice(-2), LAST_CHUNK.slice(3)) === 0
);
}
previousChunk = buf;
};
socket.prependListener('close', onSocketClose);
socket.on('data', onData);
request.on('close', () => {
socket.removeListener('close', onSocketClose);
socket.removeListener('data', onData);
});
});
}

View file

@ -1,317 +0,0 @@
/**
* Request.js
*
* Request class contains server only options
*
* All spec algorithm step numbers are based on https://fetch.spec.whatwg.org/commit-snapshots/ae716822cb3a61843226cd090eefc6589446c1d2/.
*/
import {format as formatUrl} from 'node:url';
import {deprecate} from 'node:util';
import Headers from './headers.js';
import Body, {clone, extractContentType, getTotalBytes} from './body.js';
import {isAbortSignal} from './utils/is.js';
import {getSearch} from './utils/get-search.js';
import {
validateReferrerPolicy, determineRequestsReferrer, DEFAULT_REFERRER_POLICY
} from './utils/referrer.js';
const INTERNALS = Symbol('Request internals');
/**
* Check if `obj` is an instance of Request.
*
* @param {*} object
* @return {boolean}
*/
const isRequest = object => {
return (
typeof object === 'object' &&
typeof object[INTERNALS] === 'object'
);
};
const doBadDataWarn = deprecate(() => {},
'.data is not a valid RequestInit property, use .body instead',
'https://github.com/node-fetch/node-fetch/issues/1000 (request)');
/**
* Request class
*
* Ref: https://fetch.spec.whatwg.org/#request-class
*
* @param Mixed input Url or Request instance
* @param Object init Custom options
* @return Void
*/
export default class Request extends Body {
constructor(input, init = {}) {
let parsedURL;
// Normalize input and force URL to be encoded as UTF-8 (https://github.com/node-fetch/node-fetch/issues/245)
if (isRequest(input)) {
parsedURL = new URL(input.url);
} else {
parsedURL = new URL(input);
input = {};
}
if (parsedURL.username !== '' || parsedURL.password !== '') {
throw new TypeError(`${parsedURL} is an url with embedded credentials.`);
}
let method = init.method || input.method || 'GET';
if (/^(delete|get|head|options|post|put)$/i.test(method)) {
method = method.toUpperCase();
}
if ('data' in init) {
doBadDataWarn();
}
// eslint-disable-next-line no-eq-null, eqeqeq
if ((init.body != null || (isRequest(input) && input.body !== null)) &&
(method === 'GET' || method === 'HEAD')) {
throw new TypeError('Request with GET/HEAD method cannot have body');
}
const inputBody = init.body ?
init.body :
(isRequest(input) && input.body !== null ?
clone(input) :
null);
super(inputBody, {
size: init.size || input.size || 0
});
const headers = new Headers(init.headers || input.headers || {});
if (inputBody !== null && !headers.has('Content-Type')) {
const contentType = extractContentType(inputBody, this);
if (contentType) {
headers.set('Content-Type', contentType);
}
}
let signal = isRequest(input) ?
input.signal :
null;
if ('signal' in init) {
signal = init.signal;
}
// eslint-disable-next-line no-eq-null, eqeqeq
if (signal != null && !isAbortSignal(signal)) {
throw new TypeError('Expected signal to be an instanceof AbortSignal or EventTarget');
}
// §5.4, Request constructor steps, step 15.1
// eslint-disable-next-line no-eq-null, eqeqeq
let referrer = init.referrer == null ? input.referrer : init.referrer;
if (referrer === '') {
// §5.4, Request constructor steps, step 15.2
referrer = 'no-referrer';
} else if (referrer) {
// §5.4, Request constructor steps, step 15.3.1, 15.3.2
const parsedReferrer = new URL(referrer);
// §5.4, Request constructor steps, step 15.3.3, 15.3.4
referrer = /^about:(\/\/)?client$/.test(parsedReferrer) ? 'client' : parsedReferrer;
} else {
referrer = undefined;
}
this[INTERNALS] = {
method,
redirect: init.redirect || input.redirect || 'follow',
headers,
parsedURL,
signal,
referrer
};
// Node-fetch-only options
this.follow = init.follow === undefined ? (input.follow === undefined ? 20 : input.follow) : init.follow;
this.compress = init.compress === undefined ? (input.compress === undefined ? true : input.compress) : init.compress;
this.counter = init.counter || input.counter || 0;
this.agent = init.agent || input.agent;
this.highWaterMark = init.highWaterMark || input.highWaterMark || 16384;
this.insecureHTTPParser = init.insecureHTTPParser || input.insecureHTTPParser || false;
// §5.4, Request constructor steps, step 16.
// Default is empty string per https://fetch.spec.whatwg.org/#concept-request-referrer-policy
this.referrerPolicy = init.referrerPolicy || input.referrerPolicy || '';
}
/** @returns {string} */
get method() {
return this[INTERNALS].method;
}
/** @returns {string} */
get url() {
return formatUrl(this[INTERNALS].parsedURL);
}
/** @returns {Headers} */
get headers() {
return this[INTERNALS].headers;
}
get redirect() {
return this[INTERNALS].redirect;
}
/** @returns {AbortSignal} */
get signal() {
return this[INTERNALS].signal;
}
// https://fetch.spec.whatwg.org/#dom-request-referrer
get referrer() {
if (this[INTERNALS].referrer === 'no-referrer') {
return '';
}
if (this[INTERNALS].referrer === 'client') {
return 'about:client';
}
if (this[INTERNALS].referrer) {
return this[INTERNALS].referrer.toString();
}
return undefined;
}
get referrerPolicy() {
return this[INTERNALS].referrerPolicy;
}
set referrerPolicy(referrerPolicy) {
this[INTERNALS].referrerPolicy = validateReferrerPolicy(referrerPolicy);
}
/**
* Clone this request
*
* @return Request
*/
clone() {
return new Request(this);
}
get [Symbol.toStringTag]() {
return 'Request';
}
}
Object.defineProperties(Request.prototype, {
method: {enumerable: true},
url: {enumerable: true},
headers: {enumerable: true},
redirect: {enumerable: true},
clone: {enumerable: true},
signal: {enumerable: true},
referrer: {enumerable: true},
referrerPolicy: {enumerable: true}
});
/**
* Convert a Request to Node.js http request options.
*
* @param {Request} request - A Request instance
* @return The options object to be passed to http.request
*/
export const getNodeRequestOptions = request => {
const {parsedURL} = request[INTERNALS];
const headers = new Headers(request[INTERNALS].headers);
// Fetch step 1.3
if (!headers.has('Accept')) {
headers.set('Accept', '*/*');
}
// HTTP-network-or-cache fetch steps 2.4-2.7
let contentLengthValue = null;
if (request.body === null && /^(post|put)$/i.test(request.method)) {
contentLengthValue = '0';
}
if (request.body !== null) {
const totalBytes = getTotalBytes(request);
// Set Content-Length if totalBytes is a number (that is not NaN)
if (typeof totalBytes === 'number' && !Number.isNaN(totalBytes)) {
contentLengthValue = String(totalBytes);
}
}
if (contentLengthValue) {
headers.set('Content-Length', contentLengthValue);
}
// 4.1. Main fetch, step 2.6
// > If request's referrer policy is the empty string, then set request's referrer policy to the
// > default referrer policy.
if (request.referrerPolicy === '') {
request.referrerPolicy = DEFAULT_REFERRER_POLICY;
}
// 4.1. Main fetch, step 2.7
// > If request's referrer is not "no-referrer", set request's referrer to the result of invoking
// > determine request's referrer.
if (request.referrer && request.referrer !== 'no-referrer') {
request[INTERNALS].referrer = determineRequestsReferrer(request);
} else {
request[INTERNALS].referrer = 'no-referrer';
}
// 4.5. HTTP-network-or-cache fetch, step 6.9
// > If httpRequest's referrer is a URL, then append `Referer`/httpRequest's referrer, serialized
// > and isomorphic encoded, to httpRequest's header list.
if (request[INTERNALS].referrer instanceof URL) {
headers.set('Referer', request.referrer);
}
// HTTP-network-or-cache fetch step 2.11
if (!headers.has('User-Agent')) {
headers.set('User-Agent', 'node-fetch');
}
// HTTP-network-or-cache fetch step 2.15
if (request.compress && !headers.has('Accept-Encoding')) {
headers.set('Accept-Encoding', 'gzip, deflate, br');
}
let {agent} = request;
if (typeof agent === 'function') {
agent = agent(parsedURL);
}
if (!headers.has('Connection') && !agent) {
headers.set('Connection', 'close');
}
// HTTP-network fetch step 4.2
// chunked encoding is handled by Node.js
const search = getSearch(parsedURL);
// Pass the full URL directly to request(), but overwrite the following
// options:
const options = {
// Overwrite search to retain trailing ? (issue #776)
path: parsedURL.pathname + search,
// The following options are not expressed in the URL
method: request.method,
headers: headers[Symbol.for('nodejs.util.inspect.custom')](),
insecureHTTPParser: request.insecureHTTPParser,
agent
};
return {
/** @type {URL} */
parsedURL,
options
};
};

View file

@ -1,141 +0,0 @@
/**
* Response.js
*
* Response class provides content decoding
*/
import Headers from './headers.js';
import Body, {clone, extractContentType} from './body.js';
import {isRedirect} from './utils/is-redirect.js';
const INTERNALS = Symbol('Response internals');
/**
* Response class
*
* Ref: https://fetch.spec.whatwg.org/#response-class
*
* @param Stream body Readable stream
* @param Object opts Response options
* @return Void
*/
export default class Response extends Body {
constructor(body = null, options = {}) {
super(body, options);
// eslint-disable-next-line no-eq-null, eqeqeq, no-negated-condition
const status = options.status != null ? options.status : 200;
const headers = new Headers(options.headers);
if (body !== null && !headers.has('Content-Type')) {
const contentType = extractContentType(body, this);
if (contentType) {
headers.append('Content-Type', contentType);
}
}
this[INTERNALS] = {
type: 'default',
url: options.url,
status,
statusText: options.statusText || '',
headers,
counter: options.counter,
highWaterMark: options.highWaterMark
};
}
get type() {
return this[INTERNALS].type;
}
get url() {
return this[INTERNALS].url || '';
}
get status() {
return this[INTERNALS].status;
}
/**
* Convenience property representing if the request ended normally
*/
get ok() {
return this[INTERNALS].status >= 200 && this[INTERNALS].status < 300;
}
get redirected() {
return this[INTERNALS].counter > 0;
}
get statusText() {
return this[INTERNALS].statusText;
}
get headers() {
return this[INTERNALS].headers;
}
get highWaterMark() {
return this[INTERNALS].highWaterMark;
}
/**
* Clone this response
*
* @return Response
*/
clone() {
return new Response(clone(this, this.highWaterMark), {
type: this.type,
url: this.url,
status: this.status,
statusText: this.statusText,
headers: this.headers,
ok: this.ok,
redirected: this.redirected,
size: this.size,
highWaterMark: this.highWaterMark
});
}
/**
* @param {string} url The URL that the new response is to originate from.
* @param {number} status An optional status code for the response (e.g., 302.)
* @returns {Response} A Response object.
*/
static redirect(url, status = 302) {
if (!isRedirect(status)) {
throw new RangeError('Failed to execute "redirect" on "response": Invalid status code');
}
return new Response(null, {
headers: {
location: new URL(url).toString()
},
status
});
}
static error() {
const response = new Response(null, {status: 0, statusText: ''});
response[INTERNALS].type = 'error';
return response;
}
get [Symbol.toStringTag]() {
return 'Response';
}
}
Object.defineProperties(Response.prototype, {
type: {enumerable: true},
url: {enumerable: true},
status: {enumerable: true},
ok: {enumerable: true},
redirected: {enumerable: true},
statusText: {enumerable: true},
headers: {enumerable: true},
clone: {enumerable: true}
});

View file

@ -1,9 +0,0 @@
export const getSearch = parsedURL => {
if (parsedURL.search) {
return parsedURL.search;
}
const lastOffset = parsedURL.href.length - 1;
const hash = parsedURL.hash || (parsedURL.href[lastOffset] === '#' ? '#' : '');
return parsedURL.href[lastOffset - hash.length] === '?' ? '?' : '';
};

View file

@ -1,11 +0,0 @@
const redirectStatus = new Set([301, 302, 303, 307, 308]);
/**
* Redirect code matching
*
* @param {number} code - Status code
* @return {boolean}
*/
export const isRedirect = code => {
return redirectStatus.has(code);
};

View file

@ -1,73 +0,0 @@
/**
* Is.js
*
* Object type checks.
*/
const NAME = Symbol.toStringTag;
/**
* Check if `obj` is a URLSearchParams object
* ref: https://github.com/node-fetch/node-fetch/issues/296#issuecomment-307598143
* @param {*} object - Object to check for
* @return {boolean}
*/
export const isURLSearchParameters = object => {
return (
typeof object === 'object' &&
typeof object.append === 'function' &&
typeof object.delete === 'function' &&
typeof object.get === 'function' &&
typeof object.getAll === 'function' &&
typeof object.has === 'function' &&
typeof object.set === 'function' &&
typeof object.sort === 'function' &&
object[NAME] === 'URLSearchParams'
);
};
/**
* Check if `object` is a W3C `Blob` object (which `File` inherits from)
* @param {*} object - Object to check for
* @return {boolean}
*/
export const isBlob = object => {
return (
object &&
typeof object === 'object' &&
typeof object.arrayBuffer === 'function' &&
typeof object.type === 'string' &&
typeof object.stream === 'function' &&
typeof object.constructor === 'function' &&
/^(Blob|File)$/.test(object[NAME])
);
};
/**
* Check if `obj` is an instance of AbortSignal.
* @param {*} object - Object to check for
* @return {boolean}
*/
export const isAbortSignal = object => {
return (
typeof object === 'object' && (
object[NAME] === 'AbortSignal' ||
object[NAME] === 'EventTarget'
)
);
};
/**
* isDomainOrSubdomain reports whether sub is a subdomain (or exact match) of
* the parent domain.
*
* Both domains must already be in canonical form.
* @param {string|URL} original
* @param {string|URL} destination
*/
export const isDomainOrSubdomain = (destination, original) => {
const orig = new URL(original).hostname;
const dest = new URL(destination).hostname;
return orig === dest || orig.endsWith(`.${dest}`);
};

View file

@ -1,432 +0,0 @@
import {File} from 'fetch-blob/from.js';
import {FormData} from 'formdata-polyfill/esm.min.js';
let s = 0;
const S = {
START_BOUNDARY: s++,
HEADER_FIELD_START: s++,
HEADER_FIELD: s++,
HEADER_VALUE_START: s++,
HEADER_VALUE: s++,
HEADER_VALUE_ALMOST_DONE: s++,
HEADERS_ALMOST_DONE: s++,
PART_DATA_START: s++,
PART_DATA: s++,
END: s++
};
let f = 1;
const F = {
PART_BOUNDARY: f,
LAST_BOUNDARY: f *= 2
};
const LF = 10;
const CR = 13;
const SPACE = 32;
const HYPHEN = 45;
const COLON = 58;
const A = 97;
const Z = 122;
const lower = c => c | 0x20;
const noop = () => {};
class MultipartParser {
/**
* @param {string} boundary
*/
constructor(boundary) {
this.index = 0;
this.flags = 0;
this.onHeaderEnd = noop;
this.onHeaderField = noop;
this.onHeadersEnd = noop;
this.onHeaderValue = noop;
this.onPartBegin = noop;
this.onPartData = noop;
this.onPartEnd = noop;
this.boundaryChars = {};
boundary = '\r\n--' + boundary;
const ui8a = new Uint8Array(boundary.length);
for (let i = 0; i < boundary.length; i++) {
ui8a[i] = boundary.charCodeAt(i);
this.boundaryChars[ui8a[i]] = true;
}
this.boundary = ui8a;
this.lookbehind = new Uint8Array(this.boundary.length + 8);
this.state = S.START_BOUNDARY;
}
/**
* @param {Uint8Array} data
*/
write(data) {
let i = 0;
const length_ = data.length;
let previousIndex = this.index;
let {lookbehind, boundary, boundaryChars, index, state, flags} = this;
const boundaryLength = this.boundary.length;
const boundaryEnd = boundaryLength - 1;
const bufferLength = data.length;
let c;
let cl;
const mark = name => {
this[name + 'Mark'] = i;
};
const clear = name => {
delete this[name + 'Mark'];
};
const callback = (callbackSymbol, start, end, ui8a) => {
if (start === undefined || start !== end) {
this[callbackSymbol](ui8a && ui8a.subarray(start, end));
}
};
const dataCallback = (name, clear) => {
const markSymbol = name + 'Mark';
if (!(markSymbol in this)) {
return;
}
if (clear) {
callback(name, this[markSymbol], i, data);
delete this[markSymbol];
} else {
callback(name, this[markSymbol], data.length, data);
this[markSymbol] = 0;
}
};
for (i = 0; i < length_; i++) {
c = data[i];
switch (state) {
case S.START_BOUNDARY:
if (index === boundary.length - 2) {
if (c === HYPHEN) {
flags |= F.LAST_BOUNDARY;
} else if (c !== CR) {
return;
}
index++;
break;
} else if (index - 1 === boundary.length - 2) {
if (flags & F.LAST_BOUNDARY && c === HYPHEN) {
state = S.END;
flags = 0;
} else if (!(flags & F.LAST_BOUNDARY) && c === LF) {
index = 0;
callback('onPartBegin');
state = S.HEADER_FIELD_START;
} else {
return;
}
break;
}
if (c !== boundary[index + 2]) {
index = -2;
}
if (c === boundary[index + 2]) {
index++;
}
break;
case S.HEADER_FIELD_START:
state = S.HEADER_FIELD;
mark('onHeaderField');
index = 0;
// falls through
case S.HEADER_FIELD:
if (c === CR) {
clear('onHeaderField');
state = S.HEADERS_ALMOST_DONE;
break;
}
index++;
if (c === HYPHEN) {
break;
}
if (c === COLON) {
if (index === 1) {
// empty header field
return;
}
dataCallback('onHeaderField', true);
state = S.HEADER_VALUE_START;
break;
}
cl = lower(c);
if (cl < A || cl > Z) {
return;
}
break;
case S.HEADER_VALUE_START:
if (c === SPACE) {
break;
}
mark('onHeaderValue');
state = S.HEADER_VALUE;
// falls through
case S.HEADER_VALUE:
if (c === CR) {
dataCallback('onHeaderValue', true);
callback('onHeaderEnd');
state = S.HEADER_VALUE_ALMOST_DONE;
}
break;
case S.HEADER_VALUE_ALMOST_DONE:
if (c !== LF) {
return;
}
state = S.HEADER_FIELD_START;
break;
case S.HEADERS_ALMOST_DONE:
if (c !== LF) {
return;
}
callback('onHeadersEnd');
state = S.PART_DATA_START;
break;
case S.PART_DATA_START:
state = S.PART_DATA;
mark('onPartData');
// falls through
case S.PART_DATA:
previousIndex = index;
if (index === 0) {
// boyer-moore derrived algorithm to safely skip non-boundary data
i += boundaryEnd;
while (i < bufferLength && !(data[i] in boundaryChars)) {
i += boundaryLength;
}
i -= boundaryEnd;
c = data[i];
}
if (index < boundary.length) {
if (boundary[index] === c) {
if (index === 0) {
dataCallback('onPartData', true);
}
index++;
} else {
index = 0;
}
} else if (index === boundary.length) {
index++;
if (c === CR) {
// CR = part boundary
flags |= F.PART_BOUNDARY;
} else if (c === HYPHEN) {
// HYPHEN = end boundary
flags |= F.LAST_BOUNDARY;
} else {
index = 0;
}
} else if (index - 1 === boundary.length) {
if (flags & F.PART_BOUNDARY) {
index = 0;
if (c === LF) {
// unset the PART_BOUNDARY flag
flags &= ~F.PART_BOUNDARY;
callback('onPartEnd');
callback('onPartBegin');
state = S.HEADER_FIELD_START;
break;
}
} else if (flags & F.LAST_BOUNDARY) {
if (c === HYPHEN) {
callback('onPartEnd');
state = S.END;
flags = 0;
} else {
index = 0;
}
} else {
index = 0;
}
}
if (index > 0) {
// when matching a possible boundary, keep a lookbehind reference
// in case it turns out to be a false lead
lookbehind[index - 1] = c;
} else if (previousIndex > 0) {
// if our boundary turned out to be rubbish, the captured lookbehind
// belongs to partData
const _lookbehind = new Uint8Array(lookbehind.buffer, lookbehind.byteOffset, lookbehind.byteLength);
callback('onPartData', 0, previousIndex, _lookbehind);
previousIndex = 0;
mark('onPartData');
// reconsider the current character even so it interrupted the sequence
// it could be the beginning of a new sequence
i--;
}
break;
case S.END:
break;
default:
throw new Error(`Unexpected state entered: ${state}`);
}
}
dataCallback('onHeaderField');
dataCallback('onHeaderValue');
dataCallback('onPartData');
// Update properties for the next call
this.index = index;
this.state = state;
this.flags = flags;
}
end() {
if ((this.state === S.HEADER_FIELD_START && this.index === 0) ||
(this.state === S.PART_DATA && this.index === this.boundary.length)) {
this.onPartEnd();
} else if (this.state !== S.END) {
throw new Error('MultipartParser.end(): stream ended unexpectedly');
}
}
}
function _fileName(headerValue) {
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
const m = headerValue.match(/\bfilename=("(.*?)"|([^()<>@,;:\\"/[\]?={}\s\t]+))($|;\s)/i);
if (!m) {
return;
}
const match = m[2] || m[3] || '';
let filename = match.slice(match.lastIndexOf('\\') + 1);
filename = filename.replace(/%22/g, '"');
filename = filename.replace(/&#(\d{4});/g, (m, code) => {
return String.fromCharCode(code);
});
return filename;
}
export async function toFormData(Body, ct) {
if (!/multipart/i.test(ct)) {
throw new TypeError('Failed to fetch');
}
const m = ct.match(/boundary=(?:"([^"]+)"|([^;]+))/i);
if (!m) {
throw new TypeError('no or bad content-type header, no multipart boundary');
}
const parser = new MultipartParser(m[1] || m[2]);
let headerField;
let headerValue;
let entryValue;
let entryName;
let contentType;
let filename;
const entryChunks = [];
const formData = new FormData();
const onPartData = ui8a => {
entryValue += decoder.decode(ui8a, {stream: true});
};
const appendToFile = ui8a => {
entryChunks.push(ui8a);
};
const appendFileToFormData = () => {
const file = new File(entryChunks, filename, {type: contentType});
formData.append(entryName, file);
};
const appendEntryToFormData = () => {
formData.append(entryName, entryValue);
};
const decoder = new TextDecoder('utf-8');
decoder.decode();
parser.onPartBegin = function () {
parser.onPartData = onPartData;
parser.onPartEnd = appendEntryToFormData;
headerField = '';
headerValue = '';
entryValue = '';
entryName = '';
contentType = '';
filename = null;
entryChunks.length = 0;
};
parser.onHeaderField = function (ui8a) {
headerField += decoder.decode(ui8a, {stream: true});
};
parser.onHeaderValue = function (ui8a) {
headerValue += decoder.decode(ui8a, {stream: true});
};
parser.onHeaderEnd = function () {
headerValue += decoder.decode();
headerField = headerField.toLowerCase();
if (headerField === 'content-disposition') {
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
const m = headerValue.match(/\bname=("([^"]*)"|([^()<>@,;:\\"/[\]?={}\s\t]+))/i);
if (m) {
entryName = m[2] || m[3] || '';
}
filename = _fileName(headerValue);
if (filename) {
parser.onPartData = appendToFile;
parser.onPartEnd = appendFileToFormData;
}
} else if (headerField === 'content-type') {
contentType = headerValue;
}
headerValue = '';
headerField = '';
};
for await (const chunk of Body) {
parser.write(chunk);
}
parser.end();
return formData;
}

View file

@ -1,340 +0,0 @@
import {isIP} from 'node:net';
/**
* @external URL
* @see {@link https://developer.mozilla.org/en-US/docs/Web/API/URL|URL}
*/
/**
* @module utils/referrer
* @private
*/
/**
* @see {@link https://w3c.github.io/webappsec-referrer-policy/#strip-url|Referrer Policy §8.4. Strip url for use as a referrer}
* @param {string} URL
* @param {boolean} [originOnly=false]
*/
export function stripURLForUseAsAReferrer(url, originOnly = false) {
// 1. If url is null, return no referrer.
if (url == null) { // eslint-disable-line no-eq-null, eqeqeq
return 'no-referrer';
}
url = new URL(url);
// 2. If url's scheme is a local scheme, then return no referrer.
if (/^(about|blob|data):$/.test(url.protocol)) {
return 'no-referrer';
}
// 3. Set url's username to the empty string.
url.username = '';
// 4. Set url's password to null.
// Note: `null` appears to be a mistake as this actually results in the password being `"null"`.
url.password = '';
// 5. Set url's fragment to null.
// Note: `null` appears to be a mistake as this actually results in the fragment being `"#null"`.
url.hash = '';
// 6. If the origin-only flag is true, then:
if (originOnly) {
// 6.1. Set url's path to null.
// Note: `null` appears to be a mistake as this actually results in the path being `"/null"`.
url.pathname = '';
// 6.2. Set url's query to null.
// Note: `null` appears to be a mistake as this actually results in the query being `"?null"`.
url.search = '';
}
// 7. Return url.
return url;
}
/**
* @see {@link https://w3c.github.io/webappsec-referrer-policy/#enumdef-referrerpolicy|enum ReferrerPolicy}
*/
export const ReferrerPolicy = new Set([
'',
'no-referrer',
'no-referrer-when-downgrade',
'same-origin',
'origin',
'strict-origin',
'origin-when-cross-origin',
'strict-origin-when-cross-origin',
'unsafe-url'
]);
/**
* @see {@link https://w3c.github.io/webappsec-referrer-policy/#default-referrer-policy|default referrer policy}
*/
export const DEFAULT_REFERRER_POLICY = 'strict-origin-when-cross-origin';
/**
* @see {@link https://w3c.github.io/webappsec-referrer-policy/#referrer-policies|Referrer Policy §3. Referrer Policies}
* @param {string} referrerPolicy
* @returns {string} referrerPolicy
*/
export function validateReferrerPolicy(referrerPolicy) {
if (!ReferrerPolicy.has(referrerPolicy)) {
throw new TypeError(`Invalid referrerPolicy: ${referrerPolicy}`);
}
return referrerPolicy;
}
/**
* @see {@link https://w3c.github.io/webappsec-secure-contexts/#is-origin-trustworthy|Referrer Policy §3.2. Is origin potentially trustworthy?}
* @param {external:URL} url
* @returns `true`: "Potentially Trustworthy", `false`: "Not Trustworthy"
*/
export function isOriginPotentiallyTrustworthy(url) {
// 1. If origin is an opaque origin, return "Not Trustworthy".
// Not applicable
// 2. Assert: origin is a tuple origin.
// Not for implementations
// 3. If origin's scheme is either "https" or "wss", return "Potentially Trustworthy".
if (/^(http|ws)s:$/.test(url.protocol)) {
return true;
}
// 4. If origin's host component matches one of the CIDR notations 127.0.0.0/8 or ::1/128 [RFC4632], return "Potentially Trustworthy".
const hostIp = url.host.replace(/(^\[)|(]$)/g, '');
const hostIPVersion = isIP(hostIp);
if (hostIPVersion === 4 && /^127\./.test(hostIp)) {
return true;
}
if (hostIPVersion === 6 && /^(((0+:){7})|(::(0+:){0,6}))0*1$/.test(hostIp)) {
return true;
}
// 5. If origin's host component is "localhost" or falls within ".localhost", and the user agent conforms to the name resolution rules in [let-localhost-be-localhost], return "Potentially Trustworthy".
// We are returning FALSE here because we cannot ensure conformance to
// let-localhost-be-loalhost (https://tools.ietf.org/html/draft-west-let-localhost-be-localhost)
if (/^(.+\.)*localhost$/.test(url.host)) {
return false;
}
// 6. If origin's scheme component is file, return "Potentially Trustworthy".
if (url.protocol === 'file:') {
return true;
}
// 7. If origin's scheme component is one which the user agent considers to be authenticated, return "Potentially Trustworthy".
// Not supported
// 8. If origin has been configured as a trustworthy origin, return "Potentially Trustworthy".
// Not supported
// 9. Return "Not Trustworthy".
return false;
}
/**
* @see {@link https://w3c.github.io/webappsec-secure-contexts/#is-url-trustworthy|Referrer Policy §3.3. Is url potentially trustworthy?}
* @param {external:URL} url
* @returns `true`: "Potentially Trustworthy", `false`: "Not Trustworthy"
*/
export function isUrlPotentiallyTrustworthy(url) {
// 1. If url is "about:blank" or "about:srcdoc", return "Potentially Trustworthy".
if (/^about:(blank|srcdoc)$/.test(url)) {
return true;
}
// 2. If url's scheme is "data", return "Potentially Trustworthy".
if (url.protocol === 'data:') {
return true;
}
// Note: The origin of blob: and filesystem: URLs is the origin of the context in which they were
// created. Therefore, blobs created in a trustworthy origin will themselves be potentially
// trustworthy.
if (/^(blob|filesystem):$/.test(url.protocol)) {
return true;
}
// 3. Return the result of executing §3.2 Is origin potentially trustworthy? on url's origin.
return isOriginPotentiallyTrustworthy(url);
}
/**
* Modifies the referrerURL to enforce any extra security policy considerations.
* @see {@link https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer|Referrer Policy §8.3. Determine request's Referrer}, step 7
* @callback module:utils/referrer~referrerURLCallback
* @param {external:URL} referrerURL
* @returns {external:URL} modified referrerURL
*/
/**
* Modifies the referrerOrigin to enforce any extra security policy considerations.
* @see {@link https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer|Referrer Policy §8.3. Determine request's Referrer}, step 7
* @callback module:utils/referrer~referrerOriginCallback
* @param {external:URL} referrerOrigin
* @returns {external:URL} modified referrerOrigin
*/
/**
* @see {@link https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer|Referrer Policy §8.3. Determine request's Referrer}
* @param {Request} request
* @param {object} o
* @param {module:utils/referrer~referrerURLCallback} o.referrerURLCallback
* @param {module:utils/referrer~referrerOriginCallback} o.referrerOriginCallback
* @returns {external:URL} Request's referrer
*/
export function determineRequestsReferrer(request, {referrerURLCallback, referrerOriginCallback} = {}) {
// There are 2 notes in the specification about invalid pre-conditions. We return null, here, for
// these cases:
// > Note: If request's referrer is "no-referrer", Fetch will not call into this algorithm.
// > Note: If request's referrer policy is the empty string, Fetch will not call into this
// > algorithm.
if (request.referrer === 'no-referrer' || request.referrerPolicy === '') {
return null;
}
// 1. Let policy be request's associated referrer policy.
const policy = request.referrerPolicy;
// 2. Let environment be request's client.
// not applicable to node.js
// 3. Switch on request's referrer:
if (request.referrer === 'about:client') {
return 'no-referrer';
}
// "a URL": Let referrerSource be request's referrer.
const referrerSource = request.referrer;
// 4. Let request's referrerURL be the result of stripping referrerSource for use as a referrer.
let referrerURL = stripURLForUseAsAReferrer(referrerSource);
// 5. Let referrerOrigin be the result of stripping referrerSource for use as a referrer, with the
// origin-only flag set to true.
let referrerOrigin = stripURLForUseAsAReferrer(referrerSource, true);
// 6. If the result of serializing referrerURL is a string whose length is greater than 4096, set
// referrerURL to referrerOrigin.
if (referrerURL.toString().length > 4096) {
referrerURL = referrerOrigin;
}
// 7. The user agent MAY alter referrerURL or referrerOrigin at this point to enforce arbitrary
// policy considerations in the interests of minimizing data leakage. For example, the user
// agent could strip the URL down to an origin, modify its host, replace it with an empty
// string, etc.
if (referrerURLCallback) {
referrerURL = referrerURLCallback(referrerURL);
}
if (referrerOriginCallback) {
referrerOrigin = referrerOriginCallback(referrerOrigin);
}
// 8.Execute the statements corresponding to the value of policy:
const currentURL = new URL(request.url);
switch (policy) {
case 'no-referrer':
return 'no-referrer';
case 'origin':
return referrerOrigin;
case 'unsafe-url':
return referrerURL;
case 'strict-origin':
// 1. If referrerURL is a potentially trustworthy URL and request's current URL is not a
// potentially trustworthy URL, then return no referrer.
if (isUrlPotentiallyTrustworthy(referrerURL) && !isUrlPotentiallyTrustworthy(currentURL)) {
return 'no-referrer';
}
// 2. Return referrerOrigin.
return referrerOrigin.toString();
case 'strict-origin-when-cross-origin':
// 1. If the origin of referrerURL and the origin of request's current URL are the same, then
// return referrerURL.
if (referrerURL.origin === currentURL.origin) {
return referrerURL;
}
// 2. If referrerURL is a potentially trustworthy URL and request's current URL is not a
// potentially trustworthy URL, then return no referrer.
if (isUrlPotentiallyTrustworthy(referrerURL) && !isUrlPotentiallyTrustworthy(currentURL)) {
return 'no-referrer';
}
// 3. Return referrerOrigin.
return referrerOrigin;
case 'same-origin':
// 1. If the origin of referrerURL and the origin of request's current URL are the same, then
// return referrerURL.
if (referrerURL.origin === currentURL.origin) {
return referrerURL;
}
// 2. Return no referrer.
return 'no-referrer';
case 'origin-when-cross-origin':
// 1. If the origin of referrerURL and the origin of request's current URL are the same, then
// return referrerURL.
if (referrerURL.origin === currentURL.origin) {
return referrerURL;
}
// Return referrerOrigin.
return referrerOrigin;
case 'no-referrer-when-downgrade':
// 1. If referrerURL is a potentially trustworthy URL and request's current URL is not a
// potentially trustworthy URL, then return no referrer.
if (isUrlPotentiallyTrustworthy(referrerURL) && !isUrlPotentiallyTrustworthy(currentURL)) {
return 'no-referrer';
}
// 2. Return referrerURL.
return referrerURL;
default:
throw new TypeError(`Invalid referrerPolicy: ${policy}`);
}
}
/**
* @see {@link https://w3c.github.io/webappsec-referrer-policy/#parse-referrer-policy-from-header|Referrer Policy §8.1. Parse a referrer policy from a Referrer-Policy header}
* @param {Headers} headers Response headers
* @returns {string} policy
*/
export function parseReferrerPolicyFromHeader(headers) {
// 1. Let policy-tokens be the result of extracting header list values given `Referrer-Policy`
// and responses header list.
const policyTokens = (headers.get('referrer-policy') || '').split(/[,\s]+/);
// 2. Let policy be the empty string.
let policy = '';
// 3. For each token in policy-tokens, if token is a referrer policy and token is not the empty
// string, then set policy to token.
// Note: This algorithm loops over multiple policy values to allow deployment of new policy
// values with fallbacks for older user agents, as described in § 11.1 Unknown Policy Values.
for (const token of policyTokens) {
if (token && ReferrerPolicy.has(token)) {
policy = token;
}
}
// 4. Return policy.
return policy;
}

4
node_modules/tr46/.npmignore generated vendored Normal file
View file

@ -0,0 +1,4 @@
scripts/
test/
!lib/mapping_table.json

193
node_modules/tr46/index.js generated vendored Normal file
View file

@ -0,0 +1,193 @@
"use strict";
var punycode = require("punycode");
var mappingTable = require("./lib/mappingTable.json");
var PROCESSING_OPTIONS = {
TRANSITIONAL: 0,
NONTRANSITIONAL: 1
};
function normalize(str) { // fix bug in v8
return str.split('\u0000').map(function (s) { return s.normalize('NFC'); }).join('\u0000');
}
function findStatus(val) {
var start = 0;
var end = mappingTable.length - 1;
while (start <= end) {
var mid = Math.floor((start + end) / 2);
var target = mappingTable[mid];
if (target[0][0] <= val && target[0][1] >= val) {
return target;
} else if (target[0][0] > val) {
end = mid - 1;
} else {
start = mid + 1;
}
}
return null;
}
var regexAstralSymbols = /[\uD800-\uDBFF][\uDC00-\uDFFF]/g;
function countSymbols(string) {
return string
// replace every surrogate pair with a BMP symbol
.replace(regexAstralSymbols, '_')
// then get the length
.length;
}
function mapChars(domain_name, useSTD3, processing_option) {
var hasError = false;
var processed = "";
var len = countSymbols(domain_name);
for (var i = 0; i < len; ++i) {
var codePoint = domain_name.codePointAt(i);
var status = findStatus(codePoint);
switch (status[1]) {
case "disallowed":
hasError = true;
processed += String.fromCodePoint(codePoint);
break;
case "ignored":
break;
case "mapped":
processed += String.fromCodePoint.apply(String, status[2]);
break;
case "deviation":
if (processing_option === PROCESSING_OPTIONS.TRANSITIONAL) {
processed += String.fromCodePoint.apply(String, status[2]);
} else {
processed += String.fromCodePoint(codePoint);
}
break;
case "valid":
processed += String.fromCodePoint(codePoint);
break;
case "disallowed_STD3_mapped":
if (useSTD3) {
hasError = true;
processed += String.fromCodePoint(codePoint);
} else {
processed += String.fromCodePoint.apply(String, status[2]);
}
break;
case "disallowed_STD3_valid":
if (useSTD3) {
hasError = true;
}
processed += String.fromCodePoint(codePoint);
break;
}
}
return {
string: processed,
error: hasError
};
}
var combiningMarksRegex = /[\u0300-\u036F\u0483-\u0489\u0591-\u05BD\u05BF\u05C1\u05C2\u05C4\u05C5\u05C7\u0610-\u061A\u064B-\u065F\u0670\u06D6-\u06DC\u06DF-\u06E4\u06E7\u06E8\u06EA-\u06ED\u0711\u0730-\u074A\u07A6-\u07B0\u07EB-\u07F3\u0816-\u0819\u081B-\u0823\u0825-\u0827\u0829-\u082D\u0859-\u085B\u08E4-\u0903\u093A-\u093C\u093E-\u094F\u0951-\u0957\u0962\u0963\u0981-\u0983\u09BC\u09BE-\u09C4\u09C7\u09C8\u09CB-\u09CD\u09D7\u09E2\u09E3\u0A01-\u0A03\u0A3C\u0A3E-\u0A42\u0A47\u0A48\u0A4B-\u0A4D\u0A51\u0A70\u0A71\u0A75\u0A81-\u0A83\u0ABC\u0ABE-\u0AC5\u0AC7-\u0AC9\u0ACB-\u0ACD\u0AE2\u0AE3\u0B01-\u0B03\u0B3C\u0B3E-\u0B44\u0B47\u0B48\u0B4B-\u0B4D\u0B56\u0B57\u0B62\u0B63\u0B82\u0BBE-\u0BC2\u0BC6-\u0BC8\u0BCA-\u0BCD\u0BD7\u0C00-\u0C03\u0C3E-\u0C44\u0C46-\u0C48\u0C4A-\u0C4D\u0C55\u0C56\u0C62\u0C63\u0C81-\u0C83\u0CBC\u0CBE-\u0CC4\u0CC6-\u0CC8\u0CCA-\u0CCD\u0CD5\u0CD6\u0CE2\u0CE3\u0D01-\u0D03\u0D3E-\u0D44\u0D46-\u0D48\u0D4A-\u0D4D\u0D57\u0D62\u0D63\u0D82\u0D83\u0DCA\u0DCF-\u0DD4\u0DD6\u0DD8-\u0DDF\u0DF2\u0DF3\u0E31\u0E34-\u0E3A\u0E47-\u0E4E\u0EB1\u0EB4-\u0EB9\u0EBB\u0EBC\u0EC8-\u0ECD\u0F18\u0F19\u0F35\u0F37\u0F39\u0F3E\u0F3F\u0F71-\u0F84\u0F86\u0F87\u0F8D-\u0F97\u0F99-\u0FBC\u0FC6\u102B-\u103E\u1056-\u1059\u105E-\u1060\u1062-\u1064\u1067-\u106D\u1071-\u1074\u1082-\u108D\u108F\u109A-\u109D\u135D-\u135F\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17B4-\u17D3\u17DD\u180B-\u180D\u18A9\u1920-\u192B\u1930-\u193B\u19B0-\u19C0\u19C8\u19C9\u1A17-\u1A1B\u1A55-\u1A5E\u1A60-\u1A7C\u1A7F\u1AB0-\u1ABE\u1B00-\u1B04\u1B34-\u1B44\u1B6B-\u1B73\u1B80-\u1B82\u1BA1-\u1BAD\u1BE6-\u1BF3\u1C24-\u1C37\u1CD0-\u1CD2\u1CD4-\u1CE8\u1CED\u1CF2-\u1CF4\u1CF8\u1CF9\u1DC0-\u1DF5\u1DFC-\u1DFF\u20D0-\u20F0\u2CEF-\u2CF1\u2D7F\u2DE0-\u2DFF\u302A-\u302F\u3099\u309A\uA66F-\uA672\uA674-\uA67D\uA69F\uA6F0\uA6F1\uA802\uA806\uA80B\uA823-\uA827\uA880\uA881\uA8B4-\uA8C4\uA8E0-\uA8F1\uA926-\uA92D\uA947-\uA953\uA980-\uA983\uA9B3-\uA9C0\uA9E5\uAA29-\uAA36\uAA43\uAA4C\uAA4D\uAA7B-\uAA7D\uAAB0\uAAB2-\uAAB4\uAAB7\uAAB8\uAABE\uAABF\uAAC1\uAAEB-\uAAEF\uAAF5\uAAF6\uABE3-\uABEA\uABEC\uABED\uFB1E\uFE00-\uFE0F\uFE20-\uFE2D]|\uD800[\uDDFD\uDEE0\uDF76-\uDF7A]|\uD802[\uDE01-\uDE03\uDE05\uDE06\uDE0C-\uDE0F\uDE38-\uDE3A\uDE3F\uDEE5\uDEE6]|\uD804[\uDC00-\uDC02\uDC38-\uDC46\uDC7F-\uDC82\uDCB0-\uDCBA\uDD00-\uDD02\uDD27-\uDD34\uDD73\uDD80-\uDD82\uDDB3-\uDDC0\uDE2C-\uDE37\uDEDF-\uDEEA\uDF01-\uDF03\uDF3C\uDF3E-\uDF44\uDF47\uDF48\uDF4B-\uDF4D\uDF57\uDF62\uDF63\uDF66-\uDF6C\uDF70-\uDF74]|\uD805[\uDCB0-\uDCC3\uDDAF-\uDDB5\uDDB8-\uDDC0\uDE30-\uDE40\uDEAB-\uDEB7]|\uD81A[\uDEF0-\uDEF4\uDF30-\uDF36]|\uD81B[\uDF51-\uDF7E\uDF8F-\uDF92]|\uD82F[\uDC9D\uDC9E]|\uD834[\uDD65-\uDD69\uDD6D-\uDD72\uDD7B-\uDD82\uDD85-\uDD8B\uDDAA-\uDDAD\uDE42-\uDE44]|\uD83A[\uDCD0-\uDCD6]|\uDB40[\uDD00-\uDDEF]/;
function validateLabel(label, processing_option) {
if (label.substr(0, 4) === "xn--") {
label = punycode.toUnicode(label);
processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL;
}
var error = false;
if (normalize(label) !== label ||
(label[3] === "-" && label[4] === "-") ||
label[0] === "-" || label[label.length - 1] === "-" ||
label.indexOf(".") !== -1 ||
label.search(combiningMarksRegex) === 0) {
error = true;
}
var len = countSymbols(label);
for (var i = 0; i < len; ++i) {
var status = findStatus(label.codePointAt(i));
if ((processing === PROCESSING_OPTIONS.TRANSITIONAL && status[1] !== "valid") ||
(processing === PROCESSING_OPTIONS.NONTRANSITIONAL &&
status[1] !== "valid" && status[1] !== "deviation")) {
error = true;
break;
}
}
return {
label: label,
error: error
};
}
function processing(domain_name, useSTD3, processing_option) {
var result = mapChars(domain_name, useSTD3, processing_option);
result.string = normalize(result.string);
var labels = result.string.split(".");
for (var i = 0; i < labels.length; ++i) {
try {
var validation = validateLabel(labels[i]);
labels[i] = validation.label;
result.error = result.error || validation.error;
} catch(e) {
result.error = true;
}
}
return {
string: labels.join("."),
error: result.error
};
}
module.exports.toASCII = function(domain_name, useSTD3, processing_option, verifyDnsLength) {
var result = processing(domain_name, useSTD3, processing_option);
var labels = result.string.split(".");
labels = labels.map(function(l) {
try {
return punycode.toASCII(l);
} catch(e) {
result.error = true;
return l;
}
});
if (verifyDnsLength) {
var total = labels.slice(0, labels.length - 1).join(".").length;
if (total.length > 253 || total.length === 0) {
result.error = true;
}
for (var i=0; i < labels.length; ++i) {
if (labels.length > 63 || labels.length === 0) {
result.error = true;
break;
}
}
}
if (result.error) return null;
return labels.join(".");
};
module.exports.toUnicode = function(domain_name, useSTD3) {
var result = processing(domain_name, useSTD3, PROCESSING_OPTIONS.NONTRANSITIONAL);
return {
domain: result.string,
error: result.error
};
};
module.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS;

1
node_modules/tr46/lib/mappingTable.json generated vendored Normal file

File diff suppressed because one or more lines are too long

31
node_modules/tr46/package.json generated vendored Normal file
View file

@ -0,0 +1,31 @@
{
"name": "tr46",
"version": "0.0.3",
"description": "An implementation of the Unicode TR46 spec",
"main": "index.js",
"scripts": {
"test": "mocha",
"pretest": "node scripts/getLatestUnicodeTests.js",
"prepublish": "node scripts/generateMappingTable.js"
},
"repository": {
"type": "git",
"url": "git+https://github.com/Sebmaster/tr46.js.git"
},
"keywords": [
"unicode",
"tr46",
"url",
"whatwg"
],
"author": "Sebastian Mayr <npm@smayr.name>",
"license": "MIT",
"bugs": {
"url": "https://github.com/Sebmaster/tr46.js/issues"
},
"homepage": "https://github.com/Sebmaster/tr46.js#readme",
"devDependencies": {
"mocha": "^2.2.5",
"request": "^2.57.0"
}
}

View file

@ -1,22 +0,0 @@
The MIT License (MIT)
Copyright (c) 2020 Mattias Buelens
Copyright (c) 2016 Diwank Singh Tomer
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View file

@ -1,114 +0,0 @@
# web-streams-polyfill
Web Streams, based on the WHATWG spec reference implementation.
[![build status](https://api.travis-ci.com/MattiasBuelens/web-streams-polyfill.svg?branch=master)](https://travis-ci.com/MattiasBuelens/web-streams-polyfill)
[![npm version](https://img.shields.io/npm/v/web-streams-polyfill.svg)](https://www.npmjs.com/package/web-streams-polyfill)
[![license](https://img.shields.io/npm/l/web-streams-polyfill.svg)](https://github.com/MattiasBuelens/web-streams-polyfill/blob/master/LICENSE)
[![Join the chat at https://gitter.im/web-streams-polyfill/Lobby](https://badges.gitter.im/web-streams-polyfill/Lobby.svg)](https://gitter.im/web-streams-polyfill/Lobby?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
## Links
- [Official spec][spec]
- [Reference implementation][ref-impl]
## Usage
This library comes in multiple variants:
* `web-streams-polyfill`: a polyfill that replaces the native stream implementations.
Recommended for use in web apps supporting older browsers through a `<script>` tag.
* `web-streams-polyfill/es6`: a polyfill targeting ES2015+ environments.
Recommended for use in web apps supporting modern browsers through a `<script>` tag.
* `web-streams-polyfill/es2018`: a polyfill targeting ES2018+ environments.
* `web-streams-polyfill/ponyfill`: a [ponyfill] that provides
the stream implementations without replacing any globals.
Recommended for use in legacy Node applications, or in web libraries supporting older browsers.
* `web-streams-polyfill/ponyfill/es6`: a ponyfill targeting ES2015+ environments.
Recommended for use in Node 6+ applications, or in web libraries supporting modern browsers.
* `web-streams-polyfill/ponyfill/es2018`: a ponyfill targeting ES2018+ environments.
Recommended for use in Node 10+ applications.
Each variant also includes TypeScript type definitions, compatible with the DOM type definitions for streams included in TypeScript.
Usage as a polyfill:
```html
<!-- option 1: hosted by unpkg CDN -->
<script src="https://unpkg.com/web-streams-polyfill/dist/polyfill.min.js"></script>
<!-- option 2: self hosted -->
<script src="/path/to/web-streams-polyfill/dist/polyfill.min.js"></script>
<script>
var readable = new ReadableStream();
</script>
```
Usage as a Node module:
```js
var streams = require("web-streams-polyfill/ponyfill");
var readable = new streams.ReadableStream();
```
Usage as a ES2015 module:
```js
import { ReadableStream } from "web-streams-polyfill/ponyfill";
const readable = new ReadableStream();
```
### Compatibility
The `polyfill` and `ponyfill` variants work in any ES5-compatible environment that has a global `Promise`.
If you need to support older browsers or Node versions that do not have a native `Promise` implementation
(check the [support table][promise-support]), you must first include a `Promise` polyfill
(e.g. [promise-polyfill][promise-polyfill]).
The `polyfill/es6` and `ponyfill/es6` variants work in any ES2015-compatible environment.
The `polyfill/es2018` and `ponyfill/es2018` variants work in any ES2018-compatible environment.
[Async iterable support for `ReadableStream`][rs-asynciterator] is available in all variants, but requires an ES2018-compatible environment or a polyfill for `Symbol.asyncIterator`.
[`WritableStreamDefaultController.signal`][ws-controller-signal] is available in all variants, but requires a global `AbortController` constructor. If necessary, consider using a polyfill such as [abortcontroller-polyfill].
### Compliance
The polyfill implements [version `4b6b93c` (25 Oct 2021)][spec-snapshot] of the streams specification.
The polyfill is tested against the same [web platform tests][wpt] that are used by browsers to test their native implementations.
The polyfill aims to pass all tests, although it allows some exceptions for practical reasons:
* The `es2018` variant passes all of the tests, except for the ["bad buffers and views" tests for readable byte streams][wpt-bad-buffers].
These tests require the implementation to synchronously transfer the contents of an `ArrayBuffer`, which is not yet possible from JavaScript (although there is a [proposal][proposal-arraybuffer-transfer] to make it possible).
The reference implementation "cheats" on these tests [by making a copy instead][ref-impl-transferarraybuffer], but that is unacceptable for the polyfill's performance ([#3][issue-3]).
* The `es6` variant passes the same tests as the `es2018` variant, except for the [test for the prototype of `ReadableStream`'s async iterator][wpt-async-iterator-prototype].
Retrieving the correct `%AsyncIteratorPrototype%` requires using an async generator (`async function* () {}`), which is invalid syntax before ES2018.
Instead, the polyfill [creates its own version][stub-async-iterator-prototype] which is functionally equivalent to the real prototype.
* The `es5` variant passes the same tests as the `es6` variant, except for various tests about specific characteristics of the constructors, properties and methods.
These test failures do not affect the run-time behavior of the polyfill.
For example:
* The `name` property of down-leveled constructors is incorrect.
* The `length` property of down-leveled constructors and methods with optional arguments is incorrect.
* Not all properties and methods are correctly marked as non-enumerable.
* Down-leveled class methods are not correctly marked as non-constructable.
The type definitions are compatible with the built-in stream types of TypeScript 3.3.
### Contributors
Thanks to these people for their work on [the original polyfill][creatorrr-polyfill]:
- Diwank Singh Tomer ([creatorrr](https://github.com/creatorrr))
- Anders Riutta ([ariutta](https://github.com/ariutta))
[spec]: https://streams.spec.whatwg.org
[ref-impl]: https://github.com/whatwg/streams
[ponyfill]: https://github.com/sindresorhus/ponyfill
[promise-support]: https://kangax.github.io/compat-table/es6/#test-Promise
[promise-polyfill]: https://www.npmjs.com/package/promise-polyfill
[rs-asynciterator]: https://streams.spec.whatwg.org/#rs-asynciterator
[ws-controller-signal]: https://streams.spec.whatwg.org/#ws-default-controller-signal
[abortcontroller-polyfill]: https://www.npmjs.com/package/abortcontroller-polyfill
[spec-snapshot]: https://streams.spec.whatwg.org/commit-snapshots/4b6b93c69e531e2fe45a6ed4cb1484a7ba4eb8bb/
[wpt]: https://github.com/web-platform-tests/wpt/tree/96ca25f0f7526282c0d47e6bf6a7edd439da1968/streams
[wpt-bad-buffers]: https://github.com/web-platform-tests/wpt/blob/96ca25f0f7526282c0d47e6bf6a7edd439da1968/streams/readable-byte-streams/bad-buffers-and-views.any.js
[proposal-arraybuffer-transfer]: https://github.com/domenic/proposal-arraybuffer-transfer
[ref-impl-transferarraybuffer]: https://github.com/whatwg/streams/blob/4b6b93c69e531e2fe45a6ed4cb1484a7ba4eb8bb/reference-implementation/lib/abstract-ops/ecmascript.js#L16
[issue-3]: https://github.com/MattiasBuelens/web-streams-polyfill/issues/3
[wpt-async-iterator-prototype]: https://github.com/web-platform-tests/wpt/blob/96ca25f0f7526282c0d47e6bf6a7edd439da1968/streams/readable-streams/async-iterator.any.js#L24
[stub-async-iterator-prototype]: https://github.com/MattiasBuelens/web-streams-polyfill/blob/v2.0.0/src/target/es5/stub/async-iterator-prototype.ts
[creatorrr-polyfill]: https://github.com/creatorrr/web-streams-polyfill

File diff suppressed because it is too large Load diff

Some files were not shown because too many files have changed in this diff Show more