summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/npm/node_modules/@fastify/busboy
diff options
context:
space:
mode:
authorHeiner Lohaus <heiner@lohaus.eu>2023-10-28 07:21:00 +0200
committerHeiner Lohaus <heiner@lohaus.eu>2023-10-28 07:21:00 +0200
commitdc04ca93060443a3ce6263a476f4dafc66afc6b3 (patch)
treeb9c645cbe897af198ea0551509f901a249af35f2 /g4f/Provider/npm/node_modules/@fastify/busboy
parentUpdate config supports_message_history (diff)
downloadgpt4free-dc04ca93060443a3ce6263a476f4dafc66afc6b3.tar
gpt4free-dc04ca93060443a3ce6263a476f4dafc66afc6b3.tar.gz
gpt4free-dc04ca93060443a3ce6263a476f4dafc66afc6b3.tar.bz2
gpt4free-dc04ca93060443a3ce6263a476f4dafc66afc6b3.tar.lz
gpt4free-dc04ca93060443a3ce6263a476f4dafc66afc6b3.tar.xz
gpt4free-dc04ca93060443a3ce6263a476f4dafc66afc6b3.tar.zst
gpt4free-dc04ca93060443a3ce6263a476f4dafc66afc6b3.zip
Diffstat (limited to 'g4f/Provider/npm/node_modules/@fastify/busboy')
-rw-r--r--g4f/Provider/npm/node_modules/@fastify/busboy/LICENSE19
-rw-r--r--g4f/Provider/npm/node_modules/@fastify/busboy/README.md271
-rw-r--r--g4f/Provider/npm/node_modules/@fastify/busboy/deps/dicer/LICENSE19
-rw-r--r--g4f/Provider/npm/node_modules/@fastify/busboy/deps/dicer/lib/Dicer.js207
-rw-r--r--g4f/Provider/npm/node_modules/@fastify/busboy/deps/dicer/lib/HeaderParser.js100
-rw-r--r--g4f/Provider/npm/node_modules/@fastify/busboy/deps/dicer/lib/PartStream.js13
-rw-r--r--g4f/Provider/npm/node_modules/@fastify/busboy/deps/dicer/lib/dicer.d.ts164
-rw-r--r--g4f/Provider/npm/node_modules/@fastify/busboy/deps/streamsearch/sbmh.js228
-rw-r--r--g4f/Provider/npm/node_modules/@fastify/busboy/lib/main.d.ts196
-rw-r--r--g4f/Provider/npm/node_modules/@fastify/busboy/lib/main.js85
-rw-r--r--g4f/Provider/npm/node_modules/@fastify/busboy/lib/types/multipart.js306
-rw-r--r--g4f/Provider/npm/node_modules/@fastify/busboy/lib/types/urlencoded.js190
-rw-r--r--g4f/Provider/npm/node_modules/@fastify/busboy/lib/utils/Decoder.js54
-rw-r--r--g4f/Provider/npm/node_modules/@fastify/busboy/lib/utils/basename.js14
-rw-r--r--g4f/Provider/npm/node_modules/@fastify/busboy/lib/utils/decodeText.js26
-rw-r--r--g4f/Provider/npm/node_modules/@fastify/busboy/lib/utils/getLimit.js16
-rw-r--r--g4f/Provider/npm/node_modules/@fastify/busboy/lib/utils/parseParams.js87
-rw-r--r--g4f/Provider/npm/node_modules/@fastify/busboy/package.json89
18 files changed, 2084 insertions, 0 deletions
diff --git a/g4f/Provider/npm/node_modules/@fastify/busboy/LICENSE b/g4f/Provider/npm/node_modules/@fastify/busboy/LICENSE
new file mode 100644
index 00000000..290762e9
--- /dev/null
+++ b/g4f/Provider/npm/node_modules/@fastify/busboy/LICENSE
@@ -0,0 +1,19 @@
+Copyright Brian White. All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE. \ No newline at end of file
diff --git a/g4f/Provider/npm/node_modules/@fastify/busboy/README.md b/g4f/Provider/npm/node_modules/@fastify/busboy/README.md
new file mode 100644
index 00000000..c74e618b
--- /dev/null
+++ b/g4f/Provider/npm/node_modules/@fastify/busboy/README.md
@@ -0,0 +1,271 @@
+# busboy
+
+<div align="center">
+
+[![Build Status](https://github.com/fastify/busboy/workflows/ci/badge.svg)](https://github.com/fastify/busboy/actions)
+[![Coverage Status](https://coveralls.io/repos/fastify/busboy/badge.svg?branch=master)](https://coveralls.io/r/fastify/busboy?branch=master)
+[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](https://standardjs.com/)
+[![Security Responsible Disclosure](https://img.shields.io/badge/Security-Responsible%20Disclosure-yellow.svg)](https://github.com/nodejs/security-wg/blob/HEAD/processes/responsible_disclosure_template.md)
+
+</div>
+
+<div align="center">
+
+[![NPM version](https://img.shields.io/npm/v/@fastify/busboy.svg?style=flat)](https://www.npmjs.com/package/@fastify/busboy)
+[![NPM downloads](https://img.shields.io/npm/dm/@fastify/busboy.svg?style=flat)](https://www.npmjs.com/package/@fastify/busboy)
+
+</div>
+
+Description
+===========
+
+A Node.js module for parsing incoming HTML form data.
+
+This is an officially supported fork by [fastify](https://github.com/fastify/) organization of the amazing library [originally created](https://github.com/mscdex/busboy) by Brian White,
+aimed at addressing long-standing issues with it.
+
+Benchmark (Mean time for 500 Kb payload, 2000 cycles, 1000 cycle warmup):
+
+| Library | Version | Mean time in nanoseconds (less is better) |
+|-----------------------|---------|-------------------------------------------|
+| busboy | 0.3.1 | `340114` |
+| @fastify/busboy | 1.0.0 | `270984` |
+
+[Changelog](https://github.com/fastify/busboy/blob/master/CHANGELOG.md) since busboy 0.31.
+
+Requirements
+============
+
+* [Node.js](http://nodejs.org/) 10+
+
+
+Install
+=======
+
+ npm i @fastify/busboy
+
+
+Examples
+========
+
+* Parsing (multipart) with default options:
+
+```javascript
+const http = require('node:http');
+const { inspect } = require('node:util');
+const Busboy = require('busboy');
+
+http.createServer((req, res) => {
+ if (req.method === 'POST') {
+ const busboy = new Busboy({ headers: req.headers });
+ busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
+ console.log(`File [${fieldname}]: filename: ${filename}, encoding: ${encoding}, mimetype: ${mimetype}`);
+ file.on('data', data => {
+ console.log(`File [${fieldname}] got ${data.length} bytes`);
+ });
+ file.on('end', () => {
+ console.log(`File [${fieldname}] Finished`);
+ });
+ });
+ busboy.on('field', (fieldname, val, fieldnameTruncated, valTruncated, encoding, mimetype) => {
+ console.log(`Field [${fieldname}]: value: ${inspect(val)}`);
+ });
+ busboy.on('finish', () => {
+ console.log('Done parsing form!');
+ res.writeHead(303, { Connection: 'close', Location: '/' });
+ res.end();
+ });
+ req.pipe(busboy);
+ } else if (req.method === 'GET') {
+ res.writeHead(200, { Connection: 'close' });
+ res.end(`<html><head></head><body>
+ <form method="POST" enctype="multipart/form-data">
+ <input type="text" name="textfield"><br>
+ <input type="file" name="filefield"><br>
+ <input type="submit">
+ </form>
+ </body></html>`);
+ }
+}).listen(8000, () => {
+ console.log('Listening for requests');
+});
+
+// Example output, using http://nodejs.org/images/ryan-speaker.jpg as the file:
+//
+// Listening for requests
+// File [filefield]: filename: ryan-speaker.jpg, encoding: binary
+// File [filefield] got 11971 bytes
+// Field [textfield]: value: 'testing! :-)'
+// File [filefield] Finished
+// Done parsing form!
+```
+
+* Save all incoming files to disk:
+
+```javascript
+const http = require('node:http');
+const path = require('node:path');
+const os = require('node:os');
+const fs = require('node:fs');
+
+const Busboy = require('busboy');
+
+http.createServer(function(req, res) {
+ if (req.method === 'POST') {
+ const busboy = new Busboy({ headers: req.headers });
+ busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
+ var saveTo = path.join(os.tmpdir(), path.basename(fieldname));
+ file.pipe(fs.createWriteStream(saveTo));
+ });
+ busboy.on('finish', function() {
+ res.writeHead(200, { 'Connection': 'close' });
+ res.end("That's all folks!");
+ });
+ return req.pipe(busboy);
+ }
+ res.writeHead(404);
+ res.end();
+}).listen(8000, function() {
+ console.log('Listening for requests');
+});
+```
+
+* Parsing (urlencoded) with default options:
+
+```javascript
+const http = require('node:http');
+const { inspect } = require('node:util');
+
+const Busboy = require('busboy');
+
+http.createServer(function(req, res) {
+ if (req.method === 'POST') {
+ const busboy = new Busboy({ headers: req.headers });
+ busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
+ console.log('File [' + fieldname + ']: filename: ' + filename);
+ file.on('data', function(data) {
+ console.log('File [' + fieldname + '] got ' + data.length + ' bytes');
+ });
+ file.on('end', function() {
+ console.log('File [' + fieldname + '] Finished');
+ });
+ });
+ busboy.on('field', function(fieldname, val, fieldnameTruncated, valTruncated) {
+ console.log('Field [' + fieldname + ']: value: ' + inspect(val));
+ });
+ busboy.on('finish', function() {
+ console.log('Done parsing form!');
+ res.writeHead(303, { Connection: 'close', Location: '/' });
+ res.end();
+ });
+ req.pipe(busboy);
+ } else if (req.method === 'GET') {
+ res.writeHead(200, { Connection: 'close' });
+ res.end('<html><head></head><body>\
+ <form method="POST">\
+ <input type="text" name="textfield"><br />\
+ <select name="selectfield">\
+ <option value="1">1</option>\
+ <option value="10">10</option>\
+ <option value="100">100</option>\
+ <option value="9001">9001</option>\
+ </select><br />\
+ <input type="checkbox" name="checkfield">Node.js rules!<br />\
+ <input type="submit">\
+ </form>\
+ </body></html>');
+ }
+}).listen(8000, function() {
+ console.log('Listening for requests');
+});
+
+// Example output:
+//
+// Listening for requests
+// Field [textfield]: value: 'testing! :-)'
+// Field [selectfield]: value: '9001'
+// Field [checkfield]: value: 'on'
+// Done parsing form!
+```
+
+
+API
+===
+
+_Busboy_ is a _Writable_ stream
+
+Busboy (special) events
+-----------------------
+
+* **file**(< _string_ >fieldname, < _ReadableStream_ >stream, < _string_ >filename, < _string_ >transferEncoding, < _string_ >mimeType) - Emitted for each new file form field found. `transferEncoding` contains the 'Content-Transfer-Encoding' value for the file stream. `mimeType` contains the 'Content-Type' value for the file stream.
+ * Note: if you listen for this event, you should always handle the `stream` no matter if you care about the file contents or not (e.g. you can simply just do `stream.resume();` if you want to discard the contents), otherwise the 'finish' event will never fire on the Busboy instance. However, if you don't care about **any** incoming files, you can simply not listen for the 'file' event at all and any/all files will be automatically and safely discarded (these discarded files do still count towards `files` and `parts` limits).
+ * If a configured file size limit was reached, `stream` will both have a boolean property `truncated` (best checked at the end of the stream) and emit a 'limit' event to notify you when this happens.
+ * The property `bytesRead` informs about the number of bytes that have been read so far.
+
+* **field**(< _string_ >fieldname, < _string_ >value, < _boolean_ >fieldnameTruncated, < _boolean_ >valueTruncated, < _string_ >transferEncoding, < _string_ >mimeType) - Emitted for each new non-file field found.
+
+* **partsLimit**() - Emitted when specified `parts` limit has been reached. No more 'file' or 'field' events will be emitted.
+
+* **filesLimit**() - Emitted when specified `files` limit has been reached. No more 'file' events will be emitted.
+
+* **fieldsLimit**() - Emitted when specified `fields` limit has been reached. No more 'field' events will be emitted.
+
+
+Busboy methods
+--------------
+
+* **(constructor)**(< _object_ >config) - Creates and returns a new Busboy instance.
+
+ * The constructor takes the following valid `config` settings:
+
+ * **headers** - _object_ - These are the HTTP headers of the incoming request, which are used by individual parsers.
+
+ * **autoDestroy** - _boolean_ - Whether this stream should automatically call .destroy() on itself after ending. (Default: false).
+
+ * **highWaterMark** - _integer_ - highWaterMark to use for this Busboy instance (Default: WritableStream default).
+
+ * **fileHwm** - _integer_ - highWaterMark to use for file streams (Default: ReadableStream default).
+
+ * **defCharset** - _string_ - Default character set to use when one isn't defined (Default: 'utf8').
+
+ * **preservePath** - _boolean_ - If paths in the multipart 'filename' field shall be preserved. (Default: false).
+
+ * **isPartAFile** - __function__ - Use this function to override the default file detection functionality. It has following parameters:
+
+ * fieldName - __string__ The name of the field.
+
+ * contentType - __string__ The content-type of the part, e.g. `text/plain`, `image/jpeg`, `application/octet-stream`
+
+ * fileName - __string__ The name of a file supplied by the part.
+
+ (Default: `(fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined)`)
+
+ * **limits** - _object_ - Various limits on incoming data. Valid properties are:
+
+ * **fieldNameSize** - _integer_ - Max field name size (in bytes) (Default: 100 bytes).
+
+ * **fieldSize** - _integer_ - Max field value size (in bytes) (Default: 1 MiB, which is 1024 x 1024 bytes).
+
+ * **fields** - _integer_ - Max number of non-file fields (Default: Infinity).
+
+ * **fileSize** - _integer_ - For multipart forms, the max file size (in bytes) (Default: Infinity).
+
+ * **files** - _integer_ - For multipart forms, the max number of file fields (Default: Infinity).
+
+ * **parts** - _integer_ - For multipart forms, the max number of parts (fields + files) (Default: Infinity).
+
+ * **headerPairs** - _integer_ - For multipart forms, the max number of header key=>value pairs to parse **Default:** 2000
+
+ * **headerSize** - _integer_ - For multipart forms, the max size of a multipart header **Default:** 81920.
+
+ * The constructor can throw errors:
+
+ * **Busboy expected an options-Object.** - Busboy expected an Object as first parameters.
+
+ * **Busboy expected an options-Object with headers-attribute.** - The first parameter is lacking of a headers-attribute.
+
+ * **Limit $limit is not a valid number** - Busboy expected the desired limit to be of type number. Busboy throws this Error to prevent a potential security issue by falling silently back to the Busboy-defaults. Potential source for this Error can be the direct use of environment variables without transforming them to the type number.
+
+ * **Unsupported Content-Type.** - The `Content-Type` isn't one Busboy can parse.
+
+ * **Missing Content-Type-header.** - The provided headers don't include `Content-Type` at all.
diff --git a/g4f/Provider/npm/node_modules/@fastify/busboy/deps/dicer/LICENSE b/g4f/Provider/npm/node_modules/@fastify/busboy/deps/dicer/LICENSE
new file mode 100644
index 00000000..290762e9
--- /dev/null
+++ b/g4f/Provider/npm/node_modules/@fastify/busboy/deps/dicer/LICENSE
@@ -0,0 +1,19 @@
+Copyright Brian White. All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE. \ No newline at end of file
diff --git a/g4f/Provider/npm/node_modules/@fastify/busboy/deps/dicer/lib/Dicer.js b/g4f/Provider/npm/node_modules/@fastify/busboy/deps/dicer/lib/Dicer.js
new file mode 100644
index 00000000..79da160c
--- /dev/null
+++ b/g4f/Provider/npm/node_modules/@fastify/busboy/deps/dicer/lib/Dicer.js
@@ -0,0 +1,207 @@
+'use strict'
+
+const WritableStream = require('node:stream').Writable
+const inherits = require('node:util').inherits
+
+const StreamSearch = require('../../streamsearch/sbmh')
+
+const PartStream = require('./PartStream')
+const HeaderParser = require('./HeaderParser')
+
+const DASH = 45
+const B_ONEDASH = Buffer.from('-')
+const B_CRLF = Buffer.from('\r\n')
+const EMPTY_FN = function () {}
+
+function Dicer (cfg) {
+ if (!(this instanceof Dicer)) { return new Dicer(cfg) }
+ WritableStream.call(this, cfg)
+
+ if (!cfg || (!cfg.headerFirst && typeof cfg.boundary !== 'string')) { throw new TypeError('Boundary required') }
+
+ if (typeof cfg.boundary === 'string') { this.setBoundary(cfg.boundary) } else { this._bparser = undefined }
+
+ this._headerFirst = cfg.headerFirst
+
+ this._dashes = 0
+ this._parts = 0
+ this._finished = false
+ this._realFinish = false
+ this._isPreamble = true
+ this._justMatched = false
+ this._firstWrite = true
+ this._inHeader = true
+ this._part = undefined
+ this._cb = undefined
+ this._ignoreData = false
+ this._partOpts = { highWaterMark: cfg.partHwm }
+ this._pause = false
+
+ const self = this
+ this._hparser = new HeaderParser(cfg)
+ this._hparser.on('header', function (header) {
+ self._inHeader = false
+ self._part.emit('header', header)
+ })
+}
+inherits(Dicer, WritableStream)
+
+Dicer.prototype.emit = function (ev) {
+ if (ev === 'finish' && !this._realFinish) {
+ if (!this._finished) {
+ const self = this
+ process.nextTick(function () {
+ self.emit('error', new Error('Unexpected end of multipart data'))
+ if (self._part && !self._ignoreData) {
+ const type = (self._isPreamble ? 'Preamble' : 'Part')
+ self._part.emit('error', new Error(type + ' terminated early due to unexpected end of multipart data'))
+ self._part.push(null)
+ process.nextTick(function () {
+ self._realFinish = true
+ self.emit('finish')
+ self._realFinish = false
+ })
+ return
+ }
+ self._realFinish = true
+ self.emit('finish')
+ self._realFinish = false
+ })
+ }
+ } else { WritableStream.prototype.emit.apply(this, arguments) }
+}
+
+Dicer.prototype._write = function (data, encoding, cb) {
+ // ignore unexpected data (e.g. extra trailer data after finished)
+ if (!this._hparser && !this._bparser) { return cb() }
+
+ if (this._headerFirst && this._isPreamble) {
+ if (!this._part) {
+ this._part = new PartStream(this._partOpts)
+ if (this._events.preamble) { this.emit('preamble', this._part) } else { this._ignore() }
+ }
+ const r = this._hparser.push(data)
+ if (!this._inHeader && r !== undefined && r < data.length) { data = data.slice(r) } else { return cb() }
+ }
+
+ // allows for "easier" testing
+ if (this._firstWrite) {
+ this._bparser.push(B_CRLF)
+ this._firstWrite = false
+ }
+
+ this._bparser.push(data)
+
+ if (this._pause) { this._cb = cb } else { cb() }
+}
+
+Dicer.prototype.reset = function () {
+ this._part = undefined
+ this._bparser = undefined
+ this._hparser = undefined
+}
+
+Dicer.prototype.setBoundary = function (boundary) {
+ const self = this
+ this._bparser = new StreamSearch('\r\n--' + boundary)
+ this._bparser.on('info', function (isMatch, data, start, end) {
+ self._oninfo(isMatch, data, start, end)
+ })
+}
+
+Dicer.prototype._ignore = function () {
+ if (this._part && !this._ignoreData) {
+ this._ignoreData = true
+ this._part.on('error', EMPTY_FN)
+ // we must perform some kind of read on the stream even though we are
+ // ignoring the data, otherwise node's Readable stream will not emit 'end'
+ // after pushing null to the stream
+ this._part.resume()
+ }
+}
+
+Dicer.prototype._oninfo = function (isMatch, data, start, end) {
+ let buf; const self = this; let i = 0; let r; let shouldWriteMore = true
+
+ if (!this._part && this._justMatched && data) {
+ while (this._dashes < 2 && (start + i) < end) {
+ if (data[start + i] === DASH) {
+ ++i
+ ++this._dashes
+ } else {
+ if (this._dashes) { buf = B_ONEDASH }
+ this._dashes = 0
+ break
+ }
+ }
+ if (this._dashes === 2) {
+ if ((start + i) < end && this._events.trailer) { this.emit('trailer', data.slice(start + i, end)) }
+ this.reset()
+ this._finished = true
+ // no more parts will be added
+ if (self._parts === 0) {
+ self._realFinish = true
+ self.emit('finish')
+ self._realFinish = false
+ }
+ }
+ if (this._dashes) { return }
+ }
+ if (this._justMatched) { this._justMatched = false }
+ if (!this._part) {
+ this._part = new PartStream(this._partOpts)
+ this._part._read = function (n) {
+ self._unpause()
+ }
+ if (this._isPreamble && this._events.preamble) { this.emit('preamble', this._part) } else if (this._isPreamble !== true && this._events.part) { this.emit('part', this._part) } else { this._ignore() }
+ if (!this._isPreamble) { this._inHeader = true }
+ }
+ if (data && start < end && !this._ignoreData) {
+ if (this._isPreamble || !this._inHeader) {
+ if (buf) { shouldWriteMore = this._part.push(buf) }
+ shouldWriteMore = this._part.push(data.slice(start, end))
+ if (!shouldWriteMore) { this._pause = true }
+ } else if (!this._isPreamble && this._inHeader) {
+ if (buf) { this._hparser.push(buf) }
+ r = this._hparser.push(data.slice(start, end))
+ if (!this._inHeader && r !== undefined && r < end) { this._oninfo(false, data, start + r, end) }
+ }
+ }
+ if (isMatch) {
+ this._hparser.reset()
+ if (this._isPreamble) { this._isPreamble = false } else {
+ if (start !== end) {
+ ++this._parts
+ this._part.on('end', function () {
+ if (--self._parts === 0) {
+ if (self._finished) {
+ self._realFinish = true
+ self.emit('finish')
+ self._realFinish = false
+ } else {
+ self._unpause()
+ }
+ }
+ })
+ }
+ }
+ this._part.push(null)
+ this._part = undefined
+ this._ignoreData = false
+ this._justMatched = true
+ this._dashes = 0
+ }
+}
+
+Dicer.prototype._unpause = function () {
+ if (!this._pause) { return }
+
+ this._pause = false
+ if (this._cb) {
+ const cb = this._cb
+ this._cb = undefined
+ cb()
+ }
+}
+
+module.exports = Dicer
diff --git a/g4f/Provider/npm/node_modules/@fastify/busboy/deps/dicer/lib/HeaderParser.js b/g4f/Provider/npm/node_modules/@fastify/busboy/deps/dicer/lib/HeaderParser.js
new file mode 100644
index 00000000..65f667b5
--- /dev/null
+++ b/g4f/Provider/npm/node_modules/@fastify/busboy/deps/dicer/lib/HeaderParser.js
@@ -0,0 +1,100 @@
+'use strict'
+
+const EventEmitter = require('node:events').EventEmitter
+const inherits = require('node:util').inherits
+const getLimit = require('../../../lib/utils/getLimit')
+
+const StreamSearch = require('../../streamsearch/sbmh')
+
+const B_DCRLF = Buffer.from('\r\n\r\n')
+const RE_CRLF = /\r\n/g
+const RE_HDR = /^([^:]+):[ \t]?([\x00-\xFF]+)?$/ // eslint-disable-line no-control-regex
+
+function HeaderParser (cfg) {
+ EventEmitter.call(this)
+
+ cfg = cfg || {}
+ const self = this
+ this.nread = 0
+ this.maxed = false
+ this.npairs = 0
+ this.maxHeaderPairs = getLimit(cfg, 'maxHeaderPairs', 2000)
+ this.maxHeaderSize = getLimit(cfg, 'maxHeaderSize', 80 * 1024)
+ this.buffer = ''
+ this.header = {}
+ this.finished = false
+ this.ss = new StreamSearch(B_DCRLF)
+ this.ss.on('info', function (isMatch, data, start, end) {
+ if (data && !self.maxed) {
+ if (self.nread + end - start >= self.maxHeaderSize) {
+ end = self.maxHeaderSize - self.nread + start
+ self.nread = self.maxHeaderSize
+ self.maxed = true
+ } else { self.nread += (end - start) }
+
+ self.buffer += data.toString('binary', start, end)
+ }
+ if (isMatch) { self._finish() }
+ })
+}
+inherits(HeaderParser, EventEmitter)
+
+HeaderParser.prototype.push = function (data) {
+ const r = this.ss.push(data)
+ if (this.finished) { return r }
+}
+
+HeaderParser.prototype.reset = function () {
+ this.finished = false
+ this.buffer = ''
+ this.header = {}
+ this.ss.reset()
+}
+
+HeaderParser.prototype._finish = function () {
+ if (this.buffer) { this._parseHeader() }
+ this.ss.matches = this.ss.maxMatches
+ const header = this.header
+ this.header = {}
+ this.buffer = ''
+ this.finished = true
+ this.nread = this.npairs = 0
+ this.maxed = false
+ this.emit('header', header)
+}
+
+HeaderParser.prototype._parseHeader = function () {
+ if (this.npairs === this.maxHeaderPairs) { return }
+
+ const lines = this.buffer.split(RE_CRLF)
+ const len = lines.length
+ let m, h
+
+ for (var i = 0; i < len; ++i) { // eslint-disable-line no-var
+ if (lines[i].length === 0) { continue }
+ if (lines[i][0] === '\t' || lines[i][0] === ' ') {
+ // folded header content
+ // RFC2822 says to just remove the CRLF and not the whitespace following
+ // it, so we follow the RFC and include the leading whitespace ...
+ if (h) {
+ this.header[h][this.header[h].length - 1] += lines[i]
+ continue
+ }
+ }
+
+ const posColon = lines[i].indexOf(':')
+ if (
+ posColon === -1 ||
+ posColon === 0
+ ) {
+ return
+ }
+ m = RE_HDR.exec(lines[i])
+ h = m[1].toLowerCase()
+ this.header[h] = this.header[h] || []
+ this.header[h].push((m[2] || ''))
+ if (++this.npairs === this.maxHeaderPairs) { break }
+ }
+}
+
+module.exports = HeaderParser
diff --git a/g4f/Provider/npm/node_modules/@fastify/busboy/deps/dicer/lib/PartStream.js b/g4f/Provider/npm/node_modules/@fastify/busboy/deps/dicer/lib/PartStream.js
new file mode 100644
index 00000000..c91da1c4
--- /dev/null
+++ b/g4f/Provider/npm/node_modules/@fastify/busboy/deps/dicer/lib/PartStream.js
@@ -0,0 +1,13 @@
+'use strict'
+
+const inherits = require('node:util').inherits
+const ReadableStream = require('node:stream').Readable
+
+function PartStream (opts) {
+ ReadableStream.call(this, opts)
+}
+inherits(PartStream, ReadableStream)
+
+PartStream.prototype._read = function (n) {}
+
+module.exports = PartStream
diff --git a/g4f/Provider/npm/node_modules/@fastify/busboy/deps/dicer/lib/dicer.d.ts b/g4f/Provider/npm/node_modules/@fastify/busboy/deps/dicer/lib/dicer.d.ts
new file mode 100644
index 00000000..3c5b8962
--- /dev/null
+++ b/g4f/Provider/npm/node_modules/@fastify/busboy/deps/dicer/lib/dicer.d.ts
@@ -0,0 +1,164 @@
+// Type definitions for dicer 0.2
+// Project: https://github.com/mscdex/dicer
+// Definitions by: BendingBender <https://github.com/BendingBender>
+// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
+// TypeScript Version: 2.2
+/// <reference types="node" />
+
+import stream = require("stream");
+
+// tslint:disable:unified-signatures
+
+/**
+ * A very fast streaming multipart parser for node.js.
+ * Dicer is a WritableStream
+ *
+ * Dicer (special) events:
+ * - on('finish', ()) - Emitted when all parts have been parsed and the Dicer instance has been ended.
+ * - on('part', (stream: PartStream)) - Emitted when a new part has been found.
+ * - on('preamble', (stream: PartStream)) - Emitted for preamble if you should happen to need it (can usually be ignored).
+ * - on('trailer', (data: Buffer)) - Emitted when trailing data was found after the terminating boundary (as with the preamble, this can usually be ignored too).
+ */
+export class Dicer extends stream.Writable {
+ /**
+ * Creates and returns a new Dicer instance with the following valid config settings:
+ *
+ * @param config The configuration to use
+ */
+ constructor(config: Dicer.Config);
+ /**
+ * Sets the boundary to use for parsing and performs some initialization needed for parsing.
+ * You should only need to use this if you set headerFirst to true in the constructor and are parsing the boundary from the preamble header.
+ *
+ * @param boundary The boundary to use
+ */
+ setBoundary(boundary: string): void;
+ addListener(event: "finish", listener: () => void): this;
+ addListener(event: "part", listener: (stream: Dicer.PartStream) => void): this;
+ addListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this;
+ addListener(event: "trailer", listener: (data: Buffer) => void): this;
+ addListener(event: "close", listener: () => void): this;
+ addListener(event: "drain", listener: () => void): this;
+ addListener(event: "error", listener: (err: Error) => void): this;
+ addListener(event: "pipe", listener: (src: stream.Readable) => void): this;
+ addListener(event: "unpipe", listener: (src: stream.Readable) => void): this;
+ addListener(event: string, listener: (...args: any[]) => void): this;
+ on(event: "finish", listener: () => void): this;
+ on(event: "part", listener: (stream: Dicer.PartStream) => void): this;
+ on(event: "preamble", listener: (stream: Dicer.PartStream) => void): this;
+ on(event: "trailer", listener: (data: Buffer) => void): this;
+ on(event: "close", listener: () => void): this;
+ on(event: "drain", listener: () => void): this;
+ on(event: "error", listener: (err: Error) => void): this;
+ on(event: "pipe", listener: (src: stream.Readable) => void): this;
+ on(event: "unpipe", listener: (src: stream.Readable) => void): this;
+ on(event: string, listener: (...args: any[]) => void): this;
+ once(event: "finish", listener: () => void): this;
+ once(event: "part", listener: (stream: Dicer.PartStream) => void): this;
+ once(event: "preamble", listener: (stream: Dicer.PartStream) => void): this;
+ once(event: "trailer", listener: (data: Buffer) => void): this;
+ once(event: "close", listener: () => void): this;
+ once(event: "drain", listener: () => void): this;
+ once(event: "error", listener: (err: Error) => void): this;
+ once(event: "pipe", listener: (src: stream.Readable) => void): this;
+ once(event: "unpipe", listener: (src: stream.Readable) => void): this;
+ once(event: string, listener: (...args: any[]) => void): this;
+ prependListener(event: "finish", listener: () => void): this;
+ prependListener(event: "part", listener: (stream: Dicer.PartStream) => void): this;
+ prependListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this;
+ prependListener(event: "trailer", listener: (data: Buffer) => void): this;
+ prependListener(event: "close", listener: () => void): this;
+ prependListener(event: "drain", listener: () => void): this;
+ prependListener(event: "error", listener: (err: Error) => void): this;
+ prependListener(event: "pipe", listener: (src: stream.Readable) => void): this;
+ prependListener(event: "unpipe", listener: (src: stream.Readable) => void): this;
+ prependListener(event: string, listener: (...args: any[]) => void): this;
+ prependOnceListener(event: "finish", listener: () => void): this;
+ prependOnceListener(event: "part", listener: (stream: Dicer.PartStream) => void): this;
+ prependOnceListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this;
+ prependOnceListener(event: "trailer", listener: (data: Buffer) => void): this;
+ prependOnceListener(event: "close", listener: () => void): this;
+ prependOnceListener(event: "drain", listener: () => void): this;
+ prependOnceListener(event: "error", listener: (err: Error) => void): this;
+ prependOnceListener(event: "pipe", listener: (src: stream.Readable) => void): this;
+ prependOnceListener(event: "unpipe", listener: (src: stream.Readable) => void): this;
+ prependOnceListener(event: string, listener: (...args: any[]) => void): this;
+ removeListener(event: "finish", listener: () => void): this;
+ removeListener(event: "part", listener: (stream: Dicer.PartStream) => void): this;
+ removeListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this;
+ removeListener(event: "trailer", listener: (data: Buffer) => void): this;
+ removeListener(event: "close", listener: () => void): this;
+ removeListener(event: "drain", listener: () => void): this;
+ removeListener(event: "error", listener: (err: Error) => void): this;
+ removeListener(event: "pipe", listener: (src: stream.Readable) => void): this;
+ removeListener(event: "unpipe", listener: (src: stream.Readable) => void): this;
+ removeListener(event: string, listener: (...args: any[]) => void): this;
+}
+
+declare namespace Dicer {
+ interface Config {
+ /**
+ * This is the boundary used to detect the beginning of a new part.
+ */
+ boundary?: string | undefined;
+ /**
+ * If true, preamble header parsing will be performed first.
+ */
+ headerFirst?: boolean | undefined;
+ /**
+ * The maximum number of header key=>value pairs to parse Default: 2000 (same as node's http).
+ */
+ maxHeaderPairs?: number | undefined;
+ }
+
+ /**
+ * PartStream is a _ReadableStream_
+ *
+ * PartStream (special) events:
+ * - on('header', (header: object)) - An object containing the header for this particular part. Each property value is an array of one or more string values.
+ */
+ interface PartStream extends stream.Readable {
+ addListener(event: "header", listener: (header: object) => void): this;
+ addListener(event: "close", listener: () => void): this;
+ addListener(event: "data", listener: (chunk: Buffer | string) => void): this;
+ addListener(event: "end", listener: () => void): this;
+ addListener(event: "readable", listener: () => void): this;
+ addListener(event: "error", listener: (err: Error) => void): this;
+ addListener(event: string, listener: (...args: any[]) => void): this;
+ on(event: "header", listener: (header: object) => void): this;
+ on(event: "close", listener: () => void): this;
+ on(event: "data", listener: (chunk: Buffer | string) => void): this;
+ on(event: "end", listener: () => void): this;
+ on(event: "readable", listener: () => void): this;
+ on(event: "error", listener: (err: Error) => void): this;
+ on(event: string, listener: (...args: any[]) => void): this;
+ once(event: "header", listener: (header: object) => void): this;
+ once(event: "close", listener: () => void): this;
+ once(event: "data", listener: (chunk: Buffer | string) => void): this;
+ once(event: "end", listener: () => void): this;
+ once(event: "readable", listener: () => void): this;
+ once(event: "error", listener: (err: Error) => void): this;
+ once(event: string, listener: (...args: any[]) => void): this;
+ prependListener(event: "header", listener: (header: object) => void): this;
+ prependListener(event: "close", listener: () => void): this;
+ prependListener(event: "data", listener: (chunk: Buffer | string) => void): this;
+ prependListener(event: "end", listener: () => void): this;
+ prependListener(event: "readable", listener: () => void): this;
+ prependListener(event: "error", listener: (err: Error) => void): this;
+ prependListener(event: string, listener: (...args: any[]) => void): this;
+ prependOnceListener(event: "header", listener: (header: object) => void): this;
+ prependOnceListener(event: "close", listener: () => void): this;
+ prependOnceListener(event: "data", listener: (chunk: Buffer | string) => void): this;
+ prependOnceListener(event: "end", listener: () => void): this;
+ prependOnceListener(event: "readable", listener: () => void): this;
+ prependOnceListener(event: "error", listener: (err: Error) => void): this;
+ prependOnceListener(event: string, listener: (...args: any[]) => void): this;
+ removeListener(event: "header", listener: (header: object) => void): this;
+ removeListener(event: "close", listener: () => void): this;
+ removeListener(event: "data", listener: (chunk: Buffer | string) => void): this;
+ removeListener(event: "end", listener: () => void): this;
+ removeListener(event: "readable", listener: () => void): this;
+ removeListener(event: "error", listener: (err: Error) => void): this;
+ removeListener(event: string, listener: (...args: any[]) => void): this;
+ }
+} \ No newline at end of file
diff --git a/g4f/Provider/npm/node_modules/@fastify/busboy/deps/streamsearch/sbmh.js b/g4f/Provider/npm/node_modules/@fastify/busboy/deps/streamsearch/sbmh.js
new file mode 100644
index 00000000..b90c0e86
--- /dev/null
+++ b/g4f/Provider/npm/node_modules/@fastify/busboy/deps/streamsearch/sbmh.js
@@ -0,0 +1,228 @@
+'use strict'
+
+/**
+ * Copyright Brian White. All rights reserved.
+ *
+ * @see https://github.com/mscdex/streamsearch
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+ * IN THE SOFTWARE.
+ *
+ * Based heavily on the Streaming Boyer-Moore-Horspool C++ implementation
+ * by Hongli Lai at: https://github.com/FooBarWidget/boyer-moore-horspool
+ */
+const EventEmitter = require('node:events').EventEmitter
+const inherits = require('node:util').inherits
+
+function SBMH (needle) {
+ if (typeof needle === 'string') {
+ needle = Buffer.from(needle)
+ }
+
+ if (!Buffer.isBuffer(needle)) {
+ throw new TypeError('The needle has to be a String or a Buffer.')
+ }
+
+ const needleLength = needle.length
+
+ if (needleLength === 0) {
+ throw new Error('The needle cannot be an empty String/Buffer.')
+ }
+
+ if (needleLength > 256) {
+ throw new Error('The needle cannot have a length bigger than 256.')
+ }
+
+ this.maxMatches = Infinity
+ this.matches = 0
+
+ this._occ = new Array(256)
+ .fill(needleLength) // Initialize occurrence table.
+ this._lookbehind_size = 0
+ this._needle = needle
+ this._bufpos = 0
+
+ this._lookbehind = Buffer.alloc(needleLength)
+
+ // Populate occurrence table with analysis of the needle,
+ // ignoring last letter.
+ for (var i = 0; i < needleLength - 1; ++i) { // eslint-disable-line no-var
+ this._occ[needle[i]] = needleLength - 1 - i
+ }
+}
+inherits(SBMH, EventEmitter)
+
+SBMH.prototype.reset = function () {
+ this._lookbehind_size = 0
+ this.matches = 0
+ this._bufpos = 0
+}
+
+SBMH.prototype.push = function (chunk, pos) {
+ if (!Buffer.isBuffer(chunk)) {
+ chunk = Buffer.from(chunk, 'binary')
+ }
+ const chlen = chunk.length
+ this._bufpos = pos || 0
+ let r
+ while (r !== chlen && this.matches < this.maxMatches) { r = this._sbmh_feed(chunk) }
+ return r
+}
+
+SBMH.prototype._sbmh_feed = function (data) {
+ const len = data.length
+ const needle = this._needle
+ const needleLength = needle.length
+ const lastNeedleChar = needle[needleLength - 1]
+
+ // Positive: points to a position in `data`
+ // pos == 3 points to data[3]
+ // Negative: points to a position in the lookbehind buffer
+ // pos == -2 points to lookbehind[lookbehind_size - 2]
+ let pos = -this._lookbehind_size
+ let ch
+
+ if (pos < 0) {
+ // Lookbehind buffer is not empty. Perform Boyer-Moore-Horspool
+ // search with character lookup code that considers both the
+ // lookbehind buffer and the current round's haystack data.
+ //
+ // Loop until
+ // there is a match.
+ // or until
+ // we've moved past the position that requires the
+ // lookbehind buffer. In this case we switch to the
+ // optimized loop.
+ // or until
+ // the character to look at lies outside the haystack.
+ while (pos < 0 && pos <= len - needleLength) {
+ ch = this._sbmh_lookup_char(data, pos + needleLength - 1)
+
+ if (
+ ch === lastNeedleChar &&
+ this._sbmh_memcmp(data, pos, needleLength - 1)
+ ) {
+ this._lookbehind_size = 0
+ ++this.matches
+ this.emit('info', true)
+
+ return (this._bufpos = pos + needleLength)
+ }
+ pos += this._occ[ch]
+ }
+
+ // No match.
+
+ if (pos < 0) {
+ // There's too few data for Boyer-Moore-Horspool to run,
+ // so let's use a different algorithm to skip as much as
+ // we can.
+ // Forward pos until
+ // the trailing part of lookbehind + data
+ // looks like the beginning of the needle
+ // or until
+ // pos == 0
+ while (pos < 0 && !this._sbmh_memcmp(data, pos, len - pos)) { ++pos }
+ }
+
+ if (pos >= 0) {
+ // Discard lookbehind buffer.
+ this.emit('info', false, this._lookbehind, 0, this._lookbehind_size)
+ this._lookbehind_size = 0
+ } else {
+ // Cut off part of the lookbehind buffer that has
+ // been processed and append the entire haystack
+ // into it.
+ const bytesToCutOff = this._lookbehind_size + pos
+ if (bytesToCutOff > 0) {
+ // The cut off data is guaranteed not to contain the needle.
+ this.emit('info', false, this._lookbehind, 0, bytesToCutOff)
+ }
+
+ this._lookbehind.copy(this._lookbehind, 0, bytesToCutOff,
+ this._lookbehind_size - bytesToCutOff)
+ this._lookbehind_size -= bytesToCutOff
+
+ data.copy(this._lookbehind, this._lookbehind_size)
+ this._lookbehind_size += len
+
+ this._bufpos = len
+ return len
+ }
+ }
+
+ pos += (pos >= 0) * this._bufpos
+
+ // Lookbehind buffer is now empty. We only need to check if the
+ // needle is in the haystack.
+ if (data.indexOf(needle, pos) !== -1) {
+ pos = data.indexOf(needle, pos)
+ ++this.matches
+ if (pos > 0) { this.emit('info', true, data, this._bufpos, pos) } else { this.emit('info', true) }
+
+ return (this._bufpos = pos + needleLength)
+ } else {
+ pos = len - needleLength
+ }
+
+ // There was no match. If there's trailing haystack data that we cannot
+ // match yet using the Boyer-Moore-Horspool algorithm (because the trailing
+ // data is less than the needle size) then match using a modified
+ // algorithm that starts matching from the beginning instead of the end.
+ // Whatever trailing data is left after running this algorithm is added to
+ // the lookbehind buffer.
+ while (
+ pos < len &&
+ (
+ data[pos] !== needle[0] ||
+ (
+ (Buffer.compare(
+ data.subarray(pos, pos + len - pos),
+ needle.subarray(0, len - pos)
+ ) !== 0)
+ )
+ )
+ ) {
+ ++pos
+ }
+ if (pos < len) {
+ data.copy(this._lookbehind, 0, pos, pos + (len - pos))
+ this._lookbehind_size = len - pos
+ }
+
+ // Everything until pos is guaranteed not to contain needle data.
+ if (pos > 0) { this.emit('info', false, data, this._bufpos, pos < len ? pos : len) }
+
+ this._bufpos = len
+ return len
+}
+
+SBMH.prototype._sbmh_lookup_char = function (data, pos) {
+ return (pos < 0)
+ ? this._lookbehind[this._lookbehind_size + pos]
+ : data[pos]
+}
+
+SBMH.prototype._sbmh_memcmp = function (data, pos, len) {
+ for (var i = 0; i < len; ++i) { // eslint-disable-line no-var
+ if (this._sbmh_lookup_char(data, pos + i) !== this._needle[i]) { return false }
+ }
+ return true
+}
+
+module.exports = SBMH
diff --git a/g4f/Provider/npm/node_modules/@fastify/busboy/lib/main.d.ts b/g4f/Provider/npm/node_modules/@fastify/busboy/lib/main.d.ts
new file mode 100644
index 00000000..91b64483
--- /dev/null
+++ b/g4f/Provider/npm/node_modules/@fastify/busboy/lib/main.d.ts
@@ -0,0 +1,196 @@
+// Definitions by: Jacob Baskin <https://github.com/jacobbaskin>
+// BendingBender <https://github.com/BendingBender>
+// Igor Savin <https://github.com/kibertoad>
+
+/// <reference types="node" />
+
+import * as http from 'http';
+import { Readable, Writable } from 'stream';
+export { Dicer } from "../deps/dicer/lib/dicer";
+
+export const Busboy: BusboyConstructor;
+export default Busboy;
+
+export interface BusboyConfig {
+ /**
+ * These are the HTTP headers of the incoming request, which are used by individual parsers.
+ */
+ headers: BusboyHeaders;
+ /**
+ * `highWaterMark` to use for this Busboy instance.
+ * @default WritableStream default.
+ */
+ highWaterMark?: number | undefined;
+ /**
+ * highWaterMark to use for file streams.
+ * @default ReadableStream default.
+ */
+ fileHwm?: number | undefined;
+ /**
+ * Default character set to use when one isn't defined.
+ * @default 'utf8'
+ */
+ defCharset?: string | undefined;
+ /**
+ * Detect if a Part is a file.
+ *
+ * By default a file is detected if contentType
+ * is application/octet-stream or fileName is not
+ * undefined.
+ *
+ * Modify this to handle e.g. Blobs.
+ */
+ isPartAFile?: (fieldName: string | undefined, contentType: string | undefined, fileName: string | undefined) => boolean;
+ /**
+ * If paths in the multipart 'filename' field shall be preserved.
+ * @default false
+ */
+ preservePath?: boolean | undefined;
+ /**
+ * Various limits on incoming data.
+ */
+ limits?:
+ | {
+ /**
+ * Max field name size (in bytes)
+ * @default 100 bytes
+ */
+ fieldNameSize?: number | undefined;
+ /**
+ * Max field value size (in bytes)
+ * @default 1MB
+ */
+ fieldSize?: number | undefined;
+ /**
+ * Max number of non-file fields
+ * @default Infinity
+ */
+ fields?: number | undefined;
+ /**
+ * For multipart forms, the max file size (in bytes)
+ * @default Infinity
+ */
+ fileSize?: number | undefined;
+ /**
+ * For multipart forms, the max number of file fields
+ * @default Infinity
+ */
+ files?: number | undefined;
+ /**
+ * For multipart forms, the max number of parts (fields + files)
+ * @default Infinity
+ */
+ parts?: number | undefined;
+ /**
+ * For multipart forms, the max number of header key=>value pairs to parse
+ * @default 2000
+ */
+ headerPairs?: number | undefined;
+
+ /**
+ * For multipart forms, the max size of a header part
+ * @default 81920
+ */
+ headerSize?: number | undefined;
+ }
+ | undefined;
+}
+
+export type BusboyHeaders = { 'content-type': string } & http.IncomingHttpHeaders;
+
+export interface BusboyFileStream extends
+ Readable {
+
+ truncated: boolean;
+
+ /**
+ * The number of bytes that have been read so far.
+ */
+ bytesRead: number;
+}
+
+export interface Busboy extends Writable {
+ addListener<Event extends keyof BusboyEvents>(event: Event, listener: BusboyEvents[Event]): this;
+
+ addListener(event: string | symbol, listener: (...args: any[]) => void): this;
+
+ on<Event extends keyof BusboyEvents>(event: Event, listener: BusboyEvents[Event]): this;
+
+ on(event: string | symbol, listener: (...args: any[]) => void): this;
+
+ once<Event extends keyof BusboyEvents>(event: Event, listener: BusboyEvents[Event]): this;
+
+ once(event: string | symbol, listener: (...args: any[]) => void): this;
+
+ removeListener<Event extends keyof BusboyEvents>(event: Event, listener: BusboyEvents[Event]): this;
+
+ removeListener(event: string | symbol, listener: (...args: any[]) => void): this;
+
+ off<Event extends keyof BusboyEvents>(event: Event, listener: BusboyEvents[Event]): this;
+
+ off(event: string | symbol, listener: (...args: any[]) => void): this;
+
+ prependListener<Event extends keyof BusboyEvents>(event: Event, listener: BusboyEvents[Event]): this;
+
+ prependListener(event: string | symbol, listener: (...args: any[]) => void): this;
+
+ prependOnceListener<Event extends keyof BusboyEvents>(event: Event, listener: BusboyEvents[Event]): this;
+
+ prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this;
+}
+
+export interface BusboyEvents {
+ /**
+ * Emitted for each new file form field found.
+ *
+ * * Note: if you listen for this event, you should always handle the `stream` no matter if you care about the
+ * file contents or not (e.g. you can simply just do `stream.resume();` if you want to discard the contents),
+ * otherwise the 'finish' event will never fire on the Busboy instance. However, if you don't care about **any**
+ * incoming files, you can simply not listen for the 'file' event at all and any/all files will be automatically
+ * and safely discarded (these discarded files do still count towards `files` and `parts` limits).
+ * * If a configured file size limit was reached, `stream` will both have a boolean property `truncated`
+ * (best checked at the end of the stream) and emit a 'limit' event to notify you when this happens.
+ *
+ * @param listener.transferEncoding Contains the 'Content-Transfer-Encoding' value for the file stream.
+ * @param listener.mimeType Contains the 'Content-Type' value for the file stream.
+ */
+ file: (
+ fieldname: string,
+ stream: BusboyFileStream,
+ filename: string,
+ transferEncoding: string,
+ mimeType: string,
+ ) => void;
+ /**
+ * Emitted for each new non-file field found.
+ */
+ field: (
+ fieldname: string,
+ value: string,
+ fieldnameTruncated: boolean,
+ valueTruncated: boolean,
+ transferEncoding: string,
+ mimeType: string,
+ ) => void;
+ finish: () => void;
+ /**
+ * Emitted when specified `parts` limit has been reached. No more 'file' or 'field' events will be emitted.
+ */
+ partsLimit: () => void;
+ /**
+ * Emitted when specified `files` limit has been reached. No more 'file' events will be emitted.
+ */
+ filesLimit: () => void;
+ /**
+ * Emitted when specified `fields` limit has been reached. No more 'field' events will be emitted.
+ */
+ fieldsLimit: () => void;
+ error: (error: unknown) => void;
+}
+
+export interface BusboyConstructor {
+ (options: BusboyConfig): Busboy;
+
+ new(options: BusboyConfig): Busboy;
+}
+
diff --git a/g4f/Provider/npm/node_modules/@fastify/busboy/lib/main.js b/g4f/Provider/npm/node_modules/@fastify/busboy/lib/main.js
new file mode 100644
index 00000000..8794bebf
--- /dev/null
+++ b/g4f/Provider/npm/node_modules/@fastify/busboy/lib/main.js
@@ -0,0 +1,85 @@
+'use strict'
+
+const WritableStream = require('node:stream').Writable
+const { inherits } = require('node:util')
+const Dicer = require('../deps/dicer/lib/Dicer')
+
+const MultipartParser = require('./types/multipart')
+const UrlencodedParser = require('./types/urlencoded')
+const parseParams = require('./utils/parseParams')
+
+function Busboy (opts) {
+ if (!(this instanceof Busboy)) { return new Busboy(opts) }
+
+ if (typeof opts !== 'object') {
+ throw new TypeError('Busboy expected an options-Object.')
+ }
+ if (typeof opts.headers !== 'object') {
+ throw new TypeError('Busboy expected an options-Object with headers-attribute.')
+ }
+ if (typeof opts.headers['content-type'] !== 'string') {
+ throw new TypeError('Missing Content-Type-header.')
+ }
+
+ const {
+ headers,
+ ...streamOptions
+ } = opts
+
+ this.opts = {
+ autoDestroy: false,
+ ...streamOptions
+ }
+ WritableStream.call(this, this.opts)
+
+ this._done = false
+ this._parser = this.getParserByHeaders(headers)
+ this._finished = false
+}
+inherits(Busboy, WritableStream)
+
+Busboy.prototype.emit = function (ev) {
+ if (ev === 'finish') {
+ if (!this._done) {
+ this._parser?.end()
+ return
+ } else if (this._finished) {
+ return
+ }
+ this._finished = true
+ }
+ WritableStream.prototype.emit.apply(this, arguments)
+}
+
+Busboy.prototype.getParserByHeaders = function (headers) {
+ const parsed = parseParams(headers['content-type'])
+
+ const cfg = {
+ defCharset: this.opts.defCharset,
+ fileHwm: this.opts.fileHwm,
+ headers,
+ highWaterMark: this.opts.highWaterMark,
+ isPartAFile: this.opts.isPartAFile,
+ limits: this.opts.limits,
+ parsedConType: parsed,
+ preservePath: this.opts.preservePath
+ }
+
+ if (MultipartParser.detect.test(parsed[0])) {
+ return new MultipartParser(this, cfg)
+ }
+ if (UrlencodedParser.detect.test(parsed[0])) {
+ return new UrlencodedParser(this, cfg)
+ }
+ throw new Error('Unsupported Content-Type.')
+}
+
+Busboy.prototype._write = function (chunk, encoding, cb) {
+ this._parser.write(chunk, cb)
+}
+
+module.exports = Busboy
+module.exports.default = Busboy
+module.exports.Busboy = Busboy
+
+module.exports.Dicer = Dicer
diff --git a/g4f/Provider/npm/node_modules/@fastify/busboy/lib/types/multipart.js b/g4f/Provider/npm/node_modules/@fastify/busboy/lib/types/multipart.js
new file mode 100644
index 00000000..ad242db2
--- /dev/null
+++ b/g4f/Provider/npm/node_modules/@fastify/busboy/lib/types/multipart.js
@@ -0,0 +1,306 @@
+'use strict'
+
+// TODO:
+// * support 1 nested multipart level
+// (see second multipart example here:
+// http://www.w3.org/TR/html401/interact/forms.html#didx-multipartform-data)
+// * support limits.fieldNameSize
+// -- this will require modifications to utils.parseParams
+
+const { Readable } = require('node:stream')
+const { inherits } = require('node:util')
+
+const Dicer = require('../../deps/dicer/lib/Dicer')
+
+const parseParams = require('../utils/parseParams')
+const decodeText = require('../utils/decodeText')
+const basename = require('../utils/basename')
+const getLimit = require('../utils/getLimit')
+
+const RE_BOUNDARY = /^boundary$/i
+const RE_FIELD = /^form-data$/i
+const RE_CHARSET = /^charset$/i
+const RE_FILENAME = /^filename$/i
+const RE_NAME = /^name$/i
+
+Multipart.detect = /^multipart\/form-data/i
+function Multipart (boy, cfg) {
+ let i
+ let len
+ const self = this
+ let boundary
+ const limits = cfg.limits
+ const isPartAFile = cfg.isPartAFile || ((fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined))
+ const parsedConType = cfg.parsedConType || []
+ const defCharset = cfg.defCharset || 'utf8'
+ const preservePath = cfg.preservePath
+ const fileOpts = { highWaterMark: cfg.fileHwm }
+
+ for (i = 0, len = parsedConType.length; i < len; ++i) {
+ if (Array.isArray(parsedConType[i]) &&
+ RE_BOUNDARY.test(parsedConType[i][0])) {
+ boundary = parsedConType[i][1]
+ break
+ }
+ }
+
+ function checkFinished () {
+ if (nends === 0 && finished && !boy._done) {
+ finished = false
+ self.end()
+ }
+ }
+
+ if (typeof boundary !== 'string') { throw new Error('Multipart: Boundary not found') }
+
+ const fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024)
+ const fileSizeLimit = getLimit(limits, 'fileSize', Infinity)
+ const filesLimit = getLimit(limits, 'files', Infinity)
+ const fieldsLimit = getLimit(limits, 'fields', Infinity)
+ const partsLimit = getLimit(limits, 'parts', Infinity)
+ const headerPairsLimit = getLimit(limits, 'headerPairs', 2000)
+ const headerSizeLimit = getLimit(limits, 'headerSize', 80 * 1024)
+
+ let nfiles = 0
+ let nfields = 0
+ let nends = 0
+ let curFile
+ let curField
+ let finished = false
+
+ this._needDrain = false
+ this._pause = false
+ this._cb = undefined
+ this._nparts = 0
+ this._boy = boy
+
+ const parserCfg = {
+ boundary,
+ maxHeaderPairs: headerPairsLimit,
+ maxHeaderSize: headerSizeLimit,
+ partHwm: fileOpts.highWaterMark,
+ highWaterMark: cfg.highWaterMark
+ }
+
+ this.parser = new Dicer(parserCfg)
+ this.parser.on('drain', function () {
+ self._needDrain = false
+ if (self._cb && !self._pause) {
+ const cb = self._cb
+ self._cb = undefined
+ cb()
+ }
+ }).on('part', function onPart (part) {
+ if (++self._nparts > partsLimit) {
+ self.parser.removeListener('part', onPart)
+ self.parser.on('part', skipPart)
+ boy.hitPartsLimit = true
+ boy.emit('partsLimit')
+ return skipPart(part)
+ }
+
+ // hack because streams2 _always_ doesn't emit 'end' until nextTick, so let
+ // us emit 'end' early since we know the part has ended if we are already
+ // seeing the next part
+ if (curField) {
+ const field = curField
+ field.emit('end')
+ field.removeAllListeners('end')
+ }
+
+ part.on('header', function (header) {
+ let contype
+ let fieldname
+ let parsed
+ let charset
+ let encoding
+ let filename
+ let nsize = 0
+
+ if (header['content-type']) {
+ parsed = parseParams(header['content-type'][0])
+ if (parsed[0]) {
+ contype = parsed[0].toLowerCase()
+ for (i = 0, len = parsed.length; i < len; ++i) {
+ if (RE_CHARSET.test(parsed[i][0])) {
+ charset = parsed[i][1].toLowerCase()
+ break
+ }
+ }
+ }
+ }
+
+ if (contype === undefined) { contype = 'text/plain' }
+ if (charset === undefined) { charset = defCharset }
+
+ if (header['content-disposition']) {
+ parsed = parseParams(header['content-disposition'][0])
+ if (!RE_FIELD.test(parsed[0])) { return skipPart(part) }
+ for (i = 0, len = parsed.length; i < len; ++i) {
+ if (RE_NAME.test(parsed[i][0])) {
+ fieldname = parsed[i][1]
+ } else if (RE_FILENAME.test(parsed[i][0])) {
+ filename = parsed[i][1]
+ if (!preservePath) { filename = basename(filename) }
+ }
+ }
+ } else { return skipPart(part) }
+
+ if (header['content-transfer-encoding']) { encoding = header['content-transfer-encoding'][0].toLowerCase() } else { encoding = '7bit' }
+
+ let onData,
+ onEnd
+
+ if (isPartAFile(fieldname, contype, filename)) {
+ // file/binary field
+ if (nfiles === filesLimit) {
+ if (!boy.hitFilesLimit) {
+ boy.hitFilesLimit = true
+ boy.emit('filesLimit')
+ }
+ return skipPart(part)
+ }
+
+ ++nfiles
+
+ if (!boy._events.file) {
+ self.parser._ignore()
+ return
+ }
+
+ ++nends
+ const file = new FileStream(fileOpts)
+ curFile = file
+ file.on('end', function () {
+ --nends
+ self._pause = false
+ checkFinished()
+ if (self._cb && !self._needDrain) {
+ const cb = self._cb
+ self._cb = undefined
+ cb()
+ }
+ })
+ file._read = function (n) {
+ if (!self._pause) { return }
+ self._pause = false
+ if (self._cb && !self._needDrain) {
+ const cb = self._cb
+ self._cb = undefined
+ cb()
+ }
+ }
+ boy.emit('file', fieldname, file, filename, encoding, contype)
+
+ onData = function (data) {
+ if ((nsize += data.length) > fileSizeLimit) {
+ const extralen = fileSizeLimit - nsize + data.length
+ if (extralen > 0) { file.push(data.slice(0, extralen)) }
+ file.truncated = true
+ file.bytesRead = fileSizeLimit
+ part.removeAllListeners('data')
+ file.emit('limit')
+ return
+ } else if (!file.push(data)) { self._pause = true }
+
+ file.bytesRead = nsize
+ }
+
+ onEnd = function () {
+ curFile = undefined
+ file.push(null)
+ }
+ } else {
+ // non-file field
+ if (nfields === fieldsLimit) {
+ if (!boy.hitFieldsLimit) {
+ boy.hitFieldsLimit = true
+ boy.emit('fieldsLimit')
+ }
+ return skipPart(part)
+ }
+
+ ++nfields
+ ++nends
+ let buffer = ''
+ let truncated = false
+ curField = part
+
+ onData = function (data) {
+ if ((nsize += data.length) > fieldSizeLimit) {
+ const extralen = (fieldSizeLimit - (nsize - data.length))
+ buffer += data.toString('binary', 0, extralen)
+ truncated = true
+ part.removeAllListeners('data')
+ } else { buffer += data.toString('binary') }
+ }
+
+ onEnd = function () {
+ curField = undefined
+ if (buffer.length) { buffer = decodeText(buffer, 'binary', charset) }
+ boy.emit('field', fieldname, buffer, false, truncated, encoding, contype)
+ --nends
+ checkFinished()
+ }
+ }
+
+ /* As of node@2efe4ab761666 (v0.10.29+/v0.11.14+), busboy had become
+ broken. Streams2/streams3 is a huge black box of confusion, but
+ somehow overriding the sync state seems to fix things again (and still
+ seems to work for previous node versions).
+ */
+ part._readableState.sync = false
+
+ part.on('data', onData)
+ part.on('end', onEnd)
+ }).on('error', function (err) {
+ if (curFile) { curFile.emit('error', err) }
+ })
+ }).on('error', function (err) {
+ boy.emit('error', err)
+ }).on('finish', function () {
+ finished = true
+ checkFinished()
+ })
+}
+
+Multipart.prototype.write = function (chunk, cb) {
+ const r = this.parser.write(chunk)
+ if (r && !this._pause) {
+ cb()
+ } else {
+ this._needDrain = !r
+ this._cb = cb
+ }
+}
+
+Multipart.prototype.end = function () {
+ const self = this
+
+ if (self.parser.writable) {
+ self.parser.end()
+ } else if (!self._boy._done) {
+ process.nextTick(function () {
+ self._boy._done = true
+ self._boy.emit('finish')
+ })
+ }
+}
+
+function skipPart (part) {
+ part.resume()
+}
+
+function FileStream (opts) {
+ Readable.call(this, opts)
+
+ this.bytesRead = 0
+
+ this.truncated = false
+}
+
+inherits(FileStream, Readable)
+
+FileStream.prototype._read = function (n) {}
+
+module.exports = Multipart
diff --git a/g4f/Provider/npm/node_modules/@fastify/busboy/lib/types/urlencoded.js b/g4f/Provider/npm/node_modules/@fastify/busboy/lib/types/urlencoded.js
new file mode 100644
index 00000000..6f5f7846
--- /dev/null
+++ b/g4f/Provider/npm/node_modules/@fastify/busboy/lib/types/urlencoded.js
@@ -0,0 +1,190 @@
+'use strict'
+
+const Decoder = require('../utils/Decoder')
+const decodeText = require('../utils/decodeText')
+const getLimit = require('../utils/getLimit')
+
+const RE_CHARSET = /^charset$/i
+
+UrlEncoded.detect = /^application\/x-www-form-urlencoded/i
+function UrlEncoded (boy, cfg) {
+ const limits = cfg.limits
+ const parsedConType = cfg.parsedConType
+ this.boy = boy
+
+ this.fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024)
+ this.fieldNameSizeLimit = getLimit(limits, 'fieldNameSize', 100)
+ this.fieldsLimit = getLimit(limits, 'fields', Infinity)
+
+ let charset
+ for (var i = 0, len = parsedConType.length; i < len; ++i) { // eslint-disable-line no-var
+ if (Array.isArray(parsedConType[i]) &&
+ RE_CHARSET.test(parsedConType[i][0])) {
+ charset = parsedConType[i][1].toLowerCase()
+ break
+ }
+ }
+
+ if (charset === undefined) { charset = cfg.defCharset || 'utf8' }
+
+ this.decoder = new Decoder()
+ this.charset = charset
+ this._fields = 0
+ this._state = 'key'
+ this._checkingBytes = true
+ this._bytesKey = 0
+ this._bytesVal = 0
+ this._key = ''
+ this._val = ''
+ this._keyTrunc = false
+ this._valTrunc = false
+ this._hitLimit = false
+}
+
+UrlEncoded.prototype.write = function (data, cb) {
+ if (this._fields === this.fieldsLimit) {
+ if (!this.boy.hitFieldsLimit) {
+ this.boy.hitFieldsLimit = true
+ this.boy.emit('fieldsLimit')
+ }
+ return cb()
+ }
+
+ let idxeq; let idxamp; let i; let p = 0; const len = data.length
+
+ while (p < len) {
+ if (this._state === 'key') {
+ idxeq = idxamp = undefined
+ for (i = p; i < len; ++i) {
+ if (!this._checkingBytes) { ++p }
+ if (data[i] === 0x3D/* = */) {
+ idxeq = i
+ break
+ } else if (data[i] === 0x26/* & */) {
+ idxamp = i
+ break
+ }
+ if (this._checkingBytes && this._bytesKey === this.fieldNameSizeLimit) {
+ this._hitLimit = true
+ break
+ } else if (this._checkingBytes) { ++this._bytesKey }
+ }
+
+ if (idxeq !== undefined) {
+ // key with assignment
+ if (idxeq > p) { this._key += this.decoder.write(data.toString('binary', p, idxeq)) }
+ this._state = 'val'
+
+ this._hitLimit = false
+ this._checkingBytes = true
+ this._val = ''
+ this._bytesVal = 0
+ this._valTrunc = false
+ this.decoder.reset()
+
+ p = idxeq + 1
+ } else if (idxamp !== undefined) {
+ // key with no assignment
+ ++this._fields
+ let key; const keyTrunc = this._keyTrunc
+ if (idxamp > p) { key = (this._key += this.decoder.write(data.toString('binary', p, idxamp))) } else { key = this._key }
+
+ this._hitLimit = false
+ this._checkingBytes = true
+ this._key = ''
+ this._bytesKey = 0
+ this._keyTrunc = false
+ this.decoder.reset()
+
+ if (key.length) {
+ this.boy.emit('field', decodeText(key, 'binary', this.charset),
+ '',
+ keyTrunc,
+ false)
+ }
+
+ p = idxamp + 1
+ if (this._fields === this.fieldsLimit) { return cb() }
+ } else if (this._hitLimit) {
+ // we may not have hit the actual limit if there are encoded bytes...
+ if (i > p) { this._key += this.decoder.write(data.toString('binary', p, i)) }
+ p = i
+ if ((this._bytesKey = this._key.length) === this.fieldNameSizeLimit) {
+ // yep, we actually did hit the limit
+ this._checkingBytes = false
+ this._keyTrunc = true
+ }
+ } else {
+ if (p < len) { this._key += this.decoder.write(data.toString('binary', p)) }
+ p = len
+ }
+ } else {
+ idxamp = undefined
+ for (i = p; i < len; ++i) {
+ if (!this._checkingBytes) { ++p }
+ if (data[i] === 0x26/* & */) {
+ idxamp = i
+ break
+ }
+ if (this._checkingBytes && this._bytesVal === this.fieldSizeLimit) {
+ this._hitLimit = true
+ break
+ } else if (this._checkingBytes) { ++this._bytesVal }
+ }
+
+ if (idxamp !== undefined) {
+ ++this._fields
+ if (idxamp > p) { this._val += this.decoder.write(data.toString('binary', p, idxamp)) }
+ this.boy.emit('field', decodeText(this._key, 'binary', this.charset),
+ decodeText(this._val, 'binary', this.charset),
+ this._keyTrunc,
+ this._valTrunc)
+ this._state = 'key'
+
+ this._hitLimit = false
+ this._checkingBytes = true
+ this._key = ''
+ this._bytesKey = 0
+ this._keyTrunc = false
+ this.decoder.reset()
+
+ p = idxamp + 1
+ if (this._fields === this.fieldsLimit) { return cb() }
+ } else if (this._hitLimit) {
+ // we may not have hit the actual limit if there are encoded bytes...
+ if (i > p) { this._val += this.decoder.write(data.toString('binary', p, i)) }
+ p = i
+ if ((this._val === '' && this.fieldSizeLimit === 0) ||
+ (this._bytesVal = this._val.length) === this.fieldSizeLimit) {
+ // yep, we actually did hit the limit
+ this._checkingBytes = false
+ this._valTrunc = true
+ }
+ } else {
+ if (p < len) { this._val += this.decoder.write(data.toString('binary', p)) }
+ p = len
+ }
+ }
+ }
+ cb()
+}
+
+UrlEncoded.prototype.end = function () {
+ if (this.boy._done) { return }
+
+ if (this._state === 'key' && this._key.length > 0) {
+ this.boy.emit('field', decodeText(this._key, 'binary', this.charset),
+ '',
+ this._keyTrunc,
+ false)
+ } else if (this._state === 'val') {
+ this.boy.emit('field', decodeText(this._key, 'binary', this.charset),
+ decodeText(this._val, 'binary', this.charset),
+ this._keyTrunc,
+ this._valTrunc)
+ }
+ this.boy._done = true
+ this.boy.emit('finish')
+}
+
+module.exports = UrlEncoded
diff --git a/g4f/Provider/npm/node_modules/@fastify/busboy/lib/utils/Decoder.js b/g4f/Provider/npm/node_modules/@fastify/busboy/lib/utils/Decoder.js
new file mode 100644
index 00000000..7917678c
--- /dev/null
+++ b/g4f/Provider/npm/node_modules/@fastify/busboy/lib/utils/Decoder.js
@@ -0,0 +1,54 @@
+'use strict'
+
+const RE_PLUS = /\+/g
+
+const HEX = [
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0,
+ 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+]
+
+function Decoder () {
+ this.buffer = undefined
+}
+Decoder.prototype.write = function (str) {
+ // Replace '+' with ' ' before decoding
+ str = str.replace(RE_PLUS, ' ')
+ let res = ''
+ let i = 0; let p = 0; const len = str.length
+ for (; i < len; ++i) {
+ if (this.buffer !== undefined) {
+ if (!HEX[str.charCodeAt(i)]) {
+ res += '%' + this.buffer
+ this.buffer = undefined
+ --i // retry character
+ } else {
+ this.buffer += str[i]
+ ++p
+ if (this.buffer.length === 2) {
+ res += String.fromCharCode(parseInt(this.buffer, 16))
+ this.buffer = undefined
+ }
+ }
+ } else if (str[i] === '%') {
+ if (i > p) {
+ res += str.substring(p, i)
+ p = i
+ }
+ this.buffer = ''
+ ++p
+ }
+ }
+ if (p < len && this.buffer === undefined) { res += str.substring(p) }
+ return res
+}
+Decoder.prototype.reset = function () {
+ this.buffer = undefined
+}
+
+module.exports = Decoder
diff --git a/g4f/Provider/npm/node_modules/@fastify/busboy/lib/utils/basename.js b/g4f/Provider/npm/node_modules/@fastify/busboy/lib/utils/basename.js
new file mode 100644
index 00000000..db588199
--- /dev/null
+++ b/g4f/Provider/npm/node_modules/@fastify/busboy/lib/utils/basename.js
@@ -0,0 +1,14 @@
+'use strict'
+
+module.exports = function basename (path) {
+ if (typeof path !== 'string') { return '' }
+ for (var i = path.length - 1; i >= 0; --i) { // eslint-disable-line no-var
+ switch (path.charCodeAt(i)) {
+ case 0x2F: // '/'
+ case 0x5C: // '\'
+ path = path.slice(i + 1)
+ return (path === '..' || path === '.' ? '' : path)
+ }
+ }
+ return (path === '..' || path === '.' ? '' : path)
+}
diff --git a/g4f/Provider/npm/node_modules/@fastify/busboy/lib/utils/decodeText.js b/g4f/Provider/npm/node_modules/@fastify/busboy/lib/utils/decodeText.js
new file mode 100644
index 00000000..ee376062
--- /dev/null
+++ b/g4f/Provider/npm/node_modules/@fastify/busboy/lib/utils/decodeText.js
@@ -0,0 +1,26 @@
+'use strict'
+
+// Node has always utf-8
+const utf8Decoder = new TextDecoder('utf-8')
+const textDecoders = new Map([
+ ['utf-8', utf8Decoder],
+ ['utf8', utf8Decoder]
+])
+
+function decodeText (text, textEncoding, destEncoding) {
+ if (text) {
+ if (textDecoders.has(destEncoding)) {
+ try {
+ return textDecoders.get(destEncoding).decode(Buffer.from(text, textEncoding))
+ } catch (e) { }
+ } else {
+ try {
+ textDecoders.set(destEncoding, new TextDecoder(destEncoding))
+ return textDecoders.get(destEncoding).decode(Buffer.from(text, textEncoding))
+ } catch (e) { }
+ }
+ }
+ return text
+}
+
+module.exports = decodeText
diff --git a/g4f/Provider/npm/node_modules/@fastify/busboy/lib/utils/getLimit.js b/g4f/Provider/npm/node_modules/@fastify/busboy/lib/utils/getLimit.js
new file mode 100644
index 00000000..cb64fd67
--- /dev/null
+++ b/g4f/Provider/npm/node_modules/@fastify/busboy/lib/utils/getLimit.js
@@ -0,0 +1,16 @@
+'use strict'
+
+module.exports = function getLimit (limits, name, defaultLimit) {
+ if (
+ !limits ||
+ limits[name] === undefined ||
+ limits[name] === null
+ ) { return defaultLimit }
+
+ if (
+ typeof limits[name] !== 'number' ||
+ isNaN(limits[name])
+ ) { throw new TypeError('Limit ' + name + ' is not a valid number') }
+
+ return limits[name]
+}
diff --git a/g4f/Provider/npm/node_modules/@fastify/busboy/lib/utils/parseParams.js b/g4f/Provider/npm/node_modules/@fastify/busboy/lib/utils/parseParams.js
new file mode 100644
index 00000000..f9214180
--- /dev/null
+++ b/g4f/Provider/npm/node_modules/@fastify/busboy/lib/utils/parseParams.js
@@ -0,0 +1,87 @@
+'use strict'
+
+const decodeText = require('./decodeText')
+
+const RE_ENCODED = /%([a-fA-F0-9]{2})/g
+
+function encodedReplacer (match, byte) {
+ return String.fromCharCode(parseInt(byte, 16))
+}
+
+function parseParams (str) {
+ const res = []
+ let state = 'key'
+ let charset = ''
+ let inquote = false
+ let escaping = false
+ let p = 0
+ let tmp = ''
+
+ for (var i = 0, len = str.length; i < len; ++i) { // eslint-disable-line no-var
+ const char = str[i]
+ if (char === '\\' && inquote) {
+ if (escaping) { escaping = false } else {
+ escaping = true
+ continue
+ }
+ } else if (char === '"') {
+ if (!escaping) {
+ if (inquote) {
+ inquote = false
+ state = 'key'
+ } else { inquote = true }
+ continue
+ } else { escaping = false }
+ } else {
+ if (escaping && inquote) { tmp += '\\' }
+ escaping = false
+ if ((state === 'charset' || state === 'lang') && char === "'") {
+ if (state === 'charset') {
+ state = 'lang'
+ charset = tmp.substring(1)
+ } else { state = 'value' }
+ tmp = ''
+ continue
+ } else if (state === 'key' &&
+ (char === '*' || char === '=') &&
+ res.length) {
+ if (char === '*') { state = 'charset' } else { state = 'value' }
+ res[p] = [tmp, undefined]
+ tmp = ''
+ continue
+ } else if (!inquote && char === ';') {
+ state = 'key'
+ if (charset) {
+ if (tmp.length) {
+ tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer),
+ 'binary',
+ charset)
+ }
+ charset = ''
+ } else if (tmp.length) {
+ tmp = decodeText(tmp, 'binary', 'utf8')
+ }
+ if (res[p] === undefined) { res[p] = tmp } else { res[p][1] = tmp }
+ tmp = ''
+ ++p
+ continue
+ } else if (!inquote && (char === ' ' || char === '\t')) { continue }
+ }
+ tmp += char
+ }
+ if (charset && tmp.length) {
+ tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer),
+ 'binary',
+ charset)
+ } else if (tmp) {
+ tmp = decodeText(tmp, 'binary', 'utf8')
+ }
+
+ if (res[p] === undefined) {
+ if (tmp) { res[p] = tmp }
+ } else { res[p][1] = tmp }
+
+ return res
+}
+
+module.exports = parseParams
diff --git a/g4f/Provider/npm/node_modules/@fastify/busboy/package.json b/g4f/Provider/npm/node_modules/@fastify/busboy/package.json
new file mode 100644
index 00000000..3288ee06
--- /dev/null
+++ b/g4f/Provider/npm/node_modules/@fastify/busboy/package.json
@@ -0,0 +1,89 @@
+{
+ "name": "@fastify/busboy",
+ "version": "2.0.0",
+ "private": false,
+ "author": "Brian White <mscdex@mscdex.net>",
+ "contributors": [
+ {
+ "name": "Igor Savin",
+ "email": "kibertoad@gmail.com",
+ "url": "https://github.com/kibertoad"
+ },
+ {
+ "name": "Aras Abbasi",
+ "email": "aras.abbasi@gmail.com",
+ "url": "https://github.com/uzlopak"
+ }
+ ],
+ "description": "A streaming parser for HTML form data for node.js",
+ "main": "lib/main",
+ "types": "lib/main.d.ts",
+ "scripts": {
+ "bench:busboy": "cd benchmarks && npm install && npm run benchmark-fastify",
+ "bench:dicer": "node bench/dicer/dicer-bench-multipart-parser.js",
+ "coveralls": "nyc report --reporter=lcov",
+ "lint": "npm run lint:standard",
+ "lint:everything": "npm run lint && npm run test:types",
+ "lint:fix": "standard --fix",
+ "lint:standard": "standard --verbose | snazzy",
+ "test:mocha": "mocha test",
+ "test:types": "tsd",
+ "test:coverage": "nyc npm run test",
+ "test": "npm run test:mocha"
+ },
+ "engines": {
+ "node": ">=14"
+ },
+ "devDependencies": {
+ "@types/node": "^20.1.0",
+ "busboy": "^1.0.0",
+ "chai": "^4.3.6",
+ "eslint": "^8.23.0",
+ "eslint-config-standard": "^17.0.0",
+ "eslint-plugin-n": "^16.0.0",
+ "mocha": "^10.0.0",
+ "nyc": "^15.1.0",
+ "photofinish": "^1.8.0",
+ "snazzy": "^9.0.0",
+ "standard": "^17.0.0",
+ "tsd": "^0.29.0",
+ "typescript": "^5.0.2"
+ },
+ "keywords": [
+ "uploads",
+ "forms",
+ "multipart",
+ "form-data"
+ ],
+ "license": "MIT",
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/fastify/busboy.git"
+ },
+ "tsd": {
+ "directory": "test/types",
+ "compilerOptions": {
+ "esModuleInterop": false,
+ "module": "commonjs",
+ "target": "ES2017"
+ }
+ },
+ "standard": {
+ "globals": [
+ "describe",
+ "it"
+ ],
+ "ignore": [
+ "bench"
+ ]
+ },
+ "files": [
+ "README.md",
+ "LICENSE",
+ "lib/*",
+ "deps/encoding/*",
+ "deps/dicer/lib",
+ "deps/streamsearch/",
+ "deps/dicer/LICENSE"
+ ]
+}