installed dependecies

This commit is contained in:
2025-06-20 20:52:03 +02:00
commit b6f72d059d
610 changed files with 68519 additions and 0 deletions

210
backend/node_modules/body-parser/lib/read.js generated vendored Normal file
View File

@@ -0,0 +1,210 @@
/*!
* body-parser
* Copyright(c) 2014-2015 Douglas Christopher Wilson
* MIT Licensed
*/
'use strict'
/**
* Module dependencies.
* @private
*/
var createError = require('http-errors')
var getBody = require('raw-body')
var iconv = require('iconv-lite')
var onFinished = require('on-finished')
var zlib = require('node:zlib')
/**
* Module exports.
*/
module.exports = read
/**
* Read a request into a buffer and parse.
*
* @param {object} req
* @param {object} res
* @param {function} next
* @param {function} parse
* @param {function} debug
* @param {object} options
* @private
*/
function read (req, res, next, parse, debug, options) {
var length
var opts = options
var stream
// read options
var encoding = opts.encoding !== null
? opts.encoding
: null
var verify = opts.verify
try {
// get the content stream
stream = contentstream(req, debug, opts.inflate)
length = stream.length
stream.length = undefined
} catch (err) {
return next(err)
}
// set raw-body options
opts.length = length
opts.encoding = verify
? null
: encoding
// assert charset is supported
if (opts.encoding === null && encoding !== null && !iconv.encodingExists(encoding)) {
return next(createError(415, 'unsupported charset "' + encoding.toUpperCase() + '"', {
charset: encoding.toLowerCase(),
type: 'charset.unsupported'
}))
}
// read body
debug('read body')
getBody(stream, opts, function (error, body) {
if (error) {
var _error
if (error.type === 'encoding.unsupported') {
// echo back charset
_error = createError(415, 'unsupported charset "' + encoding.toUpperCase() + '"', {
charset: encoding.toLowerCase(),
type: 'charset.unsupported'
})
} else {
// set status code on error
_error = createError(400, error)
}
// unpipe from stream and destroy
if (stream !== req) {
req.unpipe()
stream.destroy()
}
// read off entire request
dump(req, function onfinished () {
next(createError(400, _error))
})
return
}
// verify
if (verify) {
try {
debug('verify body')
verify(req, res, body, encoding)
} catch (err) {
next(createError(403, err, {
body: body,
type: err.type || 'entity.verify.failed'
}))
return
}
}
// parse
var str = body
try {
debug('parse body')
str = typeof body !== 'string' && encoding !== null
? iconv.decode(body, encoding)
: body
req.body = parse(str, encoding)
} catch (err) {
next(createError(400, err, {
body: str,
type: err.type || 'entity.parse.failed'
}))
return
}
next()
})
}
/**
* Get the content stream of the request.
*
* @param {object} req
* @param {function} debug
* @param {boolean} [inflate=true]
* @return {object}
* @api private
*/
function contentstream (req, debug, inflate) {
var encoding = (req.headers['content-encoding'] || 'identity').toLowerCase()
var length = req.headers['content-length']
debug('content-encoding "%s"', encoding)
if (inflate === false && encoding !== 'identity') {
throw createError(415, 'content encoding unsupported', {
encoding: encoding,
type: 'encoding.unsupported'
})
}
if (encoding === 'identity') {
req.length = length
return req
}
var stream = createDecompressionStream(encoding, debug)
req.pipe(stream)
return stream
}
/**
* Create a decompression stream for the given encoding.
* @param {string} encoding
* @param {function} debug
* @return {object}
* @api private
*/
function createDecompressionStream (encoding, debug) {
switch (encoding) {
case 'deflate':
debug('inflate body')
return zlib.createInflate()
case 'gzip':
debug('gunzip body')
return zlib.createGunzip()
case 'br':
debug('brotli decompress body')
return zlib.createBrotliDecompress()
default:
throw createError(415, 'unsupported content encoding "' + encoding + '"', {
encoding: encoding,
type: 'encoding.unsupported'
})
}
}
/**
* Dump the contents of a request.
*
* @param {object} req
* @param {function} callback
* @api private
*/
function dump (req, callback) {
if (onFinished.isFinished(req)) {
callback(null)
} else {
onFinished(req, callback)
req.resume()
}
}

206
backend/node_modules/body-parser/lib/types/json.js generated vendored Normal file
View File

@@ -0,0 +1,206 @@
/*!
* body-parser
* Copyright(c) 2014 Jonathan Ong
* Copyright(c) 2014-2015 Douglas Christopher Wilson
* MIT Licensed
*/
'use strict'
/**
* Module dependencies.
* @private
*/
var createError = require('http-errors')
var debug = require('debug')('body-parser:json')
var isFinished = require('on-finished').isFinished
var read = require('../read')
var typeis = require('type-is')
var { getCharset, normalizeOptions } = require('../utils')
/**
* Module exports.
*/
module.exports = json
/**
* RegExp to match the first non-space in a string.
*
* Allowed whitespace is defined in RFC 7159:
*
* ws = *(
* %x20 / ; Space
* %x09 / ; Horizontal tab
* %x0A / ; Line feed or New line
* %x0D ) ; Carriage return
*/
var FIRST_CHAR_REGEXP = /^[\x20\x09\x0a\x0d]*([^\x20\x09\x0a\x0d])/ // eslint-disable-line no-control-regex
var JSON_SYNTAX_CHAR = '#'
var JSON_SYNTAX_REGEXP = /#+/g
/**
* Create a middleware to parse JSON bodies.
*
* @param {object} [options]
* @return {function}
* @public
*/
function json (options) {
var { inflate, limit, verify, shouldParse } = normalizeOptions(options, 'application/json')
var reviver = options?.reviver
var strict = options?.strict !== false
function parse (body) {
if (body.length === 0) {
// special-case empty json body, as it's a common client-side mistake
// TODO: maybe make this configurable or part of "strict" option
return {}
}
if (strict) {
var first = firstchar(body)
if (first !== '{' && first !== '[') {
debug('strict violation')
throw createStrictSyntaxError(body, first)
}
}
try {
debug('parse json')
return JSON.parse(body, reviver)
} catch (e) {
throw normalizeJsonSyntaxError(e, {
message: e.message,
stack: e.stack
})
}
}
return function jsonParser (req, res, next) {
if (isFinished(req)) {
debug('body already parsed')
next()
return
}
if (!('body' in req)) {
req.body = undefined
}
// skip requests without bodies
if (!typeis.hasBody(req)) {
debug('skip empty body')
next()
return
}
debug('content-type %j', req.headers['content-type'])
// determine if request should be parsed
if (!shouldParse(req)) {
debug('skip parsing')
next()
return
}
// assert charset per RFC 7159 sec 8.1
var charset = getCharset(req) || 'utf-8'
if (charset.slice(0, 4) !== 'utf-') {
debug('invalid charset')
next(createError(415, 'unsupported charset "' + charset.toUpperCase() + '"', {
charset: charset,
type: 'charset.unsupported'
}))
return
}
// read
read(req, res, next, parse, debug, {
encoding: charset,
inflate,
limit,
verify
})
}
}
/**
* Create strict violation syntax error matching native error.
*
* @param {string} str
* @param {string} char
* @return {Error}
* @private
*/
function createStrictSyntaxError (str, char) {
var index = str.indexOf(char)
var partial = ''
if (index !== -1) {
partial = str.substring(0, index) + JSON_SYNTAX_CHAR
for (var i = index + 1; i < str.length; i++) {
partial += JSON_SYNTAX_CHAR
}
}
try {
JSON.parse(partial); /* istanbul ignore next */ throw new SyntaxError('strict violation')
} catch (e) {
return normalizeJsonSyntaxError(e, {
message: e.message.replace(JSON_SYNTAX_REGEXP, function (placeholder) {
return str.substring(index, index + placeholder.length)
}),
stack: e.stack
})
}
}
/**
* Get the first non-whitespace character in a string.
*
* @param {string} str
* @return {function}
* @private
*/
function firstchar (str) {
var match = FIRST_CHAR_REGEXP.exec(str)
return match
? match[1]
: undefined
}
/**
* Normalize a SyntaxError for JSON.parse.
*
* @param {SyntaxError} error
* @param {object} obj
* @return {SyntaxError}
*/
function normalizeJsonSyntaxError (error, obj) {
var keys = Object.getOwnPropertyNames(error)
for (var i = 0; i < keys.length; i++) {
var key = keys[i]
if (key !== 'stack' && key !== 'message') {
delete error[key]
}
}
// replace stack before message for Node.js 0.10 and below
error.stack = obj.stack.replace(error.message, obj.message)
error.message = obj.message
return error
}

75
backend/node_modules/body-parser/lib/types/raw.js generated vendored Normal file
View File

@@ -0,0 +1,75 @@
/*!
* body-parser
* Copyright(c) 2014-2015 Douglas Christopher Wilson
* MIT Licensed
*/
'use strict'
/**
* Module dependencies.
*/
var debug = require('debug')('body-parser:raw')
var isFinished = require('on-finished').isFinished
var read = require('../read')
var typeis = require('type-is')
var { normalizeOptions } = require('../utils')
/**
* Module exports.
*/
module.exports = raw
/**
* Create a middleware to parse raw bodies.
*
* @param {object} [options]
* @return {function}
* @api public
*/
function raw (options) {
var { inflate, limit, verify, shouldParse } = normalizeOptions(options, 'application/octet-stream')
function parse (buf) {
return buf
}
return function rawParser (req, res, next) {
if (isFinished(req)) {
debug('body already parsed')
next()
return
}
if (!('body' in req)) {
req.body = undefined
}
// skip requests without bodies
if (!typeis.hasBody(req)) {
debug('skip empty body')
next()
return
}
debug('content-type %j', req.headers['content-type'])
// determine if request should be parsed
if (!shouldParse(req)) {
debug('skip parsing')
next()
return
}
// read
read(req, res, next, parse, debug, {
encoding: null,
inflate,
limit,
verify
})
}
}

80
backend/node_modules/body-parser/lib/types/text.js generated vendored Normal file
View File

@@ -0,0 +1,80 @@
/*!
* body-parser
* Copyright(c) 2014-2015 Douglas Christopher Wilson
* MIT Licensed
*/
'use strict'
/**
* Module dependencies.
*/
var debug = require('debug')('body-parser:text')
var isFinished = require('on-finished').isFinished
var read = require('../read')
var typeis = require('type-is')
var { getCharset, normalizeOptions } = require('../utils')
/**
* Module exports.
*/
module.exports = text
/**
* Create a middleware to parse text bodies.
*
* @param {object} [options]
* @return {function}
* @api public
*/
function text (options) {
var { inflate, limit, verify, shouldParse } = normalizeOptions(options, 'text/plain')
var defaultCharset = options?.defaultCharset || 'utf-8'
function parse (buf) {
return buf
}
return function textParser (req, res, next) {
if (isFinished(req)) {
debug('body already parsed')
next()
return
}
if (!('body' in req)) {
req.body = undefined
}
// skip requests without bodies
if (!typeis.hasBody(req)) {
debug('skip empty body')
next()
return
}
debug('content-type %j', req.headers['content-type'])
// determine if request should be parsed
if (!shouldParse(req)) {
debug('skip parsing')
next()
return
}
// get charset
var charset = getCharset(req) || defaultCharset
// read
read(req, res, next, parse, debug, {
encoding: charset,
inflate,
limit,
verify
})
}
}

View File

@@ -0,0 +1,177 @@
/*!
* body-parser
* Copyright(c) 2014 Jonathan Ong
* Copyright(c) 2014-2015 Douglas Christopher Wilson
* MIT Licensed
*/
'use strict'
/**
* Module dependencies.
* @private
*/
var createError = require('http-errors')
var debug = require('debug')('body-parser:urlencoded')
var isFinished = require('on-finished').isFinished
var read = require('../read')
var typeis = require('type-is')
var qs = require('qs')
var { getCharset, normalizeOptions } = require('../utils')
/**
* Module exports.
*/
module.exports = urlencoded
/**
* Create a middleware to parse urlencoded bodies.
*
* @param {object} [options]
* @return {function}
* @public
*/
function urlencoded (options) {
var { inflate, limit, verify, shouldParse } = normalizeOptions(options, 'application/x-www-form-urlencoded')
var defaultCharset = options?.defaultCharset || 'utf-8'
if (defaultCharset !== 'utf-8' && defaultCharset !== 'iso-8859-1') {
throw new TypeError('option defaultCharset must be either utf-8 or iso-8859-1')
}
// create the appropriate query parser
var queryparse = createQueryParser(options)
function parse (body, encoding) {
return body.length
? queryparse(body, encoding)
: {}
}
return function urlencodedParser (req, res, next) {
if (isFinished(req)) {
debug('body already parsed')
next()
return
}
if (!('body' in req)) {
req.body = undefined
}
// skip requests without bodies
if (!typeis.hasBody(req)) {
debug('skip empty body')
next()
return
}
debug('content-type %j', req.headers['content-type'])
// determine if request should be parsed
if (!shouldParse(req)) {
debug('skip parsing')
next()
return
}
// assert charset
var charset = getCharset(req) || defaultCharset
if (charset !== 'utf-8' && charset !== 'iso-8859-1') {
debug('invalid charset')
next(createError(415, 'unsupported charset "' + charset.toUpperCase() + '"', {
charset: charset,
type: 'charset.unsupported'
}))
return
}
// read
read(req, res, next, parse, debug, {
encoding: charset,
inflate,
limit,
verify
})
}
}
/**
* Get the extended query parser.
*
* @param {object} options
*/
function createQueryParser (options) {
var extended = Boolean(options?.extended)
var parameterLimit = options?.parameterLimit !== undefined
? options?.parameterLimit
: 1000
var charsetSentinel = options?.charsetSentinel
var interpretNumericEntities = options?.interpretNumericEntities
var depth = extended ? (options?.depth !== undefined ? options?.depth : 32) : 0
if (isNaN(parameterLimit) || parameterLimit < 1) {
throw new TypeError('option parameterLimit must be a positive number')
}
if (isNaN(depth) || depth < 0) {
throw new TypeError('option depth must be a zero or a positive number')
}
if (isFinite(parameterLimit)) {
parameterLimit = parameterLimit | 0
}
return function queryparse (body, encoding) {
var paramCount = parameterCount(body, parameterLimit)
if (paramCount === undefined) {
debug('too many parameters')
throw createError(413, 'too many parameters', {
type: 'parameters.too.many'
})
}
var arrayLimit = extended ? Math.max(100, paramCount) : 0
debug('parse ' + (extended ? 'extended ' : '') + 'urlencoding')
try {
return qs.parse(body, {
allowPrototypes: true,
arrayLimit: arrayLimit,
depth: depth,
charsetSentinel: charsetSentinel,
interpretNumericEntities: interpretNumericEntities,
charset: encoding,
parameterLimit: parameterLimit,
strictDepth: true
})
} catch (err) {
if (err instanceof RangeError) {
throw createError(400, 'The input exceeded the depth', {
type: 'querystring.parse.rangeError'
})
} else {
throw err
}
}
}
}
/**
* Count the number of parameters, stopping once limit reached
*
* @param {string} body
* @param {number} limit
* @api private
*/
function parameterCount (body, limit) {
var len = body.split('&').length
return len > limit ? undefined : len - 1
}

83
backend/node_modules/body-parser/lib/utils.js generated vendored Normal file
View File

@@ -0,0 +1,83 @@
'use strict'
/**
* Module dependencies.
*/
var bytes = require('bytes')
var contentType = require('content-type')
var typeis = require('type-is')
/**
* Module exports.
*/
module.exports = {
getCharset,
normalizeOptions
}
/**
* Get the charset of a request.
*
* @param {object} req
* @api private
*/
function getCharset (req) {
try {
return (contentType.parse(req).parameters.charset || '').toLowerCase()
} catch {
return undefined
}
}
/**
* Get the simple type checker.
*
* @param {string | string[]} type
* @return {function}
*/
function typeChecker (type) {
return function checkType (req) {
return Boolean(typeis(req, type))
}
}
/**
* Normalizes the common options for all parsers.
*
* @param {object} options options to normalize
* @param {string | string[] | function} defaultType default content type(s) or a function to determine it
* @returns {object}
*/
function normalizeOptions (options, defaultType) {
if (!defaultType) {
// Parsers must define a default content type
throw new TypeError('defaultType must be provided')
}
var inflate = options?.inflate !== false
var limit = typeof options?.limit !== 'number'
? bytes.parse(options?.limit || '100kb')
: options?.limit
var type = options?.type || defaultType
var verify = options?.verify || false
if (verify !== false && typeof verify !== 'function') {
throw new TypeError('option verify must be function')
}
// create the appropriate type checking function
var shouldParse = typeof type !== 'function'
? typeChecker(type)
: type
return {
inflate,
limit,
verify,
shouldParse
}
}