diff --git a/backend/Backend/node_modules/.bin/mime b/backend/Backend/node_modules/.bin/mime new file mode 100644 index 000000000..7751de3cb --- /dev/null +++ b/backend/Backend/node_modules/.bin/mime @@ -0,0 +1,16 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) + if command -v cygpath > /dev/null 2>&1; then + basedir=`cygpath -w "$basedir"` + fi + ;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../mime/cli.js" "$@" +else + exec node "$basedir/../mime/cli.js" "$@" +fi diff --git a/backend/Backend/node_modules/.bin/mime.cmd b/backend/Backend/node_modules/.bin/mime.cmd new file mode 100644 index 000000000..54491f12e --- /dev/null +++ b/backend/Backend/node_modules/.bin/mime.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\mime\cli.js" %* diff --git a/backend/Backend/node_modules/.bin/mime.ps1 b/backend/Backend/node_modules/.bin/mime.ps1 new file mode 100644 index 000000000..2222f40bc --- /dev/null +++ b/backend/Backend/node_modules/.bin/mime.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../mime/cli.js" $args + } else { + & "$basedir/node$exe" "$basedir/../mime/cli.js" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../mime/cli.js" $args + } else { + & "node$exe" "$basedir/../mime/cli.js" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/backend/Backend/node_modules/.bin/mkdirp b/backend/Backend/node_modules/.bin/mkdirp new file mode 100644 index 000000000..1ab9c81af --- /dev/null +++ b/backend/Backend/node_modules/.bin/mkdirp @@ -0,0 +1,16 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) + if command -v cygpath > /dev/null 2>&1; then + basedir=`cygpath -w "$basedir"` + fi + ;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../mkdirp/bin/cmd.js" "$@" +else + exec node "$basedir/../mkdirp/bin/cmd.js" "$@" +fi diff --git a/backend/Backend/node_modules/.bin/mkdirp.cmd b/backend/Backend/node_modules/.bin/mkdirp.cmd new file mode 100644 index 000000000..a865dd9f3 --- /dev/null +++ b/backend/Backend/node_modules/.bin/mkdirp.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\mkdirp\bin\cmd.js" %* diff --git a/backend/Backend/node_modules/.bin/mkdirp.ps1 b/backend/Backend/node_modules/.bin/mkdirp.ps1 new file mode 100644 index 000000000..911e85466 --- /dev/null +++ b/backend/Backend/node_modules/.bin/mkdirp.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../mkdirp/bin/cmd.js" $args + } else { + & "$basedir/node$exe" "$basedir/../mkdirp/bin/cmd.js" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../mkdirp/bin/cmd.js" $args + } else { + & "node$exe" "$basedir/../mkdirp/bin/cmd.js" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/backend/Backend/node_modules/.bin/nodemon b/backend/Backend/node_modules/.bin/nodemon new file mode 100644 index 000000000..c477a1898 --- /dev/null +++ b/backend/Backend/node_modules/.bin/nodemon @@ -0,0 +1,16 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) + if command -v cygpath > /dev/null 2>&1; then + basedir=`cygpath -w "$basedir"` + fi + ;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../nodemon/bin/nodemon.js" "$@" +else + exec node "$basedir/../nodemon/bin/nodemon.js" "$@" +fi diff --git a/backend/Backend/node_modules/.bin/nodemon.cmd b/backend/Backend/node_modules/.bin/nodemon.cmd new file mode 100644 index 000000000..55acf8a4e --- /dev/null +++ b/backend/Backend/node_modules/.bin/nodemon.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\nodemon\bin\nodemon.js" %* diff --git a/backend/Backend/node_modules/.bin/nodemon.ps1 b/backend/Backend/node_modules/.bin/nodemon.ps1 new file mode 100644 index 000000000..d4e3f5d40 --- /dev/null +++ b/backend/Backend/node_modules/.bin/nodemon.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../nodemon/bin/nodemon.js" $args + } else { + & "$basedir/node$exe" "$basedir/../nodemon/bin/nodemon.js" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../nodemon/bin/nodemon.js" $args + } else { + & "node$exe" "$basedir/../nodemon/bin/nodemon.js" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/backend/Backend/node_modules/.bin/nodetouch b/backend/Backend/node_modules/.bin/nodetouch new file mode 100644 index 000000000..3e146b413 --- /dev/null +++ b/backend/Backend/node_modules/.bin/nodetouch @@ -0,0 +1,16 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) + if command -v cygpath > /dev/null 2>&1; then + basedir=`cygpath -w "$basedir"` + fi + ;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../touch/bin/nodetouch.js" "$@" +else + exec node "$basedir/../touch/bin/nodetouch.js" "$@" +fi diff --git a/backend/Backend/node_modules/.bin/nodetouch.cmd b/backend/Backend/node_modules/.bin/nodetouch.cmd new file mode 100644 index 000000000..8298b9183 --- /dev/null +++ b/backend/Backend/node_modules/.bin/nodetouch.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\touch\bin\nodetouch.js" %* diff --git a/backend/Backend/node_modules/.bin/nodetouch.ps1 b/backend/Backend/node_modules/.bin/nodetouch.ps1 new file mode 100644 index 000000000..5f68b4cb3 --- /dev/null +++ b/backend/Backend/node_modules/.bin/nodetouch.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../touch/bin/nodetouch.js" $args + } else { + & "$basedir/node$exe" "$basedir/../touch/bin/nodetouch.js" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../touch/bin/nodetouch.js" $args + } else { + & "node$exe" "$basedir/../touch/bin/nodetouch.js" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/backend/Backend/node_modules/.bin/semver b/backend/Backend/node_modules/.bin/semver new file mode 100644 index 000000000..97c53279f --- /dev/null +++ b/backend/Backend/node_modules/.bin/semver @@ -0,0 +1,16 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) + if command -v cygpath > /dev/null 2>&1; then + basedir=`cygpath -w "$basedir"` + fi + ;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../semver/bin/semver.js" "$@" +else + exec node "$basedir/../semver/bin/semver.js" "$@" +fi diff --git a/backend/Backend/node_modules/.bin/semver.cmd b/backend/Backend/node_modules/.bin/semver.cmd new file mode 100644 index 000000000..9913fa9d0 --- /dev/null +++ b/backend/Backend/node_modules/.bin/semver.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\semver\bin\semver.js" %* diff --git a/backend/Backend/node_modules/.bin/semver.ps1 b/backend/Backend/node_modules/.bin/semver.ps1 new file mode 100644 index 000000000..314717ad4 --- /dev/null +++ b/backend/Backend/node_modules/.bin/semver.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../semver/bin/semver.js" $args + } else { + & "$basedir/node$exe" "$basedir/../semver/bin/semver.js" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../semver/bin/semver.js" $args + } else { + & "node$exe" "$basedir/../semver/bin/semver.js" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/backend/Backend/node_modules/.package-lock.json b/backend/Backend/node_modules/.package-lock.json new file mode 100644 index 000000000..caeb93982 --- /dev/null +++ b/backend/Backend/node_modules/.package-lock.json @@ -0,0 +1,1384 @@ +{ + "name": "backend", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "node_modules/accepts": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", + "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", + "license": "MIT", + "dependencies": { + "mime-types": "~2.1.34", + "negotiator": "0.6.3" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "license": "ISC", + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/append-field": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/append-field/-/append-field-1.0.0.tgz", + "integrity": "sha512-klpgFSWLW1ZEs8svjfb7g4qWY0YS5imI82dTg+QahUvJ8YqAY0P10Uk8tTyh9ZGuYEZEMaeJYCF5BFuX552hsw==", + "license": "MIT" + }, + "node_modules/array-flatten": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", + "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==", + "license": "MIT" + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "license": "MIT" + }, + "node_modules/binary-extensions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", + "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/body-parser": { + "version": "1.20.3", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", + "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", + "license": "MIT", + "dependencies": { + "bytes": "3.1.2", + "content-type": "~1.0.5", + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "on-finished": "2.4.1", + "qs": "6.13.0", + "raw-body": "2.5.2", + "type-is": "~1.6.18", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/body-parser/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/body-parser/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "license": "MIT" + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/buffer-from": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", + "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", + "license": "MIT" + }, + "node_modules/busboy": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/busboy/-/busboy-1.6.0.tgz", + "integrity": "sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA==", + "dependencies": { + "streamsearch": "^1.1.0" + }, + "engines": { + "node": ">=10.16.0" + } + }, + "node_modules/bytes": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.1.tgz", + "integrity": "sha512-BhYE+WDaywFg2TBWYNXAE+8B1ATnThNBqXHP5nQu0jWJdVvY2hvkpyB3qOmtmDePiS5/BDQ8wASEWGMWRG148g==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.3.tgz", + "integrity": "sha512-YTd+6wGlNlPxSuri7Y6X8tY2dmm12UMH66RpKMhiX6rsk5wXXnYgbUcOt8kiS31/AjfoTOvCsE+w8nZQLQnzHA==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "get-intrinsic": "^1.2.6" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/chokidar": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", + "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", + "license": "MIT", + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "license": "MIT" + }, + "node_modules/concat-stream": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.2.tgz", + "integrity": "sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==", + "engines": [ + "node >= 0.8" + ], + "license": "MIT", + "dependencies": { + "buffer-from": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^2.2.2", + "typedarray": "^0.0.6" + } + }, + "node_modules/content-disposition": { + "version": "0.5.4", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", + "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", + "license": "MIT", + "dependencies": { + "safe-buffer": "5.2.1" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/content-type": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", + "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz", + "integrity": "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie-signature": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", + "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==", + "license": "MIT" + }, + "node_modules/core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", + "license": "MIT" + }, + "node_modules/cors": { + "version": "2.8.5", + "resolved": "https://registry.npmjs.org/cors/-/cors-2.8.5.tgz", + "integrity": "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==", + "license": "MIT", + "dependencies": { + "object-assign": "^4", + "vary": "^1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/debug": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", + "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/destroy": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", + "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==", + "license": "MIT", + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==", + "license": "MIT" + }, + "node_modules/encodeurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", + "license": "MIT" + }, + "node_modules/etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/express": { + "version": "4.21.2", + "resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz", + "integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==", + "license": "MIT", + "dependencies": { + "accepts": "~1.3.8", + "array-flatten": "1.1.1", + "body-parser": "1.20.3", + "content-disposition": "0.5.4", + "content-type": "~1.0.4", + "cookie": "0.7.1", + "cookie-signature": "1.0.6", + "debug": "2.6.9", + "depd": "2.0.0", + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "finalhandler": "1.3.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "merge-descriptors": "1.0.3", + "methods": "~1.1.2", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "path-to-regexp": "0.1.12", + "proxy-addr": "~2.0.7", + "qs": "6.13.0", + "range-parser": "~1.2.1", + "safe-buffer": "5.2.1", + "send": "0.19.0", + "serve-static": "1.16.2", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "type-is": "~1.6.18", + "utils-merge": "1.0.1", + "vary": "~1.1.2" + }, + "engines": { + "node": ">= 0.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/express/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/express/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "license": "MIT" + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/finalhandler": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz", + "integrity": "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==", + "license": "MIT", + "dependencies": { + "debug": "2.6.9", + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "statuses": "2.0.1", + "unpipe": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/finalhandler/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/finalhandler/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "license": "MIT" + }, + "node_modules/forwarded": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", + "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fresh": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", + "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-intrinsic": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.7.tgz", + "integrity": "sha512-VW6Pxhsrk0KAOqs3WEd0klDiF/+V7gQOpAvY1jVU/LHmaD/kQO4523aiJuikX/QAKYiW6x8Jh+RJej1almdtCA==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "function-bind": "^1.1.2", + "get-proto": "^1.0.0", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/http-errors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", + "license": "MIT", + "dependencies": { + "depd": "2.0.0", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "toidentifier": "1.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ignore-by-default": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/ignore-by-default/-/ignore-by-default-1.0.1.tgz", + "integrity": "sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA==", + "license": "ISC" + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "license": "ISC" + }, + "node_modules/ipaddr.js": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", + "license": "MIT", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "license": "MIT", + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", + "license": "MIT" + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/media-typer": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/merge-descriptors": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz", + "integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", + "license": "MIT", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "license": "MIT", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/mkdirp": { + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", + "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", + "license": "MIT", + "dependencies": { + "minimist": "^1.2.6" + }, + "bin": { + "mkdirp": "bin/cmd.js" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, + "node_modules/multer": { + "version": "1.4.5-lts.1", + "resolved": "https://registry.npmjs.org/multer/-/multer-1.4.5-lts.1.tgz", + "integrity": "sha512-ywPWvcDMeH+z9gQq5qYHCCy+ethsk4goepZ45GLD63fOu0YcNecQxi64nDs3qluZB+murG3/D4dJ7+dGctcCQQ==", + "license": "MIT", + "dependencies": { + "append-field": "^1.0.0", + "busboy": "^1.0.0", + "concat-stream": "^1.5.2", + "mkdirp": "^0.5.4", + "object-assign": "^4.1.1", + "type-is": "^1.6.4", + "xtend": "^4.0.0" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/negotiator": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", + "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/nodemailer": { + "version": "6.10.0", + "resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-6.10.0.tgz", + "integrity": "sha512-SQ3wZCExjeSatLE/HBaXS5vqUOQk6GtBdIIKxiFdmm01mOQZX/POJkO3SUX1wDiYcwUOJwT23scFSC9fY2H8IA==", + "license": "MIT-0", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/nodemon": { + "version": "3.1.9", + "resolved": "https://registry.npmjs.org/nodemon/-/nodemon-3.1.9.tgz", + "integrity": "sha512-hdr1oIb2p6ZSxu3PB2JWWYS7ZQ0qvaZsc3hK8DR8f02kRzc8rjYmxAIvdz+aYC+8F2IjNaB7HMcSDg8nQpJxyg==", + "license": "MIT", + "dependencies": { + "chokidar": "^3.5.2", + "debug": "^4", + "ignore-by-default": "^1.0.1", + "minimatch": "^3.1.2", + "pstree.remy": "^1.1.8", + "semver": "^7.5.3", + "simple-update-notifier": "^2.0.0", + "supports-color": "^5.5.0", + "touch": "^3.1.0", + "undefsafe": "^2.0.5" + }, + "bin": { + "nodemon": "bin/nodemon.js" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/nodemon" + } + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-inspect": { + "version": "1.13.3", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.3.tgz", + "integrity": "sha512-kDCGIbxkDSXE3euJZZXzc6to7fCrKHNI/hSRQnRuQ+BWjFNzZwiFF8fj/6o2t2G9/jTj8PSIYTfCLelLZEeRpA==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "license": "MIT", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/path-to-regexp": { + "version": "0.1.12", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.12.tgz", + "integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==", + "license": "MIT" + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", + "license": "MIT" + }, + "node_modules/proxy-addr": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", + "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", + "license": "MIT", + "dependencies": { + "forwarded": "0.2.0", + "ipaddr.js": "1.9.1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/pstree.remy": { + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/pstree.remy/-/pstree.remy-1.1.8.tgz", + "integrity": "sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==", + "license": "MIT" + }, + "node_modules/qs": { + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", + "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", + "license": "BSD-3-Clause", + "dependencies": { + "side-channel": "^1.0.6" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/raw-body": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", + "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", + "license": "MIT", + "dependencies": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/readable-stream": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "license": "MIT", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/readable-stream/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "license": "MIT" + }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "license": "MIT", + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "license": "MIT" + }, + "node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/send": { + "version": "0.19.0", + "resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz", + "integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==", + "license": "MIT", + "dependencies": { + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "mime": "1.6.0", + "ms": "2.1.3", + "on-finished": "2.4.1", + "range-parser": "~1.2.1", + "statuses": "2.0.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/send/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/send/node_modules/debug/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "license": "MIT" + }, + "node_modules/send/node_modules/encodeurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/serve-static": { + "version": "1.16.2", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz", + "integrity": "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==", + "license": "MIT", + "dependencies": { + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "parseurl": "~1.3.3", + "send": "0.19.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/setprototypeof": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==", + "license": "ISC" + }, + "node_modules/side-channel": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", + "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-list": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", + "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-map": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", + "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-weakmap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", + "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/simple-update-notifier": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-2.0.0.tgz", + "integrity": "sha512-a2B9Y0KlNXl9u/vsW6sTIu9vGEpfKu2wRV6l1H3XEas/0gUIzGzBoP/IouTcUQbm9JWZLH3COxyn03TYlFax6w==", + "license": "MIT", + "dependencies": { + "semver": "^7.5.3" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/streamsearch": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/streamsearch/-/streamsearch-1.1.0.tgz", + "integrity": "sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==", + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "license": "MIT", + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/string_decoder/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "license": "MIT" + }, + "node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "license": "MIT", + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/toidentifier": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", + "license": "MIT", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/touch": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/touch/-/touch-3.1.1.tgz", + "integrity": "sha512-r0eojU4bI8MnHr8c5bNo7lJDdI2qXlWWJk6a9EAFG7vbhTjElYhBVS3/miuE0uOuoLdb8Mc/rVfsmm6eo5o9GA==", + "license": "ISC", + "bin": { + "nodetouch": "bin/nodetouch.js" + } + }, + "node_modules/type-is": { + "version": "1.6.18", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", + "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", + "license": "MIT", + "dependencies": { + "media-typer": "0.3.0", + "mime-types": "~2.1.24" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/typedarray": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", + "integrity": "sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA==", + "license": "MIT" + }, + "node_modules/undefsafe": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/undefsafe/-/undefsafe-2.0.5.tgz", + "integrity": "sha512-WxONCrssBM8TSPRqN5EmsjVrsv4A8X12J4ArBiiayv3DyyG3ZlIg6yysuuSYdZsVz3TKcTg2fd//Ujd4CHV1iA==", + "license": "MIT" + }, + "node_modules/unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "license": "MIT" + }, + "node_modules/utils-merge": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", + "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==", + "license": "MIT", + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "license": "MIT", + "engines": { + "node": ">=0.4" + } + } + } +} diff --git a/backend/Backend/node_modules/accepts/HISTORY.md b/backend/Backend/node_modules/accepts/HISTORY.md new file mode 100644 index 000000000..cb5990c7c --- /dev/null +++ b/backend/Backend/node_modules/accepts/HISTORY.md @@ -0,0 +1,243 @@ +1.3.8 / 2022-02-02 +================== + + * deps: mime-types@~2.1.34 + - deps: mime-db@~1.51.0 + * deps: negotiator@0.6.3 + +1.3.7 / 2019-04-29 +================== + + * deps: negotiator@0.6.2 + - Fix sorting charset, encoding, and language with extra parameters + +1.3.6 / 2019-04-28 +================== + + * deps: mime-types@~2.1.24 + - deps: mime-db@~1.40.0 + +1.3.5 / 2018-02-28 +================== + + * deps: mime-types@~2.1.18 + - deps: mime-db@~1.33.0 + +1.3.4 / 2017-08-22 +================== + + * deps: mime-types@~2.1.16 + - deps: mime-db@~1.29.0 + +1.3.3 / 2016-05-02 +================== + + * deps: mime-types@~2.1.11 + - deps: mime-db@~1.23.0 + * deps: negotiator@0.6.1 + - perf: improve `Accept` parsing speed + - perf: improve `Accept-Charset` parsing speed + - perf: improve `Accept-Encoding` parsing speed + - perf: improve `Accept-Language` parsing speed + +1.3.2 / 2016-03-08 +================== + + * deps: mime-types@~2.1.10 + - Fix extension of `application/dash+xml` + - Update primary extension for `audio/mp4` + - deps: mime-db@~1.22.0 + +1.3.1 / 2016-01-19 +================== + + * deps: mime-types@~2.1.9 + - deps: mime-db@~1.21.0 + +1.3.0 / 2015-09-29 +================== + + * deps: mime-types@~2.1.7 + - deps: mime-db@~1.19.0 + * deps: negotiator@0.6.0 + - Fix including type extensions in parameters in `Accept` parsing + - Fix parsing `Accept` parameters with quoted equals + - Fix parsing `Accept` parameters with quoted semicolons + - Lazy-load modules from main entry point + - perf: delay type concatenation until needed + - perf: enable strict mode + - perf: hoist regular expressions + - perf: remove closures getting spec properties + - perf: remove a closure from media type parsing + - perf: remove property delete from media type parsing + +1.2.13 / 2015-09-06 +=================== + + * deps: mime-types@~2.1.6 + - deps: mime-db@~1.18.0 + +1.2.12 / 2015-07-30 +=================== + + * deps: mime-types@~2.1.4 + - deps: mime-db@~1.16.0 + +1.2.11 / 2015-07-16 +=================== + + * deps: mime-types@~2.1.3 + - deps: mime-db@~1.15.0 + +1.2.10 / 2015-07-01 +=================== + + * deps: mime-types@~2.1.2 + - deps: mime-db@~1.14.0 + +1.2.9 / 2015-06-08 +================== + + * deps: mime-types@~2.1.1 + - perf: fix deopt during mapping + +1.2.8 / 2015-06-07 +================== + + * deps: mime-types@~2.1.0 + - deps: mime-db@~1.13.0 + * perf: avoid argument reassignment & argument slice + * perf: avoid negotiator recursive construction + * perf: enable strict mode + * perf: remove unnecessary bitwise operator + +1.2.7 / 2015-05-10 +================== + + * deps: negotiator@0.5.3 + - Fix media type parameter matching to be case-insensitive + +1.2.6 / 2015-05-07 +================== + + * deps: mime-types@~2.0.11 + - deps: mime-db@~1.9.1 + * deps: negotiator@0.5.2 + - Fix comparing media types with quoted values + - Fix splitting media types with quoted commas + +1.2.5 / 2015-03-13 +================== + + * deps: mime-types@~2.0.10 + - deps: mime-db@~1.8.0 + +1.2.4 / 2015-02-14 +================== + + * Support Node.js 0.6 + * deps: mime-types@~2.0.9 + - deps: mime-db@~1.7.0 + * deps: negotiator@0.5.1 + - Fix preference sorting to be stable for long acceptable lists + +1.2.3 / 2015-01-31 +================== + + * deps: mime-types@~2.0.8 + - deps: mime-db@~1.6.0 + +1.2.2 / 2014-12-30 +================== + + * deps: mime-types@~2.0.7 + - deps: mime-db@~1.5.0 + +1.2.1 / 2014-12-30 +================== + + * deps: mime-types@~2.0.5 + - deps: mime-db@~1.3.1 + +1.2.0 / 2014-12-19 +================== + + * deps: negotiator@0.5.0 + - Fix list return order when large accepted list + - Fix missing identity encoding when q=0 exists + - Remove dynamic building of Negotiator class + +1.1.4 / 2014-12-10 +================== + + * deps: mime-types@~2.0.4 + - deps: mime-db@~1.3.0 + +1.1.3 / 2014-11-09 +================== + + * deps: mime-types@~2.0.3 + - deps: mime-db@~1.2.0 + +1.1.2 / 2014-10-14 +================== + + * deps: negotiator@0.4.9 + - Fix error when media type has invalid parameter + +1.1.1 / 2014-09-28 +================== + + * deps: mime-types@~2.0.2 + - deps: mime-db@~1.1.0 + * deps: negotiator@0.4.8 + - Fix all negotiations to be case-insensitive + - Stable sort preferences of same quality according to client order + +1.1.0 / 2014-09-02 +================== + + * update `mime-types` + +1.0.7 / 2014-07-04 +================== + + * Fix wrong type returned from `type` when match after unknown extension + +1.0.6 / 2014-06-24 +================== + + * deps: negotiator@0.4.7 + +1.0.5 / 2014-06-20 +================== + + * fix crash when unknown extension given + +1.0.4 / 2014-06-19 +================== + + * use `mime-types` + +1.0.3 / 2014-06-11 +================== + + * deps: negotiator@0.4.6 + - Order by specificity when quality is the same + +1.0.2 / 2014-05-29 +================== + + * Fix interpretation when header not in request + * deps: pin negotiator@0.4.5 + +1.0.1 / 2014-01-18 +================== + + * Identity encoding isn't always acceptable + * deps: negotiator@~0.4.0 + +1.0.0 / 2013-12-27 +================== + + * Genesis diff --git a/backend/Backend/node_modules/accepts/LICENSE b/backend/Backend/node_modules/accepts/LICENSE new file mode 100644 index 000000000..06166077b --- /dev/null +++ b/backend/Backend/node_modules/accepts/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2014 Jonathan Ong +Copyright (c) 2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/backend/Backend/node_modules/accepts/README.md b/backend/Backend/node_modules/accepts/README.md new file mode 100644 index 000000000..82680c530 --- /dev/null +++ b/backend/Backend/node_modules/accepts/README.md @@ -0,0 +1,140 @@ +# accepts + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][github-actions-ci-image]][github-actions-ci-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Higher level content negotiation based on [negotiator](https://www.npmjs.com/package/negotiator). +Extracted from [koa](https://www.npmjs.com/package/koa) for general use. + +In addition to negotiator, it allows: + +- Allows types as an array or arguments list, ie `(['text/html', 'application/json'])` + as well as `('text/html', 'application/json')`. +- Allows type shorthands such as `json`. +- Returns `false` when no types match +- Treats non-existent headers as `*` + +## Installation + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```sh +$ npm install accepts +``` + +## API + +```js +var accepts = require('accepts') +``` + +### accepts(req) + +Create a new `Accepts` object for the given `req`. + +#### .charset(charsets) + +Return the first accepted charset. If nothing in `charsets` is accepted, +then `false` is returned. + +#### .charsets() + +Return the charsets that the request accepts, in the order of the client's +preference (most preferred first). + +#### .encoding(encodings) + +Return the first accepted encoding. If nothing in `encodings` is accepted, +then `false` is returned. + +#### .encodings() + +Return the encodings that the request accepts, in the order of the client's +preference (most preferred first). + +#### .language(languages) + +Return the first accepted language. If nothing in `languages` is accepted, +then `false` is returned. + +#### .languages() + +Return the languages that the request accepts, in the order of the client's +preference (most preferred first). + +#### .type(types) + +Return the first accepted type (and it is returned as the same text as what +appears in the `types` array). If nothing in `types` is accepted, then `false` +is returned. + +The `types` array can contain full MIME types or file extensions. Any value +that is not a full MIME types is passed to `require('mime-types').lookup`. + +#### .types() + +Return the types that the request accepts, in the order of the client's +preference (most preferred first). + +## Examples + +### Simple type negotiation + +This simple example shows how to use `accepts` to return a different typed +respond body based on what the client wants to accept. The server lists it's +preferences in order and will get back the best match between the client and +server. + +```js +var accepts = require('accepts') +var http = require('http') + +function app (req, res) { + var accept = accepts(req) + + // the order of this list is significant; should be server preferred order + switch (accept.type(['json', 'html'])) { + case 'json': + res.setHeader('Content-Type', 'application/json') + res.write('{"hello":"world!"}') + break + case 'html': + res.setHeader('Content-Type', 'text/html') + res.write('hello, world!') + break + default: + // the fallback is text/plain, so no need to specify it above + res.setHeader('Content-Type', 'text/plain') + res.write('hello, world!') + break + } + + res.end() +} + +http.createServer(app).listen(3000) +``` + +You can test this out with the cURL program: +```sh +curl -I -H'Accept: text/html' http://localhost:3000/ +``` + +## License + +[MIT](LICENSE) + +[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/accepts/master +[coveralls-url]: https://coveralls.io/r/jshttp/accepts?branch=master +[github-actions-ci-image]: https://badgen.net/github/checks/jshttp/accepts/master?label=ci +[github-actions-ci-url]: https://github.com/jshttp/accepts/actions/workflows/ci.yml +[node-version-image]: https://badgen.net/npm/node/accepts +[node-version-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/accepts +[npm-url]: https://npmjs.org/package/accepts +[npm-version-image]: https://badgen.net/npm/v/accepts diff --git a/backend/Backend/node_modules/accepts/index.js b/backend/Backend/node_modules/accepts/index.js new file mode 100644 index 000000000..e9b2f63fb --- /dev/null +++ b/backend/Backend/node_modules/accepts/index.js @@ -0,0 +1,238 @@ +/*! + * accepts + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var Negotiator = require('negotiator') +var mime = require('mime-types') + +/** + * Module exports. + * @public + */ + +module.exports = Accepts + +/** + * Create a new Accepts object for the given req. + * + * @param {object} req + * @public + */ + +function Accepts (req) { + if (!(this instanceof Accepts)) { + return new Accepts(req) + } + + this.headers = req.headers + this.negotiator = new Negotiator(req) +} + +/** + * Check if the given `type(s)` is acceptable, returning + * the best match when true, otherwise `undefined`, in which + * case you should respond with 406 "Not Acceptable". + * + * The `type` value may be a single mime type string + * such as "application/json", the extension name + * such as "json" or an array `["json", "html", "text/plain"]`. When a list + * or array is given the _best_ match, if any is returned. + * + * Examples: + * + * // Accept: text/html + * this.types('html'); + * // => "html" + * + * // Accept: text/*, application/json + * this.types('html'); + * // => "html" + * this.types('text/html'); + * // => "text/html" + * this.types('json', 'text'); + * // => "json" + * this.types('application/json'); + * // => "application/json" + * + * // Accept: text/*, application/json + * this.types('image/png'); + * this.types('png'); + * // => undefined + * + * // Accept: text/*;q=.5, application/json + * this.types(['html', 'json']); + * this.types('html', 'json'); + * // => "json" + * + * @param {String|Array} types... + * @return {String|Array|Boolean} + * @public + */ + +Accepts.prototype.type = +Accepts.prototype.types = function (types_) { + var types = types_ + + // support flattened arguments + if (types && !Array.isArray(types)) { + types = new Array(arguments.length) + for (var i = 0; i < types.length; i++) { + types[i] = arguments[i] + } + } + + // no types, return all requested types + if (!types || types.length === 0) { + return this.negotiator.mediaTypes() + } + + // no accept header, return first given type + if (!this.headers.accept) { + return types[0] + } + + var mimes = types.map(extToMime) + var accepts = this.negotiator.mediaTypes(mimes.filter(validMime)) + var first = accepts[0] + + return first + ? types[mimes.indexOf(first)] + : false +} + +/** + * Return accepted encodings or best fit based on `encodings`. + * + * Given `Accept-Encoding: gzip, deflate` + * an array sorted by quality is returned: + * + * ['gzip', 'deflate'] + * + * @param {String|Array} encodings... + * @return {String|Array} + * @public + */ + +Accepts.prototype.encoding = +Accepts.prototype.encodings = function (encodings_) { + var encodings = encodings_ + + // support flattened arguments + if (encodings && !Array.isArray(encodings)) { + encodings = new Array(arguments.length) + for (var i = 0; i < encodings.length; i++) { + encodings[i] = arguments[i] + } + } + + // no encodings, return all requested encodings + if (!encodings || encodings.length === 0) { + return this.negotiator.encodings() + } + + return this.negotiator.encodings(encodings)[0] || false +} + +/** + * Return accepted charsets or best fit based on `charsets`. + * + * Given `Accept-Charset: utf-8, iso-8859-1;q=0.2, utf-7;q=0.5` + * an array sorted by quality is returned: + * + * ['utf-8', 'utf-7', 'iso-8859-1'] + * + * @param {String|Array} charsets... + * @return {String|Array} + * @public + */ + +Accepts.prototype.charset = +Accepts.prototype.charsets = function (charsets_) { + var charsets = charsets_ + + // support flattened arguments + if (charsets && !Array.isArray(charsets)) { + charsets = new Array(arguments.length) + for (var i = 0; i < charsets.length; i++) { + charsets[i] = arguments[i] + } + } + + // no charsets, return all requested charsets + if (!charsets || charsets.length === 0) { + return this.negotiator.charsets() + } + + return this.negotiator.charsets(charsets)[0] || false +} + +/** + * Return accepted languages or best fit based on `langs`. + * + * Given `Accept-Language: en;q=0.8, es, pt` + * an array sorted by quality is returned: + * + * ['es', 'pt', 'en'] + * + * @param {String|Array} langs... + * @return {Array|String} + * @public + */ + +Accepts.prototype.lang = +Accepts.prototype.langs = +Accepts.prototype.language = +Accepts.prototype.languages = function (languages_) { + var languages = languages_ + + // support flattened arguments + if (languages && !Array.isArray(languages)) { + languages = new Array(arguments.length) + for (var i = 0; i < languages.length; i++) { + languages[i] = arguments[i] + } + } + + // no languages, return all requested languages + if (!languages || languages.length === 0) { + return this.negotiator.languages() + } + + return this.negotiator.languages(languages)[0] || false +} + +/** + * Convert extnames to mime. + * + * @param {String} type + * @return {String} + * @private + */ + +function extToMime (type) { + return type.indexOf('/') === -1 + ? mime.lookup(type) + : type +} + +/** + * Check if mime is valid. + * + * @param {String} type + * @return {String} + * @private + */ + +function validMime (type) { + return typeof type === 'string' +} diff --git a/backend/Backend/node_modules/accepts/package.json b/backend/Backend/node_modules/accepts/package.json new file mode 100644 index 000000000..0f2d15da9 --- /dev/null +++ b/backend/Backend/node_modules/accepts/package.json @@ -0,0 +1,47 @@ +{ + "name": "accepts", + "description": "Higher-level content negotiation", + "version": "1.3.8", + "contributors": [ + "Douglas Christopher Wilson ", + "Jonathan Ong (http://jongleberry.com)" + ], + "license": "MIT", + "repository": "jshttp/accepts", + "dependencies": { + "mime-types": "~2.1.34", + "negotiator": "0.6.3" + }, + "devDependencies": { + "deep-equal": "1.0.1", + "eslint": "7.32.0", + "eslint-config-standard": "14.1.1", + "eslint-plugin-import": "2.25.4", + "eslint-plugin-markdown": "2.2.1", + "eslint-plugin-node": "11.1.0", + "eslint-plugin-promise": "4.3.1", + "eslint-plugin-standard": "4.1.0", + "mocha": "9.2.0", + "nyc": "15.1.0" + }, + "files": [ + "LICENSE", + "HISTORY.md", + "index.js" + ], + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "lint": "eslint .", + "test": "mocha --reporter spec --check-leaks --bail test/", + "test-ci": "nyc --reporter=lcov --reporter=text npm test", + "test-cov": "nyc --reporter=html --reporter=text npm test" + }, + "keywords": [ + "content", + "negotiation", + "accept", + "accepts" + ] +} diff --git a/backend/Backend/node_modules/anymatch/LICENSE b/backend/Backend/node_modules/anymatch/LICENSE new file mode 100644 index 000000000..491766ca7 --- /dev/null +++ b/backend/Backend/node_modules/anymatch/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2019 Elan Shanker, Paul Miller (https://paulmillr.com) + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/backend/Backend/node_modules/anymatch/README.md b/backend/Backend/node_modules/anymatch/README.md new file mode 100644 index 000000000..1dd67f534 --- /dev/null +++ b/backend/Backend/node_modules/anymatch/README.md @@ -0,0 +1,87 @@ +anymatch [![Build Status](https://travis-ci.org/micromatch/anymatch.svg?branch=master)](https://travis-ci.org/micromatch/anymatch) [![Coverage Status](https://img.shields.io/coveralls/micromatch/anymatch.svg?branch=master)](https://coveralls.io/r/micromatch/anymatch?branch=master) +====== +Javascript module to match a string against a regular expression, glob, string, +or function that takes the string as an argument and returns a truthy or falsy +value. The matcher can also be an array of any or all of these. Useful for +allowing a very flexible user-defined config to define things like file paths. + +__Note: This module has Bash-parity, please be aware that Windows-style backslashes are not supported as separators. See https://github.com/micromatch/micromatch#backslashes for more information.__ + + +Usage +----- +```sh +npm install anymatch +``` + +#### anymatch(matchers, testString, [returnIndex], [options]) +* __matchers__: (_Array|String|RegExp|Function_) +String to be directly matched, string with glob patterns, regular expression +test, function that takes the testString as an argument and returns a truthy +value if it should be matched, or an array of any number and mix of these types. +* __testString__: (_String|Array_) The string to test against the matchers. If +passed as an array, the first element of the array will be used as the +`testString` for non-function matchers, while the entire array will be applied +as the arguments for function matchers. +* __options__: (_Object_ [optional]_) Any of the [picomatch](https://github.com/micromatch/picomatch#options) options. + * __returnIndex__: (_Boolean [optional]_) If true, return the array index of +the first matcher that that testString matched, or -1 if no match, instead of a +boolean result. + +```js +const anymatch = require('anymatch'); + +const matchers = [ 'path/to/file.js', 'path/anyjs/**/*.js', /foo.js$/, string => string.includes('bar') && string.length > 10 ] ; + +anymatch(matchers, 'path/to/file.js'); // true +anymatch(matchers, 'path/anyjs/baz.js'); // true +anymatch(matchers, 'path/to/foo.js'); // true +anymatch(matchers, 'path/to/bar.js'); // true +anymatch(matchers, 'bar.js'); // false + +// returnIndex = true +anymatch(matchers, 'foo.js', {returnIndex: true}); // 2 +anymatch(matchers, 'path/anyjs/foo.js', {returnIndex: true}); // 1 + +// any picomatc + +// using globs to match directories and their children +anymatch('node_modules', 'node_modules'); // true +anymatch('node_modules', 'node_modules/somelib/index.js'); // false +anymatch('node_modules/**', 'node_modules/somelib/index.js'); // true +anymatch('node_modules/**', '/absolute/path/to/node_modules/somelib/index.js'); // false +anymatch('**/node_modules/**', '/absolute/path/to/node_modules/somelib/index.js'); // true + +const matcher = anymatch(matchers); +['foo.js', 'bar.js'].filter(matcher); // [ 'foo.js' ] +anymatch master* ❯ + +``` + +#### anymatch(matchers) +You can also pass in only your matcher(s) to get a curried function that has +already been bound to the provided matching criteria. This can be used as an +`Array#filter` callback. + +```js +var matcher = anymatch(matchers); + +matcher('path/to/file.js'); // true +matcher('path/anyjs/baz.js', true); // 1 + +['foo.js', 'bar.js'].filter(matcher); // ['foo.js'] +``` + +Changelog +---------- +[See release notes page on GitHub](https://github.com/micromatch/anymatch/releases) + +- **v3.0:** Removed `startIndex` and `endIndex` arguments. Node 8.x-only. +- **v2.0:** [micromatch](https://github.com/jonschlinkert/micromatch) moves away from minimatch-parity and inline with Bash. This includes handling backslashes differently (see https://github.com/micromatch/micromatch#backslashes for more information). +- **v1.2:** anymatch uses [micromatch](https://github.com/jonschlinkert/micromatch) +for glob pattern matching. Issues with glob pattern matching should be +reported directly to the [micromatch issue tracker](https://github.com/jonschlinkert/micromatch/issues). + +License +------- +[ISC](https://raw.github.com/micromatch/anymatch/master/LICENSE) diff --git a/backend/Backend/node_modules/anymatch/index.d.ts b/backend/Backend/node_modules/anymatch/index.d.ts new file mode 100644 index 000000000..3ef7eaadd --- /dev/null +++ b/backend/Backend/node_modules/anymatch/index.d.ts @@ -0,0 +1,20 @@ +type AnymatchFn = (testString: string) => boolean; +type AnymatchPattern = string|RegExp|AnymatchFn; +type AnymatchMatcher = AnymatchPattern|AnymatchPattern[] +type AnymatchTester = { + (testString: string|any[], returnIndex: true): number; + (testString: string|any[]): boolean; +} + +type PicomatchOptions = {dot: boolean}; + +declare const anymatch: { + (matchers: AnymatchMatcher): AnymatchTester; + (matchers: AnymatchMatcher, testString: null, returnIndex: true | PicomatchOptions): AnymatchTester; + (matchers: AnymatchMatcher, testString: string|any[], returnIndex: true | PicomatchOptions): number; + (matchers: AnymatchMatcher, testString: string|any[]): boolean; +} + +export {AnymatchMatcher as Matcher} +export {AnymatchTester as Tester} +export default anymatch diff --git a/backend/Backend/node_modules/anymatch/index.js b/backend/Backend/node_modules/anymatch/index.js new file mode 100644 index 000000000..8eb73e9c9 --- /dev/null +++ b/backend/Backend/node_modules/anymatch/index.js @@ -0,0 +1,104 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { value: true }); + +const picomatch = require('picomatch'); +const normalizePath = require('normalize-path'); + +/** + * @typedef {(testString: string) => boolean} AnymatchFn + * @typedef {string|RegExp|AnymatchFn} AnymatchPattern + * @typedef {AnymatchPattern|AnymatchPattern[]} AnymatchMatcher + */ +const BANG = '!'; +const DEFAULT_OPTIONS = {returnIndex: false}; +const arrify = (item) => Array.isArray(item) ? item : [item]; + +/** + * @param {AnymatchPattern} matcher + * @param {object} options + * @returns {AnymatchFn} + */ +const createPattern = (matcher, options) => { + if (typeof matcher === 'function') { + return matcher; + } + if (typeof matcher === 'string') { + const glob = picomatch(matcher, options); + return (string) => matcher === string || glob(string); + } + if (matcher instanceof RegExp) { + return (string) => matcher.test(string); + } + return (string) => false; +}; + +/** + * @param {Array} patterns + * @param {Array} negPatterns + * @param {String|Array} args + * @param {Boolean} returnIndex + * @returns {boolean|number} + */ +const matchPatterns = (patterns, negPatterns, args, returnIndex) => { + const isList = Array.isArray(args); + const _path = isList ? args[0] : args; + if (!isList && typeof _path !== 'string') { + throw new TypeError('anymatch: second argument must be a string: got ' + + Object.prototype.toString.call(_path)) + } + const path = normalizePath(_path, false); + + for (let index = 0; index < negPatterns.length; index++) { + const nglob = negPatterns[index]; + if (nglob(path)) { + return returnIndex ? -1 : false; + } + } + + const applied = isList && [path].concat(args.slice(1)); + for (let index = 0; index < patterns.length; index++) { + const pattern = patterns[index]; + if (isList ? pattern(...applied) : pattern(path)) { + return returnIndex ? index : true; + } + } + + return returnIndex ? -1 : false; +}; + +/** + * @param {AnymatchMatcher} matchers + * @param {Array|string} testString + * @param {object} options + * @returns {boolean|number|Function} + */ +const anymatch = (matchers, testString, options = DEFAULT_OPTIONS) => { + if (matchers == null) { + throw new TypeError('anymatch: specify first argument'); + } + const opts = typeof options === 'boolean' ? {returnIndex: options} : options; + const returnIndex = opts.returnIndex || false; + + // Early cache for matchers. + const mtchers = arrify(matchers); + const negatedGlobs = mtchers + .filter(item => typeof item === 'string' && item.charAt(0) === BANG) + .map(item => item.slice(1)) + .map(item => picomatch(item, opts)); + const patterns = mtchers + .filter(item => typeof item !== 'string' || (typeof item === 'string' && item.charAt(0) !== BANG)) + .map(matcher => createPattern(matcher, opts)); + + if (testString == null) { + return (testString, ri = false) => { + const returnIndex = typeof ri === 'boolean' ? ri : false; + return matchPatterns(patterns, negatedGlobs, testString, returnIndex); + } + } + + return matchPatterns(patterns, negatedGlobs, testString, returnIndex); +}; + +anymatch.default = anymatch; +module.exports = anymatch; diff --git a/backend/Backend/node_modules/anymatch/package.json b/backend/Backend/node_modules/anymatch/package.json new file mode 100644 index 000000000..2cb2307e4 --- /dev/null +++ b/backend/Backend/node_modules/anymatch/package.json @@ -0,0 +1,48 @@ +{ + "name": "anymatch", + "version": "3.1.3", + "description": "Matches strings against configurable strings, globs, regular expressions, and/or functions", + "files": [ + "index.js", + "index.d.ts" + ], + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "author": { + "name": "Elan Shanker", + "url": "https://github.com/es128" + }, + "license": "ISC", + "homepage": "https://github.com/micromatch/anymatch", + "repository": { + "type": "git", + "url": "https://github.com/micromatch/anymatch" + }, + "keywords": [ + "match", + "any", + "string", + "file", + "fs", + "list", + "glob", + "regex", + "regexp", + "regular", + "expression", + "function" + ], + "scripts": { + "test": "nyc mocha", + "mocha": "mocha" + }, + "devDependencies": { + "mocha": "^6.1.3", + "nyc": "^14.0.0" + }, + "engines": { + "node": ">= 8" + } +} diff --git a/backend/Backend/node_modules/append-field/.npmignore b/backend/Backend/node_modules/append-field/.npmignore new file mode 100644 index 000000000..c2658d7d1 --- /dev/null +++ b/backend/Backend/node_modules/append-field/.npmignore @@ -0,0 +1 @@ +node_modules/ diff --git a/backend/Backend/node_modules/append-field/LICENSE b/backend/Backend/node_modules/append-field/LICENSE new file mode 100644 index 000000000..14b1f891b --- /dev/null +++ b/backend/Backend/node_modules/append-field/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Linus Unnebäck + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/backend/Backend/node_modules/append-field/README.md b/backend/Backend/node_modules/append-field/README.md new file mode 100644 index 000000000..62b901b7e --- /dev/null +++ b/backend/Backend/node_modules/append-field/README.md @@ -0,0 +1,44 @@ +# `append-field` + +A [W3C HTML JSON forms spec](http://www.w3.org/TR/html-json-forms/) compliant +field appender (for lack of a better name). Useful for people implementing +`application/x-www-form-urlencoded` and `multipart/form-data` parsers. + +It works best on objects created with `Object.create(null)`. Otherwise it might +conflict with variables from the prototype (e.g. `hasOwnProperty`). + +## Installation + +```sh +npm install --save append-field +``` + +## Usage + +```javascript +var appendField = require('append-field') +var obj = Object.create(null) + +appendField(obj, 'pets[0][species]', 'Dahut') +appendField(obj, 'pets[0][name]', 'Hypatia') +appendField(obj, 'pets[1][species]', 'Felis Stultus') +appendField(obj, 'pets[1][name]', 'Billie') + +console.log(obj) +``` + +```text +{ pets: + [ { species: 'Dahut', name: 'Hypatia' }, + { species: 'Felis Stultus', name: 'Billie' } ] } +``` + +## API + +### `appendField(store, key, value)` + +Adds the field named `key` with the value `value` to the object `store`. + +## License + +MIT diff --git a/backend/Backend/node_modules/append-field/index.js b/backend/Backend/node_modules/append-field/index.js new file mode 100644 index 000000000..fc5acc8bc --- /dev/null +++ b/backend/Backend/node_modules/append-field/index.js @@ -0,0 +1,12 @@ +var parsePath = require('./lib/parse-path') +var setValue = require('./lib/set-value') + +function appendField (store, key, value) { + var steps = parsePath(key) + + steps.reduce(function (context, step) { + return setValue(context, step, context[step.key], value) + }, store) +} + +module.exports = appendField diff --git a/backend/Backend/node_modules/append-field/lib/parse-path.js b/backend/Backend/node_modules/append-field/lib/parse-path.js new file mode 100644 index 000000000..31d617966 --- /dev/null +++ b/backend/Backend/node_modules/append-field/lib/parse-path.js @@ -0,0 +1,53 @@ +var reFirstKey = /^[^\[]*/ +var reDigitPath = /^\[(\d+)\]/ +var reNormalPath = /^\[([^\]]+)\]/ + +function parsePath (key) { + function failure () { + return [{ type: 'object', key: key, last: true }] + } + + var firstKey = reFirstKey.exec(key)[0] + if (!firstKey) return failure() + + var len = key.length + var pos = firstKey.length + var tail = { type: 'object', key: firstKey } + var steps = [tail] + + while (pos < len) { + var m + + if (key[pos] === '[' && key[pos + 1] === ']') { + pos += 2 + tail.append = true + if (pos !== len) return failure() + continue + } + + m = reDigitPath.exec(key.substring(pos)) + if (m !== null) { + pos += m[0].length + tail.nextType = 'array' + tail = { type: 'array', key: parseInt(m[1], 10) } + steps.push(tail) + continue + } + + m = reNormalPath.exec(key.substring(pos)) + if (m !== null) { + pos += m[0].length + tail.nextType = 'object' + tail = { type: 'object', key: m[1] } + steps.push(tail) + continue + } + + return failure() + } + + tail.last = true + return steps +} + +module.exports = parsePath diff --git a/backend/Backend/node_modules/append-field/lib/set-value.js b/backend/Backend/node_modules/append-field/lib/set-value.js new file mode 100644 index 000000000..c15e87377 --- /dev/null +++ b/backend/Backend/node_modules/append-field/lib/set-value.js @@ -0,0 +1,64 @@ +function valueType (value) { + if (value === undefined) return 'undefined' + if (Array.isArray(value)) return 'array' + if (typeof value === 'object') return 'object' + return 'scalar' +} + +function setLastValue (context, step, currentValue, entryValue) { + switch (valueType(currentValue)) { + case 'undefined': + if (step.append) { + context[step.key] = [entryValue] + } else { + context[step.key] = entryValue + } + break + case 'array': + context[step.key].push(entryValue) + break + case 'object': + return setLastValue(currentValue, { type: 'object', key: '', last: true }, currentValue[''], entryValue) + case 'scalar': + context[step.key] = [context[step.key], entryValue] + break + } + + return context +} + +function setValue (context, step, currentValue, entryValue) { + if (step.last) return setLastValue(context, step, currentValue, entryValue) + + var obj + switch (valueType(currentValue)) { + case 'undefined': + if (step.nextType === 'array') { + context[step.key] = [] + } else { + context[step.key] = Object.create(null) + } + return context[step.key] + case 'object': + return context[step.key] + case 'array': + if (step.nextType === 'array') { + return currentValue + } + + obj = Object.create(null) + context[step.key] = obj + currentValue.forEach(function (item, i) { + if (item !== undefined) obj['' + i] = item + }) + + return obj + case 'scalar': + obj = Object.create(null) + obj[''] = currentValue + context[step.key] = obj + return obj + } +} + +module.exports = setValue diff --git a/backend/Backend/node_modules/append-field/package.json b/backend/Backend/node_modules/append-field/package.json new file mode 100644 index 000000000..8d6e71643 --- /dev/null +++ b/backend/Backend/node_modules/append-field/package.json @@ -0,0 +1,19 @@ +{ + "name": "append-field", + "version": "1.0.0", + "license": "MIT", + "author": "Linus Unnebäck ", + "main": "index.js", + "devDependencies": { + "mocha": "^2.2.4", + "standard": "^6.0.5", + "testdata-w3c-json-form": "^0.2.0" + }, + "scripts": { + "test": "standard && mocha" + }, + "repository": { + "type": "git", + "url": "http://github.com/LinusU/node-append-field.git" + } +} diff --git a/backend/Backend/node_modules/append-field/test/forms.js b/backend/Backend/node_modules/append-field/test/forms.js new file mode 100644 index 000000000..dd6fbc988 --- /dev/null +++ b/backend/Backend/node_modules/append-field/test/forms.js @@ -0,0 +1,19 @@ +/* eslint-env mocha */ + +var assert = require('assert') +var appendField = require('../') +var testData = require('testdata-w3c-json-form') + +describe('Append Field', function () { + for (var test of testData) { + it('handles ' + test.name, function () { + var store = Object.create(null) + + for (var field of test.fields) { + appendField(store, field.key, field.value) + } + + assert.deepEqual(store, test.expected) + }) + } +}) diff --git a/backend/Backend/node_modules/array-flatten/LICENSE b/backend/Backend/node_modules/array-flatten/LICENSE new file mode 100644 index 000000000..983fbe8ae --- /dev/null +++ b/backend/Backend/node_modules/array-flatten/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Blake Embrey (hello@blakeembrey.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/backend/Backend/node_modules/array-flatten/README.md b/backend/Backend/node_modules/array-flatten/README.md new file mode 100644 index 000000000..91fa5b637 --- /dev/null +++ b/backend/Backend/node_modules/array-flatten/README.md @@ -0,0 +1,43 @@ +# Array Flatten + +[![NPM version][npm-image]][npm-url] +[![NPM downloads][downloads-image]][downloads-url] +[![Build status][travis-image]][travis-url] +[![Test coverage][coveralls-image]][coveralls-url] + +> Flatten an array of nested arrays into a single flat array. Accepts an optional depth. + +## Installation + +``` +npm install array-flatten --save +``` + +## Usage + +```javascript +var flatten = require('array-flatten') + +flatten([1, [2, [3, [4, [5], 6], 7], 8], 9]) +//=> [1, 2, 3, 4, 5, 6, 7, 8, 9] + +flatten([1, [2, [3, [4, [5], 6], 7], 8], 9], 2) +//=> [1, 2, 3, [4, [5], 6], 7, 8, 9] + +(function () { + flatten(arguments) //=> [1, 2, 3] +})(1, [2, 3]) +``` + +## License + +MIT + +[npm-image]: https://img.shields.io/npm/v/array-flatten.svg?style=flat +[npm-url]: https://npmjs.org/package/array-flatten +[downloads-image]: https://img.shields.io/npm/dm/array-flatten.svg?style=flat +[downloads-url]: https://npmjs.org/package/array-flatten +[travis-image]: https://img.shields.io/travis/blakeembrey/array-flatten.svg?style=flat +[travis-url]: https://travis-ci.org/blakeembrey/array-flatten +[coveralls-image]: https://img.shields.io/coveralls/blakeembrey/array-flatten.svg?style=flat +[coveralls-url]: https://coveralls.io/r/blakeembrey/array-flatten?branch=master diff --git a/backend/Backend/node_modules/array-flatten/array-flatten.js b/backend/Backend/node_modules/array-flatten/array-flatten.js new file mode 100644 index 000000000..089117b32 --- /dev/null +++ b/backend/Backend/node_modules/array-flatten/array-flatten.js @@ -0,0 +1,64 @@ +'use strict' + +/** + * Expose `arrayFlatten`. + */ +module.exports = arrayFlatten + +/** + * Recursive flatten function with depth. + * + * @param {Array} array + * @param {Array} result + * @param {Number} depth + * @return {Array} + */ +function flattenWithDepth (array, result, depth) { + for (var i = 0; i < array.length; i++) { + var value = array[i] + + if (depth > 0 && Array.isArray(value)) { + flattenWithDepth(value, result, depth - 1) + } else { + result.push(value) + } + } + + return result +} + +/** + * Recursive flatten function. Omitting depth is slightly faster. + * + * @param {Array} array + * @param {Array} result + * @return {Array} + */ +function flattenForever (array, result) { + for (var i = 0; i < array.length; i++) { + var value = array[i] + + if (Array.isArray(value)) { + flattenForever(value, result) + } else { + result.push(value) + } + } + + return result +} + +/** + * Flatten an array, with the ability to define a depth. + * + * @param {Array} array + * @param {Number} depth + * @return {Array} + */ +function arrayFlatten (array, depth) { + if (depth == null) { + return flattenForever(array, []) + } + + return flattenWithDepth(array, [], depth) +} diff --git a/backend/Backend/node_modules/array-flatten/package.json b/backend/Backend/node_modules/array-flatten/package.json new file mode 100644 index 000000000..1a24e2a1a --- /dev/null +++ b/backend/Backend/node_modules/array-flatten/package.json @@ -0,0 +1,39 @@ +{ + "name": "array-flatten", + "version": "1.1.1", + "description": "Flatten an array of nested arrays into a single flat array", + "main": "array-flatten.js", + "files": [ + "array-flatten.js", + "LICENSE" + ], + "scripts": { + "test": "istanbul cover _mocha -- -R spec" + }, + "repository": { + "type": "git", + "url": "git://github.com/blakeembrey/array-flatten.git" + }, + "keywords": [ + "array", + "flatten", + "arguments", + "depth" + ], + "author": { + "name": "Blake Embrey", + "email": "hello@blakeembrey.com", + "url": "http://blakeembrey.me" + }, + "license": "MIT", + "bugs": { + "url": "https://github.com/blakeembrey/array-flatten/issues" + }, + "homepage": "https://github.com/blakeembrey/array-flatten", + "devDependencies": { + "istanbul": "^0.3.13", + "mocha": "^2.2.4", + "pre-commit": "^1.0.7", + "standard": "^3.7.3" + } +} diff --git a/backend/Backend/node_modules/balanced-match/.github/FUNDING.yml b/backend/Backend/node_modules/balanced-match/.github/FUNDING.yml new file mode 100644 index 000000000..cea8b16e9 --- /dev/null +++ b/backend/Backend/node_modules/balanced-match/.github/FUNDING.yml @@ -0,0 +1,2 @@ +tidelift: "npm/balanced-match" +patreon: juliangruber diff --git a/backend/Backend/node_modules/balanced-match/LICENSE.md b/backend/Backend/node_modules/balanced-match/LICENSE.md new file mode 100644 index 000000000..2cdc8e414 --- /dev/null +++ b/backend/Backend/node_modules/balanced-match/LICENSE.md @@ -0,0 +1,21 @@ +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/backend/Backend/node_modules/balanced-match/README.md b/backend/Backend/node_modules/balanced-match/README.md new file mode 100644 index 000000000..d2a48b6b4 --- /dev/null +++ b/backend/Backend/node_modules/balanced-match/README.md @@ -0,0 +1,97 @@ +# balanced-match + +Match balanced string pairs, like `{` and `}` or `` and ``. Supports regular expressions as well! + +[![build status](https://secure.travis-ci.org/juliangruber/balanced-match.svg)](http://travis-ci.org/juliangruber/balanced-match) +[![downloads](https://img.shields.io/npm/dm/balanced-match.svg)](https://www.npmjs.org/package/balanced-match) + +[![testling badge](https://ci.testling.com/juliangruber/balanced-match.png)](https://ci.testling.com/juliangruber/balanced-match) + +## Example + +Get the first matching pair of braces: + +```js +var balanced = require('balanced-match'); + +console.log(balanced('{', '}', 'pre{in{nested}}post')); +console.log(balanced('{', '}', 'pre{first}between{second}post')); +console.log(balanced(/\s+\{\s+/, /\s+\}\s+/, 'pre { in{nest} } post')); +``` + +The matches are: + +```bash +$ node example.js +{ start: 3, end: 14, pre: 'pre', body: 'in{nested}', post: 'post' } +{ start: 3, + end: 9, + pre: 'pre', + body: 'first', + post: 'between{second}post' } +{ start: 3, end: 17, pre: 'pre', body: 'in{nest}', post: 'post' } +``` + +## API + +### var m = balanced(a, b, str) + +For the first non-nested matching pair of `a` and `b` in `str`, return an +object with those keys: + +* **start** the index of the first match of `a` +* **end** the index of the matching `b` +* **pre** the preamble, `a` and `b` not included +* **body** the match, `a` and `b` not included +* **post** the postscript, `a` and `b` not included + +If there's no match, `undefined` will be returned. + +If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `['{', 'a', '']` and `{a}}` will match `['', 'a', '}']`. + +### var r = balanced.range(a, b, str) + +For the first non-nested matching pair of `a` and `b` in `str`, return an +array with indexes: `[ , ]`. + +If there's no match, `undefined` will be returned. + +If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `[ 1, 3 ]` and `{a}}` will match `[0, 2]`. + +## Installation + +With [npm](https://npmjs.org) do: + +```bash +npm install balanced-match +``` + +## Security contact information + +To report a security vulnerability, please use the +[Tidelift security contact](https://tidelift.com/security). +Tidelift will coordinate the fix and disclosure. + +## License + +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/backend/Backend/node_modules/balanced-match/index.js b/backend/Backend/node_modules/balanced-match/index.js new file mode 100644 index 000000000..c67a64608 --- /dev/null +++ b/backend/Backend/node_modules/balanced-match/index.js @@ -0,0 +1,62 @@ +'use strict'; +module.exports = balanced; +function balanced(a, b, str) { + if (a instanceof RegExp) a = maybeMatch(a, str); + if (b instanceof RegExp) b = maybeMatch(b, str); + + var r = range(a, b, str); + + return r && { + start: r[0], + end: r[1], + pre: str.slice(0, r[0]), + body: str.slice(r[0] + a.length, r[1]), + post: str.slice(r[1] + b.length) + }; +} + +function maybeMatch(reg, str) { + var m = str.match(reg); + return m ? m[0] : null; +} + +balanced.range = range; +function range(a, b, str) { + var begs, beg, left, right, result; + var ai = str.indexOf(a); + var bi = str.indexOf(b, ai + 1); + var i = ai; + + if (ai >= 0 && bi > 0) { + if(a===b) { + return [ai, bi]; + } + begs = []; + left = str.length; + + while (i >= 0 && !result) { + if (i == ai) { + begs.push(i); + ai = str.indexOf(a, i + 1); + } else if (begs.length == 1) { + result = [ begs.pop(), bi ]; + } else { + beg = begs.pop(); + if (beg < left) { + left = beg; + right = bi; + } + + bi = str.indexOf(b, i + 1); + } + + i = ai < bi && ai >= 0 ? ai : bi; + } + + if (begs.length) { + result = [ left, right ]; + } + } + + return result; +} diff --git a/backend/Backend/node_modules/balanced-match/package.json b/backend/Backend/node_modules/balanced-match/package.json new file mode 100644 index 000000000..ce6073e04 --- /dev/null +++ b/backend/Backend/node_modules/balanced-match/package.json @@ -0,0 +1,48 @@ +{ + "name": "balanced-match", + "description": "Match balanced character pairs, like \"{\" and \"}\"", + "version": "1.0.2", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/balanced-match.git" + }, + "homepage": "https://github.com/juliangruber/balanced-match", + "main": "index.js", + "scripts": { + "test": "tape test/test.js", + "bench": "matcha test/bench.js" + }, + "devDependencies": { + "matcha": "^0.7.0", + "tape": "^4.6.0" + }, + "keywords": [ + "match", + "regexp", + "test", + "balanced", + "parse" + ], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT", + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8..latest", + "firefox/20..latest", + "firefox/nightly", + "chrome/25..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } +} diff --git a/backend/Backend/node_modules/binary-extensions/binary-extensions.json b/backend/Backend/node_modules/binary-extensions/binary-extensions.json new file mode 100644 index 000000000..ac08048e4 --- /dev/null +++ b/backend/Backend/node_modules/binary-extensions/binary-extensions.json @@ -0,0 +1,263 @@ +[ + "3dm", + "3ds", + "3g2", + "3gp", + "7z", + "a", + "aac", + "adp", + "afdesign", + "afphoto", + "afpub", + "ai", + "aif", + "aiff", + "alz", + "ape", + "apk", + "appimage", + "ar", + "arj", + "asf", + "au", + "avi", + "bak", + "baml", + "bh", + "bin", + "bk", + "bmp", + "btif", + "bz2", + "bzip2", + "cab", + "caf", + "cgm", + "class", + "cmx", + "cpio", + "cr2", + "cur", + "dat", + "dcm", + "deb", + "dex", + "djvu", + "dll", + "dmg", + "dng", + "doc", + "docm", + "docx", + "dot", + "dotm", + "dra", + "DS_Store", + "dsk", + "dts", + "dtshd", + "dvb", + "dwg", + "dxf", + "ecelp4800", + "ecelp7470", + "ecelp9600", + "egg", + "eol", + "eot", + "epub", + "exe", + "f4v", + "fbs", + "fh", + "fla", + "flac", + "flatpak", + "fli", + "flv", + "fpx", + "fst", + "fvt", + "g3", + "gh", + "gif", + "graffle", + "gz", + "gzip", + "h261", + "h263", + "h264", + "icns", + "ico", + "ief", + "img", + "ipa", + "iso", + "jar", + "jpeg", + "jpg", + "jpgv", + "jpm", + "jxr", + "key", + "ktx", + "lha", + "lib", + "lvp", + "lz", + "lzh", + "lzma", + "lzo", + "m3u", + "m4a", + "m4v", + "mar", + "mdi", + "mht", + "mid", + "midi", + "mj2", + "mka", + "mkv", + "mmr", + "mng", + "mobi", + "mov", + "movie", + "mp3", + "mp4", + "mp4a", + "mpeg", + "mpg", + "mpga", + "mxu", + "nef", + "npx", + "numbers", + "nupkg", + "o", + "odp", + "ods", + "odt", + "oga", + "ogg", + "ogv", + "otf", + "ott", + "pages", + "pbm", + "pcx", + "pdb", + "pdf", + "pea", + "pgm", + "pic", + "png", + "pnm", + "pot", + "potm", + "potx", + "ppa", + "ppam", + "ppm", + "pps", + "ppsm", + "ppsx", + "ppt", + "pptm", + "pptx", + "psd", + "pya", + "pyc", + "pyo", + "pyv", + "qt", + "rar", + "ras", + "raw", + "resources", + "rgb", + "rip", + "rlc", + "rmf", + "rmvb", + "rpm", + "rtf", + "rz", + "s3m", + "s7z", + "scpt", + "sgi", + "shar", + "snap", + "sil", + "sketch", + "slk", + "smv", + "snk", + "so", + "stl", + "suo", + "sub", + "swf", + "tar", + "tbz", + "tbz2", + "tga", + "tgz", + "thmx", + "tif", + "tiff", + "tlz", + "ttc", + "ttf", + "txz", + "udf", + "uvh", + "uvi", + "uvm", + "uvp", + "uvs", + "uvu", + "viv", + "vob", + "war", + "wav", + "wax", + "wbmp", + "wdp", + "weba", + "webm", + "webp", + "whl", + "wim", + "wm", + "wma", + "wmv", + "wmx", + "woff", + "woff2", + "wrm", + "wvx", + "xbm", + "xif", + "xla", + "xlam", + "xls", + "xlsb", + "xlsm", + "xlsx", + "xlt", + "xltm", + "xltx", + "xm", + "xmind", + "xpi", + "xpm", + "xwd", + "xz", + "z", + "zip", + "zipx" +] diff --git a/backend/Backend/node_modules/binary-extensions/binary-extensions.json.d.ts b/backend/Backend/node_modules/binary-extensions/binary-extensions.json.d.ts new file mode 100644 index 000000000..94a248c2b --- /dev/null +++ b/backend/Backend/node_modules/binary-extensions/binary-extensions.json.d.ts @@ -0,0 +1,3 @@ +declare const binaryExtensionsJson: readonly string[]; + +export = binaryExtensionsJson; diff --git a/backend/Backend/node_modules/binary-extensions/index.d.ts b/backend/Backend/node_modules/binary-extensions/index.d.ts new file mode 100644 index 000000000..f469ac5fb --- /dev/null +++ b/backend/Backend/node_modules/binary-extensions/index.d.ts @@ -0,0 +1,14 @@ +/** +List of binary file extensions. + +@example +``` +import binaryExtensions = require('binary-extensions'); + +console.log(binaryExtensions); +//=> ['3ds', '3g2', …] +``` +*/ +declare const binaryExtensions: readonly string[]; + +export = binaryExtensions; diff --git a/backend/Backend/node_modules/binary-extensions/index.js b/backend/Backend/node_modules/binary-extensions/index.js new file mode 100644 index 000000000..d46e46886 --- /dev/null +++ b/backend/Backend/node_modules/binary-extensions/index.js @@ -0,0 +1 @@ +module.exports = require('./binary-extensions.json'); diff --git a/backend/Backend/node_modules/binary-extensions/license b/backend/Backend/node_modules/binary-extensions/license new file mode 100644 index 000000000..5493a1a6e --- /dev/null +++ b/backend/Backend/node_modules/binary-extensions/license @@ -0,0 +1,10 @@ +MIT License + +Copyright (c) Sindre Sorhus (https://sindresorhus.com) +Copyright (c) Paul Miller (https://paulmillr.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/backend/Backend/node_modules/binary-extensions/package.json b/backend/Backend/node_modules/binary-extensions/package.json new file mode 100644 index 000000000..4710c339a --- /dev/null +++ b/backend/Backend/node_modules/binary-extensions/package.json @@ -0,0 +1,40 @@ +{ + "name": "binary-extensions", + "version": "2.3.0", + "description": "List of binary file extensions", + "license": "MIT", + "repository": "sindresorhus/binary-extensions", + "funding": "https://github.com/sponsors/sindresorhus", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "sideEffects": false, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts", + "binary-extensions.json", + "binary-extensions.json.d.ts" + ], + "keywords": [ + "binary", + "extensions", + "extension", + "file", + "json", + "list", + "array" + ], + "devDependencies": { + "ava": "^1.4.1", + "tsd": "^0.7.2", + "xo": "^0.24.0" + } +} diff --git a/backend/Backend/node_modules/binary-extensions/readme.md b/backend/Backend/node_modules/binary-extensions/readme.md new file mode 100644 index 000000000..88519b3a6 --- /dev/null +++ b/backend/Backend/node_modules/binary-extensions/readme.md @@ -0,0 +1,25 @@ +# binary-extensions + +> List of binary file extensions + +The list is just a [JSON file](binary-extensions.json) and can be used anywhere. + +## Install + +```sh +npm install binary-extensions +``` + +## Usage + +```js +const binaryExtensions = require('binary-extensions'); + +console.log(binaryExtensions); +//=> ['3ds', '3g2', …] +``` + +## Related + +- [is-binary-path](https://github.com/sindresorhus/is-binary-path) - Check if a filepath is a binary file +- [text-extensions](https://github.com/sindresorhus/text-extensions) - List of text file extensions diff --git a/backend/Backend/node_modules/body-parser/HISTORY.md b/backend/Backend/node_modules/body-parser/HISTORY.md new file mode 100644 index 000000000..81d23e064 --- /dev/null +++ b/backend/Backend/node_modules/body-parser/HISTORY.md @@ -0,0 +1,672 @@ +1.20.3 / 2024-09-10 +=================== + + * deps: qs@6.13.0 + * add `depth` option to customize the depth level in the parser + * IMPORTANT: The default `depth` level for parsing URL-encoded data is now `32` (previously was `Infinity`) + +1.20.2 / 2023-02-21 +=================== + + * Fix strict json error message on Node.js 19+ + * deps: content-type@~1.0.5 + - perf: skip value escaping when unnecessary + * deps: raw-body@2.5.2 + +1.20.1 / 2022-10-06 +=================== + + * deps: qs@6.11.0 + * perf: remove unnecessary object clone + +1.20.0 / 2022-04-02 +=================== + + * Fix error message for json parse whitespace in `strict` + * Fix internal error when inflated body exceeds limit + * Prevent loss of async hooks context + * Prevent hanging when request already read + * deps: depd@2.0.0 + - Replace internal `eval` usage with `Function` constructor + - Use instance methods on `process` to check for listeners + * deps: http-errors@2.0.0 + - deps: depd@2.0.0 + - deps: statuses@2.0.1 + * deps: on-finished@2.4.1 + * deps: qs@6.10.3 + * deps: raw-body@2.5.1 + - deps: http-errors@2.0.0 + +1.19.2 / 2022-02-15 +=================== + + * deps: bytes@3.1.2 + * deps: qs@6.9.7 + * Fix handling of `__proto__` keys + * deps: raw-body@2.4.3 + - deps: bytes@3.1.2 + +1.19.1 / 2021-12-10 +=================== + + * deps: bytes@3.1.1 + * deps: http-errors@1.8.1 + - deps: inherits@2.0.4 + - deps: toidentifier@1.0.1 + - deps: setprototypeof@1.2.0 + * deps: qs@6.9.6 + * deps: raw-body@2.4.2 + - deps: bytes@3.1.1 + - deps: http-errors@1.8.1 + * deps: safe-buffer@5.2.1 + * deps: type-is@~1.6.18 + +1.19.0 / 2019-04-25 +=================== + + * deps: bytes@3.1.0 + - Add petabyte (`pb`) support + * deps: http-errors@1.7.2 + - Set constructor name when possible + - deps: setprototypeof@1.1.1 + - deps: statuses@'>= 1.5.0 < 2' + * deps: iconv-lite@0.4.24 + - Added encoding MIK + * deps: qs@6.7.0 + - Fix parsing array brackets after index + * deps: raw-body@2.4.0 + - deps: bytes@3.1.0 + - deps: http-errors@1.7.2 + - deps: iconv-lite@0.4.24 + * deps: type-is@~1.6.17 + - deps: mime-types@~2.1.24 + - perf: prevent internal `throw` on invalid type + +1.18.3 / 2018-05-14 +=================== + + * Fix stack trace for strict json parse error + * deps: depd@~1.1.2 + - perf: remove argument reassignment + * deps: http-errors@~1.6.3 + - deps: depd@~1.1.2 + - deps: setprototypeof@1.1.0 + - deps: statuses@'>= 1.3.1 < 2' + * deps: iconv-lite@0.4.23 + - Fix loading encoding with year appended + - Fix deprecation warnings on Node.js 10+ + * deps: qs@6.5.2 + * deps: raw-body@2.3.3 + - deps: http-errors@1.6.3 + - deps: iconv-lite@0.4.23 + * deps: type-is@~1.6.16 + - deps: mime-types@~2.1.18 + +1.18.2 / 2017-09-22 +=================== + + * deps: debug@2.6.9 + * perf: remove argument reassignment + +1.18.1 / 2017-09-12 +=================== + + * deps: content-type@~1.0.4 + - perf: remove argument reassignment + - perf: skip parameter parsing when no parameters + * deps: iconv-lite@0.4.19 + - Fix ISO-8859-1 regression + - Update Windows-1255 + * deps: qs@6.5.1 + - Fix parsing & compacting very deep objects + * deps: raw-body@2.3.2 + - deps: iconv-lite@0.4.19 + +1.18.0 / 2017-09-08 +=================== + + * Fix JSON strict violation error to match native parse error + * Include the `body` property on verify errors + * Include the `type` property on all generated errors + * Use `http-errors` to set status code on errors + * deps: bytes@3.0.0 + * deps: debug@2.6.8 + * deps: depd@~1.1.1 + - Remove unnecessary `Buffer` loading + * deps: http-errors@~1.6.2 + - deps: depd@1.1.1 + * deps: iconv-lite@0.4.18 + - Add support for React Native + - Add a warning if not loaded as utf-8 + - Fix CESU-8 decoding in Node.js 8 + - Improve speed of ISO-8859-1 encoding + * deps: qs@6.5.0 + * deps: raw-body@2.3.1 + - Use `http-errors` for standard emitted errors + - deps: bytes@3.0.0 + - deps: iconv-lite@0.4.18 + - perf: skip buffer decoding on overage chunk + * perf: prevent internal `throw` when missing charset + +1.17.2 / 2017-05-17 +=================== + + * deps: debug@2.6.7 + - Fix `DEBUG_MAX_ARRAY_LENGTH` + - deps: ms@2.0.0 + * deps: type-is@~1.6.15 + - deps: mime-types@~2.1.15 + +1.17.1 / 2017-03-06 +=================== + + * deps: qs@6.4.0 + - Fix regression parsing keys starting with `[` + +1.17.0 / 2017-03-01 +=================== + + * deps: http-errors@~1.6.1 + - Make `message` property enumerable for `HttpError`s + - deps: setprototypeof@1.0.3 + * deps: qs@6.3.1 + - Fix compacting nested arrays + +1.16.1 / 2017-02-10 +=================== + + * deps: debug@2.6.1 + - Fix deprecation messages in WebStorm and other editors + - Undeprecate `DEBUG_FD` set to `1` or `2` + +1.16.0 / 2017-01-17 +=================== + + * deps: debug@2.6.0 + - Allow colors in workers + - Deprecated `DEBUG_FD` environment variable + - Fix error when running under React Native + - Use same color for same namespace + - deps: ms@0.7.2 + * deps: http-errors@~1.5.1 + - deps: inherits@2.0.3 + - deps: setprototypeof@1.0.2 + - deps: statuses@'>= 1.3.1 < 2' + * deps: iconv-lite@0.4.15 + - Added encoding MS-31J + - Added encoding MS-932 + - Added encoding MS-936 + - Added encoding MS-949 + - Added encoding MS-950 + - Fix GBK/GB18030 handling of Euro character + * deps: qs@6.2.1 + - Fix array parsing from skipping empty values + * deps: raw-body@~2.2.0 + - deps: iconv-lite@0.4.15 + * deps: type-is@~1.6.14 + - deps: mime-types@~2.1.13 + +1.15.2 / 2016-06-19 +=================== + + * deps: bytes@2.4.0 + * deps: content-type@~1.0.2 + - perf: enable strict mode + * deps: http-errors@~1.5.0 + - Use `setprototypeof` module to replace `__proto__` setting + - deps: statuses@'>= 1.3.0 < 2' + - perf: enable strict mode + * deps: qs@6.2.0 + * deps: raw-body@~2.1.7 + - deps: bytes@2.4.0 + - perf: remove double-cleanup on happy path + * deps: type-is@~1.6.13 + - deps: mime-types@~2.1.11 + +1.15.1 / 2016-05-05 +=================== + + * deps: bytes@2.3.0 + - Drop partial bytes on all parsed units + - Fix parsing byte string that looks like hex + * deps: raw-body@~2.1.6 + - deps: bytes@2.3.0 + * deps: type-is@~1.6.12 + - deps: mime-types@~2.1.10 + +1.15.0 / 2016-02-10 +=================== + + * deps: http-errors@~1.4.0 + - Add `HttpError` export, for `err instanceof createError.HttpError` + - deps: inherits@2.0.1 + - deps: statuses@'>= 1.2.1 < 2' + * deps: qs@6.1.0 + * deps: type-is@~1.6.11 + - deps: mime-types@~2.1.9 + +1.14.2 / 2015-12-16 +=================== + + * deps: bytes@2.2.0 + * deps: iconv-lite@0.4.13 + * deps: qs@5.2.0 + * deps: raw-body@~2.1.5 + - deps: bytes@2.2.0 + - deps: iconv-lite@0.4.13 + * deps: type-is@~1.6.10 + - deps: mime-types@~2.1.8 + +1.14.1 / 2015-09-27 +=================== + + * Fix issue where invalid charset results in 400 when `verify` used + * deps: iconv-lite@0.4.12 + - Fix CESU-8 decoding in Node.js 4.x + * deps: raw-body@~2.1.4 + - Fix masking critical errors from `iconv-lite` + - deps: iconv-lite@0.4.12 + * deps: type-is@~1.6.9 + - deps: mime-types@~2.1.7 + +1.14.0 / 2015-09-16 +=================== + + * Fix JSON strict parse error to match syntax errors + * Provide static `require` analysis in `urlencoded` parser + * deps: depd@~1.1.0 + - Support web browser loading + * deps: qs@5.1.0 + * deps: raw-body@~2.1.3 + - Fix sync callback when attaching data listener causes sync read + * deps: type-is@~1.6.8 + - Fix type error when given invalid type to match against + - deps: mime-types@~2.1.6 + +1.13.3 / 2015-07-31 +=================== + + * deps: type-is@~1.6.6 + - deps: mime-types@~2.1.4 + +1.13.2 / 2015-07-05 +=================== + + * deps: iconv-lite@0.4.11 + * deps: qs@4.0.0 + - Fix dropping parameters like `hasOwnProperty` + - Fix user-visible incompatibilities from 3.1.0 + - Fix various parsing edge cases + * deps: raw-body@~2.1.2 + - Fix error stack traces to skip `makeError` + - deps: iconv-lite@0.4.11 + * deps: type-is@~1.6.4 + - deps: mime-types@~2.1.2 + - perf: enable strict mode + - perf: remove argument reassignment + +1.13.1 / 2015-06-16 +=================== + + * deps: qs@2.4.2 + - Downgraded from 3.1.0 because of user-visible incompatibilities + +1.13.0 / 2015-06-14 +=================== + + * Add `statusCode` property on `Error`s, in addition to `status` + * Change `type` default to `application/json` for JSON parser + * Change `type` default to `application/x-www-form-urlencoded` for urlencoded parser + * Provide static `require` analysis + * Use the `http-errors` module to generate errors + * deps: bytes@2.1.0 + - Slight optimizations + * deps: iconv-lite@0.4.10 + - The encoding UTF-16 without BOM now defaults to UTF-16LE when detection fails + - Leading BOM is now removed when decoding + * deps: on-finished@~2.3.0 + - Add defined behavior for HTTP `CONNECT` requests + - Add defined behavior for HTTP `Upgrade` requests + - deps: ee-first@1.1.1 + * deps: qs@3.1.0 + - Fix dropping parameters like `hasOwnProperty` + - Fix various parsing edge cases + - Parsed object now has `null` prototype + * deps: raw-body@~2.1.1 + - Use `unpipe` module for unpiping requests + - deps: iconv-lite@0.4.10 + * deps: type-is@~1.6.3 + - deps: mime-types@~2.1.1 + - perf: reduce try block size + - perf: remove bitwise operations + * perf: enable strict mode + * perf: remove argument reassignment + * perf: remove delete call + +1.12.4 / 2015-05-10 +=================== + + * deps: debug@~2.2.0 + * deps: qs@2.4.2 + - Fix allowing parameters like `constructor` + * deps: on-finished@~2.2.1 + * deps: raw-body@~2.0.1 + - Fix a false-positive when unpiping in Node.js 0.8 + - deps: bytes@2.0.1 + * deps: type-is@~1.6.2 + - deps: mime-types@~2.0.11 + +1.12.3 / 2015-04-15 +=================== + + * Slight efficiency improvement when not debugging + * deps: depd@~1.0.1 + * deps: iconv-lite@0.4.8 + - Add encoding alias UNICODE-1-1-UTF-7 + * deps: raw-body@1.3.4 + - Fix hanging callback if request aborts during read + - deps: iconv-lite@0.4.8 + +1.12.2 / 2015-03-16 +=================== + + * deps: qs@2.4.1 + - Fix error when parameter `hasOwnProperty` is present + +1.12.1 / 2015-03-15 +=================== + + * deps: debug@~2.1.3 + - Fix high intensity foreground color for bold + - deps: ms@0.7.0 + * deps: type-is@~1.6.1 + - deps: mime-types@~2.0.10 + +1.12.0 / 2015-02-13 +=================== + + * add `debug` messages + * accept a function for the `type` option + * use `content-type` to parse `Content-Type` headers + * deps: iconv-lite@0.4.7 + - Gracefully support enumerables on `Object.prototype` + * deps: raw-body@1.3.3 + - deps: iconv-lite@0.4.7 + * deps: type-is@~1.6.0 + - fix argument reassignment + - fix false-positives in `hasBody` `Transfer-Encoding` check + - support wildcard for both type and subtype (`*/*`) + - deps: mime-types@~2.0.9 + +1.11.0 / 2015-01-30 +=================== + + * make internal `extended: true` depth limit infinity + * deps: type-is@~1.5.6 + - deps: mime-types@~2.0.8 + +1.10.2 / 2015-01-20 +=================== + + * deps: iconv-lite@0.4.6 + - Fix rare aliases of single-byte encodings + * deps: raw-body@1.3.2 + - deps: iconv-lite@0.4.6 + +1.10.1 / 2015-01-01 +=================== + + * deps: on-finished@~2.2.0 + * deps: type-is@~1.5.5 + - deps: mime-types@~2.0.7 + +1.10.0 / 2014-12-02 +=================== + + * make internal `extended: true` array limit dynamic + +1.9.3 / 2014-11-21 +================== + + * deps: iconv-lite@0.4.5 + - Fix Windows-31J and X-SJIS encoding support + * deps: qs@2.3.3 + - Fix `arrayLimit` behavior + * deps: raw-body@1.3.1 + - deps: iconv-lite@0.4.5 + * deps: type-is@~1.5.3 + - deps: mime-types@~2.0.3 + +1.9.2 / 2014-10-27 +================== + + * deps: qs@2.3.2 + - Fix parsing of mixed objects and values + +1.9.1 / 2014-10-22 +================== + + * deps: on-finished@~2.1.1 + - Fix handling of pipelined requests + * deps: qs@2.3.0 + - Fix parsing of mixed implicit and explicit arrays + * deps: type-is@~1.5.2 + - deps: mime-types@~2.0.2 + +1.9.0 / 2014-09-24 +================== + + * include the charset in "unsupported charset" error message + * include the encoding in "unsupported content encoding" error message + * deps: depd@~1.0.0 + +1.8.4 / 2014-09-23 +================== + + * fix content encoding to be case-insensitive + +1.8.3 / 2014-09-19 +================== + + * deps: qs@2.2.4 + - Fix issue with object keys starting with numbers truncated + +1.8.2 / 2014-09-15 +================== + + * deps: depd@0.4.5 + +1.8.1 / 2014-09-07 +================== + + * deps: media-typer@0.3.0 + * deps: type-is@~1.5.1 + +1.8.0 / 2014-09-05 +================== + + * make empty-body-handling consistent between chunked requests + - empty `json` produces `{}` + - empty `raw` produces `new Buffer(0)` + - empty `text` produces `''` + - empty `urlencoded` produces `{}` + * deps: qs@2.2.3 + - Fix issue where first empty value in array is discarded + * deps: type-is@~1.5.0 + - fix `hasbody` to be true for `content-length: 0` + +1.7.0 / 2014-09-01 +================== + + * add `parameterLimit` option to `urlencoded` parser + * change `urlencoded` extended array limit to 100 + * respond with 413 when over `parameterLimit` in `urlencoded` + +1.6.7 / 2014-08-29 +================== + + * deps: qs@2.2.2 + - Remove unnecessary cloning + +1.6.6 / 2014-08-27 +================== + + * deps: qs@2.2.0 + - Array parsing fix + - Performance improvements + +1.6.5 / 2014-08-16 +================== + + * deps: on-finished@2.1.0 + +1.6.4 / 2014-08-14 +================== + + * deps: qs@1.2.2 + +1.6.3 / 2014-08-10 +================== + + * deps: qs@1.2.1 + +1.6.2 / 2014-08-07 +================== + + * deps: qs@1.2.0 + - Fix parsing array of objects + +1.6.1 / 2014-08-06 +================== + + * deps: qs@1.1.0 + - Accept urlencoded square brackets + - Accept empty values in implicit array notation + +1.6.0 / 2014-08-05 +================== + + * deps: qs@1.0.2 + - Complete rewrite + - Limits array length to 20 + - Limits object depth to 5 + - Limits parameters to 1,000 + +1.5.2 / 2014-07-27 +================== + + * deps: depd@0.4.4 + - Work-around v8 generating empty stack traces + +1.5.1 / 2014-07-26 +================== + + * deps: depd@0.4.3 + - Fix exception when global `Error.stackTraceLimit` is too low + +1.5.0 / 2014-07-20 +================== + + * deps: depd@0.4.2 + - Add `TRACE_DEPRECATION` environment variable + - Remove non-standard grey color from color output + - Support `--no-deprecation` argument + - Support `--trace-deprecation` argument + * deps: iconv-lite@0.4.4 + - Added encoding UTF-7 + * deps: raw-body@1.3.0 + - deps: iconv-lite@0.4.4 + - Added encoding UTF-7 + - Fix `Cannot switch to old mode now` error on Node.js 0.10+ + * deps: type-is@~1.3.2 + +1.4.3 / 2014-06-19 +================== + + * deps: type-is@1.3.1 + - fix global variable leak + +1.4.2 / 2014-06-19 +================== + + * deps: type-is@1.3.0 + - improve type parsing + +1.4.1 / 2014-06-19 +================== + + * fix urlencoded extended deprecation message + +1.4.0 / 2014-06-19 +================== + + * add `text` parser + * add `raw` parser + * check accepted charset in content-type (accepts utf-8) + * check accepted encoding in content-encoding (accepts identity) + * deprecate `bodyParser()` middleware; use `.json()` and `.urlencoded()` as needed + * deprecate `urlencoded()` without provided `extended` option + * lazy-load urlencoded parsers + * parsers split into files for reduced mem usage + * support gzip and deflate bodies + - set `inflate: false` to turn off + * deps: raw-body@1.2.2 + - Support all encodings from `iconv-lite` + +1.3.1 / 2014-06-11 +================== + + * deps: type-is@1.2.1 + - Switch dependency from mime to mime-types@1.0.0 + +1.3.0 / 2014-05-31 +================== + + * add `extended` option to urlencoded parser + +1.2.2 / 2014-05-27 +================== + + * deps: raw-body@1.1.6 + - assert stream encoding on node.js 0.8 + - assert stream encoding on node.js < 0.10.6 + - deps: bytes@1 + +1.2.1 / 2014-05-26 +================== + + * invoke `next(err)` after request fully read + - prevents hung responses and socket hang ups + +1.2.0 / 2014-05-11 +================== + + * add `verify` option + * deps: type-is@1.2.0 + - support suffix matching + +1.1.2 / 2014-05-11 +================== + + * improve json parser speed + +1.1.1 / 2014-05-11 +================== + + * fix repeated limit parsing with every request + +1.1.0 / 2014-05-10 +================== + + * add `type` option + * deps: pin for safety and consistency + +1.0.2 / 2014-04-14 +================== + + * use `type-is` module + +1.0.1 / 2014-03-20 +================== + + * lower default limits to 100kb diff --git a/backend/Backend/node_modules/body-parser/LICENSE b/backend/Backend/node_modules/body-parser/LICENSE new file mode 100644 index 000000000..386b7b694 --- /dev/null +++ b/backend/Backend/node_modules/body-parser/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2014 Jonathan Ong +Copyright (c) 2014-2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/backend/Backend/node_modules/body-parser/README.md b/backend/Backend/node_modules/body-parser/README.md new file mode 100644 index 000000000..f6661b7d3 --- /dev/null +++ b/backend/Backend/node_modules/body-parser/README.md @@ -0,0 +1,476 @@ +# body-parser + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Build Status][ci-image]][ci-url] +[![Test Coverage][coveralls-image]][coveralls-url] +[![OpenSSF Scorecard Badge][ossf-scorecard-badge]][ossf-scorecard-visualizer] + +Node.js body parsing middleware. + +Parse incoming request bodies in a middleware before your handlers, available +under the `req.body` property. + +**Note** As `req.body`'s shape is based on user-controlled input, all +properties and values in this object are untrusted and should be validated +before trusting. For example, `req.body.foo.toString()` may fail in multiple +ways, for example the `foo` property may not be there or may not be a string, +and `toString` may not be a function and instead a string or other user input. + +[Learn about the anatomy of an HTTP transaction in Node.js](https://nodejs.org/en/docs/guides/anatomy-of-an-http-transaction/). + +_This does not handle multipart bodies_, due to their complex and typically +large nature. For multipart bodies, you may be interested in the following +modules: + + * [busboy](https://www.npmjs.org/package/busboy#readme) and + [connect-busboy](https://www.npmjs.org/package/connect-busboy#readme) + * [multiparty](https://www.npmjs.org/package/multiparty#readme) and + [connect-multiparty](https://www.npmjs.org/package/connect-multiparty#readme) + * [formidable](https://www.npmjs.org/package/formidable#readme) + * [multer](https://www.npmjs.org/package/multer#readme) + +This module provides the following parsers: + + * [JSON body parser](#bodyparserjsonoptions) + * [Raw body parser](#bodyparserrawoptions) + * [Text body parser](#bodyparsertextoptions) + * [URL-encoded form body parser](#bodyparserurlencodedoptions) + +Other body parsers you might be interested in: + +- [body](https://www.npmjs.org/package/body#readme) +- [co-body](https://www.npmjs.org/package/co-body#readme) + +## Installation + +```sh +$ npm install body-parser +``` + +## API + +```js +var bodyParser = require('body-parser') +``` + +The `bodyParser` object exposes various factories to create middlewares. All +middlewares will populate the `req.body` property with the parsed body when +the `Content-Type` request header matches the `type` option, or an empty +object (`{}`) if there was no body to parse, the `Content-Type` was not matched, +or an error occurred. + +The various errors returned by this module are described in the +[errors section](#errors). + +### bodyParser.json([options]) + +Returns middleware that only parses `json` and only looks at requests where +the `Content-Type` header matches the `type` option. This parser accepts any +Unicode encoding of the body and supports automatic inflation of `gzip` and +`deflate` encodings. + +A new `body` object containing the parsed data is populated on the `request` +object after the middleware (i.e. `req.body`). + +#### Options + +The `json` function takes an optional `options` object that may contain any of +the following keys: + +##### inflate + +When set to `true`, then deflated (compressed) bodies will be inflated; when +`false`, deflated bodies are rejected. Defaults to `true`. + +##### limit + +Controls the maximum request body size. If this is a number, then the value +specifies the number of bytes; if it is a string, the value is passed to the +[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults +to `'100kb'`. + +##### reviver + +The `reviver` option is passed directly to `JSON.parse` as the second +argument. You can find more information on this argument +[in the MDN documentation about JSON.parse](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse#Example.3A_Using_the_reviver_parameter). + +##### strict + +When set to `true`, will only accept arrays and objects; when `false` will +accept anything `JSON.parse` accepts. Defaults to `true`. + +##### type + +The `type` option is used to determine what media type the middleware will +parse. This option can be a string, array of strings, or a function. If not a +function, `type` option is passed directly to the +[type-is](https://www.npmjs.org/package/type-is#readme) library and this can +be an extension name (like `json`), a mime type (like `application/json`), or +a mime type with a wildcard (like `*/*` or `*/json`). If a function, the `type` +option is called as `fn(req)` and the request is parsed if it returns a truthy +value. Defaults to `application/json`. + +##### verify + +The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`, +where `buf` is a `Buffer` of the raw request body and `encoding` is the +encoding of the request. The parsing can be aborted by throwing an error. + +### bodyParser.raw([options]) + +Returns middleware that parses all bodies as a `Buffer` and only looks at +requests where the `Content-Type` header matches the `type` option. This +parser supports automatic inflation of `gzip` and `deflate` encodings. + +A new `body` object containing the parsed data is populated on the `request` +object after the middleware (i.e. `req.body`). This will be a `Buffer` object +of the body. + +#### Options + +The `raw` function takes an optional `options` object that may contain any of +the following keys: + +##### inflate + +When set to `true`, then deflated (compressed) bodies will be inflated; when +`false`, deflated bodies are rejected. Defaults to `true`. + +##### limit + +Controls the maximum request body size. If this is a number, then the value +specifies the number of bytes; if it is a string, the value is passed to the +[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults +to `'100kb'`. + +##### type + +The `type` option is used to determine what media type the middleware will +parse. This option can be a string, array of strings, or a function. +If not a function, `type` option is passed directly to the +[type-is](https://www.npmjs.org/package/type-is#readme) library and this +can be an extension name (like `bin`), a mime type (like +`application/octet-stream`), or a mime type with a wildcard (like `*/*` or +`application/*`). If a function, the `type` option is called as `fn(req)` +and the request is parsed if it returns a truthy value. Defaults to +`application/octet-stream`. + +##### verify + +The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`, +where `buf` is a `Buffer` of the raw request body and `encoding` is the +encoding of the request. The parsing can be aborted by throwing an error. + +### bodyParser.text([options]) + +Returns middleware that parses all bodies as a string and only looks at +requests where the `Content-Type` header matches the `type` option. This +parser supports automatic inflation of `gzip` and `deflate` encodings. + +A new `body` string containing the parsed data is populated on the `request` +object after the middleware (i.e. `req.body`). This will be a string of the +body. + +#### Options + +The `text` function takes an optional `options` object that may contain any of +the following keys: + +##### defaultCharset + +Specify the default character set for the text content if the charset is not +specified in the `Content-Type` header of the request. Defaults to `utf-8`. + +##### inflate + +When set to `true`, then deflated (compressed) bodies will be inflated; when +`false`, deflated bodies are rejected. Defaults to `true`. + +##### limit + +Controls the maximum request body size. If this is a number, then the value +specifies the number of bytes; if it is a string, the value is passed to the +[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults +to `'100kb'`. + +##### type + +The `type` option is used to determine what media type the middleware will +parse. This option can be a string, array of strings, or a function. If not +a function, `type` option is passed directly to the +[type-is](https://www.npmjs.org/package/type-is#readme) library and this can +be an extension name (like `txt`), a mime type (like `text/plain`), or a mime +type with a wildcard (like `*/*` or `text/*`). If a function, the `type` +option is called as `fn(req)` and the request is parsed if it returns a +truthy value. Defaults to `text/plain`. + +##### verify + +The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`, +where `buf` is a `Buffer` of the raw request body and `encoding` is the +encoding of the request. The parsing can be aborted by throwing an error. + +### bodyParser.urlencoded([options]) + +Returns middleware that only parses `urlencoded` bodies and only looks at +requests where the `Content-Type` header matches the `type` option. This +parser accepts only UTF-8 encoding of the body and supports automatic +inflation of `gzip` and `deflate` encodings. + +A new `body` object containing the parsed data is populated on the `request` +object after the middleware (i.e. `req.body`). This object will contain +key-value pairs, where the value can be a string or array (when `extended` is +`false`), or any type (when `extended` is `true`). + +#### Options + +The `urlencoded` function takes an optional `options` object that may contain +any of the following keys: + +##### extended + +The `extended` option allows to choose between parsing the URL-encoded data +with the `querystring` library (when `false`) or the `qs` library (when +`true`). The "extended" syntax allows for rich objects and arrays to be +encoded into the URL-encoded format, allowing for a JSON-like experience +with URL-encoded. For more information, please +[see the qs library](https://www.npmjs.org/package/qs#readme). + +Defaults to `true`, but using the default has been deprecated. Please +research into the difference between `qs` and `querystring` and choose the +appropriate setting. + +##### inflate + +When set to `true`, then deflated (compressed) bodies will be inflated; when +`false`, deflated bodies are rejected. Defaults to `true`. + +##### limit + +Controls the maximum request body size. If this is a number, then the value +specifies the number of bytes; if it is a string, the value is passed to the +[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults +to `'100kb'`. + +##### parameterLimit + +The `parameterLimit` option controls the maximum number of parameters that +are allowed in the URL-encoded data. If a request contains more parameters +than this value, a 413 will be returned to the client. Defaults to `1000`. + +##### type + +The `type` option is used to determine what media type the middleware will +parse. This option can be a string, array of strings, or a function. If not +a function, `type` option is passed directly to the +[type-is](https://www.npmjs.org/package/type-is#readme) library and this can +be an extension name (like `urlencoded`), a mime type (like +`application/x-www-form-urlencoded`), or a mime type with a wildcard (like +`*/x-www-form-urlencoded`). If a function, the `type` option is called as +`fn(req)` and the request is parsed if it returns a truthy value. Defaults +to `application/x-www-form-urlencoded`. + +##### verify + +The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`, +where `buf` is a `Buffer` of the raw request body and `encoding` is the +encoding of the request. The parsing can be aborted by throwing an error. + +#### depth + +The `depth` option is used to configure the maximum depth of the `qs` library when `extended` is `true`. This allows you to limit the amount of keys that are parsed and can be useful to prevent certain types of abuse. Defaults to `32`. It is recommended to keep this value as low as possible. + +## Errors + +The middlewares provided by this module create errors using the +[`http-errors` module](https://www.npmjs.com/package/http-errors). The errors +will typically have a `status`/`statusCode` property that contains the suggested +HTTP response code, an `expose` property to determine if the `message` property +should be displayed to the client, a `type` property to determine the type of +error without matching against the `message`, and a `body` property containing +the read body, if available. + +The following are the common errors created, though any error can come through +for various reasons. + +### content encoding unsupported + +This error will occur when the request had a `Content-Encoding` header that +contained an encoding but the "inflation" option was set to `false`. The +`status` property is set to `415`, the `type` property is set to +`'encoding.unsupported'`, and the `charset` property will be set to the +encoding that is unsupported. + +### entity parse failed + +This error will occur when the request contained an entity that could not be +parsed by the middleware. The `status` property is set to `400`, the `type` +property is set to `'entity.parse.failed'`, and the `body` property is set to +the entity value that failed parsing. + +### entity verify failed + +This error will occur when the request contained an entity that could not be +failed verification by the defined `verify` option. The `status` property is +set to `403`, the `type` property is set to `'entity.verify.failed'`, and the +`body` property is set to the entity value that failed verification. + +### request aborted + +This error will occur when the request is aborted by the client before reading +the body has finished. The `received` property will be set to the number of +bytes received before the request was aborted and the `expected` property is +set to the number of expected bytes. The `status` property is set to `400` +and `type` property is set to `'request.aborted'`. + +### request entity too large + +This error will occur when the request body's size is larger than the "limit" +option. The `limit` property will be set to the byte limit and the `length` +property will be set to the request body's length. The `status` property is +set to `413` and the `type` property is set to `'entity.too.large'`. + +### request size did not match content length + +This error will occur when the request's length did not match the length from +the `Content-Length` header. This typically occurs when the request is malformed, +typically when the `Content-Length` header was calculated based on characters +instead of bytes. The `status` property is set to `400` and the `type` property +is set to `'request.size.invalid'`. + +### stream encoding should not be set + +This error will occur when something called the `req.setEncoding` method prior +to this middleware. This module operates directly on bytes only and you cannot +call `req.setEncoding` when using this module. The `status` property is set to +`500` and the `type` property is set to `'stream.encoding.set'`. + +### stream is not readable + +This error will occur when the request is no longer readable when this middleware +attempts to read it. This typically means something other than a middleware from +this module read the request body already and the middleware was also configured to +read the same request. The `status` property is set to `500` and the `type` +property is set to `'stream.not.readable'`. + +### too many parameters + +This error will occur when the content of the request exceeds the configured +`parameterLimit` for the `urlencoded` parser. The `status` property is set to +`413` and the `type` property is set to `'parameters.too.many'`. + +### unsupported charset "BOGUS" + +This error will occur when the request had a charset parameter in the +`Content-Type` header, but the `iconv-lite` module does not support it OR the +parser does not support it. The charset is contained in the message as well +as in the `charset` property. The `status` property is set to `415`, the +`type` property is set to `'charset.unsupported'`, and the `charset` property +is set to the charset that is unsupported. + +### unsupported content encoding "bogus" + +This error will occur when the request had a `Content-Encoding` header that +contained an unsupported encoding. The encoding is contained in the message +as well as in the `encoding` property. The `status` property is set to `415`, +the `type` property is set to `'encoding.unsupported'`, and the `encoding` +property is set to the encoding that is unsupported. + +### The input exceeded the depth + +This error occurs when using `bodyParser.urlencoded` with the `extended` property set to `true` and the input exceeds the configured `depth` option. The `status` property is set to `400`. It is recommended to review the `depth` option and evaluate if it requires a higher value. When the `depth` option is set to `32` (default value), the error will not be thrown. + +## Examples + +### Express/Connect top-level generic + +This example demonstrates adding a generic JSON and URL-encoded parser as a +top-level middleware, which will parse the bodies of all incoming requests. +This is the simplest setup. + +```js +var express = require('express') +var bodyParser = require('body-parser') + +var app = express() + +// parse application/x-www-form-urlencoded +app.use(bodyParser.urlencoded({ extended: false })) + +// parse application/json +app.use(bodyParser.json()) + +app.use(function (req, res) { + res.setHeader('Content-Type', 'text/plain') + res.write('you posted:\n') + res.end(JSON.stringify(req.body, null, 2)) +}) +``` + +### Express route-specific + +This example demonstrates adding body parsers specifically to the routes that +need them. In general, this is the most recommended way to use body-parser with +Express. + +```js +var express = require('express') +var bodyParser = require('body-parser') + +var app = express() + +// create application/json parser +var jsonParser = bodyParser.json() + +// create application/x-www-form-urlencoded parser +var urlencodedParser = bodyParser.urlencoded({ extended: false }) + +// POST /login gets urlencoded bodies +app.post('/login', urlencodedParser, function (req, res) { + res.send('welcome, ' + req.body.username) +}) + +// POST /api/users gets JSON bodies +app.post('/api/users', jsonParser, function (req, res) { + // create user in req.body +}) +``` + +### Change accepted type for parsers + +All the parsers accept a `type` option which allows you to change the +`Content-Type` that the middleware will parse. + +```js +var express = require('express') +var bodyParser = require('body-parser') + +var app = express() + +// parse various different custom JSON types as JSON +app.use(bodyParser.json({ type: 'application/*+json' })) + +// parse some custom thing into a Buffer +app.use(bodyParser.raw({ type: 'application/vnd.custom-type' })) + +// parse an HTML body into a string +app.use(bodyParser.text({ type: 'text/html' })) +``` + +## License + +[MIT](LICENSE) + +[ci-image]: https://badgen.net/github/checks/expressjs/body-parser/master?label=ci +[ci-url]: https://github.com/expressjs/body-parser/actions/workflows/ci.yml +[coveralls-image]: https://badgen.net/coveralls/c/github/expressjs/body-parser/master +[coveralls-url]: https://coveralls.io/r/expressjs/body-parser?branch=master +[node-version-image]: https://badgen.net/npm/node/body-parser +[node-version-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/body-parser +[npm-url]: https://npmjs.org/package/body-parser +[npm-version-image]: https://badgen.net/npm/v/body-parser +[ossf-scorecard-badge]: https://api.scorecard.dev/projects/github.com/expressjs/body-parser/badge +[ossf-scorecard-visualizer]: https://ossf.github.io/scorecard-visualizer/#/projects/github.com/expressjs/body-parser \ No newline at end of file diff --git a/backend/Backend/node_modules/body-parser/SECURITY.md b/backend/Backend/node_modules/body-parser/SECURITY.md new file mode 100644 index 000000000..9694d4296 --- /dev/null +++ b/backend/Backend/node_modules/body-parser/SECURITY.md @@ -0,0 +1,25 @@ +# Security Policies and Procedures + +## Reporting a Bug + +The Express team and community take all security bugs seriously. Thank you +for improving the security of Express. We appreciate your efforts and +responsible disclosure and will make every effort to acknowledge your +contributions. + +Report security bugs by emailing the current owner(s) of `body-parser`. This +information can be found in the npm registry using the command +`npm owner ls body-parser`. +If unsure or unable to get the information from the above, open an issue +in the [project issue tracker](https://github.com/expressjs/body-parser/issues) +asking for the current contact information. + +To ensure the timely response to your report, please ensure that the entirety +of the report is contained within the email body and not solely behind a web +link or an attachment. + +At least one owner will acknowledge your email within 48 hours, and will send a +more detailed response within 48 hours indicating the next steps in handling +your report. After the initial reply to your report, the owners will +endeavor to keep you informed of the progress towards a fix and full +announcement, and may ask for additional information or guidance. diff --git a/backend/Backend/node_modules/body-parser/index.js b/backend/Backend/node_modules/body-parser/index.js new file mode 100644 index 000000000..bb24d739d --- /dev/null +++ b/backend/Backend/node_modules/body-parser/index.js @@ -0,0 +1,156 @@ +/*! + * body-parser + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var deprecate = require('depd')('body-parser') + +/** + * Cache of loaded parsers. + * @private + */ + +var parsers = Object.create(null) + +/** + * @typedef Parsers + * @type {function} + * @property {function} json + * @property {function} raw + * @property {function} text + * @property {function} urlencoded + */ + +/** + * Module exports. + * @type {Parsers} + */ + +exports = module.exports = deprecate.function(bodyParser, + 'bodyParser: use individual json/urlencoded middlewares') + +/** + * JSON parser. + * @public + */ + +Object.defineProperty(exports, 'json', { + configurable: true, + enumerable: true, + get: createParserGetter('json') +}) + +/** + * Raw parser. + * @public + */ + +Object.defineProperty(exports, 'raw', { + configurable: true, + enumerable: true, + get: createParserGetter('raw') +}) + +/** + * Text parser. + * @public + */ + +Object.defineProperty(exports, 'text', { + configurable: true, + enumerable: true, + get: createParserGetter('text') +}) + +/** + * URL-encoded parser. + * @public + */ + +Object.defineProperty(exports, 'urlencoded', { + configurable: true, + enumerable: true, + get: createParserGetter('urlencoded') +}) + +/** + * Create a middleware to parse json and urlencoded bodies. + * + * @param {object} [options] + * @return {function} + * @deprecated + * @public + */ + +function bodyParser (options) { + // use default type for parsers + var opts = Object.create(options || null, { + type: { + configurable: true, + enumerable: true, + value: undefined, + writable: true + } + }) + + var _urlencoded = exports.urlencoded(opts) + var _json = exports.json(opts) + + return function bodyParser (req, res, next) { + _json(req, res, function (err) { + if (err) return next(err) + _urlencoded(req, res, next) + }) + } +} + +/** + * Create a getter for loading a parser. + * @private + */ + +function createParserGetter (name) { + return function get () { + return loadParser(name) + } +} + +/** + * Load a parser module. + * @private + */ + +function loadParser (parserName) { + var parser = parsers[parserName] + + if (parser !== undefined) { + return parser + } + + // this uses a switch for static require analysis + switch (parserName) { + case 'json': + parser = require('./lib/types/json') + break + case 'raw': + parser = require('./lib/types/raw') + break + case 'text': + parser = require('./lib/types/text') + break + case 'urlencoded': + parser = require('./lib/types/urlencoded') + break + } + + // store to prevent invoking require() + return (parsers[parserName] = parser) +} diff --git a/backend/Backend/node_modules/body-parser/lib/read.js b/backend/Backend/node_modules/body-parser/lib/read.js new file mode 100644 index 000000000..fce6283f5 --- /dev/null +++ b/backend/Backend/node_modules/body-parser/lib/read.js @@ -0,0 +1,205 @@ +/*! + * body-parser + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var createError = require('http-errors') +var destroy = require('destroy') +var getBody = require('raw-body') +var iconv = require('iconv-lite') +var onFinished = require('on-finished') +var unpipe = require('unpipe') +var zlib = require('zlib') + +/** + * Module exports. + */ + +module.exports = read + +/** + * Read a request into a buffer and parse. + * + * @param {object} req + * @param {object} res + * @param {function} next + * @param {function} parse + * @param {function} debug + * @param {object} options + * @private + */ + +function read (req, res, next, parse, debug, options) { + var length + var opts = options + var stream + + // flag as parsed + req._body = true + + // read options + var encoding = opts.encoding !== null + ? opts.encoding + : null + var verify = opts.verify + + try { + // get the content stream + stream = contentstream(req, debug, opts.inflate) + length = stream.length + stream.length = undefined + } catch (err) { + return next(err) + } + + // set raw-body options + opts.length = length + opts.encoding = verify + ? null + : encoding + + // assert charset is supported + if (opts.encoding === null && encoding !== null && !iconv.encodingExists(encoding)) { + return next(createError(415, 'unsupported charset "' + encoding.toUpperCase() + '"', { + charset: encoding.toLowerCase(), + type: 'charset.unsupported' + })) + } + + // read body + debug('read body') + getBody(stream, opts, function (error, body) { + if (error) { + var _error + + if (error.type === 'encoding.unsupported') { + // echo back charset + _error = createError(415, 'unsupported charset "' + encoding.toUpperCase() + '"', { + charset: encoding.toLowerCase(), + type: 'charset.unsupported' + }) + } else { + // set status code on error + _error = createError(400, error) + } + + // unpipe from stream and destroy + if (stream !== req) { + unpipe(req) + destroy(stream, true) + } + + // read off entire request + dump(req, function onfinished () { + next(createError(400, _error)) + }) + return + } + + // verify + if (verify) { + try { + debug('verify body') + verify(req, res, body, encoding) + } catch (err) { + next(createError(403, err, { + body: body, + type: err.type || 'entity.verify.failed' + })) + return + } + } + + // parse + var str = body + try { + debug('parse body') + str = typeof body !== 'string' && encoding !== null + ? iconv.decode(body, encoding) + : body + req.body = parse(str) + } catch (err) { + next(createError(400, err, { + body: str, + type: err.type || 'entity.parse.failed' + })) + return + } + + next() + }) +} + +/** + * Get the content stream of the request. + * + * @param {object} req + * @param {function} debug + * @param {boolean} [inflate=true] + * @return {object} + * @api private + */ + +function contentstream (req, debug, inflate) { + var encoding = (req.headers['content-encoding'] || 'identity').toLowerCase() + var length = req.headers['content-length'] + var stream + + debug('content-encoding "%s"', encoding) + + if (inflate === false && encoding !== 'identity') { + throw createError(415, 'content encoding unsupported', { + encoding: encoding, + type: 'encoding.unsupported' + }) + } + + switch (encoding) { + case 'deflate': + stream = zlib.createInflate() + debug('inflate body') + req.pipe(stream) + break + case 'gzip': + stream = zlib.createGunzip() + debug('gunzip body') + req.pipe(stream) + break + case 'identity': + stream = req + stream.length = length + break + default: + throw createError(415, 'unsupported content encoding "' + encoding + '"', { + encoding: encoding, + type: 'encoding.unsupported' + }) + } + + return stream +} + +/** + * Dump the contents of a request. + * + * @param {object} req + * @param {function} callback + * @api private + */ + +function dump (req, callback) { + if (onFinished.isFinished(req)) { + callback(null) + } else { + onFinished(req, callback) + req.resume() + } +} diff --git a/backend/Backend/node_modules/body-parser/lib/types/json.js b/backend/Backend/node_modules/body-parser/lib/types/json.js new file mode 100644 index 000000000..59f3f7e28 --- /dev/null +++ b/backend/Backend/node_modules/body-parser/lib/types/json.js @@ -0,0 +1,247 @@ +/*! + * body-parser + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var bytes = require('bytes') +var contentType = require('content-type') +var createError = require('http-errors') +var debug = require('debug')('body-parser:json') +var read = require('../read') +var typeis = require('type-is') + +/** + * Module exports. + */ + +module.exports = json + +/** + * RegExp to match the first non-space in a string. + * + * Allowed whitespace is defined in RFC 7159: + * + * ws = *( + * %x20 / ; Space + * %x09 / ; Horizontal tab + * %x0A / ; Line feed or New line + * %x0D ) ; Carriage return + */ + +var FIRST_CHAR_REGEXP = /^[\x20\x09\x0a\x0d]*([^\x20\x09\x0a\x0d])/ // eslint-disable-line no-control-regex + +var JSON_SYNTAX_CHAR = '#' +var JSON_SYNTAX_REGEXP = /#+/g + +/** + * Create a middleware to parse JSON bodies. + * + * @param {object} [options] + * @return {function} + * @public + */ + +function json (options) { + var opts = options || {} + + var limit = typeof opts.limit !== 'number' + ? bytes.parse(opts.limit || '100kb') + : opts.limit + var inflate = opts.inflate !== false + var reviver = opts.reviver + var strict = opts.strict !== false + var type = opts.type || 'application/json' + var verify = opts.verify || false + + if (verify !== false && typeof verify !== 'function') { + throw new TypeError('option verify must be function') + } + + // create the appropriate type checking function + var shouldParse = typeof type !== 'function' + ? typeChecker(type) + : type + + function parse (body) { + if (body.length === 0) { + // special-case empty json body, as it's a common client-side mistake + // TODO: maybe make this configurable or part of "strict" option + return {} + } + + if (strict) { + var first = firstchar(body) + + if (first !== '{' && first !== '[') { + debug('strict violation') + throw createStrictSyntaxError(body, first) + } + } + + try { + debug('parse json') + return JSON.parse(body, reviver) + } catch (e) { + throw normalizeJsonSyntaxError(e, { + message: e.message, + stack: e.stack + }) + } + } + + return function jsonParser (req, res, next) { + if (req._body) { + debug('body already parsed') + next() + return + } + + req.body = req.body || {} + + // skip requests without bodies + if (!typeis.hasBody(req)) { + debug('skip empty body') + next() + return + } + + debug('content-type %j', req.headers['content-type']) + + // determine if request should be parsed + if (!shouldParse(req)) { + debug('skip parsing') + next() + return + } + + // assert charset per RFC 7159 sec 8.1 + var charset = getCharset(req) || 'utf-8' + if (charset.slice(0, 4) !== 'utf-') { + debug('invalid charset') + next(createError(415, 'unsupported charset "' + charset.toUpperCase() + '"', { + charset: charset, + type: 'charset.unsupported' + })) + return + } + + // read + read(req, res, next, parse, debug, { + encoding: charset, + inflate: inflate, + limit: limit, + verify: verify + }) + } +} + +/** + * Create strict violation syntax error matching native error. + * + * @param {string} str + * @param {string} char + * @return {Error} + * @private + */ + +function createStrictSyntaxError (str, char) { + var index = str.indexOf(char) + var partial = '' + + if (index !== -1) { + partial = str.substring(0, index) + JSON_SYNTAX_CHAR + + for (var i = index + 1; i < str.length; i++) { + partial += JSON_SYNTAX_CHAR + } + } + + try { + JSON.parse(partial); /* istanbul ignore next */ throw new SyntaxError('strict violation') + } catch (e) { + return normalizeJsonSyntaxError(e, { + message: e.message.replace(JSON_SYNTAX_REGEXP, function (placeholder) { + return str.substring(index, index + placeholder.length) + }), + stack: e.stack + }) + } +} + +/** + * Get the first non-whitespace character in a string. + * + * @param {string} str + * @return {function} + * @private + */ + +function firstchar (str) { + var match = FIRST_CHAR_REGEXP.exec(str) + + return match + ? match[1] + : undefined +} + +/** + * Get the charset of a request. + * + * @param {object} req + * @api private + */ + +function getCharset (req) { + try { + return (contentType.parse(req).parameters.charset || '').toLowerCase() + } catch (e) { + return undefined + } +} + +/** + * Normalize a SyntaxError for JSON.parse. + * + * @param {SyntaxError} error + * @param {object} obj + * @return {SyntaxError} + */ + +function normalizeJsonSyntaxError (error, obj) { + var keys = Object.getOwnPropertyNames(error) + + for (var i = 0; i < keys.length; i++) { + var key = keys[i] + if (key !== 'stack' && key !== 'message') { + delete error[key] + } + } + + // replace stack before message for Node.js 0.10 and below + error.stack = obj.stack.replace(error.message, obj.message) + error.message = obj.message + + return error +} + +/** + * Get the simple type checker. + * + * @param {string} type + * @return {function} + */ + +function typeChecker (type) { + return function checkType (req) { + return Boolean(typeis(req, type)) + } +} diff --git a/backend/Backend/node_modules/body-parser/lib/types/raw.js b/backend/Backend/node_modules/body-parser/lib/types/raw.js new file mode 100644 index 000000000..f5d1b6747 --- /dev/null +++ b/backend/Backend/node_modules/body-parser/lib/types/raw.js @@ -0,0 +1,101 @@ +/*! + * body-parser + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + */ + +var bytes = require('bytes') +var debug = require('debug')('body-parser:raw') +var read = require('../read') +var typeis = require('type-is') + +/** + * Module exports. + */ + +module.exports = raw + +/** + * Create a middleware to parse raw bodies. + * + * @param {object} [options] + * @return {function} + * @api public + */ + +function raw (options) { + var opts = options || {} + + var inflate = opts.inflate !== false + var limit = typeof opts.limit !== 'number' + ? bytes.parse(opts.limit || '100kb') + : opts.limit + var type = opts.type || 'application/octet-stream' + var verify = opts.verify || false + + if (verify !== false && typeof verify !== 'function') { + throw new TypeError('option verify must be function') + } + + // create the appropriate type checking function + var shouldParse = typeof type !== 'function' + ? typeChecker(type) + : type + + function parse (buf) { + return buf + } + + return function rawParser (req, res, next) { + if (req._body) { + debug('body already parsed') + next() + return + } + + req.body = req.body || {} + + // skip requests without bodies + if (!typeis.hasBody(req)) { + debug('skip empty body') + next() + return + } + + debug('content-type %j', req.headers['content-type']) + + // determine if request should be parsed + if (!shouldParse(req)) { + debug('skip parsing') + next() + return + } + + // read + read(req, res, next, parse, debug, { + encoding: null, + inflate: inflate, + limit: limit, + verify: verify + }) + } +} + +/** + * Get the simple type checker. + * + * @param {string} type + * @return {function} + */ + +function typeChecker (type) { + return function checkType (req) { + return Boolean(typeis(req, type)) + } +} diff --git a/backend/Backend/node_modules/body-parser/lib/types/text.js b/backend/Backend/node_modules/body-parser/lib/types/text.js new file mode 100644 index 000000000..083a00908 --- /dev/null +++ b/backend/Backend/node_modules/body-parser/lib/types/text.js @@ -0,0 +1,121 @@ +/*! + * body-parser + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + */ + +var bytes = require('bytes') +var contentType = require('content-type') +var debug = require('debug')('body-parser:text') +var read = require('../read') +var typeis = require('type-is') + +/** + * Module exports. + */ + +module.exports = text + +/** + * Create a middleware to parse text bodies. + * + * @param {object} [options] + * @return {function} + * @api public + */ + +function text (options) { + var opts = options || {} + + var defaultCharset = opts.defaultCharset || 'utf-8' + var inflate = opts.inflate !== false + var limit = typeof opts.limit !== 'number' + ? bytes.parse(opts.limit || '100kb') + : opts.limit + var type = opts.type || 'text/plain' + var verify = opts.verify || false + + if (verify !== false && typeof verify !== 'function') { + throw new TypeError('option verify must be function') + } + + // create the appropriate type checking function + var shouldParse = typeof type !== 'function' + ? typeChecker(type) + : type + + function parse (buf) { + return buf + } + + return function textParser (req, res, next) { + if (req._body) { + debug('body already parsed') + next() + return + } + + req.body = req.body || {} + + // skip requests without bodies + if (!typeis.hasBody(req)) { + debug('skip empty body') + next() + return + } + + debug('content-type %j', req.headers['content-type']) + + // determine if request should be parsed + if (!shouldParse(req)) { + debug('skip parsing') + next() + return + } + + // get charset + var charset = getCharset(req) || defaultCharset + + // read + read(req, res, next, parse, debug, { + encoding: charset, + inflate: inflate, + limit: limit, + verify: verify + }) + } +} + +/** + * Get the charset of a request. + * + * @param {object} req + * @api private + */ + +function getCharset (req) { + try { + return (contentType.parse(req).parameters.charset || '').toLowerCase() + } catch (e) { + return undefined + } +} + +/** + * Get the simple type checker. + * + * @param {string} type + * @return {function} + */ + +function typeChecker (type) { + return function checkType (req) { + return Boolean(typeis(req, type)) + } +} diff --git a/backend/Backend/node_modules/body-parser/lib/types/urlencoded.js b/backend/Backend/node_modules/body-parser/lib/types/urlencoded.js new file mode 100644 index 000000000..2bd4485f5 --- /dev/null +++ b/backend/Backend/node_modules/body-parser/lib/types/urlencoded.js @@ -0,0 +1,307 @@ +/*! + * body-parser + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var bytes = require('bytes') +var contentType = require('content-type') +var createError = require('http-errors') +var debug = require('debug')('body-parser:urlencoded') +var deprecate = require('depd')('body-parser') +var read = require('../read') +var typeis = require('type-is') + +/** + * Module exports. + */ + +module.exports = urlencoded + +/** + * Cache of parser modules. + */ + +var parsers = Object.create(null) + +/** + * Create a middleware to parse urlencoded bodies. + * + * @param {object} [options] + * @return {function} + * @public + */ + +function urlencoded (options) { + var opts = options || {} + + // notice because option default will flip in next major + if (opts.extended === undefined) { + deprecate('undefined extended: provide extended option') + } + + var extended = opts.extended !== false + var inflate = opts.inflate !== false + var limit = typeof opts.limit !== 'number' + ? bytes.parse(opts.limit || '100kb') + : opts.limit + var type = opts.type || 'application/x-www-form-urlencoded' + var verify = opts.verify || false + var depth = typeof opts.depth !== 'number' + ? Number(opts.depth || 32) + : opts.depth + + if (verify !== false && typeof verify !== 'function') { + throw new TypeError('option verify must be function') + } + + // create the appropriate query parser + var queryparse = extended + ? extendedparser(opts) + : simpleparser(opts) + + // create the appropriate type checking function + var shouldParse = typeof type !== 'function' + ? typeChecker(type) + : type + + function parse (body) { + return body.length + ? queryparse(body) + : {} + } + + return function urlencodedParser (req, res, next) { + if (req._body) { + debug('body already parsed') + next() + return + } + + req.body = req.body || {} + + // skip requests without bodies + if (!typeis.hasBody(req)) { + debug('skip empty body') + next() + return + } + + debug('content-type %j', req.headers['content-type']) + + // determine if request should be parsed + if (!shouldParse(req)) { + debug('skip parsing') + next() + return + } + + // assert charset + var charset = getCharset(req) || 'utf-8' + if (charset !== 'utf-8') { + debug('invalid charset') + next(createError(415, 'unsupported charset "' + charset.toUpperCase() + '"', { + charset: charset, + type: 'charset.unsupported' + })) + return + } + + // read + read(req, res, next, parse, debug, { + debug: debug, + encoding: charset, + inflate: inflate, + limit: limit, + verify: verify, + depth: depth + }) + } +} + +/** + * Get the extended query parser. + * + * @param {object} options + */ + +function extendedparser (options) { + var parameterLimit = options.parameterLimit !== undefined + ? options.parameterLimit + : 1000 + + var depth = typeof options.depth !== 'number' + ? Number(options.depth || 32) + : options.depth + var parse = parser('qs') + + if (isNaN(parameterLimit) || parameterLimit < 1) { + throw new TypeError('option parameterLimit must be a positive number') + } + + if (isNaN(depth) || depth < 0) { + throw new TypeError('option depth must be a zero or a positive number') + } + + if (isFinite(parameterLimit)) { + parameterLimit = parameterLimit | 0 + } + + return function queryparse (body) { + var paramCount = parameterCount(body, parameterLimit) + + if (paramCount === undefined) { + debug('too many parameters') + throw createError(413, 'too many parameters', { + type: 'parameters.too.many' + }) + } + + var arrayLimit = Math.max(100, paramCount) + + debug('parse extended urlencoding') + try { + return parse(body, { + allowPrototypes: true, + arrayLimit: arrayLimit, + depth: depth, + strictDepth: true, + parameterLimit: parameterLimit + }) + } catch (err) { + if (err instanceof RangeError) { + throw createError(400, 'The input exceeded the depth', { + type: 'querystring.parse.rangeError' + }) + } else { + throw err + } + } + } +} + +/** + * Get the charset of a request. + * + * @param {object} req + * @api private + */ + +function getCharset (req) { + try { + return (contentType.parse(req).parameters.charset || '').toLowerCase() + } catch (e) { + return undefined + } +} + +/** + * Count the number of parameters, stopping once limit reached + * + * @param {string} body + * @param {number} limit + * @api private + */ + +function parameterCount (body, limit) { + var count = 0 + var index = 0 + + while ((index = body.indexOf('&', index)) !== -1) { + count++ + index++ + + if (count === limit) { + return undefined + } + } + + return count +} + +/** + * Get parser for module name dynamically. + * + * @param {string} name + * @return {function} + * @api private + */ + +function parser (name) { + var mod = parsers[name] + + if (mod !== undefined) { + return mod.parse + } + + // this uses a switch for static require analysis + switch (name) { + case 'qs': + mod = require('qs') + break + case 'querystring': + mod = require('querystring') + break + } + + // store to prevent invoking require() + parsers[name] = mod + + return mod.parse +} + +/** + * Get the simple query parser. + * + * @param {object} options + */ + +function simpleparser (options) { + var parameterLimit = options.parameterLimit !== undefined + ? options.parameterLimit + : 1000 + var parse = parser('querystring') + + if (isNaN(parameterLimit) || parameterLimit < 1) { + throw new TypeError('option parameterLimit must be a positive number') + } + + if (isFinite(parameterLimit)) { + parameterLimit = parameterLimit | 0 + } + + return function queryparse (body) { + var paramCount = parameterCount(body, parameterLimit) + + if (paramCount === undefined) { + debug('too many parameters') + throw createError(413, 'too many parameters', { + type: 'parameters.too.many' + }) + } + + debug('parse urlencoding') + return parse(body, undefined, undefined, { maxKeys: parameterLimit }) + } +} + +/** + * Get the simple type checker. + * + * @param {string} type + * @return {function} + */ + +function typeChecker (type) { + return function checkType (req) { + return Boolean(typeis(req, type)) + } +} diff --git a/backend/Backend/node_modules/body-parser/node_modules/debug/.coveralls.yml b/backend/Backend/node_modules/body-parser/node_modules/debug/.coveralls.yml new file mode 100644 index 000000000..20a706858 --- /dev/null +++ b/backend/Backend/node_modules/body-parser/node_modules/debug/.coveralls.yml @@ -0,0 +1 @@ +repo_token: SIAeZjKYlHK74rbcFvNHMUzjRiMpflxve diff --git a/backend/Backend/node_modules/body-parser/node_modules/debug/.eslintrc b/backend/Backend/node_modules/body-parser/node_modules/debug/.eslintrc new file mode 100644 index 000000000..8a37ae2c2 --- /dev/null +++ b/backend/Backend/node_modules/body-parser/node_modules/debug/.eslintrc @@ -0,0 +1,11 @@ +{ + "env": { + "browser": true, + "node": true + }, + "rules": { + "no-console": 0, + "no-empty": [1, { "allowEmptyCatch": true }] + }, + "extends": "eslint:recommended" +} diff --git a/backend/Backend/node_modules/body-parser/node_modules/debug/.npmignore b/backend/Backend/node_modules/body-parser/node_modules/debug/.npmignore new file mode 100644 index 000000000..5f60eecc8 --- /dev/null +++ b/backend/Backend/node_modules/body-parser/node_modules/debug/.npmignore @@ -0,0 +1,9 @@ +support +test +examples +example +*.sock +dist +yarn.lock +coverage +bower.json diff --git a/backend/Backend/node_modules/body-parser/node_modules/debug/.travis.yml b/backend/Backend/node_modules/body-parser/node_modules/debug/.travis.yml new file mode 100644 index 000000000..6c6090c3b --- /dev/null +++ b/backend/Backend/node_modules/body-parser/node_modules/debug/.travis.yml @@ -0,0 +1,14 @@ + +language: node_js +node_js: + - "6" + - "5" + - "4" + +install: + - make node_modules + +script: + - make lint + - make test + - make coveralls diff --git a/backend/Backend/node_modules/body-parser/node_modules/debug/CHANGELOG.md b/backend/Backend/node_modules/body-parser/node_modules/debug/CHANGELOG.md new file mode 100644 index 000000000..eadaa1895 --- /dev/null +++ b/backend/Backend/node_modules/body-parser/node_modules/debug/CHANGELOG.md @@ -0,0 +1,362 @@ + +2.6.9 / 2017-09-22 +================== + + * remove ReDoS regexp in %o formatter (#504) + +2.6.8 / 2017-05-18 +================== + + * Fix: Check for undefined on browser globals (#462, @marbemac) + +2.6.7 / 2017-05-16 +================== + + * Fix: Update ms to 2.0.0 to fix regular expression denial of service vulnerability (#458, @hubdotcom) + * Fix: Inline extend function in node implementation (#452, @dougwilson) + * Docs: Fix typo (#455, @msasad) + +2.6.5 / 2017-04-27 +================== + + * Fix: null reference check on window.documentElement.style.WebkitAppearance (#447, @thebigredgeek) + * Misc: clean up browser reference checks (#447, @thebigredgeek) + * Misc: add npm-debug.log to .gitignore (@thebigredgeek) + + +2.6.4 / 2017-04-20 +================== + + * Fix: bug that would occure if process.env.DEBUG is a non-string value. (#444, @LucianBuzzo) + * Chore: ignore bower.json in npm installations. (#437, @joaovieira) + * Misc: update "ms" to v0.7.3 (@tootallnate) + +2.6.3 / 2017-03-13 +================== + + * Fix: Electron reference to `process.env.DEBUG` (#431, @paulcbetts) + * Docs: Changelog fix (@thebigredgeek) + +2.6.2 / 2017-03-10 +================== + + * Fix: DEBUG_MAX_ARRAY_LENGTH (#420, @slavaGanzin) + * Docs: Add backers and sponsors from Open Collective (#422, @piamancini) + * Docs: Add Slackin invite badge (@tootallnate) + +2.6.1 / 2017-02-10 +================== + + * Fix: Module's `export default` syntax fix for IE8 `Expected identifier` error + * Fix: Whitelist DEBUG_FD for values 1 and 2 only (#415, @pi0) + * Fix: IE8 "Expected identifier" error (#414, @vgoma) + * Fix: Namespaces would not disable once enabled (#409, @musikov) + +2.6.0 / 2016-12-28 +================== + + * Fix: added better null pointer checks for browser useColors (@thebigredgeek) + * Improvement: removed explicit `window.debug` export (#404, @tootallnate) + * Improvement: deprecated `DEBUG_FD` environment variable (#405, @tootallnate) + +2.5.2 / 2016-12-25 +================== + + * Fix: reference error on window within webworkers (#393, @KlausTrainer) + * Docs: fixed README typo (#391, @lurch) + * Docs: added notice about v3 api discussion (@thebigredgeek) + +2.5.1 / 2016-12-20 +================== + + * Fix: babel-core compatibility + +2.5.0 / 2016-12-20 +================== + + * Fix: wrong reference in bower file (@thebigredgeek) + * Fix: webworker compatibility (@thebigredgeek) + * Fix: output formatting issue (#388, @kribblo) + * Fix: babel-loader compatibility (#383, @escwald) + * Misc: removed built asset from repo and publications (@thebigredgeek) + * Misc: moved source files to /src (#378, @yamikuronue) + * Test: added karma integration and replaced babel with browserify for browser tests (#378, @yamikuronue) + * Test: coveralls integration (#378, @yamikuronue) + * Docs: simplified language in the opening paragraph (#373, @yamikuronue) + +2.4.5 / 2016-12-17 +================== + + * Fix: `navigator` undefined in Rhino (#376, @jochenberger) + * Fix: custom log function (#379, @hsiliev) + * Improvement: bit of cleanup + linting fixes (@thebigredgeek) + * Improvement: rm non-maintainted `dist/` dir (#375, @freewil) + * Docs: simplified language in the opening paragraph. (#373, @yamikuronue) + +2.4.4 / 2016-12-14 +================== + + * Fix: work around debug being loaded in preload scripts for electron (#368, @paulcbetts) + +2.4.3 / 2016-12-14 +================== + + * Fix: navigation.userAgent error for react native (#364, @escwald) + +2.4.2 / 2016-12-14 +================== + + * Fix: browser colors (#367, @tootallnate) + * Misc: travis ci integration (@thebigredgeek) + * Misc: added linting and testing boilerplate with sanity check (@thebigredgeek) + +2.4.1 / 2016-12-13 +================== + + * Fix: typo that broke the package (#356) + +2.4.0 / 2016-12-13 +================== + + * Fix: bower.json references unbuilt src entry point (#342, @justmatt) + * Fix: revert "handle regex special characters" (@tootallnate) + * Feature: configurable util.inspect()`options for NodeJS (#327, @tootallnate) + * Feature: %O`(big O) pretty-prints objects (#322, @tootallnate) + * Improvement: allow colors in workers (#335, @botverse) + * Improvement: use same color for same namespace. (#338, @lchenay) + +2.3.3 / 2016-11-09 +================== + + * Fix: Catch `JSON.stringify()` errors (#195, Jovan Alleyne) + * Fix: Returning `localStorage` saved values (#331, Levi Thomason) + * Improvement: Don't create an empty object when no `process` (Nathan Rajlich) + +2.3.2 / 2016-11-09 +================== + + * Fix: be super-safe in index.js as well (@TooTallNate) + * Fix: should check whether process exists (Tom Newby) + +2.3.1 / 2016-11-09 +================== + + * Fix: Added electron compatibility (#324, @paulcbetts) + * Improvement: Added performance optimizations (@tootallnate) + * Readme: Corrected PowerShell environment variable example (#252, @gimre) + * Misc: Removed yarn lock file from source control (#321, @fengmk2) + +2.3.0 / 2016-11-07 +================== + + * Fix: Consistent placement of ms diff at end of output (#215, @gorangajic) + * Fix: Escaping of regex special characters in namespace strings (#250, @zacronos) + * Fix: Fixed bug causing crash on react-native (#282, @vkarpov15) + * Feature: Enabled ES6+ compatible import via default export (#212 @bucaran) + * Feature: Added %O formatter to reflect Chrome's console.log capability (#279, @oncletom) + * Package: Update "ms" to 0.7.2 (#315, @DevSide) + * Package: removed superfluous version property from bower.json (#207 @kkirsche) + * Readme: fix USE_COLORS to DEBUG_COLORS + * Readme: Doc fixes for format string sugar (#269, @mlucool) + * Readme: Updated docs for DEBUG_FD and DEBUG_COLORS environment variables (#232, @mattlyons0) + * Readme: doc fixes for PowerShell (#271 #243, @exoticknight @unreadable) + * Readme: better docs for browser support (#224, @matthewmueller) + * Tooling: Added yarn integration for development (#317, @thebigredgeek) + * Misc: Renamed History.md to CHANGELOG.md (@thebigredgeek) + * Misc: Added license file (#226 #274, @CantemoInternal @sdaitzman) + * Misc: Updated contributors (@thebigredgeek) + +2.2.0 / 2015-05-09 +================== + + * package: update "ms" to v0.7.1 (#202, @dougwilson) + * README: add logging to file example (#193, @DanielOchoa) + * README: fixed a typo (#191, @amir-s) + * browser: expose `storage` (#190, @stephenmathieson) + * Makefile: add a `distclean` target (#189, @stephenmathieson) + +2.1.3 / 2015-03-13 +================== + + * Updated stdout/stderr example (#186) + * Updated example/stdout.js to match debug current behaviour + * Renamed example/stderr.js to stdout.js + * Update Readme.md (#184) + * replace high intensity foreground color for bold (#182, #183) + +2.1.2 / 2015-03-01 +================== + + * dist: recompile + * update "ms" to v0.7.0 + * package: update "browserify" to v9.0.3 + * component: fix "ms.js" repo location + * changed bower package name + * updated documentation about using debug in a browser + * fix: security error on safari (#167, #168, @yields) + +2.1.1 / 2014-12-29 +================== + + * browser: use `typeof` to check for `console` existence + * browser: check for `console.log` truthiness (fix IE 8/9) + * browser: add support for Chrome apps + * Readme: added Windows usage remarks + * Add `bower.json` to properly support bower install + +2.1.0 / 2014-10-15 +================== + + * node: implement `DEBUG_FD` env variable support + * package: update "browserify" to v6.1.0 + * package: add "license" field to package.json (#135, @panuhorsmalahti) + +2.0.0 / 2014-09-01 +================== + + * package: update "browserify" to v5.11.0 + * node: use stderr rather than stdout for logging (#29, @stephenmathieson) + +1.0.4 / 2014-07-15 +================== + + * dist: recompile + * example: remove `console.info()` log usage + * example: add "Content-Type" UTF-8 header to browser example + * browser: place %c marker after the space character + * browser: reset the "content" color via `color: inherit` + * browser: add colors support for Firefox >= v31 + * debug: prefer an instance `log()` function over the global one (#119) + * Readme: update documentation about styled console logs for FF v31 (#116, @wryk) + +1.0.3 / 2014-07-09 +================== + + * Add support for multiple wildcards in namespaces (#122, @seegno) + * browser: fix lint + +1.0.2 / 2014-06-10 +================== + + * browser: update color palette (#113, @gscottolson) + * common: make console logging function configurable (#108, @timoxley) + * node: fix %o colors on old node <= 0.8.x + * Makefile: find node path using shell/which (#109, @timoxley) + +1.0.1 / 2014-06-06 +================== + + * browser: use `removeItem()` to clear localStorage + * browser, node: don't set DEBUG if namespaces is undefined (#107, @leedm777) + * package: add "contributors" section + * node: fix comment typo + * README: list authors + +1.0.0 / 2014-06-04 +================== + + * make ms diff be global, not be scope + * debug: ignore empty strings in enable() + * node: make DEBUG_COLORS able to disable coloring + * *: export the `colors` array + * npmignore: don't publish the `dist` dir + * Makefile: refactor to use browserify + * package: add "browserify" as a dev dependency + * Readme: add Web Inspector Colors section + * node: reset terminal color for the debug content + * node: map "%o" to `util.inspect()` + * browser: map "%j" to `JSON.stringify()` + * debug: add custom "formatters" + * debug: use "ms" module for humanizing the diff + * Readme: add "bash" syntax highlighting + * browser: add Firebug color support + * browser: add colors for WebKit browsers + * node: apply log to `console` + * rewrite: abstract common logic for Node & browsers + * add .jshintrc file + +0.8.1 / 2014-04-14 +================== + + * package: re-add the "component" section + +0.8.0 / 2014-03-30 +================== + + * add `enable()` method for nodejs. Closes #27 + * change from stderr to stdout + * remove unnecessary index.js file + +0.7.4 / 2013-11-13 +================== + + * remove "browserify" key from package.json (fixes something in browserify) + +0.7.3 / 2013-10-30 +================== + + * fix: catch localStorage security error when cookies are blocked (Chrome) + * add debug(err) support. Closes #46 + * add .browser prop to package.json. Closes #42 + +0.7.2 / 2013-02-06 +================== + + * fix package.json + * fix: Mobile Safari (private mode) is broken with debug + * fix: Use unicode to send escape character to shell instead of octal to work with strict mode javascript + +0.7.1 / 2013-02-05 +================== + + * add repository URL to package.json + * add DEBUG_COLORED to force colored output + * add browserify support + * fix component. Closes #24 + +0.7.0 / 2012-05-04 +================== + + * Added .component to package.json + * Added debug.component.js build + +0.6.0 / 2012-03-16 +================== + + * Added support for "-" prefix in DEBUG [Vinay Pulim] + * Added `.enabled` flag to the node version [TooTallNate] + +0.5.0 / 2012-02-02 +================== + + * Added: humanize diffs. Closes #8 + * Added `debug.disable()` to the CS variant + * Removed padding. Closes #10 + * Fixed: persist client-side variant again. Closes #9 + +0.4.0 / 2012-02-01 +================== + + * Added browser variant support for older browsers [TooTallNate] + * Added `debug.enable('project:*')` to browser variant [TooTallNate] + * Added padding to diff (moved it to the right) + +0.3.0 / 2012-01-26 +================== + + * Added millisecond diff when isatty, otherwise UTC string + +0.2.0 / 2012-01-22 +================== + + * Added wildcard support + +0.1.0 / 2011-12-02 +================== + + * Added: remove colors unless stderr isatty [TooTallNate] + +0.0.1 / 2010-01-03 +================== + + * Initial release diff --git a/backend/Backend/node_modules/body-parser/node_modules/debug/LICENSE b/backend/Backend/node_modules/body-parser/node_modules/debug/LICENSE new file mode 100644 index 000000000..658c933d2 --- /dev/null +++ b/backend/Backend/node_modules/body-parser/node_modules/debug/LICENSE @@ -0,0 +1,19 @@ +(The MIT License) + +Copyright (c) 2014 TJ Holowaychuk + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software +and associated documentation files (the 'Software'), to deal in the Software without restriction, +including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial +portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/backend/Backend/node_modules/body-parser/node_modules/debug/Makefile b/backend/Backend/node_modules/body-parser/node_modules/debug/Makefile new file mode 100644 index 000000000..584da8bf9 --- /dev/null +++ b/backend/Backend/node_modules/body-parser/node_modules/debug/Makefile @@ -0,0 +1,50 @@ +# get Makefile directory name: http://stackoverflow.com/a/5982798/376773 +THIS_MAKEFILE_PATH:=$(word $(words $(MAKEFILE_LIST)),$(MAKEFILE_LIST)) +THIS_DIR:=$(shell cd $(dir $(THIS_MAKEFILE_PATH));pwd) + +# BIN directory +BIN := $(THIS_DIR)/node_modules/.bin + +# Path +PATH := node_modules/.bin:$(PATH) +SHELL := /bin/bash + +# applications +NODE ?= $(shell which node) +YARN ?= $(shell which yarn) +PKG ?= $(if $(YARN),$(YARN),$(NODE) $(shell which npm)) +BROWSERIFY ?= $(NODE) $(BIN)/browserify + +.FORCE: + +install: node_modules + +node_modules: package.json + @NODE_ENV= $(PKG) install + @touch node_modules + +lint: .FORCE + eslint browser.js debug.js index.js node.js + +test-node: .FORCE + istanbul cover node_modules/mocha/bin/_mocha -- test/**.js + +test-browser: .FORCE + mkdir -p dist + + @$(BROWSERIFY) \ + --standalone debug \ + . > dist/debug.js + + karma start --single-run + rimraf dist + +test: .FORCE + concurrently \ + "make test-node" \ + "make test-browser" + +coveralls: + cat ./coverage/lcov.info | ./node_modules/coveralls/bin/coveralls.js + +.PHONY: all install clean distclean diff --git a/backend/Backend/node_modules/body-parser/node_modules/debug/README.md b/backend/Backend/node_modules/body-parser/node_modules/debug/README.md new file mode 100644 index 000000000..f67be6b31 --- /dev/null +++ b/backend/Backend/node_modules/body-parser/node_modules/debug/README.md @@ -0,0 +1,312 @@ +# debug +[![Build Status](https://travis-ci.org/visionmedia/debug.svg?branch=master)](https://travis-ci.org/visionmedia/debug) [![Coverage Status](https://coveralls.io/repos/github/visionmedia/debug/badge.svg?branch=master)](https://coveralls.io/github/visionmedia/debug?branch=master) [![Slack](https://visionmedia-community-slackin.now.sh/badge.svg)](https://visionmedia-community-slackin.now.sh/) [![OpenCollective](https://opencollective.com/debug/backers/badge.svg)](#backers) +[![OpenCollective](https://opencollective.com/debug/sponsors/badge.svg)](#sponsors) + + + +A tiny node.js debugging utility modelled after node core's debugging technique. + +**Discussion around the V3 API is under way [here](https://github.com/visionmedia/debug/issues/370)** + +## Installation + +```bash +$ npm install debug +``` + +## Usage + +`debug` exposes a function; simply pass this function the name of your module, and it will return a decorated version of `console.error` for you to pass debug statements to. This will allow you to toggle the debug output for different parts of your module as well as the module as a whole. + +Example _app.js_: + +```js +var debug = require('debug')('http') + , http = require('http') + , name = 'My App'; + +// fake app + +debug('booting %s', name); + +http.createServer(function(req, res){ + debug(req.method + ' ' + req.url); + res.end('hello\n'); +}).listen(3000, function(){ + debug('listening'); +}); + +// fake worker of some kind + +require('./worker'); +``` + +Example _worker.js_: + +```js +var debug = require('debug')('worker'); + +setInterval(function(){ + debug('doing some work'); +}, 1000); +``` + + The __DEBUG__ environment variable is then used to enable these based on space or comma-delimited names. Here are some examples: + + ![debug http and worker](http://f.cl.ly/items/18471z1H402O24072r1J/Screenshot.png) + + ![debug worker](http://f.cl.ly/items/1X413v1a3M0d3C2c1E0i/Screenshot.png) + +#### Windows note + + On Windows the environment variable is set using the `set` command. + + ```cmd + set DEBUG=*,-not_this + ``` + + Note that PowerShell uses different syntax to set environment variables. + + ```cmd + $env:DEBUG = "*,-not_this" + ``` + +Then, run the program to be debugged as usual. + +## Millisecond diff + + When actively developing an application it can be useful to see when the time spent between one `debug()` call and the next. Suppose for example you invoke `debug()` before requesting a resource, and after as well, the "+NNNms" will show you how much time was spent between calls. + + ![](http://f.cl.ly/items/2i3h1d3t121M2Z1A3Q0N/Screenshot.png) + + When stdout is not a TTY, `Date#toUTCString()` is used, making it more useful for logging the debug information as shown below: + + ![](http://f.cl.ly/items/112H3i0e0o0P0a2Q2r11/Screenshot.png) + +## Conventions + + If you're using this in one or more of your libraries, you _should_ use the name of your library so that developers may toggle debugging as desired without guessing names. If you have more than one debuggers you _should_ prefix them with your library name and use ":" to separate features. For example "bodyParser" from Connect would then be "connect:bodyParser". + +## Wildcards + + The `*` character may be used as a wildcard. Suppose for example your library has debuggers named "connect:bodyParser", "connect:compress", "connect:session", instead of listing all three with `DEBUG=connect:bodyParser,connect:compress,connect:session`, you may simply do `DEBUG=connect:*`, or to run everything using this module simply use `DEBUG=*`. + + You can also exclude specific debuggers by prefixing them with a "-" character. For example, `DEBUG=*,-connect:*` would include all debuggers except those starting with "connect:". + +## Environment Variables + + When running through Node.js, you can set a few environment variables that will + change the behavior of the debug logging: + +| Name | Purpose | +|-----------|-------------------------------------------------| +| `DEBUG` | Enables/disables specific debugging namespaces. | +| `DEBUG_COLORS`| Whether or not to use colors in the debug output. | +| `DEBUG_DEPTH` | Object inspection depth. | +| `DEBUG_SHOW_HIDDEN` | Shows hidden properties on inspected objects. | + + + __Note:__ The environment variables beginning with `DEBUG_` end up being + converted into an Options object that gets used with `%o`/`%O` formatters. + See the Node.js documentation for + [`util.inspect()`](https://nodejs.org/api/util.html#util_util_inspect_object_options) + for the complete list. + +## Formatters + + + Debug uses [printf-style](https://wikipedia.org/wiki/Printf_format_string) formatting. Below are the officially supported formatters: + +| Formatter | Representation | +|-----------|----------------| +| `%O` | Pretty-print an Object on multiple lines. | +| `%o` | Pretty-print an Object all on a single line. | +| `%s` | String. | +| `%d` | Number (both integer and float). | +| `%j` | JSON. Replaced with the string '[Circular]' if the argument contains circular references. | +| `%%` | Single percent sign ('%'). This does not consume an argument. | + +### Custom formatters + + You can add custom formatters by extending the `debug.formatters` object. For example, if you wanted to add support for rendering a Buffer as hex with `%h`, you could do something like: + +```js +const createDebug = require('debug') +createDebug.formatters.h = (v) => { + return v.toString('hex') +} + +// …elsewhere +const debug = createDebug('foo') +debug('this is hex: %h', new Buffer('hello world')) +// foo this is hex: 68656c6c6f20776f726c6421 +0ms +``` + +## Browser support + You can build a browser-ready script using [browserify](https://github.com/substack/node-browserify), + or just use the [browserify-as-a-service](https://wzrd.in/) [build](https://wzrd.in/standalone/debug@latest), + if you don't want to build it yourself. + + Debug's enable state is currently persisted by `localStorage`. + Consider the situation shown below where you have `worker:a` and `worker:b`, + and wish to debug both. You can enable this using `localStorage.debug`: + +```js +localStorage.debug = 'worker:*' +``` + +And then refresh the page. + +```js +a = debug('worker:a'); +b = debug('worker:b'); + +setInterval(function(){ + a('doing some work'); +}, 1000); + +setInterval(function(){ + b('doing some work'); +}, 1200); +``` + +#### Web Inspector Colors + + Colors are also enabled on "Web Inspectors" that understand the `%c` formatting + option. These are WebKit web inspectors, Firefox ([since version + 31](https://hacks.mozilla.org/2014/05/editable-box-model-multiple-selection-sublime-text-keys-much-more-firefox-developer-tools-episode-31/)) + and the Firebug plugin for Firefox (any version). + + Colored output looks something like: + + ![](https://cloud.githubusercontent.com/assets/71256/3139768/b98c5fd8-e8ef-11e3-862a-f7253b6f47c6.png) + + +## Output streams + + By default `debug` will log to stderr, however this can be configured per-namespace by overriding the `log` method: + +Example _stdout.js_: + +```js +var debug = require('debug'); +var error = debug('app:error'); + +// by default stderr is used +error('goes to stderr!'); + +var log = debug('app:log'); +// set this namespace to log via console.log +log.log = console.log.bind(console); // don't forget to bind to console! +log('goes to stdout'); +error('still goes to stderr!'); + +// set all output to go via console.info +// overrides all per-namespace log settings +debug.log = console.info.bind(console); +error('now goes to stdout via console.info'); +log('still goes to stdout, but via console.info now'); +``` + + +## Authors + + - TJ Holowaychuk + - Nathan Rajlich + - Andrew Rhyne + +## Backers + +Support us with a monthly donation and help us continue our activities. [[Become a backer](https://opencollective.com/debug#backer)] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +## Sponsors + +Become a sponsor and get your logo on our README on Github with a link to your site. [[Become a sponsor](https://opencollective.com/debug#sponsor)] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +## License + +(The MIT License) + +Copyright (c) 2014-2016 TJ Holowaychuk <tj@vision-media.ca> + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/backend/Backend/node_modules/body-parser/node_modules/debug/component.json b/backend/Backend/node_modules/body-parser/node_modules/debug/component.json new file mode 100644 index 000000000..9de26410f --- /dev/null +++ b/backend/Backend/node_modules/body-parser/node_modules/debug/component.json @@ -0,0 +1,19 @@ +{ + "name": "debug", + "repo": "visionmedia/debug", + "description": "small debugging utility", + "version": "2.6.9", + "keywords": [ + "debug", + "log", + "debugger" + ], + "main": "src/browser.js", + "scripts": [ + "src/browser.js", + "src/debug.js" + ], + "dependencies": { + "rauchg/ms.js": "0.7.1" + } +} diff --git a/backend/Backend/node_modules/body-parser/node_modules/debug/karma.conf.js b/backend/Backend/node_modules/body-parser/node_modules/debug/karma.conf.js new file mode 100644 index 000000000..103a82d15 --- /dev/null +++ b/backend/Backend/node_modules/body-parser/node_modules/debug/karma.conf.js @@ -0,0 +1,70 @@ +// Karma configuration +// Generated on Fri Dec 16 2016 13:09:51 GMT+0000 (UTC) + +module.exports = function(config) { + config.set({ + + // base path that will be used to resolve all patterns (eg. files, exclude) + basePath: '', + + + // frameworks to use + // available frameworks: https://npmjs.org/browse/keyword/karma-adapter + frameworks: ['mocha', 'chai', 'sinon'], + + + // list of files / patterns to load in the browser + files: [ + 'dist/debug.js', + 'test/*spec.js' + ], + + + // list of files to exclude + exclude: [ + 'src/node.js' + ], + + + // preprocess matching files before serving them to the browser + // available preprocessors: https://npmjs.org/browse/keyword/karma-preprocessor + preprocessors: { + }, + + // test results reporter to use + // possible values: 'dots', 'progress' + // available reporters: https://npmjs.org/browse/keyword/karma-reporter + reporters: ['progress'], + + + // web server port + port: 9876, + + + // enable / disable colors in the output (reporters and logs) + colors: true, + + + // level of logging + // possible values: config.LOG_DISABLE || config.LOG_ERROR || config.LOG_WARN || config.LOG_INFO || config.LOG_DEBUG + logLevel: config.LOG_INFO, + + + // enable / disable watching file and executing tests whenever any file changes + autoWatch: true, + + + // start these browsers + // available browser launchers: https://npmjs.org/browse/keyword/karma-launcher + browsers: ['PhantomJS'], + + + // Continuous Integration mode + // if true, Karma captures browsers, runs the tests and exits + singleRun: false, + + // Concurrency level + // how many browser should be started simultaneous + concurrency: Infinity + }) +} diff --git a/backend/Backend/node_modules/body-parser/node_modules/debug/node.js b/backend/Backend/node_modules/body-parser/node_modules/debug/node.js new file mode 100644 index 000000000..7fc36fe6d --- /dev/null +++ b/backend/Backend/node_modules/body-parser/node_modules/debug/node.js @@ -0,0 +1 @@ +module.exports = require('./src/node'); diff --git a/backend/Backend/node_modules/body-parser/node_modules/debug/package.json b/backend/Backend/node_modules/body-parser/node_modules/debug/package.json new file mode 100644 index 000000000..dc787ba76 --- /dev/null +++ b/backend/Backend/node_modules/body-parser/node_modules/debug/package.json @@ -0,0 +1,49 @@ +{ + "name": "debug", + "version": "2.6.9", + "repository": { + "type": "git", + "url": "git://github.com/visionmedia/debug.git" + }, + "description": "small debugging utility", + "keywords": [ + "debug", + "log", + "debugger" + ], + "author": "TJ Holowaychuk ", + "contributors": [ + "Nathan Rajlich (http://n8.io)", + "Andrew Rhyne " + ], + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + }, + "devDependencies": { + "browserify": "9.0.3", + "chai": "^3.5.0", + "concurrently": "^3.1.0", + "coveralls": "^2.11.15", + "eslint": "^3.12.1", + "istanbul": "^0.4.5", + "karma": "^1.3.0", + "karma-chai": "^0.1.0", + "karma-mocha": "^1.3.0", + "karma-phantomjs-launcher": "^1.0.2", + "karma-sinon": "^1.0.5", + "mocha": "^3.2.0", + "mocha-lcov-reporter": "^1.2.0", + "rimraf": "^2.5.4", + "sinon": "^1.17.6", + "sinon-chai": "^2.8.0" + }, + "main": "./src/index.js", + "browser": "./src/browser.js", + "component": { + "scripts": { + "debug/index.js": "browser.js", + "debug/debug.js": "debug.js" + } + } +} diff --git a/backend/Backend/node_modules/body-parser/node_modules/debug/src/browser.js b/backend/Backend/node_modules/body-parser/node_modules/debug/src/browser.js new file mode 100644 index 000000000..710692493 --- /dev/null +++ b/backend/Backend/node_modules/body-parser/node_modules/debug/src/browser.js @@ -0,0 +1,185 @@ +/** + * This is the web browser implementation of `debug()`. + * + * Expose `debug()` as the module. + */ + +exports = module.exports = require('./debug'); +exports.log = log; +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.storage = 'undefined' != typeof chrome + && 'undefined' != typeof chrome.storage + ? chrome.storage.local + : localstorage(); + +/** + * Colors. + */ + +exports.colors = [ + 'lightseagreen', + 'forestgreen', + 'goldenrod', + 'dodgerblue', + 'darkorchid', + 'crimson' +]; + +/** + * Currently only WebKit-based Web Inspectors, Firefox >= v31, + * and the Firebug extension (any Firefox version) are known + * to support "%c" CSS customizations. + * + * TODO: add a `localStorage` variable to explicitly enable/disable colors + */ + +function useColors() { + // NB: In an Electron preload script, document will be defined but not fully + // initialized. Since we know we're in Chrome, we'll just detect this case + // explicitly + if (typeof window !== 'undefined' && window.process && window.process.type === 'renderer') { + return true; + } + + // is webkit? http://stackoverflow.com/a/16459606/376773 + // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 + return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || + // is firebug? http://stackoverflow.com/a/398120/376773 + (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || + // is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) || + // double check webkit in userAgent just in case we are in a worker + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); +} + +/** + * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. + */ + +exports.formatters.j = function(v) { + try { + return JSON.stringify(v); + } catch (err) { + return '[UnexpectedJSONParseError]: ' + err.message; + } +}; + + +/** + * Colorize log arguments if enabled. + * + * @api public + */ + +function formatArgs(args) { + var useColors = this.useColors; + + args[0] = (useColors ? '%c' : '') + + this.namespace + + (useColors ? ' %c' : ' ') + + args[0] + + (useColors ? '%c ' : ' ') + + '+' + exports.humanize(this.diff); + + if (!useColors) return; + + var c = 'color: ' + this.color; + args.splice(1, 0, c, 'color: inherit') + + // the final "%c" is somewhat tricky, because there could be other + // arguments passed either before or after the %c, so we need to + // figure out the correct index to insert the CSS into + var index = 0; + var lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, function(match) { + if ('%%' === match) return; + index++; + if ('%c' === match) { + // we only are interested in the *last* %c + // (the user may have provided their own) + lastC = index; + } + }); + + args.splice(lastC, 0, c); +} + +/** + * Invokes `console.log()` when available. + * No-op when `console.log` is not a "function". + * + * @api public + */ + +function log() { + // this hackery is required for IE8/9, where + // the `console.log` function doesn't have 'apply' + return 'object' === typeof console + && console.log + && Function.prototype.apply.call(console.log, console, arguments); +} + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ + +function save(namespaces) { + try { + if (null == namespaces) { + exports.storage.removeItem('debug'); + } else { + exports.storage.debug = namespaces; + } + } catch(e) {} +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ + +function load() { + var r; + try { + r = exports.storage.debug; + } catch(e) {} + + // If debug isn't set in LS, and we're in Electron, try to load $DEBUG + if (!r && typeof process !== 'undefined' && 'env' in process) { + r = process.env.DEBUG; + } + + return r; +} + +/** + * Enable namespaces listed in `localStorage.debug` initially. + */ + +exports.enable(load()); + +/** + * Localstorage attempts to return the localstorage. + * + * This is necessary because safari throws + * when a user disables cookies/localstorage + * and you attempt to access it. + * + * @return {LocalStorage} + * @api private + */ + +function localstorage() { + try { + return window.localStorage; + } catch (e) {} +} diff --git a/backend/Backend/node_modules/body-parser/node_modules/debug/src/debug.js b/backend/Backend/node_modules/body-parser/node_modules/debug/src/debug.js new file mode 100644 index 000000000..6a5e3fc94 --- /dev/null +++ b/backend/Backend/node_modules/body-parser/node_modules/debug/src/debug.js @@ -0,0 +1,202 @@ + +/** + * This is the common logic for both the Node.js and web browser + * implementations of `debug()`. + * + * Expose `debug()` as the module. + */ + +exports = module.exports = createDebug.debug = createDebug['default'] = createDebug; +exports.coerce = coerce; +exports.disable = disable; +exports.enable = enable; +exports.enabled = enabled; +exports.humanize = require('ms'); + +/** + * The currently active debug mode names, and names to skip. + */ + +exports.names = []; +exports.skips = []; + +/** + * Map of special "%n" handling functions, for the debug "format" argument. + * + * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". + */ + +exports.formatters = {}; + +/** + * Previous log timestamp. + */ + +var prevTime; + +/** + * Select a color. + * @param {String} namespace + * @return {Number} + * @api private + */ + +function selectColor(namespace) { + var hash = 0, i; + + for (i in namespace) { + hash = ((hash << 5) - hash) + namespace.charCodeAt(i); + hash |= 0; // Convert to 32bit integer + } + + return exports.colors[Math.abs(hash) % exports.colors.length]; +} + +/** + * Create a debugger with the given `namespace`. + * + * @param {String} namespace + * @return {Function} + * @api public + */ + +function createDebug(namespace) { + + function debug() { + // disabled? + if (!debug.enabled) return; + + var self = debug; + + // set `diff` timestamp + var curr = +new Date(); + var ms = curr - (prevTime || curr); + self.diff = ms; + self.prev = prevTime; + self.curr = curr; + prevTime = curr; + + // turn the `arguments` into a proper Array + var args = new Array(arguments.length); + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i]; + } + + args[0] = exports.coerce(args[0]); + + if ('string' !== typeof args[0]) { + // anything else let's inspect with %O + args.unshift('%O'); + } + + // apply any `formatters` transformations + var index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, function(match, format) { + // if we encounter an escaped % then don't increase the array index + if (match === '%%') return match; + index++; + var formatter = exports.formatters[format]; + if ('function' === typeof formatter) { + var val = args[index]; + match = formatter.call(self, val); + + // now we need to remove `args[index]` since it's inlined in the `format` + args.splice(index, 1); + index--; + } + return match; + }); + + // apply env-specific formatting (colors, etc.) + exports.formatArgs.call(self, args); + + var logFn = debug.log || exports.log || console.log.bind(console); + logFn.apply(self, args); + } + + debug.namespace = namespace; + debug.enabled = exports.enabled(namespace); + debug.useColors = exports.useColors(); + debug.color = selectColor(namespace); + + // env-specific initialization logic for debug instances + if ('function' === typeof exports.init) { + exports.init(debug); + } + + return debug; +} + +/** + * Enables a debug mode by namespaces. This can include modes + * separated by a colon and wildcards. + * + * @param {String} namespaces + * @api public + */ + +function enable(namespaces) { + exports.save(namespaces); + + exports.names = []; + exports.skips = []; + + var split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); + var len = split.length; + + for (var i = 0; i < len; i++) { + if (!split[i]) continue; // ignore empty strings + namespaces = split[i].replace(/\*/g, '.*?'); + if (namespaces[0] === '-') { + exports.skips.push(new RegExp('^' + namespaces.substr(1) + '$')); + } else { + exports.names.push(new RegExp('^' + namespaces + '$')); + } + } +} + +/** + * Disable debug output. + * + * @api public + */ + +function disable() { + exports.enable(''); +} + +/** + * Returns true if the given mode name is enabled, false otherwise. + * + * @param {String} name + * @return {Boolean} + * @api public + */ + +function enabled(name) { + var i, len; + for (i = 0, len = exports.skips.length; i < len; i++) { + if (exports.skips[i].test(name)) { + return false; + } + } + for (i = 0, len = exports.names.length; i < len; i++) { + if (exports.names[i].test(name)) { + return true; + } + } + return false; +} + +/** + * Coerce `val`. + * + * @param {Mixed} val + * @return {Mixed} + * @api private + */ + +function coerce(val) { + if (val instanceof Error) return val.stack || val.message; + return val; +} diff --git a/backend/Backend/node_modules/body-parser/node_modules/debug/src/index.js b/backend/Backend/node_modules/body-parser/node_modules/debug/src/index.js new file mode 100644 index 000000000..e12cf4d58 --- /dev/null +++ b/backend/Backend/node_modules/body-parser/node_modules/debug/src/index.js @@ -0,0 +1,10 @@ +/** + * Detect Electron renderer process, which is node, but we should + * treat as a browser. + */ + +if (typeof process !== 'undefined' && process.type === 'renderer') { + module.exports = require('./browser.js'); +} else { + module.exports = require('./node.js'); +} diff --git a/backend/Backend/node_modules/body-parser/node_modules/debug/src/inspector-log.js b/backend/Backend/node_modules/body-parser/node_modules/debug/src/inspector-log.js new file mode 100644 index 000000000..60ea6c04a --- /dev/null +++ b/backend/Backend/node_modules/body-parser/node_modules/debug/src/inspector-log.js @@ -0,0 +1,15 @@ +module.exports = inspectorLog; + +// black hole +const nullStream = new (require('stream').Writable)(); +nullStream._write = () => {}; + +/** + * Outputs a `console.log()` to the Node.js Inspector console *only*. + */ +function inspectorLog() { + const stdout = console._stdout; + console._stdout = nullStream; + console.log.apply(console, arguments); + console._stdout = stdout; +} diff --git a/backend/Backend/node_modules/body-parser/node_modules/debug/src/node.js b/backend/Backend/node_modules/body-parser/node_modules/debug/src/node.js new file mode 100644 index 000000000..b15109c90 --- /dev/null +++ b/backend/Backend/node_modules/body-parser/node_modules/debug/src/node.js @@ -0,0 +1,248 @@ +/** + * Module dependencies. + */ + +var tty = require('tty'); +var util = require('util'); + +/** + * This is the Node.js implementation of `debug()`. + * + * Expose `debug()` as the module. + */ + +exports = module.exports = require('./debug'); +exports.init = init; +exports.log = log; +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; + +/** + * Colors. + */ + +exports.colors = [6, 2, 3, 4, 5, 1]; + +/** + * Build up the default `inspectOpts` object from the environment variables. + * + * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js + */ + +exports.inspectOpts = Object.keys(process.env).filter(function (key) { + return /^debug_/i.test(key); +}).reduce(function (obj, key) { + // camel-case + var prop = key + .substring(6) + .toLowerCase() + .replace(/_([a-z])/g, function (_, k) { return k.toUpperCase() }); + + // coerce string value into JS value + var val = process.env[key]; + if (/^(yes|on|true|enabled)$/i.test(val)) val = true; + else if (/^(no|off|false|disabled)$/i.test(val)) val = false; + else if (val === 'null') val = null; + else val = Number(val); + + obj[prop] = val; + return obj; +}, {}); + +/** + * The file descriptor to write the `debug()` calls to. + * Set the `DEBUG_FD` env variable to override with another value. i.e.: + * + * $ DEBUG_FD=3 node script.js 3>debug.log + */ + +var fd = parseInt(process.env.DEBUG_FD, 10) || 2; + +if (1 !== fd && 2 !== fd) { + util.deprecate(function(){}, 'except for stderr(2) and stdout(1), any other usage of DEBUG_FD is deprecated. Override debug.log if you want to use a different log function (https://git.io/debug_fd)')() +} + +var stream = 1 === fd ? process.stdout : + 2 === fd ? process.stderr : + createWritableStdioStream(fd); + +/** + * Is stdout a TTY? Colored output is enabled when `true`. + */ + +function useColors() { + return 'colors' in exports.inspectOpts + ? Boolean(exports.inspectOpts.colors) + : tty.isatty(fd); +} + +/** + * Map %o to `util.inspect()`, all on a single line. + */ + +exports.formatters.o = function(v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts) + .split('\n').map(function(str) { + return str.trim() + }).join(' '); +}; + +/** + * Map %o to `util.inspect()`, allowing multiple lines if needed. + */ + +exports.formatters.O = function(v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts); +}; + +/** + * Adds ANSI color escape codes if enabled. + * + * @api public + */ + +function formatArgs(args) { + var name = this.namespace; + var useColors = this.useColors; + + if (useColors) { + var c = this.color; + var prefix = ' \u001b[3' + c + ';1m' + name + ' ' + '\u001b[0m'; + + args[0] = prefix + args[0].split('\n').join('\n' + prefix); + args.push('\u001b[3' + c + 'm+' + exports.humanize(this.diff) + '\u001b[0m'); + } else { + args[0] = new Date().toUTCString() + + ' ' + name + ' ' + args[0]; + } +} + +/** + * Invokes `util.format()` with the specified arguments and writes to `stream`. + */ + +function log() { + return stream.write(util.format.apply(util, arguments) + '\n'); +} + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ + +function save(namespaces) { + if (null == namespaces) { + // If you set a process.env field to null or undefined, it gets cast to the + // string 'null' or 'undefined'. Just delete instead. + delete process.env.DEBUG; + } else { + process.env.DEBUG = namespaces; + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ + +function load() { + return process.env.DEBUG; +} + +/** + * Copied from `node/src/node.js`. + * + * XXX: It's lame that node doesn't expose this API out-of-the-box. It also + * relies on the undocumented `tty_wrap.guessHandleType()` which is also lame. + */ + +function createWritableStdioStream (fd) { + var stream; + var tty_wrap = process.binding('tty_wrap'); + + // Note stream._type is used for test-module-load-list.js + + switch (tty_wrap.guessHandleType(fd)) { + case 'TTY': + stream = new tty.WriteStream(fd); + stream._type = 'tty'; + + // Hack to have stream not keep the event loop alive. + // See https://github.com/joyent/node/issues/1726 + if (stream._handle && stream._handle.unref) { + stream._handle.unref(); + } + break; + + case 'FILE': + var fs = require('fs'); + stream = new fs.SyncWriteStream(fd, { autoClose: false }); + stream._type = 'fs'; + break; + + case 'PIPE': + case 'TCP': + var net = require('net'); + stream = new net.Socket({ + fd: fd, + readable: false, + writable: true + }); + + // FIXME Should probably have an option in net.Socket to create a + // stream from an existing fd which is writable only. But for now + // we'll just add this hack and set the `readable` member to false. + // Test: ./node test/fixtures/echo.js < /etc/passwd + stream.readable = false; + stream.read = null; + stream._type = 'pipe'; + + // FIXME Hack to have stream not keep the event loop alive. + // See https://github.com/joyent/node/issues/1726 + if (stream._handle && stream._handle.unref) { + stream._handle.unref(); + } + break; + + default: + // Probably an error on in uv_guess_handle() + throw new Error('Implement me. Unknown stream file type!'); + } + + // For supporting legacy API we put the FD here. + stream.fd = fd; + + stream._isStdio = true; + + return stream; +} + +/** + * Init logic for `debug` instances. + * + * Create a new `inspectOpts` object in case `useColors` is set + * differently for a particular `debug` instance. + */ + +function init (debug) { + debug.inspectOpts = {}; + + var keys = Object.keys(exports.inspectOpts); + for (var i = 0; i < keys.length; i++) { + debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]]; + } +} + +/** + * Enable namespaces listed in `process.env.DEBUG` initially. + */ + +exports.enable(load()); diff --git a/backend/Backend/node_modules/body-parser/node_modules/ms/index.js b/backend/Backend/node_modules/body-parser/node_modules/ms/index.js new file mode 100644 index 000000000..6a522b16b --- /dev/null +++ b/backend/Backend/node_modules/body-parser/node_modules/ms/index.js @@ -0,0 +1,152 @@ +/** + * Helpers. + */ + +var s = 1000; +var m = s * 60; +var h = m * 60; +var d = h * 24; +var y = d * 365.25; + +/** + * Parse or format the given `val`. + * + * Options: + * + * - `long` verbose formatting [false] + * + * @param {String|Number} val + * @param {Object} [options] + * @throws {Error} throw an error if val is not a non-empty string or a number + * @return {String|Number} + * @api public + */ + +module.exports = function(val, options) { + options = options || {}; + var type = typeof val; + if (type === 'string' && val.length > 0) { + return parse(val); + } else if (type === 'number' && isNaN(val) === false) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error( + 'val is not a non-empty string or a valid number. val=' + + JSON.stringify(val) + ); +}; + +/** + * Parse the given `str` and return milliseconds. + * + * @param {String} str + * @return {Number} + * @api private + */ + +function parse(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match = /^((?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|years?|yrs?|y)?$/i.exec( + str + ); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); + switch (type) { + case 'years': + case 'year': + case 'yrs': + case 'yr': + case 'y': + return n * y; + case 'days': + case 'day': + case 'd': + return n * d; + case 'hours': + case 'hour': + case 'hrs': + case 'hr': + case 'h': + return n * h; + case 'minutes': + case 'minute': + case 'mins': + case 'min': + case 'm': + return n * m; + case 'seconds': + case 'second': + case 'secs': + case 'sec': + case 's': + return n * s; + case 'milliseconds': + case 'millisecond': + case 'msecs': + case 'msec': + case 'ms': + return n; + default: + return undefined; + } +} + +/** + * Short format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtShort(ms) { + if (ms >= d) { + return Math.round(ms / d) + 'd'; + } + if (ms >= h) { + return Math.round(ms / h) + 'h'; + } + if (ms >= m) { + return Math.round(ms / m) + 'm'; + } + if (ms >= s) { + return Math.round(ms / s) + 's'; + } + return ms + 'ms'; +} + +/** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtLong(ms) { + return plural(ms, d, 'day') || + plural(ms, h, 'hour') || + plural(ms, m, 'minute') || + plural(ms, s, 'second') || + ms + ' ms'; +} + +/** + * Pluralization helper. + */ + +function plural(ms, n, name) { + if (ms < n) { + return; + } + if (ms < n * 1.5) { + return Math.floor(ms / n) + ' ' + name; + } + return Math.ceil(ms / n) + ' ' + name + 's'; +} diff --git a/backend/Backend/node_modules/body-parser/node_modules/ms/license.md b/backend/Backend/node_modules/body-parser/node_modules/ms/license.md new file mode 100644 index 000000000..69b61253a --- /dev/null +++ b/backend/Backend/node_modules/body-parser/node_modules/ms/license.md @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Zeit, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/backend/Backend/node_modules/body-parser/node_modules/ms/package.json b/backend/Backend/node_modules/body-parser/node_modules/ms/package.json new file mode 100644 index 000000000..6a31c81fa --- /dev/null +++ b/backend/Backend/node_modules/body-parser/node_modules/ms/package.json @@ -0,0 +1,37 @@ +{ + "name": "ms", + "version": "2.0.0", + "description": "Tiny milisecond conversion utility", + "repository": "zeit/ms", + "main": "./index", + "files": [ + "index.js" + ], + "scripts": { + "precommit": "lint-staged", + "lint": "eslint lib/* bin/*", + "test": "mocha tests.js" + }, + "eslintConfig": { + "extends": "eslint:recommended", + "env": { + "node": true, + "es6": true + } + }, + "lint-staged": { + "*.js": [ + "npm run lint", + "prettier --single-quote --write", + "git add" + ] + }, + "license": "MIT", + "devDependencies": { + "eslint": "3.19.0", + "expect.js": "0.3.1", + "husky": "0.13.3", + "lint-staged": "3.4.1", + "mocha": "3.4.1" + } +} diff --git a/backend/Backend/node_modules/body-parser/node_modules/ms/readme.md b/backend/Backend/node_modules/body-parser/node_modules/ms/readme.md new file mode 100644 index 000000000..84a9974cc --- /dev/null +++ b/backend/Backend/node_modules/body-parser/node_modules/ms/readme.md @@ -0,0 +1,51 @@ +# ms + +[![Build Status](https://travis-ci.org/zeit/ms.svg?branch=master)](https://travis-ci.org/zeit/ms) +[![Slack Channel](http://zeit-slackin.now.sh/badge.svg)](https://zeit.chat/) + +Use this package to easily convert various time formats to milliseconds. + +## Examples + +```js +ms('2 days') // 172800000 +ms('1d') // 86400000 +ms('10h') // 36000000 +ms('2.5 hrs') // 9000000 +ms('2h') // 7200000 +ms('1m') // 60000 +ms('5s') // 5000 +ms('1y') // 31557600000 +ms('100') // 100 +``` + +### Convert from milliseconds + +```js +ms(60000) // "1m" +ms(2 * 60000) // "2m" +ms(ms('10 hours')) // "10h" +``` + +### Time format written-out + +```js +ms(60000, { long: true }) // "1 minute" +ms(2 * 60000, { long: true }) // "2 minutes" +ms(ms('10 hours'), { long: true }) // "10 hours" +``` + +## Features + +- Works both in [node](https://nodejs.org) and in the browser. +- If a number is supplied to `ms`, a string with a unit is returned. +- If a string that contains the number is supplied, it returns it as a number (e.g.: it returns `100` for `'100'`). +- If you pass a string with a number and a valid unit, the number of equivalent ms is returned. + +## Caught a bug? + +1. [Fork](https://help.github.com/articles/fork-a-repo/) this repository to your own GitHub account and then [clone](https://help.github.com/articles/cloning-a-repository/) it to your local device +2. Link the package to the global module directory: `npm link` +3. Within the module you want to test your local development instance of ms, just link it to the dependencies: `npm link ms`. Instead of the default one from npm, node will now use your clone of ms! + +As always, you can run the tests using: `npm test` diff --git a/backend/Backend/node_modules/body-parser/package.json b/backend/Backend/node_modules/body-parser/package.json new file mode 100644 index 000000000..3c9926fc5 --- /dev/null +++ b/backend/Backend/node_modules/body-parser/package.json @@ -0,0 +1,56 @@ +{ + "name": "body-parser", + "description": "Node.js body parsing middleware", + "version": "1.20.3", + "contributors": [ + "Douglas Christopher Wilson ", + "Jonathan Ong (http://jongleberry.com)" + ], + "license": "MIT", + "repository": "expressjs/body-parser", + "dependencies": { + "bytes": "3.1.2", + "content-type": "~1.0.5", + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "on-finished": "2.4.1", + "qs": "6.13.0", + "raw-body": "2.5.2", + "type-is": "~1.6.18", + "unpipe": "1.0.0" + }, + "devDependencies": { + "eslint": "8.34.0", + "eslint-config-standard": "14.1.1", + "eslint-plugin-import": "2.27.5", + "eslint-plugin-markdown": "3.0.0", + "eslint-plugin-node": "11.1.0", + "eslint-plugin-promise": "6.1.1", + "eslint-plugin-standard": "4.1.0", + "methods": "1.1.2", + "mocha": "10.2.0", + "nyc": "15.1.0", + "safe-buffer": "5.2.1", + "supertest": "6.3.3" + }, + "files": [ + "lib/", + "LICENSE", + "HISTORY.md", + "SECURITY.md", + "index.js" + ], + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + }, + "scripts": { + "lint": "eslint .", + "test": "mocha --require test/support/env --reporter spec --check-leaks --bail test/", + "test-ci": "nyc --reporter=lcov --reporter=text npm test", + "test-cov": "nyc --reporter=html --reporter=text npm test" + } +} diff --git a/backend/Backend/node_modules/brace-expansion/LICENSE b/backend/Backend/node_modules/brace-expansion/LICENSE new file mode 100644 index 000000000..de3226673 --- /dev/null +++ b/backend/Backend/node_modules/brace-expansion/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2013 Julian Gruber + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/backend/Backend/node_modules/brace-expansion/README.md b/backend/Backend/node_modules/brace-expansion/README.md new file mode 100644 index 000000000..6b4e0e164 --- /dev/null +++ b/backend/Backend/node_modules/brace-expansion/README.md @@ -0,0 +1,129 @@ +# brace-expansion + +[Brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html), +as known from sh/bash, in JavaScript. + +[![build status](https://secure.travis-ci.org/juliangruber/brace-expansion.svg)](http://travis-ci.org/juliangruber/brace-expansion) +[![downloads](https://img.shields.io/npm/dm/brace-expansion.svg)](https://www.npmjs.org/package/brace-expansion) +[![Greenkeeper badge](https://badges.greenkeeper.io/juliangruber/brace-expansion.svg)](https://greenkeeper.io/) + +[![testling badge](https://ci.testling.com/juliangruber/brace-expansion.png)](https://ci.testling.com/juliangruber/brace-expansion) + +## Example + +```js +var expand = require('brace-expansion'); + +expand('file-{a,b,c}.jpg') +// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg'] + +expand('-v{,,}') +// => ['-v', '-v', '-v'] + +expand('file{0..2}.jpg') +// => ['file0.jpg', 'file1.jpg', 'file2.jpg'] + +expand('file-{a..c}.jpg') +// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg'] + +expand('file{2..0}.jpg') +// => ['file2.jpg', 'file1.jpg', 'file0.jpg'] + +expand('file{0..4..2}.jpg') +// => ['file0.jpg', 'file2.jpg', 'file4.jpg'] + +expand('file-{a..e..2}.jpg') +// => ['file-a.jpg', 'file-c.jpg', 'file-e.jpg'] + +expand('file{00..10..5}.jpg') +// => ['file00.jpg', 'file05.jpg', 'file10.jpg'] + +expand('{{A..C},{a..c}}') +// => ['A', 'B', 'C', 'a', 'b', 'c'] + +expand('ppp{,config,oe{,conf}}') +// => ['ppp', 'pppconfig', 'pppoe', 'pppoeconf'] +``` + +## API + +```js +var expand = require('brace-expansion'); +``` + +### var expanded = expand(str) + +Return an array of all possible and valid expansions of `str`. If none are +found, `[str]` is returned. + +Valid expansions are: + +```js +/^(.*,)+(.+)?$/ +// {a,b,...} +``` + +A comma separated list of options, like `{a,b}` or `{a,{b,c}}` or `{,a,}`. + +```js +/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/ +// {x..y[..incr]} +``` + +A numeric sequence from `x` to `y` inclusive, with optional increment. +If `x` or `y` start with a leading `0`, all the numbers will be padded +to have equal length. Negative numbers and backwards iteration work too. + +```js +/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/ +// {x..y[..incr]} +``` + +An alphabetic sequence from `x` to `y` inclusive, with optional increment. +`x` and `y` must be exactly one character, and if given, `incr` must be a +number. + +For compatibility reasons, the string `${` is not eligible for brace expansion. + +## Installation + +With [npm](https://npmjs.org) do: + +```bash +npm install brace-expansion +``` + +## Contributors + +- [Julian Gruber](https://github.com/juliangruber) +- [Isaac Z. Schlueter](https://github.com/isaacs) + +## Sponsors + +This module is proudly supported by my [Sponsors](https://github.com/juliangruber/sponsors)! + +Do you want to support modules like this to improve their quality, stability and weigh in on new features? Then please consider donating to my [Patreon](https://www.patreon.com/juliangruber). Not sure how much of my modules you're using? Try [feross/thanks](https://github.com/feross/thanks)! + +## License + +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/backend/Backend/node_modules/brace-expansion/index.js b/backend/Backend/node_modules/brace-expansion/index.js new file mode 100644 index 000000000..0478be81e --- /dev/null +++ b/backend/Backend/node_modules/brace-expansion/index.js @@ -0,0 +1,201 @@ +var concatMap = require('concat-map'); +var balanced = require('balanced-match'); + +module.exports = expandTop; + +var escSlash = '\0SLASH'+Math.random()+'\0'; +var escOpen = '\0OPEN'+Math.random()+'\0'; +var escClose = '\0CLOSE'+Math.random()+'\0'; +var escComma = '\0COMMA'+Math.random()+'\0'; +var escPeriod = '\0PERIOD'+Math.random()+'\0'; + +function numeric(str) { + return parseInt(str, 10) == str + ? parseInt(str, 10) + : str.charCodeAt(0); +} + +function escapeBraces(str) { + return str.split('\\\\').join(escSlash) + .split('\\{').join(escOpen) + .split('\\}').join(escClose) + .split('\\,').join(escComma) + .split('\\.').join(escPeriod); +} + +function unescapeBraces(str) { + return str.split(escSlash).join('\\') + .split(escOpen).join('{') + .split(escClose).join('}') + .split(escComma).join(',') + .split(escPeriod).join('.'); +} + + +// Basically just str.split(","), but handling cases +// where we have nested braced sections, which should be +// treated as individual members, like {a,{b,c},d} +function parseCommaParts(str) { + if (!str) + return ['']; + + var parts = []; + var m = balanced('{', '}', str); + + if (!m) + return str.split(','); + + var pre = m.pre; + var body = m.body; + var post = m.post; + var p = pre.split(','); + + p[p.length-1] += '{' + body + '}'; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length-1] += postParts.shift(); + p.push.apply(p, postParts); + } + + parts.push.apply(parts, p); + + return parts; +} + +function expandTop(str) { + if (!str) + return []; + + // I don't know why Bash 4.3 does this, but it does. + // Anything starting with {} will have the first two bytes preserved + // but *only* at the top level, so {},a}b will not expand to anything, + // but a{},b}c will be expanded to [a}c,abc]. + // One could argue that this is a bug in Bash, but since the goal of + // this module is to match Bash's rules, we escape a leading {} + if (str.substr(0, 2) === '{}') { + str = '\\{\\}' + str.substr(2); + } + + return expand(escapeBraces(str), true).map(unescapeBraces); +} + +function identity(e) { + return e; +} + +function embrace(str) { + return '{' + str + '}'; +} +function isPadded(el) { + return /^-?0\d/.test(el); +} + +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; +} + +function expand(str, isTop) { + var expansions = []; + + var m = balanced('{', '}', str); + if (!m || /\$$/.test(m.pre)) return [str]; + + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m.body.indexOf(',') >= 0; + if (!isSequence && !isOptions) { + // {a},b} + if (m.post.match(/,.*\}/)) { + str = m.pre + '{' + m.body + escClose + m.post; + return expand(str); + } + return [str]; + } + + var n; + if (isSequence) { + n = m.body.split(/\.\./); + } else { + n = parseCommaParts(m.body); + if (n.length === 1) { + // x{{a,b}}y ==> x{a}y x{b}y + n = expand(n[0], false).map(embrace); + if (n.length === 1) { + var post = m.post.length + ? expand(m.post, false) + : ['']; + return post.map(function(p) { + return m.pre + n[0] + p; + }); + } + } + } + + // at this point, n is the parts, and we know it's not a comma set + // with a single entry. + + // no need to expand pre, since it is guaranteed to be free of brace-sets + var pre = m.pre; + var post = m.post.length + ? expand(m.post, false) + : ['']; + + var N; + + if (isSequence) { + var x = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length) + var incr = n.length == 3 + ? Math.abs(numeric(n[2])) + : 1; + var test = lte; + var reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + var pad = n.some(isPadded); + + N = []; + + for (var i = x; test(i, y); i += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === '\\') + c = ''; + } else { + c = String(i); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join('0'); + if (i < 0) + c = '-' + z + c.slice(1); + else + c = z + c; + } + } + } + N.push(c); + } + } else { + N = concatMap(n, function(el) { return expand(el, false) }); + } + + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); + } + } + + return expansions; +} + diff --git a/backend/Backend/node_modules/brace-expansion/package.json b/backend/Backend/node_modules/brace-expansion/package.json new file mode 100644 index 000000000..a18faa8fd --- /dev/null +++ b/backend/Backend/node_modules/brace-expansion/package.json @@ -0,0 +1,47 @@ +{ + "name": "brace-expansion", + "description": "Brace expansion as known from sh/bash", + "version": "1.1.11", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/brace-expansion.git" + }, + "homepage": "https://github.com/juliangruber/brace-expansion", + "main": "index.js", + "scripts": { + "test": "tape test/*.js", + "gentest": "bash test/generate.sh", + "bench": "matcha test/perf/bench.js" + }, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + }, + "devDependencies": { + "matcha": "^0.7.0", + "tape": "^4.6.0" + }, + "keywords": [], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT", + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8..latest", + "firefox/20..latest", + "firefox/nightly", + "chrome/25..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } +} diff --git a/backend/Backend/node_modules/braces/LICENSE b/backend/Backend/node_modules/braces/LICENSE new file mode 100644 index 000000000..9af4a67d2 --- /dev/null +++ b/backend/Backend/node_modules/braces/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-present, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/backend/Backend/node_modules/braces/README.md b/backend/Backend/node_modules/braces/README.md new file mode 100644 index 000000000..f59dd6045 --- /dev/null +++ b/backend/Backend/node_modules/braces/README.md @@ -0,0 +1,586 @@ +# braces [![Donate](https://img.shields.io/badge/Donate-PayPal-green.svg)](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=W8YFZ425KND68) [![NPM version](https://img.shields.io/npm/v/braces.svg?style=flat)](https://www.npmjs.com/package/braces) [![NPM monthly downloads](https://img.shields.io/npm/dm/braces.svg?style=flat)](https://npmjs.org/package/braces) [![NPM total downloads](https://img.shields.io/npm/dt/braces.svg?style=flat)](https://npmjs.org/package/braces) [![Linux Build Status](https://img.shields.io/travis/micromatch/braces.svg?style=flat&label=Travis)](https://travis-ci.org/micromatch/braces) + +> Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support for the Bash 4.3 braces specification, without sacrificing speed. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save braces +``` + +## v3.0.0 Released!! + +See the [changelog](CHANGELOG.md) for details. + +## Why use braces? + +Brace patterns make globs more powerful by adding the ability to match specific ranges and sequences of characters. + +- **Accurate** - complete support for the [Bash 4.3 Brace Expansion](www.gnu.org/software/bash/) specification (passes all of the Bash braces tests) +- **[fast and performant](#benchmarks)** - Starts fast, runs fast and [scales well](#performance) as patterns increase in complexity. +- **Organized code base** - The parser and compiler are easy to maintain and update when edge cases crop up. +- **Well-tested** - Thousands of test assertions, and passes all of the Bash, minimatch, and [brace-expansion](https://github.com/juliangruber/brace-expansion) unit tests (as of the date this was written). +- **Safer** - You shouldn't have to worry about users defining aggressive or malicious brace patterns that can break your application. Braces takes measures to prevent malicious regex that can be used for DDoS attacks (see [catastrophic backtracking](https://www.regular-expressions.info/catastrophic.html)). +- [Supports lists](#lists) - (aka "sets") `a/{b,c}/d` => `['a/b/d', 'a/c/d']` +- [Supports sequences](#sequences) - (aka "ranges") `{01..03}` => `['01', '02', '03']` +- [Supports steps](#steps) - (aka "increments") `{2..10..2}` => `['2', '4', '6', '8', '10']` +- [Supports escaping](#escaping) - To prevent evaluation of special characters. + +## Usage + +The main export is a function that takes one or more brace `patterns` and `options`. + +```js +const braces = require('braces'); +// braces(patterns[, options]); + +console.log(braces(['{01..05}', '{a..e}'])); +//=> ['(0[1-5])', '([a-e])'] + +console.log(braces(['{01..05}', '{a..e}'], { expand: true })); +//=> ['01', '02', '03', '04', '05', 'a', 'b', 'c', 'd', 'e'] +``` + +### Brace Expansion vs. Compilation + +By default, brace patterns are compiled into strings that are optimized for creating regular expressions and matching. + +**Compiled** + +```js +console.log(braces('a/{x,y,z}/b')); +//=> ['a/(x|y|z)/b'] +console.log(braces(['a/{01..20}/b', 'a/{1..5}/b'])); +//=> [ 'a/(0[1-9]|1[0-9]|20)/b', 'a/([1-5])/b' ] +``` + +**Expanded** + +Enable brace expansion by setting the `expand` option to true, or by using [braces.expand()](#expand) (returns an array similar to what you'd expect from Bash, or `echo {1..5}`, or [minimatch](https://github.com/isaacs/minimatch)): + +```js +console.log(braces('a/{x,y,z}/b', { expand: true })); +//=> ['a/x/b', 'a/y/b', 'a/z/b'] + +console.log(braces.expand('{01..10}')); +//=> ['01','02','03','04','05','06','07','08','09','10'] +``` + +### Lists + +Expand lists (like Bash "sets"): + +```js +console.log(braces('a/{foo,bar,baz}/*.js')); +//=> ['a/(foo|bar|baz)/*.js'] + +console.log(braces.expand('a/{foo,bar,baz}/*.js')); +//=> ['a/foo/*.js', 'a/bar/*.js', 'a/baz/*.js'] +``` + +### Sequences + +Expand ranges of characters (like Bash "sequences"): + +```js +console.log(braces.expand('{1..3}')); // ['1', '2', '3'] +console.log(braces.expand('a/{1..3}/b')); // ['a/1/b', 'a/2/b', 'a/3/b'] +console.log(braces('{a..c}', { expand: true })); // ['a', 'b', 'c'] +console.log(braces('foo/{a..c}', { expand: true })); // ['foo/a', 'foo/b', 'foo/c'] + +// supports zero-padded ranges +console.log(braces('a/{01..03}/b')); //=> ['a/(0[1-3])/b'] +console.log(braces('a/{001..300}/b')); //=> ['a/(0{2}[1-9]|0[1-9][0-9]|[12][0-9]{2}|300)/b'] +``` + +See [fill-range](https://github.com/jonschlinkert/fill-range) for all available range-expansion options. + +### Steppped ranges + +Steps, or increments, may be used with ranges: + +```js +console.log(braces.expand('{2..10..2}')); +//=> ['2', '4', '6', '8', '10'] + +console.log(braces('{2..10..2}')); +//=> ['(2|4|6|8|10)'] +``` + +When the [.optimize](#optimize) method is used, or [options.optimize](#optionsoptimize) is set to true, sequences are passed to [to-regex-range](https://github.com/jonschlinkert/to-regex-range) for expansion. + +### Nesting + +Brace patterns may be nested. The results of each expanded string are not sorted, and left to right order is preserved. + +**"Expanded" braces** + +```js +console.log(braces.expand('a{b,c,/{x,y}}/e')); +//=> ['ab/e', 'ac/e', 'a/x/e', 'a/y/e'] + +console.log(braces.expand('a/{x,{1..5},y}/c')); +//=> ['a/x/c', 'a/1/c', 'a/2/c', 'a/3/c', 'a/4/c', 'a/5/c', 'a/y/c'] +``` + +**"Optimized" braces** + +```js +console.log(braces('a{b,c,/{x,y}}/e')); +//=> ['a(b|c|/(x|y))/e'] + +console.log(braces('a/{x,{1..5},y}/c')); +//=> ['a/(x|([1-5])|y)/c'] +``` + +### Escaping + +**Escaping braces** + +A brace pattern will not be expanded or evaluted if _either the opening or closing brace is escaped_: + +```js +console.log(braces.expand('a\\{d,c,b}e')); +//=> ['a{d,c,b}e'] + +console.log(braces.expand('a{d,c,b\\}e')); +//=> ['a{d,c,b}e'] +``` + +**Escaping commas** + +Commas inside braces may also be escaped: + +```js +console.log(braces.expand('a{b\\,c}d')); +//=> ['a{b,c}d'] + +console.log(braces.expand('a{d\\,c,b}e')); +//=> ['ad,ce', 'abe'] +``` + +**Single items** + +Following bash conventions, a brace pattern is also not expanded when it contains a single character: + +```js +console.log(braces.expand('a{b}c')); +//=> ['a{b}c'] +``` + +## Options + +### options.maxLength + +**Type**: `Number` + +**Default**: `10,000` + +**Description**: Limit the length of the input string. Useful when the input string is generated or your application allows users to pass a string, et cetera. + +```js +console.log(braces('a/{b,c}/d', { maxLength: 3 })); //=> throws an error +``` + +### options.expand + +**Type**: `Boolean` + +**Default**: `undefined` + +**Description**: Generate an "expanded" brace pattern (alternatively you can use the `braces.expand()` method, which does the same thing). + +```js +console.log(braces('a/{b,c}/d', { expand: true })); +//=> [ 'a/b/d', 'a/c/d' ] +``` + +### options.nodupes + +**Type**: `Boolean` + +**Default**: `undefined` + +**Description**: Remove duplicates from the returned array. + +### options.rangeLimit + +**Type**: `Number` + +**Default**: `1000` + +**Description**: To prevent malicious patterns from being passed by users, an error is thrown when `braces.expand()` is used or `options.expand` is true and the generated range will exceed the `rangeLimit`. + +You can customize `options.rangeLimit` or set it to `Inifinity` to disable this altogether. + +**Examples** + +```js +// pattern exceeds the "rangeLimit", so it's optimized automatically +console.log(braces.expand('{1..1000}')); +//=> ['([1-9]|[1-9][0-9]{1,2}|1000)'] + +// pattern does not exceed "rangeLimit", so it's NOT optimized +console.log(braces.expand('{1..100}')); +//=> ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23', '24', '25', '26', '27', '28', '29', '30', '31', '32', '33', '34', '35', '36', '37', '38', '39', '40', '41', '42', '43', '44', '45', '46', '47', '48', '49', '50', '51', '52', '53', '54', '55', '56', '57', '58', '59', '60', '61', '62', '63', '64', '65', '66', '67', '68', '69', '70', '71', '72', '73', '74', '75', '76', '77', '78', '79', '80', '81', '82', '83', '84', '85', '86', '87', '88', '89', '90', '91', '92', '93', '94', '95', '96', '97', '98', '99', '100'] +``` + +### options.transform + +**Type**: `Function` + +**Default**: `undefined` + +**Description**: Customize range expansion. + +**Example: Transforming non-numeric values** + +```js +const alpha = braces.expand('x/{a..e}/y', { + transform(value, index) { + // When non-numeric values are passed, "value" is a character code. + return 'foo/' + String.fromCharCode(value) + '-' + index; + }, +}); +console.log(alpha); +//=> [ 'x/foo/a-0/y', 'x/foo/b-1/y', 'x/foo/c-2/y', 'x/foo/d-3/y', 'x/foo/e-4/y' ] +``` + +**Example: Transforming numeric values** + +```js +const numeric = braces.expand('{1..5}', { + transform(value) { + // when numeric values are passed, "value" is a number + return 'foo/' + value * 2; + }, +}); +console.log(numeric); +//=> [ 'foo/2', 'foo/4', 'foo/6', 'foo/8', 'foo/10' ] +``` + +### options.quantifiers + +**Type**: `Boolean` + +**Default**: `undefined` + +**Description**: In regular expressions, quanitifiers can be used to specify how many times a token can be repeated. For example, `a{1,3}` will match the letter `a` one to three times. + +Unfortunately, regex quantifiers happen to share the same syntax as [Bash lists](#lists) + +The `quantifiers` option tells braces to detect when [regex quantifiers](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp#quantifiers) are defined in the given pattern, and not to try to expand them as lists. + +**Examples** + +```js +const braces = require('braces'); +console.log(braces('a/b{1,3}/{x,y,z}')); +//=> [ 'a/b(1|3)/(x|y|z)' ] +console.log(braces('a/b{1,3}/{x,y,z}', { quantifiers: true })); +//=> [ 'a/b{1,3}/(x|y|z)' ] +console.log(braces('a/b{1,3}/{x,y,z}', { quantifiers: true, expand: true })); +//=> [ 'a/b{1,3}/x', 'a/b{1,3}/y', 'a/b{1,3}/z' ] +``` + +### options.keepEscaping + +**Type**: `Boolean` + +**Default**: `undefined` + +**Description**: Do not strip backslashes that were used for escaping from the result. + +## What is "brace expansion"? + +Brace expansion is a type of parameter expansion that was made popular by unix shells for generating lists of strings, as well as regex-like matching when used alongside wildcards (globs). + +In addition to "expansion", braces are also used for matching. In other words: + +- [brace expansion](#brace-expansion) is for generating new lists +- [brace matching](#brace-matching) is for filtering existing lists + +
+More about brace expansion (click to expand) + +There are two main types of brace expansion: + +1. **lists**: which are defined using comma-separated values inside curly braces: `{a,b,c}` +2. **sequences**: which are defined using a starting value and an ending value, separated by two dots: `a{1..3}b`. Optionally, a third argument may be passed to define a "step" or increment to use: `a{1..100..10}b`. These are also sometimes referred to as "ranges". + +Here are some example brace patterns to illustrate how they work: + +**Sets** + +``` +{a,b,c} => a b c +{a,b,c}{1,2} => a1 a2 b1 b2 c1 c2 +``` + +**Sequences** + +``` +{1..9} => 1 2 3 4 5 6 7 8 9 +{4..-4} => 4 3 2 1 0 -1 -2 -3 -4 +{1..20..3} => 1 4 7 10 13 16 19 +{a..j} => a b c d e f g h i j +{j..a} => j i h g f e d c b a +{a..z..3} => a d g j m p s v y +``` + +**Combination** + +Sets and sequences can be mixed together or used along with any other strings. + +``` +{a,b,c}{1..3} => a1 a2 a3 b1 b2 b3 c1 c2 c3 +foo/{a,b,c}/bar => foo/a/bar foo/b/bar foo/c/bar +``` + +The fact that braces can be "expanded" from relatively simple patterns makes them ideal for quickly generating test fixtures, file paths, and similar use cases. + +## Brace matching + +In addition to _expansion_, brace patterns are also useful for performing regular-expression-like matching. + +For example, the pattern `foo/{1..3}/bar` would match any of following strings: + +``` +foo/1/bar +foo/2/bar +foo/3/bar +``` + +But not: + +``` +baz/1/qux +baz/2/qux +baz/3/qux +``` + +Braces can also be combined with [glob patterns](https://github.com/jonschlinkert/micromatch) to perform more advanced wildcard matching. For example, the pattern `*/{1..3}/*` would match any of following strings: + +``` +foo/1/bar +foo/2/bar +foo/3/bar +baz/1/qux +baz/2/qux +baz/3/qux +``` + +## Brace matching pitfalls + +Although brace patterns offer a user-friendly way of matching ranges or sets of strings, there are also some major disadvantages and potential risks you should be aware of. + +### tldr + +**"brace bombs"** + +- brace expansion can eat up a huge amount of processing resources +- as brace patterns increase _linearly in size_, the system resources required to expand the pattern increase exponentially +- users can accidentally (or intentially) exhaust your system's resources resulting in the equivalent of a DoS attack (bonus: no programming knowledge is required!) + +For a more detailed explanation with examples, see the [geometric complexity](#geometric-complexity) section. + +### The solution + +Jump to the [performance section](#performance) to see how Braces solves this problem in comparison to other libraries. + +### Geometric complexity + +At minimum, brace patterns with sets limited to two elements have quadradic or `O(n^2)` complexity. But the complexity of the algorithm increases exponentially as the number of sets, _and elements per set_, increases, which is `O(n^c)`. + +For example, the following sets demonstrate quadratic (`O(n^2)`) complexity: + +``` +{1,2}{3,4} => (2X2) => 13 14 23 24 +{1,2}{3,4}{5,6} => (2X2X2) => 135 136 145 146 235 236 245 246 +``` + +But add an element to a set, and we get a n-fold Cartesian product with `O(n^c)` complexity: + +``` +{1,2,3}{4,5,6}{7,8,9} => (3X3X3) => 147 148 149 157 158 159 167 168 169 247 248 + 249 257 258 259 267 268 269 347 348 349 357 + 358 359 367 368 369 +``` + +Now, imagine how this complexity grows given that each element is a n-tuple: + +``` +{1..100}{1..100} => (100X100) => 10,000 elements (38.4 kB) +{1..100}{1..100}{1..100} => (100X100X100) => 1,000,000 elements (5.76 MB) +``` + +Although these examples are clearly contrived, they demonstrate how brace patterns can quickly grow out of control. + +**More information** + +Interested in learning more about brace expansion? + +- [linuxjournal/bash-brace-expansion](http://www.linuxjournal.com/content/bash-brace-expansion) +- [rosettacode/Brace_expansion](https://rosettacode.org/wiki/Brace_expansion) +- [cartesian product](https://en.wikipedia.org/wiki/Cartesian_product) + +
+ +## Performance + +Braces is not only screaming fast, it's also more accurate the other brace expansion libraries. + +### Better algorithms + +Fortunately there is a solution to the ["brace bomb" problem](#brace-matching-pitfalls): _don't expand brace patterns into an array when they're used for matching_. + +Instead, convert the pattern into an optimized regular expression. This is easier said than done, and braces is the only library that does this currently. + +**The proof is in the numbers** + +Minimatch gets exponentially slower as patterns increase in complexity, braces does not. The following results were generated using `braces()` and `minimatch.braceExpand()`, respectively. + +| **Pattern** | **braces** | **[minimatch][]** | +| --------------------------- | ------------------- | ---------------------------- | +| `{1..9007199254740991}`[^1] | `298 B` (5ms 459μs) | N/A (freezes) | +| `{1..1000000000000000}` | `41 B` (1ms 15μs) | N/A (freezes) | +| `{1..100000000000000}` | `40 B` (890μs) | N/A (freezes) | +| `{1..10000000000000}` | `39 B` (2ms 49μs) | N/A (freezes) | +| `{1..1000000000000}` | `38 B` (608μs) | N/A (freezes) | +| `{1..100000000000}` | `37 B` (397μs) | N/A (freezes) | +| `{1..10000000000}` | `35 B` (983μs) | N/A (freezes) | +| `{1..1000000000}` | `34 B` (798μs) | N/A (freezes) | +| `{1..100000000}` | `33 B` (733μs) | N/A (freezes) | +| `{1..10000000}` | `32 B` (5ms 632μs) | `78.89 MB` (16s 388ms 569μs) | +| `{1..1000000}` | `31 B` (1ms 381μs) | `6.89 MB` (1s 496ms 887μs) | +| `{1..100000}` | `30 B` (950μs) | `588.89 kB` (146ms 921μs) | +| `{1..10000}` | `29 B` (1ms 114μs) | `48.89 kB` (14ms 187μs) | +| `{1..1000}` | `28 B` (760μs) | `3.89 kB` (1ms 453μs) | +| `{1..100}` | `22 B` (345μs) | `291 B` (196μs) | +| `{1..10}` | `10 B` (533μs) | `20 B` (37μs) | +| `{1..3}` | `7 B` (190μs) | `5 B` (27μs) | + +### Faster algorithms + +When you need expansion, braces is still much faster. + +_(the following results were generated using `braces.expand()` and `minimatch.braceExpand()`, respectively)_ + +| **Pattern** | **braces** | **[minimatch][]** | +| --------------- | --------------------------- | ---------------------------- | +| `{1..10000000}` | `78.89 MB` (2s 698ms 642μs) | `78.89 MB` (18s 601ms 974μs) | +| `{1..1000000}` | `6.89 MB` (458ms 576μs) | `6.89 MB` (1s 491ms 621μs) | +| `{1..100000}` | `588.89 kB` (20ms 728μs) | `588.89 kB` (156ms 919μs) | +| `{1..10000}` | `48.89 kB` (2ms 202μs) | `48.89 kB` (13ms 641μs) | +| `{1..1000}` | `3.89 kB` (1ms 796μs) | `3.89 kB` (1ms 958μs) | +| `{1..100}` | `291 B` (424μs) | `291 B` (211μs) | +| `{1..10}` | `20 B` (487μs) | `20 B` (72μs) | +| `{1..3}` | `5 B` (166μs) | `5 B` (27μs) | + +If you'd like to run these comparisons yourself, see [test/support/generate.js](test/support/generate.js). + +## Benchmarks + +### Running benchmarks + +Install dev dependencies: + +```bash +npm i -d && npm benchmark +``` + +### Latest results + +Braces is more accurate, without sacrificing performance. + +```bash +● expand - range (expanded) + braces x 53,167 ops/sec ±0.12% (102 runs sampled) + minimatch x 11,378 ops/sec ±0.10% (102 runs sampled) +● expand - range (optimized for regex) + braces x 373,442 ops/sec ±0.04% (100 runs sampled) + minimatch x 3,262 ops/sec ±0.18% (100 runs sampled) +● expand - nested ranges (expanded) + braces x 33,921 ops/sec ±0.09% (99 runs sampled) + minimatch x 10,855 ops/sec ±0.28% (100 runs sampled) +● expand - nested ranges (optimized for regex) + braces x 287,479 ops/sec ±0.52% (98 runs sampled) + minimatch x 3,219 ops/sec ±0.28% (101 runs sampled) +● expand - set (expanded) + braces x 238,243 ops/sec ±0.19% (97 runs sampled) + minimatch x 538,268 ops/sec ±0.31% (96 runs sampled) +● expand - set (optimized for regex) + braces x 321,844 ops/sec ±0.10% (97 runs sampled) + minimatch x 140,600 ops/sec ±0.15% (100 runs sampled) +● expand - nested sets (expanded) + braces x 165,371 ops/sec ±0.42% (96 runs sampled) + minimatch x 337,720 ops/sec ±0.28% (100 runs sampled) +● expand - nested sets (optimized for regex) + braces x 242,948 ops/sec ±0.12% (99 runs sampled) + minimatch x 87,403 ops/sec ±0.79% (96 runs sampled) +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Contributors + +| **Commits** | **Contributor** | +| ----------- | ------------------------------------------------------------- | +| 197 | [jonschlinkert](https://github.com/jonschlinkert) | +| 4 | [doowb](https://github.com/doowb) | +| 1 | [es128](https://github.com/es128) | +| 1 | [eush77](https://github.com/eush77) | +| 1 | [hemanth](https://github.com/hemanth) | +| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) | + +### Author + +**Jon Schlinkert** + +- [GitHub Profile](https://github.com/jonschlinkert) +- [Twitter Profile](https://twitter.com/jonschlinkert) +- [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) + +### License + +Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +--- + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 08, 2019._ diff --git a/backend/Backend/node_modules/braces/index.js b/backend/Backend/node_modules/braces/index.js new file mode 100644 index 000000000..d222c13b5 --- /dev/null +++ b/backend/Backend/node_modules/braces/index.js @@ -0,0 +1,170 @@ +'use strict'; + +const stringify = require('./lib/stringify'); +const compile = require('./lib/compile'); +const expand = require('./lib/expand'); +const parse = require('./lib/parse'); + +/** + * Expand the given pattern or create a regex-compatible string. + * + * ```js + * const braces = require('braces'); + * console.log(braces('{a,b,c}', { compile: true })); //=> ['(a|b|c)'] + * console.log(braces('{a,b,c}')); //=> ['a', 'b', 'c'] + * ``` + * @param {String} `str` + * @param {Object} `options` + * @return {String} + * @api public + */ + +const braces = (input, options = {}) => { + let output = []; + + if (Array.isArray(input)) { + for (const pattern of input) { + const result = braces.create(pattern, options); + if (Array.isArray(result)) { + output.push(...result); + } else { + output.push(result); + } + } + } else { + output = [].concat(braces.create(input, options)); + } + + if (options && options.expand === true && options.nodupes === true) { + output = [...new Set(output)]; + } + return output; +}; + +/** + * Parse the given `str` with the given `options`. + * + * ```js + * // braces.parse(pattern, [, options]); + * const ast = braces.parse('a/{b,c}/d'); + * console.log(ast); + * ``` + * @param {String} pattern Brace pattern to parse + * @param {Object} options + * @return {Object} Returns an AST + * @api public + */ + +braces.parse = (input, options = {}) => parse(input, options); + +/** + * Creates a braces string from an AST, or an AST node. + * + * ```js + * const braces = require('braces'); + * let ast = braces.parse('foo/{a,b}/bar'); + * console.log(stringify(ast.nodes[2])); //=> '{a,b}' + * ``` + * @param {String} `input` Brace pattern or AST. + * @param {Object} `options` + * @return {Array} Returns an array of expanded values. + * @api public + */ + +braces.stringify = (input, options = {}) => { + if (typeof input === 'string') { + return stringify(braces.parse(input, options), options); + } + return stringify(input, options); +}; + +/** + * Compiles a brace pattern into a regex-compatible, optimized string. + * This method is called by the main [braces](#braces) function by default. + * + * ```js + * const braces = require('braces'); + * console.log(braces.compile('a/{b,c}/d')); + * //=> ['a/(b|c)/d'] + * ``` + * @param {String} `input` Brace pattern or AST. + * @param {Object} `options` + * @return {Array} Returns an array of expanded values. + * @api public + */ + +braces.compile = (input, options = {}) => { + if (typeof input === 'string') { + input = braces.parse(input, options); + } + return compile(input, options); +}; + +/** + * Expands a brace pattern into an array. This method is called by the + * main [braces](#braces) function when `options.expand` is true. Before + * using this method it's recommended that you read the [performance notes](#performance)) + * and advantages of using [.compile](#compile) instead. + * + * ```js + * const braces = require('braces'); + * console.log(braces.expand('a/{b,c}/d')); + * //=> ['a/b/d', 'a/c/d']; + * ``` + * @param {String} `pattern` Brace pattern + * @param {Object} `options` + * @return {Array} Returns an array of expanded values. + * @api public + */ + +braces.expand = (input, options = {}) => { + if (typeof input === 'string') { + input = braces.parse(input, options); + } + + let result = expand(input, options); + + // filter out empty strings if specified + if (options.noempty === true) { + result = result.filter(Boolean); + } + + // filter out duplicates if specified + if (options.nodupes === true) { + result = [...new Set(result)]; + } + + return result; +}; + +/** + * Processes a brace pattern and returns either an expanded array + * (if `options.expand` is true), a highly optimized regex-compatible string. + * This method is called by the main [braces](#braces) function. + * + * ```js + * const braces = require('braces'); + * console.log(braces.create('user-{200..300}/project-{a,b,c}-{1..10}')) + * //=> 'user-(20[0-9]|2[1-9][0-9]|300)/project-(a|b|c)-([1-9]|10)' + * ``` + * @param {String} `pattern` Brace pattern + * @param {Object} `options` + * @return {Array} Returns an array of expanded values. + * @api public + */ + +braces.create = (input, options = {}) => { + if (input === '' || input.length < 3) { + return [input]; + } + + return options.expand !== true + ? braces.compile(input, options) + : braces.expand(input, options); +}; + +/** + * Expose "braces" + */ + +module.exports = braces; diff --git a/backend/Backend/node_modules/braces/lib/compile.js b/backend/Backend/node_modules/braces/lib/compile.js new file mode 100644 index 000000000..dce69beb9 --- /dev/null +++ b/backend/Backend/node_modules/braces/lib/compile.js @@ -0,0 +1,60 @@ +'use strict'; + +const fill = require('fill-range'); +const utils = require('./utils'); + +const compile = (ast, options = {}) => { + const walk = (node, parent = {}) => { + const invalidBlock = utils.isInvalidBrace(parent); + const invalidNode = node.invalid === true && options.escapeInvalid === true; + const invalid = invalidBlock === true || invalidNode === true; + const prefix = options.escapeInvalid === true ? '\\' : ''; + let output = ''; + + if (node.isOpen === true) { + return prefix + node.value; + } + + if (node.isClose === true) { + console.log('node.isClose', prefix, node.value); + return prefix + node.value; + } + + if (node.type === 'open') { + return invalid ? prefix + node.value : '('; + } + + if (node.type === 'close') { + return invalid ? prefix + node.value : ')'; + } + + if (node.type === 'comma') { + return node.prev.type === 'comma' ? '' : invalid ? node.value : '|'; + } + + if (node.value) { + return node.value; + } + + if (node.nodes && node.ranges > 0) { + const args = utils.reduce(node.nodes); + const range = fill(...args, { ...options, wrap: false, toRegex: true, strictZeros: true }); + + if (range.length !== 0) { + return args.length > 1 && range.length > 1 ? `(${range})` : range; + } + } + + if (node.nodes) { + for (const child of node.nodes) { + output += walk(child, node); + } + } + + return output; + }; + + return walk(ast); +}; + +module.exports = compile; diff --git a/backend/Backend/node_modules/braces/lib/constants.js b/backend/Backend/node_modules/braces/lib/constants.js new file mode 100644 index 000000000..2bb3b8840 --- /dev/null +++ b/backend/Backend/node_modules/braces/lib/constants.js @@ -0,0 +1,57 @@ +'use strict'; + +module.exports = { + MAX_LENGTH: 10000, + + // Digits + CHAR_0: '0', /* 0 */ + CHAR_9: '9', /* 9 */ + + // Alphabet chars. + CHAR_UPPERCASE_A: 'A', /* A */ + CHAR_LOWERCASE_A: 'a', /* a */ + CHAR_UPPERCASE_Z: 'Z', /* Z */ + CHAR_LOWERCASE_Z: 'z', /* z */ + + CHAR_LEFT_PARENTHESES: '(', /* ( */ + CHAR_RIGHT_PARENTHESES: ')', /* ) */ + + CHAR_ASTERISK: '*', /* * */ + + // Non-alphabetic chars. + CHAR_AMPERSAND: '&', /* & */ + CHAR_AT: '@', /* @ */ + CHAR_BACKSLASH: '\\', /* \ */ + CHAR_BACKTICK: '`', /* ` */ + CHAR_CARRIAGE_RETURN: '\r', /* \r */ + CHAR_CIRCUMFLEX_ACCENT: '^', /* ^ */ + CHAR_COLON: ':', /* : */ + CHAR_COMMA: ',', /* , */ + CHAR_DOLLAR: '$', /* . */ + CHAR_DOT: '.', /* . */ + CHAR_DOUBLE_QUOTE: '"', /* " */ + CHAR_EQUAL: '=', /* = */ + CHAR_EXCLAMATION_MARK: '!', /* ! */ + CHAR_FORM_FEED: '\f', /* \f */ + CHAR_FORWARD_SLASH: '/', /* / */ + CHAR_HASH: '#', /* # */ + CHAR_HYPHEN_MINUS: '-', /* - */ + CHAR_LEFT_ANGLE_BRACKET: '<', /* < */ + CHAR_LEFT_CURLY_BRACE: '{', /* { */ + CHAR_LEFT_SQUARE_BRACKET: '[', /* [ */ + CHAR_LINE_FEED: '\n', /* \n */ + CHAR_NO_BREAK_SPACE: '\u00A0', /* \u00A0 */ + CHAR_PERCENT: '%', /* % */ + CHAR_PLUS: '+', /* + */ + CHAR_QUESTION_MARK: '?', /* ? */ + CHAR_RIGHT_ANGLE_BRACKET: '>', /* > */ + CHAR_RIGHT_CURLY_BRACE: '}', /* } */ + CHAR_RIGHT_SQUARE_BRACKET: ']', /* ] */ + CHAR_SEMICOLON: ';', /* ; */ + CHAR_SINGLE_QUOTE: '\'', /* ' */ + CHAR_SPACE: ' ', /* */ + CHAR_TAB: '\t', /* \t */ + CHAR_UNDERSCORE: '_', /* _ */ + CHAR_VERTICAL_LINE: '|', /* | */ + CHAR_ZERO_WIDTH_NOBREAK_SPACE: '\uFEFF' /* \uFEFF */ +}; diff --git a/backend/Backend/node_modules/braces/lib/expand.js b/backend/Backend/node_modules/braces/lib/expand.js new file mode 100644 index 000000000..35b2c41d6 --- /dev/null +++ b/backend/Backend/node_modules/braces/lib/expand.js @@ -0,0 +1,113 @@ +'use strict'; + +const fill = require('fill-range'); +const stringify = require('./stringify'); +const utils = require('./utils'); + +const append = (queue = '', stash = '', enclose = false) => { + const result = []; + + queue = [].concat(queue); + stash = [].concat(stash); + + if (!stash.length) return queue; + if (!queue.length) { + return enclose ? utils.flatten(stash).map(ele => `{${ele}}`) : stash; + } + + for (const item of queue) { + if (Array.isArray(item)) { + for (const value of item) { + result.push(append(value, stash, enclose)); + } + } else { + for (let ele of stash) { + if (enclose === true && typeof ele === 'string') ele = `{${ele}}`; + result.push(Array.isArray(ele) ? append(item, ele, enclose) : item + ele); + } + } + } + return utils.flatten(result); +}; + +const expand = (ast, options = {}) => { + const rangeLimit = options.rangeLimit === undefined ? 1000 : options.rangeLimit; + + const walk = (node, parent = {}) => { + node.queue = []; + + let p = parent; + let q = parent.queue; + + while (p.type !== 'brace' && p.type !== 'root' && p.parent) { + p = p.parent; + q = p.queue; + } + + if (node.invalid || node.dollar) { + q.push(append(q.pop(), stringify(node, options))); + return; + } + + if (node.type === 'brace' && node.invalid !== true && node.nodes.length === 2) { + q.push(append(q.pop(), ['{}'])); + return; + } + + if (node.nodes && node.ranges > 0) { + const args = utils.reduce(node.nodes); + + if (utils.exceedsLimit(...args, options.step, rangeLimit)) { + throw new RangeError('expanded array length exceeds range limit. Use options.rangeLimit to increase or disable the limit.'); + } + + let range = fill(...args, options); + if (range.length === 0) { + range = stringify(node, options); + } + + q.push(append(q.pop(), range)); + node.nodes = []; + return; + } + + const enclose = utils.encloseBrace(node); + let queue = node.queue; + let block = node; + + while (block.type !== 'brace' && block.type !== 'root' && block.parent) { + block = block.parent; + queue = block.queue; + } + + for (let i = 0; i < node.nodes.length; i++) { + const child = node.nodes[i]; + + if (child.type === 'comma' && node.type === 'brace') { + if (i === 1) queue.push(''); + queue.push(''); + continue; + } + + if (child.type === 'close') { + q.push(append(q.pop(), queue, enclose)); + continue; + } + + if (child.value && child.type !== 'open') { + queue.push(append(queue.pop(), child.value)); + continue; + } + + if (child.nodes) { + walk(child, node); + } + } + + return queue; + }; + + return utils.flatten(walk(ast)); +}; + +module.exports = expand; diff --git a/backend/Backend/node_modules/braces/lib/parse.js b/backend/Backend/node_modules/braces/lib/parse.js new file mode 100644 index 000000000..3a6988e62 --- /dev/null +++ b/backend/Backend/node_modules/braces/lib/parse.js @@ -0,0 +1,331 @@ +'use strict'; + +const stringify = require('./stringify'); + +/** + * Constants + */ + +const { + MAX_LENGTH, + CHAR_BACKSLASH, /* \ */ + CHAR_BACKTICK, /* ` */ + CHAR_COMMA, /* , */ + CHAR_DOT, /* . */ + CHAR_LEFT_PARENTHESES, /* ( */ + CHAR_RIGHT_PARENTHESES, /* ) */ + CHAR_LEFT_CURLY_BRACE, /* { */ + CHAR_RIGHT_CURLY_BRACE, /* } */ + CHAR_LEFT_SQUARE_BRACKET, /* [ */ + CHAR_RIGHT_SQUARE_BRACKET, /* ] */ + CHAR_DOUBLE_QUOTE, /* " */ + CHAR_SINGLE_QUOTE, /* ' */ + CHAR_NO_BREAK_SPACE, + CHAR_ZERO_WIDTH_NOBREAK_SPACE +} = require('./constants'); + +/** + * parse + */ + +const parse = (input, options = {}) => { + if (typeof input !== 'string') { + throw new TypeError('Expected a string'); + } + + const opts = options || {}; + const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; + if (input.length > max) { + throw new SyntaxError(`Input length (${input.length}), exceeds max characters (${max})`); + } + + const ast = { type: 'root', input, nodes: [] }; + const stack = [ast]; + let block = ast; + let prev = ast; + let brackets = 0; + const length = input.length; + let index = 0; + let depth = 0; + let value; + + /** + * Helpers + */ + + const advance = () => input[index++]; + const push = node => { + if (node.type === 'text' && prev.type === 'dot') { + prev.type = 'text'; + } + + if (prev && prev.type === 'text' && node.type === 'text') { + prev.value += node.value; + return; + } + + block.nodes.push(node); + node.parent = block; + node.prev = prev; + prev = node; + return node; + }; + + push({ type: 'bos' }); + + while (index < length) { + block = stack[stack.length - 1]; + value = advance(); + + /** + * Invalid chars + */ + + if (value === CHAR_ZERO_WIDTH_NOBREAK_SPACE || value === CHAR_NO_BREAK_SPACE) { + continue; + } + + /** + * Escaped chars + */ + + if (value === CHAR_BACKSLASH) { + push({ type: 'text', value: (options.keepEscaping ? value : '') + advance() }); + continue; + } + + /** + * Right square bracket (literal): ']' + */ + + if (value === CHAR_RIGHT_SQUARE_BRACKET) { + push({ type: 'text', value: '\\' + value }); + continue; + } + + /** + * Left square bracket: '[' + */ + + if (value === CHAR_LEFT_SQUARE_BRACKET) { + brackets++; + + let next; + + while (index < length && (next = advance())) { + value += next; + + if (next === CHAR_LEFT_SQUARE_BRACKET) { + brackets++; + continue; + } + + if (next === CHAR_BACKSLASH) { + value += advance(); + continue; + } + + if (next === CHAR_RIGHT_SQUARE_BRACKET) { + brackets--; + + if (brackets === 0) { + break; + } + } + } + + push({ type: 'text', value }); + continue; + } + + /** + * Parentheses + */ + + if (value === CHAR_LEFT_PARENTHESES) { + block = push({ type: 'paren', nodes: [] }); + stack.push(block); + push({ type: 'text', value }); + continue; + } + + if (value === CHAR_RIGHT_PARENTHESES) { + if (block.type !== 'paren') { + push({ type: 'text', value }); + continue; + } + block = stack.pop(); + push({ type: 'text', value }); + block = stack[stack.length - 1]; + continue; + } + + /** + * Quotes: '|"|` + */ + + if (value === CHAR_DOUBLE_QUOTE || value === CHAR_SINGLE_QUOTE || value === CHAR_BACKTICK) { + const open = value; + let next; + + if (options.keepQuotes !== true) { + value = ''; + } + + while (index < length && (next = advance())) { + if (next === CHAR_BACKSLASH) { + value += next + advance(); + continue; + } + + if (next === open) { + if (options.keepQuotes === true) value += next; + break; + } + + value += next; + } + + push({ type: 'text', value }); + continue; + } + + /** + * Left curly brace: '{' + */ + + if (value === CHAR_LEFT_CURLY_BRACE) { + depth++; + + const dollar = prev.value && prev.value.slice(-1) === '$' || block.dollar === true; + const brace = { + type: 'brace', + open: true, + close: false, + dollar, + depth, + commas: 0, + ranges: 0, + nodes: [] + }; + + block = push(brace); + stack.push(block); + push({ type: 'open', value }); + continue; + } + + /** + * Right curly brace: '}' + */ + + if (value === CHAR_RIGHT_CURLY_BRACE) { + if (block.type !== 'brace') { + push({ type: 'text', value }); + continue; + } + + const type = 'close'; + block = stack.pop(); + block.close = true; + + push({ type, value }); + depth--; + + block = stack[stack.length - 1]; + continue; + } + + /** + * Comma: ',' + */ + + if (value === CHAR_COMMA && depth > 0) { + if (block.ranges > 0) { + block.ranges = 0; + const open = block.nodes.shift(); + block.nodes = [open, { type: 'text', value: stringify(block) }]; + } + + push({ type: 'comma', value }); + block.commas++; + continue; + } + + /** + * Dot: '.' + */ + + if (value === CHAR_DOT && depth > 0 && block.commas === 0) { + const siblings = block.nodes; + + if (depth === 0 || siblings.length === 0) { + push({ type: 'text', value }); + continue; + } + + if (prev.type === 'dot') { + block.range = []; + prev.value += value; + prev.type = 'range'; + + if (block.nodes.length !== 3 && block.nodes.length !== 5) { + block.invalid = true; + block.ranges = 0; + prev.type = 'text'; + continue; + } + + block.ranges++; + block.args = []; + continue; + } + + if (prev.type === 'range') { + siblings.pop(); + + const before = siblings[siblings.length - 1]; + before.value += prev.value + value; + prev = before; + block.ranges--; + continue; + } + + push({ type: 'dot', value }); + continue; + } + + /** + * Text + */ + + push({ type: 'text', value }); + } + + // Mark imbalanced braces and brackets as invalid + do { + block = stack.pop(); + + if (block.type !== 'root') { + block.nodes.forEach(node => { + if (!node.nodes) { + if (node.type === 'open') node.isOpen = true; + if (node.type === 'close') node.isClose = true; + if (!node.nodes) node.type = 'text'; + node.invalid = true; + } + }); + + // get the location of the block on parent.nodes (block's siblings) + const parent = stack[stack.length - 1]; + const index = parent.nodes.indexOf(block); + // replace the (invalid) block with it's nodes + parent.nodes.splice(index, 1, ...block.nodes); + } + } while (stack.length > 0); + + push({ type: 'eos' }); + return ast; +}; + +module.exports = parse; diff --git a/backend/Backend/node_modules/braces/lib/stringify.js b/backend/Backend/node_modules/braces/lib/stringify.js new file mode 100644 index 000000000..8bcf872c3 --- /dev/null +++ b/backend/Backend/node_modules/braces/lib/stringify.js @@ -0,0 +1,32 @@ +'use strict'; + +const utils = require('./utils'); + +module.exports = (ast, options = {}) => { + const stringify = (node, parent = {}) => { + const invalidBlock = options.escapeInvalid && utils.isInvalidBrace(parent); + const invalidNode = node.invalid === true && options.escapeInvalid === true; + let output = ''; + + if (node.value) { + if ((invalidBlock || invalidNode) && utils.isOpenOrClose(node)) { + return '\\' + node.value; + } + return node.value; + } + + if (node.value) { + return node.value; + } + + if (node.nodes) { + for (const child of node.nodes) { + output += stringify(child); + } + } + return output; + }; + + return stringify(ast); +}; + diff --git a/backend/Backend/node_modules/braces/lib/utils.js b/backend/Backend/node_modules/braces/lib/utils.js new file mode 100644 index 000000000..d19311fe0 --- /dev/null +++ b/backend/Backend/node_modules/braces/lib/utils.js @@ -0,0 +1,122 @@ +'use strict'; + +exports.isInteger = num => { + if (typeof num === 'number') { + return Number.isInteger(num); + } + if (typeof num === 'string' && num.trim() !== '') { + return Number.isInteger(Number(num)); + } + return false; +}; + +/** + * Find a node of the given type + */ + +exports.find = (node, type) => node.nodes.find(node => node.type === type); + +/** + * Find a node of the given type + */ + +exports.exceedsLimit = (min, max, step = 1, limit) => { + if (limit === false) return false; + if (!exports.isInteger(min) || !exports.isInteger(max)) return false; + return ((Number(max) - Number(min)) / Number(step)) >= limit; +}; + +/** + * Escape the given node with '\\' before node.value + */ + +exports.escapeNode = (block, n = 0, type) => { + const node = block.nodes[n]; + if (!node) return; + + if ((type && node.type === type) || node.type === 'open' || node.type === 'close') { + if (node.escaped !== true) { + node.value = '\\' + node.value; + node.escaped = true; + } + } +}; + +/** + * Returns true if the given brace node should be enclosed in literal braces + */ + +exports.encloseBrace = node => { + if (node.type !== 'brace') return false; + if ((node.commas >> 0 + node.ranges >> 0) === 0) { + node.invalid = true; + return true; + } + return false; +}; + +/** + * Returns true if a brace node is invalid. + */ + +exports.isInvalidBrace = block => { + if (block.type !== 'brace') return false; + if (block.invalid === true || block.dollar) return true; + if ((block.commas >> 0 + block.ranges >> 0) === 0) { + block.invalid = true; + return true; + } + if (block.open !== true || block.close !== true) { + block.invalid = true; + return true; + } + return false; +}; + +/** + * Returns true if a node is an open or close node + */ + +exports.isOpenOrClose = node => { + if (node.type === 'open' || node.type === 'close') { + return true; + } + return node.open === true || node.close === true; +}; + +/** + * Reduce an array of text nodes. + */ + +exports.reduce = nodes => nodes.reduce((acc, node) => { + if (node.type === 'text') acc.push(node.value); + if (node.type === 'range') node.type = 'text'; + return acc; +}, []); + +/** + * Flatten an array + */ + +exports.flatten = (...args) => { + const result = []; + + const flat = arr => { + for (let i = 0; i < arr.length; i++) { + const ele = arr[i]; + + if (Array.isArray(ele)) { + flat(ele); + continue; + } + + if (ele !== undefined) { + result.push(ele); + } + } + return result; + }; + + flat(args); + return result; +}; diff --git a/backend/Backend/node_modules/braces/package.json b/backend/Backend/node_modules/braces/package.json new file mode 100644 index 000000000..c3c056e46 --- /dev/null +++ b/backend/Backend/node_modules/braces/package.json @@ -0,0 +1,77 @@ +{ + "name": "braces", + "description": "Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support for the Bash 4.3 braces specification, without sacrificing speed.", + "version": "3.0.3", + "homepage": "https://github.com/micromatch/braces", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Brian Woodward (https://twitter.com/doowb)", + "Elan Shanker (https://github.com/es128)", + "Eugene Sharygin (https://github.com/eush77)", + "hemanth.hm (http://h3manth.com)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)" + ], + "repository": "micromatch/braces", + "bugs": { + "url": "https://github.com/micromatch/braces/issues" + }, + "license": "MIT", + "files": [ + "index.js", + "lib" + ], + "main": "index.js", + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "mocha", + "benchmark": "node benchmark" + }, + "dependencies": { + "fill-range": "^7.1.1" + }, + "devDependencies": { + "ansi-colors": "^3.2.4", + "bash-path": "^2.0.1", + "gulp-format-md": "^2.0.0", + "mocha": "^6.1.1" + }, + "keywords": [ + "alpha", + "alphabetical", + "bash", + "brace", + "braces", + "expand", + "expansion", + "filepath", + "fill", + "fs", + "glob", + "globbing", + "letter", + "match", + "matches", + "matching", + "number", + "numerical", + "path", + "range", + "ranges", + "sh" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "lint": { + "reflinks": true + }, + "plugins": [ + "gulp-format-md" + ] + } +} diff --git a/backend/Backend/node_modules/buffer-from/LICENSE b/backend/Backend/node_modules/buffer-from/LICENSE new file mode 100644 index 000000000..e4bf1d69b --- /dev/null +++ b/backend/Backend/node_modules/buffer-from/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2016, 2018 Linus Unnebäck + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/backend/Backend/node_modules/buffer-from/index.js b/backend/Backend/node_modules/buffer-from/index.js new file mode 100644 index 000000000..e1a58b5e8 --- /dev/null +++ b/backend/Backend/node_modules/buffer-from/index.js @@ -0,0 +1,72 @@ +/* eslint-disable node/no-deprecated-api */ + +var toString = Object.prototype.toString + +var isModern = ( + typeof Buffer !== 'undefined' && + typeof Buffer.alloc === 'function' && + typeof Buffer.allocUnsafe === 'function' && + typeof Buffer.from === 'function' +) + +function isArrayBuffer (input) { + return toString.call(input).slice(8, -1) === 'ArrayBuffer' +} + +function fromArrayBuffer (obj, byteOffset, length) { + byteOffset >>>= 0 + + var maxLength = obj.byteLength - byteOffset + + if (maxLength < 0) { + throw new RangeError("'offset' is out of bounds") + } + + if (length === undefined) { + length = maxLength + } else { + length >>>= 0 + + if (length > maxLength) { + throw new RangeError("'length' is out of bounds") + } + } + + return isModern + ? Buffer.from(obj.slice(byteOffset, byteOffset + length)) + : new Buffer(new Uint8Array(obj.slice(byteOffset, byteOffset + length))) +} + +function fromString (string, encoding) { + if (typeof encoding !== 'string' || encoding === '') { + encoding = 'utf8' + } + + if (!Buffer.isEncoding(encoding)) { + throw new TypeError('"encoding" must be a valid string encoding') + } + + return isModern + ? Buffer.from(string, encoding) + : new Buffer(string, encoding) +} + +function bufferFrom (value, encodingOrOffset, length) { + if (typeof value === 'number') { + throw new TypeError('"value" argument must not be a number') + } + + if (isArrayBuffer(value)) { + return fromArrayBuffer(value, encodingOrOffset, length) + } + + if (typeof value === 'string') { + return fromString(value, encodingOrOffset) + } + + return isModern + ? Buffer.from(value) + : new Buffer(value) +} + +module.exports = bufferFrom diff --git a/backend/Backend/node_modules/buffer-from/package.json b/backend/Backend/node_modules/buffer-from/package.json new file mode 100644 index 000000000..6ac5327bf --- /dev/null +++ b/backend/Backend/node_modules/buffer-from/package.json @@ -0,0 +1,19 @@ +{ + "name": "buffer-from", + "version": "1.1.2", + "license": "MIT", + "repository": "LinusU/buffer-from", + "files": [ + "index.js" + ], + "scripts": { + "test": "standard && node test" + }, + "devDependencies": { + "standard": "^12.0.1" + }, + "keywords": [ + "buffer", + "buffer from" + ] +} diff --git a/backend/Backend/node_modules/buffer-from/readme.md b/backend/Backend/node_modules/buffer-from/readme.md new file mode 100644 index 000000000..9880a558a --- /dev/null +++ b/backend/Backend/node_modules/buffer-from/readme.md @@ -0,0 +1,69 @@ +# Buffer From + +A [ponyfill](https://ponyfill.com) for `Buffer.from`, uses native implementation if available. + +## Installation + +```sh +npm install --save buffer-from +``` + +## Usage + +```js +const bufferFrom = require('buffer-from') + +console.log(bufferFrom([1, 2, 3, 4])) +//=> + +const arr = new Uint8Array([1, 2, 3, 4]) +console.log(bufferFrom(arr.buffer, 1, 2)) +//=> + +console.log(bufferFrom('test', 'utf8')) +//=> + +const buf = bufferFrom('test') +console.log(bufferFrom(buf)) +//=> +``` + +## API + +### bufferFrom(array) + +- `array` <Array> + +Allocates a new `Buffer` using an `array` of octets. + +### bufferFrom(arrayBuffer[, byteOffset[, length]]) + +- `arrayBuffer` <ArrayBuffer> The `.buffer` property of a TypedArray or ArrayBuffer +- `byteOffset` <Integer> Where to start copying from `arrayBuffer`. **Default:** `0` +- `length` <Integer> How many bytes to copy from `arrayBuffer`. **Default:** `arrayBuffer.length - byteOffset` + +When passed a reference to the `.buffer` property of a TypedArray instance, the +newly created `Buffer` will share the same allocated memory as the TypedArray. + +The optional `byteOffset` and `length` arguments specify a memory range within +the `arrayBuffer` that will be shared by the `Buffer`. + +### bufferFrom(buffer) + +- `buffer` <Buffer> An existing `Buffer` to copy data from + +Copies the passed `buffer` data onto a new `Buffer` instance. + +### bufferFrom(string[, encoding]) + +- `string` <String> A string to encode. +- `encoding` <String> The encoding of `string`. **Default:** `'utf8'` + +Creates a new `Buffer` containing the given JavaScript string `string`. If +provided, the `encoding` parameter identifies the character encoding of +`string`. + +## See also + +- [buffer-alloc](https://github.com/LinusU/buffer-alloc) A ponyfill for `Buffer.alloc` +- [buffer-alloc-unsafe](https://github.com/LinusU/buffer-alloc-unsafe) A ponyfill for `Buffer.allocUnsafe` diff --git a/backend/Backend/node_modules/busboy/.eslintrc.js b/backend/Backend/node_modules/busboy/.eslintrc.js new file mode 100644 index 000000000..be9311d02 --- /dev/null +++ b/backend/Backend/node_modules/busboy/.eslintrc.js @@ -0,0 +1,5 @@ +'use strict'; + +module.exports = { + extends: '@mscdex/eslint-config', +}; diff --git a/backend/Backend/node_modules/busboy/.github/workflows/ci.yml b/backend/Backend/node_modules/busboy/.github/workflows/ci.yml new file mode 100644 index 000000000..799bae04a --- /dev/null +++ b/backend/Backend/node_modules/busboy/.github/workflows/ci.yml @@ -0,0 +1,24 @@ +name: CI + +on: + pull_request: + push: + branches: [ master ] + +jobs: + tests-linux: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + node-version: [10.16.0, 10.x, 12.x, 14.x, 16.x] + steps: + - uses: actions/checkout@v2 + - name: Use Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v1 + with: + node-version: ${{ matrix.node-version }} + - name: Install module + run: npm install + - name: Run tests + run: npm test diff --git a/backend/Backend/node_modules/busboy/.github/workflows/lint.yml b/backend/Backend/node_modules/busboy/.github/workflows/lint.yml new file mode 100644 index 000000000..9f9e1f589 --- /dev/null +++ b/backend/Backend/node_modules/busboy/.github/workflows/lint.yml @@ -0,0 +1,23 @@ +name: lint + +on: + pull_request: + push: + branches: [ master ] + +env: + NODE_VERSION: 16.x + +jobs: + lint-js: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Use Node.js ${{ env.NODE_VERSION }} + uses: actions/setup-node@v1 + with: + node-version: ${{ env.NODE_VERSION }} + - name: Install ESLint + ESLint configs/plugins + run: npm install --only=dev + - name: Lint files + run: npm run lint diff --git a/backend/Backend/node_modules/busboy/LICENSE b/backend/Backend/node_modules/busboy/LICENSE new file mode 100644 index 000000000..290762e94 --- /dev/null +++ b/backend/Backend/node_modules/busboy/LICENSE @@ -0,0 +1,19 @@ +Copyright Brian White. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. \ No newline at end of file diff --git a/backend/Backend/node_modules/busboy/README.md b/backend/Backend/node_modules/busboy/README.md new file mode 100644 index 000000000..654af3045 --- /dev/null +++ b/backend/Backend/node_modules/busboy/README.md @@ -0,0 +1,191 @@ +# Description + +A node.js module for parsing incoming HTML form data. + +Changes (breaking or otherwise) in v1.0.0 can be found [here](https://github.com/mscdex/busboy/issues/266). + +# Requirements + +* [node.js](http://nodejs.org/) -- v10.16.0 or newer + + +# Install + + npm install busboy + + +# Examples + +* Parsing (multipart) with default options: + +```js +const http = require('http'); + +const busboy = require('busboy'); + +http.createServer((req, res) => { + if (req.method === 'POST') { + console.log('POST request'); + const bb = busboy({ headers: req.headers }); + bb.on('file', (name, file, info) => { + const { filename, encoding, mimeType } = info; + console.log( + `File [${name}]: filename: %j, encoding: %j, mimeType: %j`, + filename, + encoding, + mimeType + ); + file.on('data', (data) => { + console.log(`File [${name}] got ${data.length} bytes`); + }).on('close', () => { + console.log(`File [${name}] done`); + }); + }); + bb.on('field', (name, val, info) => { + console.log(`Field [${name}]: value: %j`, val); + }); + bb.on('close', () => { + console.log('Done parsing form!'); + res.writeHead(303, { Connection: 'close', Location: '/' }); + res.end(); + }); + req.pipe(bb); + } else if (req.method === 'GET') { + res.writeHead(200, { Connection: 'close' }); + res.end(` + + + +
+
+
+ +
+ + + `); + } +}).listen(8000, () => { + console.log('Listening for requests'); +}); + +// Example output: +// +// Listening for requests +// < ... form submitted ... > +// POST request +// File [filefield]: filename: "logo.jpg", encoding: "binary", mime: "image/jpeg" +// File [filefield] got 11912 bytes +// Field [textfield]: value: "testing! :-)" +// File [filefield] done +// Done parsing form! +``` + +* Save all incoming files to disk: + +```js +const { randomFillSync } = require('crypto'); +const fs = require('fs'); +const http = require('http'); +const os = require('os'); +const path = require('path'); + +const busboy = require('busboy'); + +const random = (() => { + const buf = Buffer.alloc(16); + return () => randomFillSync(buf).toString('hex'); +})(); + +http.createServer((req, res) => { + if (req.method === 'POST') { + const bb = busboy({ headers: req.headers }); + bb.on('file', (name, file, info) => { + const saveTo = path.join(os.tmpdir(), `busboy-upload-${random()}`); + file.pipe(fs.createWriteStream(saveTo)); + }); + bb.on('close', () => { + res.writeHead(200, { 'Connection': 'close' }); + res.end(`That's all folks!`); + }); + req.pipe(bb); + return; + } + res.writeHead(404); + res.end(); +}).listen(8000, () => { + console.log('Listening for requests'); +}); +``` + + +# API + +## Exports + +`busboy` exports a single function: + +**( _function_ )**(< _object_ >config) - Creates and returns a new _Writable_ form parser stream. + +* Valid `config` properties: + + * **headers** - _object_ - These are the HTTP headers of the incoming request, which are used by individual parsers. + + * **highWaterMark** - _integer_ - highWaterMark to use for the parser stream. **Default:** node's _stream.Writable_ default. + + * **fileHwm** - _integer_ - highWaterMark to use for individual file streams. **Default:** node's _stream.Readable_ default. + + * **defCharset** - _string_ - Default character set to use when one isn't defined. **Default:** `'utf8'`. + + * **defParamCharset** - _string_ - For multipart forms, the default character set to use for values of part header parameters (e.g. filename) that are not extended parameters (that contain an explicit charset). **Default:** `'latin1'`. + + * **preservePath** - _boolean_ - If paths in filenames from file parts in a `'multipart/form-data'` request shall be preserved. **Default:** `false`. + + * **limits** - _object_ - Various limits on incoming data. Valid properties are: + + * **fieldNameSize** - _integer_ - Max field name size (in bytes). **Default:** `100`. + + * **fieldSize** - _integer_ - Max field value size (in bytes). **Default:** `1048576` (1MB). + + * **fields** - _integer_ - Max number of non-file fields. **Default:** `Infinity`. + + * **fileSize** - _integer_ - For multipart forms, the max file size (in bytes). **Default:** `Infinity`. + + * **files** - _integer_ - For multipart forms, the max number of file fields. **Default:** `Infinity`. + + * **parts** - _integer_ - For multipart forms, the max number of parts (fields + files). **Default:** `Infinity`. + + * **headerPairs** - _integer_ - For multipart forms, the max number of header key-value pairs to parse. **Default:** `2000` (same as node's http module). + +This function can throw exceptions if there is something wrong with the values in `config`. For example, if the Content-Type in `headers` is missing entirely, is not a supported type, or is missing the boundary for `'multipart/form-data'` requests. + +## (Special) Parser stream events + +* **file**(< _string_ >name, < _Readable_ >stream, < _object_ >info) - Emitted for each new file found. `name` contains the form field name. `stream` is a _Readable_ stream containing the file's data. No transformations/conversions (e.g. base64 to raw binary) are done on the file's data. `info` contains the following properties: + + * `filename` - _string_ - If supplied, this contains the file's filename. **WARNING:** You should almost _never_ use this value as-is (especially if you are using `preservePath: true` in your `config`) as it could contain malicious input. You are better off generating your own (safe) filenames, or at the very least using a hash of the filename. + + * `encoding` - _string_ - The file's `'Content-Transfer-Encoding'` value. + + * `mimeType` - _string_ - The file's `'Content-Type'` value. + + **Note:** If you listen for this event, you should always consume the `stream` whether you care about its contents or not (you can simply do `stream.resume();` if you want to discard/skip the contents), otherwise the `'finish'`/`'close'` event will never fire on the busboy parser stream. + However, if you aren't accepting files, you can either simply not listen for the `'file'` event at all or set `limits.files` to `0`, and any/all files will be automatically skipped (these skipped files will still count towards any configured `limits.files` and `limits.parts` limits though). + + **Note:** If a configured `limits.fileSize` limit was reached for a file, `stream` will both have a boolean property `truncated` set to `true` (best checked at the end of the stream) and emit a `'limit'` event to notify you when this happens. + +* **field**(< _string_ >name, < _string_ >value, < _object_ >info) - Emitted for each new non-file field found. `name` contains the form field name. `value` contains the string value of the field. `info` contains the following properties: + + * `nameTruncated` - _boolean_ - Whether `name` was truncated or not (due to a configured `limits.fieldNameSize` limit) + + * `valueTruncated` - _boolean_ - Whether `value` was truncated or not (due to a configured `limits.fieldSize` limit) + + * `encoding` - _string_ - The field's `'Content-Transfer-Encoding'` value. + + * `mimeType` - _string_ - The field's `'Content-Type'` value. + +* **partsLimit**() - Emitted when the configured `limits.parts` limit has been reached. No more `'file'` or `'field'` events will be emitted. + +* **filesLimit**() - Emitted when the configured `limits.files` limit has been reached. No more `'file'` events will be emitted. + +* **fieldsLimit**() - Emitted when the configured `limits.fields` limit has been reached. No more `'field'` events will be emitted. diff --git a/backend/Backend/node_modules/busboy/bench/bench-multipart-fields-100mb-big.js b/backend/Backend/node_modules/busboy/bench/bench-multipart-fields-100mb-big.js new file mode 100644 index 000000000..ef15729ea --- /dev/null +++ b/backend/Backend/node_modules/busboy/bench/bench-multipart-fields-100mb-big.js @@ -0,0 +1,149 @@ +'use strict'; + +function createMultipartBuffers(boundary, sizes) { + const bufs = []; + for (let i = 0; i < sizes.length; ++i) { + const mb = sizes[i] * 1024 * 1024; + bufs.push(Buffer.from([ + `--${boundary}`, + `content-disposition: form-data; name="field${i + 1}"`, + '', + '0'.repeat(mb), + '', + ].join('\r\n'))); + } + bufs.push(Buffer.from([ + `--${boundary}--`, + '', + ].join('\r\n'))); + return bufs; +} + +const boundary = '-----------------------------168072824752491622650073'; +const buffers = createMultipartBuffers(boundary, [ + 10, + 10, + 10, + 20, + 50, +]); +const calls = { + partBegin: 0, + headerField: 0, + headerValue: 0, + headerEnd: 0, + headersEnd: 0, + partData: 0, + partEnd: 0, + end: 0, +}; + +const moduleName = process.argv[2]; +switch (moduleName) { + case 'busboy': { + const busboy = require('busboy'); + + const parser = busboy({ + limits: { + fieldSizeLimit: Infinity, + }, + headers: { + 'content-type': `multipart/form-data; boundary=${boundary}`, + }, + }); + parser.on('field', (name, val, info) => { + ++calls.partBegin; + ++calls.partData; + ++calls.partEnd; + }).on('close', () => { + ++calls.end; + console.timeEnd(moduleName); + }); + + console.time(moduleName); + for (const buf of buffers) + parser.write(buf); + break; + } + + case 'formidable': { + const { MultipartParser } = require('formidable'); + + const parser = new MultipartParser(); + parser.initWithBoundary(boundary); + parser.on('data', ({ name }) => { + ++calls[name]; + if (name === 'end') + console.timeEnd(moduleName); + }); + + console.time(moduleName); + for (const buf of buffers) + parser.write(buf); + + break; + } + + case 'multiparty': { + const { Readable } = require('stream'); + + const { Form } = require('multiparty'); + + const form = new Form({ + maxFieldsSize: Infinity, + maxFields: Infinity, + maxFilesSize: Infinity, + autoFields: false, + autoFiles: false, + }); + + const req = new Readable({ read: () => {} }); + req.headers = { + 'content-type': `multipart/form-data; boundary=${boundary}`, + }; + + function hijack(name, fn) { + const oldFn = form[name]; + form[name] = function() { + fn(); + return oldFn.apply(this, arguments); + }; + } + + hijack('onParseHeaderField', () => { + ++calls.headerField; + }); + hijack('onParseHeaderValue', () => { + ++calls.headerValue; + }); + hijack('onParsePartBegin', () => { + ++calls.partBegin; + }); + hijack('onParsePartData', () => { + ++calls.partData; + }); + hijack('onParsePartEnd', () => { + ++calls.partEnd; + }); + + form.on('close', () => { + ++calls.end; + console.timeEnd(moduleName); + }).on('part', (p) => p.resume()); + + console.time(moduleName); + form.parse(req); + for (const buf of buffers) + req.push(buf); + req.push(null); + + break; + } + + default: + if (moduleName === undefined) + console.error('Missing parser module name'); + else + console.error(`Invalid parser module name: ${moduleName}`); + process.exit(1); +} diff --git a/backend/Backend/node_modules/busboy/bench/bench-multipart-fields-100mb-small.js b/backend/Backend/node_modules/busboy/bench/bench-multipart-fields-100mb-small.js new file mode 100644 index 000000000..f32d421c7 --- /dev/null +++ b/backend/Backend/node_modules/busboy/bench/bench-multipart-fields-100mb-small.js @@ -0,0 +1,143 @@ +'use strict'; + +function createMultipartBuffers(boundary, sizes) { + const bufs = []; + for (let i = 0; i < sizes.length; ++i) { + const mb = sizes[i] * 1024 * 1024; + bufs.push(Buffer.from([ + `--${boundary}`, + `content-disposition: form-data; name="field${i + 1}"`, + '', + '0'.repeat(mb), + '', + ].join('\r\n'))); + } + bufs.push(Buffer.from([ + `--${boundary}--`, + '', + ].join('\r\n'))); + return bufs; +} + +const boundary = '-----------------------------168072824752491622650073'; +const buffers = createMultipartBuffers(boundary, (new Array(100)).fill(1)); +const calls = { + partBegin: 0, + headerField: 0, + headerValue: 0, + headerEnd: 0, + headersEnd: 0, + partData: 0, + partEnd: 0, + end: 0, +}; + +const moduleName = process.argv[2]; +switch (moduleName) { + case 'busboy': { + const busboy = require('busboy'); + + const parser = busboy({ + limits: { + fieldSizeLimit: Infinity, + }, + headers: { + 'content-type': `multipart/form-data; boundary=${boundary}`, + }, + }); + parser.on('field', (name, val, info) => { + ++calls.partBegin; + ++calls.partData; + ++calls.partEnd; + }).on('close', () => { + ++calls.end; + console.timeEnd(moduleName); + }); + + console.time(moduleName); + for (const buf of buffers) + parser.write(buf); + break; + } + + case 'formidable': { + const { MultipartParser } = require('formidable'); + + const parser = new MultipartParser(); + parser.initWithBoundary(boundary); + parser.on('data', ({ name }) => { + ++calls[name]; + if (name === 'end') + console.timeEnd(moduleName); + }); + + console.time(moduleName); + for (const buf of buffers) + parser.write(buf); + + break; + } + + case 'multiparty': { + const { Readable } = require('stream'); + + const { Form } = require('multiparty'); + + const form = new Form({ + maxFieldsSize: Infinity, + maxFields: Infinity, + maxFilesSize: Infinity, + autoFields: false, + autoFiles: false, + }); + + const req = new Readable({ read: () => {} }); + req.headers = { + 'content-type': `multipart/form-data; boundary=${boundary}`, + }; + + function hijack(name, fn) { + const oldFn = form[name]; + form[name] = function() { + fn(); + return oldFn.apply(this, arguments); + }; + } + + hijack('onParseHeaderField', () => { + ++calls.headerField; + }); + hijack('onParseHeaderValue', () => { + ++calls.headerValue; + }); + hijack('onParsePartBegin', () => { + ++calls.partBegin; + }); + hijack('onParsePartData', () => { + ++calls.partData; + }); + hijack('onParsePartEnd', () => { + ++calls.partEnd; + }); + + form.on('close', () => { + ++calls.end; + console.timeEnd(moduleName); + }).on('part', (p) => p.resume()); + + console.time(moduleName); + form.parse(req); + for (const buf of buffers) + req.push(buf); + req.push(null); + + break; + } + + default: + if (moduleName === undefined) + console.error('Missing parser module name'); + else + console.error(`Invalid parser module name: ${moduleName}`); + process.exit(1); +} diff --git a/backend/Backend/node_modules/busboy/bench/bench-multipart-files-100mb-big.js b/backend/Backend/node_modules/busboy/bench/bench-multipart-files-100mb-big.js new file mode 100644 index 000000000..b46bdee02 --- /dev/null +++ b/backend/Backend/node_modules/busboy/bench/bench-multipart-files-100mb-big.js @@ -0,0 +1,154 @@ +'use strict'; + +function createMultipartBuffers(boundary, sizes) { + const bufs = []; + for (let i = 0; i < sizes.length; ++i) { + const mb = sizes[i] * 1024 * 1024; + bufs.push(Buffer.from([ + `--${boundary}`, + `content-disposition: form-data; name="file${i + 1}"; ` + + `filename="random${i + 1}.bin"`, + 'content-type: application/octet-stream', + '', + '0'.repeat(mb), + '', + ].join('\r\n'))); + } + bufs.push(Buffer.from([ + `--${boundary}--`, + '', + ].join('\r\n'))); + return bufs; +} + +const boundary = '-----------------------------168072824752491622650073'; +const buffers = createMultipartBuffers(boundary, [ + 10, + 10, + 10, + 20, + 50, +]); +const calls = { + partBegin: 0, + headerField: 0, + headerValue: 0, + headerEnd: 0, + headersEnd: 0, + partData: 0, + partEnd: 0, + end: 0, +}; + +const moduleName = process.argv[2]; +switch (moduleName) { + case 'busboy': { + const busboy = require('busboy'); + + const parser = busboy({ + limits: { + fieldSizeLimit: Infinity, + }, + headers: { + 'content-type': `multipart/form-data; boundary=${boundary}`, + }, + }); + parser.on('file', (name, stream, info) => { + ++calls.partBegin; + stream.on('data', (chunk) => { + ++calls.partData; + }).on('end', () => { + ++calls.partEnd; + }); + }).on('close', () => { + ++calls.end; + console.timeEnd(moduleName); + }); + + console.time(moduleName); + for (const buf of buffers) + parser.write(buf); + break; + } + + case 'formidable': { + const { MultipartParser } = require('formidable'); + + const parser = new MultipartParser(); + parser.initWithBoundary(boundary); + parser.on('data', ({ name }) => { + ++calls[name]; + if (name === 'end') + console.timeEnd(moduleName); + }); + + console.time(moduleName); + for (const buf of buffers) + parser.write(buf); + + break; + } + + case 'multiparty': { + const { Readable } = require('stream'); + + const { Form } = require('multiparty'); + + const form = new Form({ + maxFieldsSize: Infinity, + maxFields: Infinity, + maxFilesSize: Infinity, + autoFields: false, + autoFiles: false, + }); + + const req = new Readable({ read: () => {} }); + req.headers = { + 'content-type': `multipart/form-data; boundary=${boundary}`, + }; + + function hijack(name, fn) { + const oldFn = form[name]; + form[name] = function() { + fn(); + return oldFn.apply(this, arguments); + }; + } + + hijack('onParseHeaderField', () => { + ++calls.headerField; + }); + hijack('onParseHeaderValue', () => { + ++calls.headerValue; + }); + hijack('onParsePartBegin', () => { + ++calls.partBegin; + }); + hijack('onParsePartData', () => { + ++calls.partData; + }); + hijack('onParsePartEnd', () => { + ++calls.partEnd; + }); + + form.on('close', () => { + ++calls.end; + console.timeEnd(moduleName); + }).on('part', (p) => p.resume()); + + console.time(moduleName); + form.parse(req); + for (const buf of buffers) + req.push(buf); + req.push(null); + + break; + } + + default: + if (moduleName === undefined) + console.error('Missing parser module name'); + else + console.error(`Invalid parser module name: ${moduleName}`); + process.exit(1); +} diff --git a/backend/Backend/node_modules/busboy/bench/bench-multipart-files-100mb-small.js b/backend/Backend/node_modules/busboy/bench/bench-multipart-files-100mb-small.js new file mode 100644 index 000000000..46b5dffb0 --- /dev/null +++ b/backend/Backend/node_modules/busboy/bench/bench-multipart-files-100mb-small.js @@ -0,0 +1,148 @@ +'use strict'; + +function createMultipartBuffers(boundary, sizes) { + const bufs = []; + for (let i = 0; i < sizes.length; ++i) { + const mb = sizes[i] * 1024 * 1024; + bufs.push(Buffer.from([ + `--${boundary}`, + `content-disposition: form-data; name="file${i + 1}"; ` + + `filename="random${i + 1}.bin"`, + 'content-type: application/octet-stream', + '', + '0'.repeat(mb), + '', + ].join('\r\n'))); + } + bufs.push(Buffer.from([ + `--${boundary}--`, + '', + ].join('\r\n'))); + return bufs; +} + +const boundary = '-----------------------------168072824752491622650073'; +const buffers = createMultipartBuffers(boundary, (new Array(100)).fill(1)); +const calls = { + partBegin: 0, + headerField: 0, + headerValue: 0, + headerEnd: 0, + headersEnd: 0, + partData: 0, + partEnd: 0, + end: 0, +}; + +const moduleName = process.argv[2]; +switch (moduleName) { + case 'busboy': { + const busboy = require('busboy'); + + const parser = busboy({ + limits: { + fieldSizeLimit: Infinity, + }, + headers: { + 'content-type': `multipart/form-data; boundary=${boundary}`, + }, + }); + parser.on('file', (name, stream, info) => { + ++calls.partBegin; + stream.on('data', (chunk) => { + ++calls.partData; + }).on('end', () => { + ++calls.partEnd; + }); + }).on('close', () => { + ++calls.end; + console.timeEnd(moduleName); + }); + + console.time(moduleName); + for (const buf of buffers) + parser.write(buf); + break; + } + + case 'formidable': { + const { MultipartParser } = require('formidable'); + + const parser = new MultipartParser(); + parser.initWithBoundary(boundary); + parser.on('data', ({ name }) => { + ++calls[name]; + if (name === 'end') + console.timeEnd(moduleName); + }); + + console.time(moduleName); + for (const buf of buffers) + parser.write(buf); + + break; + } + + case 'multiparty': { + const { Readable } = require('stream'); + + const { Form } = require('multiparty'); + + const form = new Form({ + maxFieldsSize: Infinity, + maxFields: Infinity, + maxFilesSize: Infinity, + autoFields: false, + autoFiles: false, + }); + + const req = new Readable({ read: () => {} }); + req.headers = { + 'content-type': `multipart/form-data; boundary=${boundary}`, + }; + + function hijack(name, fn) { + const oldFn = form[name]; + form[name] = function() { + fn(); + return oldFn.apply(this, arguments); + }; + } + + hijack('onParseHeaderField', () => { + ++calls.headerField; + }); + hijack('onParseHeaderValue', () => { + ++calls.headerValue; + }); + hijack('onParsePartBegin', () => { + ++calls.partBegin; + }); + hijack('onParsePartData', () => { + ++calls.partData; + }); + hijack('onParsePartEnd', () => { + ++calls.partEnd; + }); + + form.on('close', () => { + ++calls.end; + console.timeEnd(moduleName); + }).on('part', (p) => p.resume()); + + console.time(moduleName); + form.parse(req); + for (const buf of buffers) + req.push(buf); + req.push(null); + + break; + } + + default: + if (moduleName === undefined) + console.error('Missing parser module name'); + else + console.error(`Invalid parser module name: ${moduleName}`); + process.exit(1); +} diff --git a/backend/Backend/node_modules/busboy/bench/bench-urlencoded-fields-100pairs-small.js b/backend/Backend/node_modules/busboy/bench/bench-urlencoded-fields-100pairs-small.js new file mode 100644 index 000000000..5c337df2e --- /dev/null +++ b/backend/Backend/node_modules/busboy/bench/bench-urlencoded-fields-100pairs-small.js @@ -0,0 +1,101 @@ +'use strict'; + +const buffers = [ + Buffer.from( + (new Array(100)).fill('').map((_, i) => `key${i}=value${i}`).join('&') + ), +]; +const calls = { + field: 0, + end: 0, +}; + +let n = 3e3; + +const moduleName = process.argv[2]; +switch (moduleName) { + case 'busboy': { + const busboy = require('busboy'); + + console.time(moduleName); + (function next() { + const parser = busboy({ + limits: { + fieldSizeLimit: Infinity, + }, + headers: { + 'content-type': 'application/x-www-form-urlencoded; charset=utf-8', + }, + }); + parser.on('field', (name, val, info) => { + ++calls.field; + }).on('close', () => { + ++calls.end; + if (--n === 0) + console.timeEnd(moduleName); + else + process.nextTick(next); + }); + + for (const buf of buffers) + parser.write(buf); + parser.end(); + })(); + break; + } + + case 'formidable': { + const QuerystringParser = + require('formidable/src/parsers/Querystring.js'); + + console.time(moduleName); + (function next() { + const parser = new QuerystringParser(); + parser.on('data', (obj) => { + ++calls.field; + }).on('end', () => { + ++calls.end; + if (--n === 0) + console.timeEnd(moduleName); + else + process.nextTick(next); + }); + + for (const buf of buffers) + parser.write(buf); + parser.end(); + })(); + break; + } + + case 'formidable-streaming': { + const QuerystringParser = + require('formidable/src/parsers/StreamingQuerystring.js'); + + console.time(moduleName); + (function next() { + const parser = new QuerystringParser(); + parser.on('data', (obj) => { + ++calls.field; + }).on('end', () => { + ++calls.end; + if (--n === 0) + console.timeEnd(moduleName); + else + process.nextTick(next); + }); + + for (const buf of buffers) + parser.write(buf); + parser.end(); + })(); + break; + } + + default: + if (moduleName === undefined) + console.error('Missing parser module name'); + else + console.error(`Invalid parser module name: ${moduleName}`); + process.exit(1); +} diff --git a/backend/Backend/node_modules/busboy/bench/bench-urlencoded-fields-900pairs-small-alt.js b/backend/Backend/node_modules/busboy/bench/bench-urlencoded-fields-900pairs-small-alt.js new file mode 100644 index 000000000..1f5645cb8 --- /dev/null +++ b/backend/Backend/node_modules/busboy/bench/bench-urlencoded-fields-900pairs-small-alt.js @@ -0,0 +1,84 @@ +'use strict'; + +const buffers = [ + Buffer.from( + (new Array(900)).fill('').map((_, i) => `key${i}=value${i}`).join('&') + ), +]; +const calls = { + field: 0, + end: 0, +}; + +const moduleName = process.argv[2]; +switch (moduleName) { + case 'busboy': { + const busboy = require('busboy'); + + console.time(moduleName); + const parser = busboy({ + limits: { + fieldSizeLimit: Infinity, + }, + headers: { + 'content-type': 'application/x-www-form-urlencoded; charset=utf-8', + }, + }); + parser.on('field', (name, val, info) => { + ++calls.field; + }).on('close', () => { + ++calls.end; + console.timeEnd(moduleName); + }); + + for (const buf of buffers) + parser.write(buf); + parser.end(); + break; + } + + case 'formidable': { + const QuerystringParser = + require('formidable/src/parsers/Querystring.js'); + + console.time(moduleName); + const parser = new QuerystringParser(); + parser.on('data', (obj) => { + ++calls.field; + }).on('end', () => { + ++calls.end; + console.timeEnd(moduleName); + }); + + for (const buf of buffers) + parser.write(buf); + parser.end(); + break; + } + + case 'formidable-streaming': { + const QuerystringParser = + require('formidable/src/parsers/StreamingQuerystring.js'); + + console.time(moduleName); + const parser = new QuerystringParser(); + parser.on('data', (obj) => { + ++calls.field; + }).on('end', () => { + ++calls.end; + console.timeEnd(moduleName); + }); + + for (const buf of buffers) + parser.write(buf); + parser.end(); + break; + } + + default: + if (moduleName === undefined) + console.error('Missing parser module name'); + else + console.error(`Invalid parser module name: ${moduleName}`); + process.exit(1); +} diff --git a/backend/Backend/node_modules/busboy/lib/index.js b/backend/Backend/node_modules/busboy/lib/index.js new file mode 100644 index 000000000..873272d93 --- /dev/null +++ b/backend/Backend/node_modules/busboy/lib/index.js @@ -0,0 +1,57 @@ +'use strict'; + +const { parseContentType } = require('./utils.js'); + +function getInstance(cfg) { + const headers = cfg.headers; + const conType = parseContentType(headers['content-type']); + if (!conType) + throw new Error('Malformed content type'); + + for (const type of TYPES) { + const matched = type.detect(conType); + if (!matched) + continue; + + const instanceCfg = { + limits: cfg.limits, + headers, + conType, + highWaterMark: undefined, + fileHwm: undefined, + defCharset: undefined, + defParamCharset: undefined, + preservePath: false, + }; + if (cfg.highWaterMark) + instanceCfg.highWaterMark = cfg.highWaterMark; + if (cfg.fileHwm) + instanceCfg.fileHwm = cfg.fileHwm; + instanceCfg.defCharset = cfg.defCharset; + instanceCfg.defParamCharset = cfg.defParamCharset; + instanceCfg.preservePath = cfg.preservePath; + return new type(instanceCfg); + } + + throw new Error(`Unsupported content type: ${headers['content-type']}`); +} + +// Note: types are explicitly listed here for easier bundling +// See: https://github.com/mscdex/busboy/issues/121 +const TYPES = [ + require('./types/multipart'), + require('./types/urlencoded'), +].filter(function(typemod) { return typeof typemod.detect === 'function'; }); + +module.exports = (cfg) => { + if (typeof cfg !== 'object' || cfg === null) + cfg = {}; + + if (typeof cfg.headers !== 'object' + || cfg.headers === null + || typeof cfg.headers['content-type'] !== 'string') { + throw new Error('Missing Content-Type'); + } + + return getInstance(cfg); +}; diff --git a/backend/Backend/node_modules/busboy/lib/types/multipart.js b/backend/Backend/node_modules/busboy/lib/types/multipart.js new file mode 100644 index 000000000..cc0d7bb66 --- /dev/null +++ b/backend/Backend/node_modules/busboy/lib/types/multipart.js @@ -0,0 +1,653 @@ +'use strict'; + +const { Readable, Writable } = require('stream'); + +const StreamSearch = require('streamsearch'); + +const { + basename, + convertToUTF8, + getDecoder, + parseContentType, + parseDisposition, +} = require('../utils.js'); + +const BUF_CRLF = Buffer.from('\r\n'); +const BUF_CR = Buffer.from('\r'); +const BUF_DASH = Buffer.from('-'); + +function noop() {} + +const MAX_HEADER_PAIRS = 2000; // From node +const MAX_HEADER_SIZE = 16 * 1024; // From node (its default value) + +const HPARSER_NAME = 0; +const HPARSER_PRE_OWS = 1; +const HPARSER_VALUE = 2; +class HeaderParser { + constructor(cb) { + this.header = Object.create(null); + this.pairCount = 0; + this.byteCount = 0; + this.state = HPARSER_NAME; + this.name = ''; + this.value = ''; + this.crlf = 0; + this.cb = cb; + } + + reset() { + this.header = Object.create(null); + this.pairCount = 0; + this.byteCount = 0; + this.state = HPARSER_NAME; + this.name = ''; + this.value = ''; + this.crlf = 0; + } + + push(chunk, pos, end) { + let start = pos; + while (pos < end) { + switch (this.state) { + case HPARSER_NAME: { + let done = false; + for (; pos < end; ++pos) { + if (this.byteCount === MAX_HEADER_SIZE) + return -1; + ++this.byteCount; + const code = chunk[pos]; + if (TOKEN[code] !== 1) { + if (code !== 58/* ':' */) + return -1; + this.name += chunk.latin1Slice(start, pos); + if (this.name.length === 0) + return -1; + ++pos; + done = true; + this.state = HPARSER_PRE_OWS; + break; + } + } + if (!done) { + this.name += chunk.latin1Slice(start, pos); + break; + } + // FALLTHROUGH + } + case HPARSER_PRE_OWS: { + // Skip optional whitespace + let done = false; + for (; pos < end; ++pos) { + if (this.byteCount === MAX_HEADER_SIZE) + return -1; + ++this.byteCount; + const code = chunk[pos]; + if (code !== 32/* ' ' */ && code !== 9/* '\t' */) { + start = pos; + done = true; + this.state = HPARSER_VALUE; + break; + } + } + if (!done) + break; + // FALLTHROUGH + } + case HPARSER_VALUE: + switch (this.crlf) { + case 0: // Nothing yet + for (; pos < end; ++pos) { + if (this.byteCount === MAX_HEADER_SIZE) + return -1; + ++this.byteCount; + const code = chunk[pos]; + if (FIELD_VCHAR[code] !== 1) { + if (code !== 13/* '\r' */) + return -1; + ++this.crlf; + break; + } + } + this.value += chunk.latin1Slice(start, pos++); + break; + case 1: // Received CR + if (this.byteCount === MAX_HEADER_SIZE) + return -1; + ++this.byteCount; + if (chunk[pos++] !== 10/* '\n' */) + return -1; + ++this.crlf; + break; + case 2: { // Received CR LF + if (this.byteCount === MAX_HEADER_SIZE) + return -1; + ++this.byteCount; + const code = chunk[pos]; + if (code === 32/* ' ' */ || code === 9/* '\t' */) { + // Folded value + start = pos; + this.crlf = 0; + } else { + if (++this.pairCount < MAX_HEADER_PAIRS) { + this.name = this.name.toLowerCase(); + if (this.header[this.name] === undefined) + this.header[this.name] = [this.value]; + else + this.header[this.name].push(this.value); + } + if (code === 13/* '\r' */) { + ++this.crlf; + ++pos; + } else { + // Assume start of next header field name + start = pos; + this.crlf = 0; + this.state = HPARSER_NAME; + this.name = ''; + this.value = ''; + } + } + break; + } + case 3: { // Received CR LF CR + if (this.byteCount === MAX_HEADER_SIZE) + return -1; + ++this.byteCount; + if (chunk[pos++] !== 10/* '\n' */) + return -1; + // End of header + const header = this.header; + this.reset(); + this.cb(header); + return pos; + } + } + break; + } + } + + return pos; + } +} + +class FileStream extends Readable { + constructor(opts, owner) { + super(opts); + this.truncated = false; + this._readcb = null; + this.once('end', () => { + // We need to make sure that we call any outstanding _writecb() that is + // associated with this file so that processing of the rest of the form + // can continue. This may not happen if the file stream ends right after + // backpressure kicks in, so we force it here. + this._read(); + if (--owner._fileEndsLeft === 0 && owner._finalcb) { + const cb = owner._finalcb; + owner._finalcb = null; + // Make sure other 'end' event handlers get a chance to be executed + // before busboy's 'finish' event is emitted + process.nextTick(cb); + } + }); + } + _read(n) { + const cb = this._readcb; + if (cb) { + this._readcb = null; + cb(); + } + } +} + +const ignoreData = { + push: (chunk, pos) => {}, + destroy: () => {}, +}; + +function callAndUnsetCb(self, err) { + const cb = self._writecb; + self._writecb = null; + if (err) + self.destroy(err); + else if (cb) + cb(); +} + +function nullDecoder(val, hint) { + return val; +} + +class Multipart extends Writable { + constructor(cfg) { + const streamOpts = { + autoDestroy: true, + emitClose: true, + highWaterMark: (typeof cfg.highWaterMark === 'number' + ? cfg.highWaterMark + : undefined), + }; + super(streamOpts); + + if (!cfg.conType.params || typeof cfg.conType.params.boundary !== 'string') + throw new Error('Multipart: Boundary not found'); + + const boundary = cfg.conType.params.boundary; + const paramDecoder = (typeof cfg.defParamCharset === 'string' + && cfg.defParamCharset + ? getDecoder(cfg.defParamCharset) + : nullDecoder); + const defCharset = (cfg.defCharset || 'utf8'); + const preservePath = cfg.preservePath; + const fileOpts = { + autoDestroy: true, + emitClose: true, + highWaterMark: (typeof cfg.fileHwm === 'number' + ? cfg.fileHwm + : undefined), + }; + + const limits = cfg.limits; + const fieldSizeLimit = (limits && typeof limits.fieldSize === 'number' + ? limits.fieldSize + : 1 * 1024 * 1024); + const fileSizeLimit = (limits && typeof limits.fileSize === 'number' + ? limits.fileSize + : Infinity); + const filesLimit = (limits && typeof limits.files === 'number' + ? limits.files + : Infinity); + const fieldsLimit = (limits && typeof limits.fields === 'number' + ? limits.fields + : Infinity); + const partsLimit = (limits && typeof limits.parts === 'number' + ? limits.parts + : Infinity); + + let parts = -1; // Account for initial boundary + let fields = 0; + let files = 0; + let skipPart = false; + + this._fileEndsLeft = 0; + this._fileStream = undefined; + this._complete = false; + let fileSize = 0; + + let field; + let fieldSize = 0; + let partCharset; + let partEncoding; + let partType; + let partName; + let partTruncated = false; + + let hitFilesLimit = false; + let hitFieldsLimit = false; + + this._hparser = null; + const hparser = new HeaderParser((header) => { + this._hparser = null; + skipPart = false; + + partType = 'text/plain'; + partCharset = defCharset; + partEncoding = '7bit'; + partName = undefined; + partTruncated = false; + + let filename; + if (!header['content-disposition']) { + skipPart = true; + return; + } + + const disp = parseDisposition(header['content-disposition'][0], + paramDecoder); + if (!disp || disp.type !== 'form-data') { + skipPart = true; + return; + } + + if (disp.params) { + if (disp.params.name) + partName = disp.params.name; + + if (disp.params['filename*']) + filename = disp.params['filename*']; + else if (disp.params.filename) + filename = disp.params.filename; + + if (filename !== undefined && !preservePath) + filename = basename(filename); + } + + if (header['content-type']) { + const conType = parseContentType(header['content-type'][0]); + if (conType) { + partType = `${conType.type}/${conType.subtype}`; + if (conType.params && typeof conType.params.charset === 'string') + partCharset = conType.params.charset.toLowerCase(); + } + } + + if (header['content-transfer-encoding']) + partEncoding = header['content-transfer-encoding'][0].toLowerCase(); + + if (partType === 'application/octet-stream' || filename !== undefined) { + // File + + if (files === filesLimit) { + if (!hitFilesLimit) { + hitFilesLimit = true; + this.emit('filesLimit'); + } + skipPart = true; + return; + } + ++files; + + if (this.listenerCount('file') === 0) { + skipPart = true; + return; + } + + fileSize = 0; + this._fileStream = new FileStream(fileOpts, this); + ++this._fileEndsLeft; + this.emit( + 'file', + partName, + this._fileStream, + { filename, + encoding: partEncoding, + mimeType: partType } + ); + } else { + // Non-file + + if (fields === fieldsLimit) { + if (!hitFieldsLimit) { + hitFieldsLimit = true; + this.emit('fieldsLimit'); + } + skipPart = true; + return; + } + ++fields; + + if (this.listenerCount('field') === 0) { + skipPart = true; + return; + } + + field = []; + fieldSize = 0; + } + }); + + let matchPostBoundary = 0; + const ssCb = (isMatch, data, start, end, isDataSafe) => { +retrydata: + while (data) { + if (this._hparser !== null) { + const ret = this._hparser.push(data, start, end); + if (ret === -1) { + this._hparser = null; + hparser.reset(); + this.emit('error', new Error('Malformed part header')); + break; + } + start = ret; + } + + if (start === end) + break; + + if (matchPostBoundary !== 0) { + if (matchPostBoundary === 1) { + switch (data[start]) { + case 45: // '-' + // Try matching '--' after boundary + matchPostBoundary = 2; + ++start; + break; + case 13: // '\r' + // Try matching CR LF before header + matchPostBoundary = 3; + ++start; + break; + default: + matchPostBoundary = 0; + } + if (start === end) + return; + } + + if (matchPostBoundary === 2) { + matchPostBoundary = 0; + if (data[start] === 45/* '-' */) { + // End of multipart data + this._complete = true; + this._bparser = ignoreData; + return; + } + // We saw something other than '-', so put the dash we consumed + // "back" + const writecb = this._writecb; + this._writecb = noop; + ssCb(false, BUF_DASH, 0, 1, false); + this._writecb = writecb; + } else if (matchPostBoundary === 3) { + matchPostBoundary = 0; + if (data[start] === 10/* '\n' */) { + ++start; + if (parts >= partsLimit) + break; + // Prepare the header parser + this._hparser = hparser; + if (start === end) + break; + // Process the remaining data as a header + continue retrydata; + } else { + // We saw something other than LF, so put the CR we consumed + // "back" + const writecb = this._writecb; + this._writecb = noop; + ssCb(false, BUF_CR, 0, 1, false); + this._writecb = writecb; + } + } + } + + if (!skipPart) { + if (this._fileStream) { + let chunk; + const actualLen = Math.min(end - start, fileSizeLimit - fileSize); + if (!isDataSafe) { + chunk = Buffer.allocUnsafe(actualLen); + data.copy(chunk, 0, start, start + actualLen); + } else { + chunk = data.slice(start, start + actualLen); + } + + fileSize += chunk.length; + if (fileSize === fileSizeLimit) { + if (chunk.length > 0) + this._fileStream.push(chunk); + this._fileStream.emit('limit'); + this._fileStream.truncated = true; + skipPart = true; + } else if (!this._fileStream.push(chunk)) { + if (this._writecb) + this._fileStream._readcb = this._writecb; + this._writecb = null; + } + } else if (field !== undefined) { + let chunk; + const actualLen = Math.min( + end - start, + fieldSizeLimit - fieldSize + ); + if (!isDataSafe) { + chunk = Buffer.allocUnsafe(actualLen); + data.copy(chunk, 0, start, start + actualLen); + } else { + chunk = data.slice(start, start + actualLen); + } + + fieldSize += actualLen; + field.push(chunk); + if (fieldSize === fieldSizeLimit) { + skipPart = true; + partTruncated = true; + } + } + } + + break; + } + + if (isMatch) { + matchPostBoundary = 1; + + if (this._fileStream) { + // End the active file stream if the previous part was a file + this._fileStream.push(null); + this._fileStream = null; + } else if (field !== undefined) { + let data; + switch (field.length) { + case 0: + data = ''; + break; + case 1: + data = convertToUTF8(field[0], partCharset, 0); + break; + default: + data = convertToUTF8( + Buffer.concat(field, fieldSize), + partCharset, + 0 + ); + } + field = undefined; + fieldSize = 0; + this.emit( + 'field', + partName, + data, + { nameTruncated: false, + valueTruncated: partTruncated, + encoding: partEncoding, + mimeType: partType } + ); + } + + if (++parts === partsLimit) + this.emit('partsLimit'); + } + }; + this._bparser = new StreamSearch(`\r\n--${boundary}`, ssCb); + + this._writecb = null; + this._finalcb = null; + + // Just in case there is no preamble + this.write(BUF_CRLF); + } + + static detect(conType) { + return (conType.type === 'multipart' && conType.subtype === 'form-data'); + } + + _write(chunk, enc, cb) { + this._writecb = cb; + this._bparser.push(chunk, 0); + if (this._writecb) + callAndUnsetCb(this); + } + + _destroy(err, cb) { + this._hparser = null; + this._bparser = ignoreData; + if (!err) + err = checkEndState(this); + const fileStream = this._fileStream; + if (fileStream) { + this._fileStream = null; + fileStream.destroy(err); + } + cb(err); + } + + _final(cb) { + this._bparser.destroy(); + if (!this._complete) + return cb(new Error('Unexpected end of form')); + if (this._fileEndsLeft) + this._finalcb = finalcb.bind(null, this, cb); + else + finalcb(this, cb); + } +} + +function finalcb(self, cb, err) { + if (err) + return cb(err); + err = checkEndState(self); + cb(err); +} + +function checkEndState(self) { + if (self._hparser) + return new Error('Malformed part header'); + const fileStream = self._fileStream; + if (fileStream) { + self._fileStream = null; + fileStream.destroy(new Error('Unexpected end of file')); + } + if (!self._complete) + return new Error('Unexpected end of form'); +} + +const TOKEN = [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, + 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, +]; + +const FIELD_VCHAR = [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, +]; + +module.exports = Multipart; diff --git a/backend/Backend/node_modules/busboy/lib/types/urlencoded.js b/backend/Backend/node_modules/busboy/lib/types/urlencoded.js new file mode 100644 index 000000000..5c463a258 --- /dev/null +++ b/backend/Backend/node_modules/busboy/lib/types/urlencoded.js @@ -0,0 +1,350 @@ +'use strict'; + +const { Writable } = require('stream'); + +const { getDecoder } = require('../utils.js'); + +class URLEncoded extends Writable { + constructor(cfg) { + const streamOpts = { + autoDestroy: true, + emitClose: true, + highWaterMark: (typeof cfg.highWaterMark === 'number' + ? cfg.highWaterMark + : undefined), + }; + super(streamOpts); + + let charset = (cfg.defCharset || 'utf8'); + if (cfg.conType.params && typeof cfg.conType.params.charset === 'string') + charset = cfg.conType.params.charset; + + this.charset = charset; + + const limits = cfg.limits; + this.fieldSizeLimit = (limits && typeof limits.fieldSize === 'number' + ? limits.fieldSize + : 1 * 1024 * 1024); + this.fieldsLimit = (limits && typeof limits.fields === 'number' + ? limits.fields + : Infinity); + this.fieldNameSizeLimit = ( + limits && typeof limits.fieldNameSize === 'number' + ? limits.fieldNameSize + : 100 + ); + + this._inKey = true; + this._keyTrunc = false; + this._valTrunc = false; + this._bytesKey = 0; + this._bytesVal = 0; + this._fields = 0; + this._key = ''; + this._val = ''; + this._byte = -2; + this._lastPos = 0; + this._encode = 0; + this._decoder = getDecoder(charset); + } + + static detect(conType) { + return (conType.type === 'application' + && conType.subtype === 'x-www-form-urlencoded'); + } + + _write(chunk, enc, cb) { + if (this._fields >= this.fieldsLimit) + return cb(); + + let i = 0; + const len = chunk.length; + this._lastPos = 0; + + // Check if we last ended mid-percent-encoded byte + if (this._byte !== -2) { + i = readPctEnc(this, chunk, i, len); + if (i === -1) + return cb(new Error('Malformed urlencoded form')); + if (i >= len) + return cb(); + if (this._inKey) + ++this._bytesKey; + else + ++this._bytesVal; + } + +main: + while (i < len) { + if (this._inKey) { + // Parsing key + + i = skipKeyBytes(this, chunk, i, len); + + while (i < len) { + switch (chunk[i]) { + case 61: // '=' + if (this._lastPos < i) + this._key += chunk.latin1Slice(this._lastPos, i); + this._lastPos = ++i; + this._key = this._decoder(this._key, this._encode); + this._encode = 0; + this._inKey = false; + continue main; + case 38: // '&' + if (this._lastPos < i) + this._key += chunk.latin1Slice(this._lastPos, i); + this._lastPos = ++i; + this._key = this._decoder(this._key, this._encode); + this._encode = 0; + if (this._bytesKey > 0) { + this.emit( + 'field', + this._key, + '', + { nameTruncated: this._keyTrunc, + valueTruncated: false, + encoding: this.charset, + mimeType: 'text/plain' } + ); + } + this._key = ''; + this._val = ''; + this._keyTrunc = false; + this._valTrunc = false; + this._bytesKey = 0; + this._bytesVal = 0; + if (++this._fields >= this.fieldsLimit) { + this.emit('fieldsLimit'); + return cb(); + } + continue; + case 43: // '+' + if (this._lastPos < i) + this._key += chunk.latin1Slice(this._lastPos, i); + this._key += ' '; + this._lastPos = i + 1; + break; + case 37: // '%' + if (this._encode === 0) + this._encode = 1; + if (this._lastPos < i) + this._key += chunk.latin1Slice(this._lastPos, i); + this._lastPos = i + 1; + this._byte = -1; + i = readPctEnc(this, chunk, i + 1, len); + if (i === -1) + return cb(new Error('Malformed urlencoded form')); + if (i >= len) + return cb(); + ++this._bytesKey; + i = skipKeyBytes(this, chunk, i, len); + continue; + } + ++i; + ++this._bytesKey; + i = skipKeyBytes(this, chunk, i, len); + } + if (this._lastPos < i) + this._key += chunk.latin1Slice(this._lastPos, i); + } else { + // Parsing value + + i = skipValBytes(this, chunk, i, len); + + while (i < len) { + switch (chunk[i]) { + case 38: // '&' + if (this._lastPos < i) + this._val += chunk.latin1Slice(this._lastPos, i); + this._lastPos = ++i; + this._inKey = true; + this._val = this._decoder(this._val, this._encode); + this._encode = 0; + if (this._bytesKey > 0 || this._bytesVal > 0) { + this.emit( + 'field', + this._key, + this._val, + { nameTruncated: this._keyTrunc, + valueTruncated: this._valTrunc, + encoding: this.charset, + mimeType: 'text/plain' } + ); + } + this._key = ''; + this._val = ''; + this._keyTrunc = false; + this._valTrunc = false; + this._bytesKey = 0; + this._bytesVal = 0; + if (++this._fields >= this.fieldsLimit) { + this.emit('fieldsLimit'); + return cb(); + } + continue main; + case 43: // '+' + if (this._lastPos < i) + this._val += chunk.latin1Slice(this._lastPos, i); + this._val += ' '; + this._lastPos = i + 1; + break; + case 37: // '%' + if (this._encode === 0) + this._encode = 1; + if (this._lastPos < i) + this._val += chunk.latin1Slice(this._lastPos, i); + this._lastPos = i + 1; + this._byte = -1; + i = readPctEnc(this, chunk, i + 1, len); + if (i === -1) + return cb(new Error('Malformed urlencoded form')); + if (i >= len) + return cb(); + ++this._bytesVal; + i = skipValBytes(this, chunk, i, len); + continue; + } + ++i; + ++this._bytesVal; + i = skipValBytes(this, chunk, i, len); + } + if (this._lastPos < i) + this._val += chunk.latin1Slice(this._lastPos, i); + } + } + + cb(); + } + + _final(cb) { + if (this._byte !== -2) + return cb(new Error('Malformed urlencoded form')); + if (!this._inKey || this._bytesKey > 0 || this._bytesVal > 0) { + if (this._inKey) + this._key = this._decoder(this._key, this._encode); + else + this._val = this._decoder(this._val, this._encode); + this.emit( + 'field', + this._key, + this._val, + { nameTruncated: this._keyTrunc, + valueTruncated: this._valTrunc, + encoding: this.charset, + mimeType: 'text/plain' } + ); + } + cb(); + } +} + +function readPctEnc(self, chunk, pos, len) { + if (pos >= len) + return len; + + if (self._byte === -1) { + // We saw a '%' but no hex characters yet + const hexUpper = HEX_VALUES[chunk[pos++]]; + if (hexUpper === -1) + return -1; + + if (hexUpper >= 8) + self._encode = 2; // Indicate high bits detected + + if (pos < len) { + // Both hex characters are in this chunk + const hexLower = HEX_VALUES[chunk[pos++]]; + if (hexLower === -1) + return -1; + + if (self._inKey) + self._key += String.fromCharCode((hexUpper << 4) + hexLower); + else + self._val += String.fromCharCode((hexUpper << 4) + hexLower); + + self._byte = -2; + self._lastPos = pos; + } else { + // Only one hex character was available in this chunk + self._byte = hexUpper; + } + } else { + // We saw only one hex character so far + const hexLower = HEX_VALUES[chunk[pos++]]; + if (hexLower === -1) + return -1; + + if (self._inKey) + self._key += String.fromCharCode((self._byte << 4) + hexLower); + else + self._val += String.fromCharCode((self._byte << 4) + hexLower); + + self._byte = -2; + self._lastPos = pos; + } + + return pos; +} + +function skipKeyBytes(self, chunk, pos, len) { + // Skip bytes if we've truncated + if (self._bytesKey > self.fieldNameSizeLimit) { + if (!self._keyTrunc) { + if (self._lastPos < pos) + self._key += chunk.latin1Slice(self._lastPos, pos - 1); + } + self._keyTrunc = true; + for (; pos < len; ++pos) { + const code = chunk[pos]; + if (code === 61/* '=' */ || code === 38/* '&' */) + break; + ++self._bytesKey; + } + self._lastPos = pos; + } + + return pos; +} + +function skipValBytes(self, chunk, pos, len) { + // Skip bytes if we've truncated + if (self._bytesVal > self.fieldSizeLimit) { + if (!self._valTrunc) { + if (self._lastPos < pos) + self._val += chunk.latin1Slice(self._lastPos, pos - 1); + } + self._valTrunc = true; + for (; pos < len; ++pos) { + if (chunk[pos] === 38/* '&' */) + break; + ++self._bytesVal; + } + self._lastPos = pos; + } + + return pos; +} + +/* eslint-disable no-multi-spaces */ +const HEX_VALUES = [ + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, -1, -1, -1, -1, -1, -1, + -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, +]; +/* eslint-enable no-multi-spaces */ + +module.exports = URLEncoded; diff --git a/backend/Backend/node_modules/busboy/lib/utils.js b/backend/Backend/node_modules/busboy/lib/utils.js new file mode 100644 index 000000000..8274f6c3a --- /dev/null +++ b/backend/Backend/node_modules/busboy/lib/utils.js @@ -0,0 +1,596 @@ +'use strict'; + +function parseContentType(str) { + if (str.length === 0) + return; + + const params = Object.create(null); + let i = 0; + + // Parse type + for (; i < str.length; ++i) { + const code = str.charCodeAt(i); + if (TOKEN[code] !== 1) { + if (code !== 47/* '/' */ || i === 0) + return; + break; + } + } + // Check for type without subtype + if (i === str.length) + return; + + const type = str.slice(0, i).toLowerCase(); + + // Parse subtype + const subtypeStart = ++i; + for (; i < str.length; ++i) { + const code = str.charCodeAt(i); + if (TOKEN[code] !== 1) { + // Make sure we have a subtype + if (i === subtypeStart) + return; + + if (parseContentTypeParams(str, i, params) === undefined) + return; + break; + } + } + // Make sure we have a subtype + if (i === subtypeStart) + return; + + const subtype = str.slice(subtypeStart, i).toLowerCase(); + + return { type, subtype, params }; +} + +function parseContentTypeParams(str, i, params) { + while (i < str.length) { + // Consume whitespace + for (; i < str.length; ++i) { + const code = str.charCodeAt(i); + if (code !== 32/* ' ' */ && code !== 9/* '\t' */) + break; + } + + // Ended on whitespace + if (i === str.length) + break; + + // Check for malformed parameter + if (str.charCodeAt(i++) !== 59/* ';' */) + return; + + // Consume whitespace + for (; i < str.length; ++i) { + const code = str.charCodeAt(i); + if (code !== 32/* ' ' */ && code !== 9/* '\t' */) + break; + } + + // Ended on whitespace (malformed) + if (i === str.length) + return; + + let name; + const nameStart = i; + // Parse parameter name + for (; i < str.length; ++i) { + const code = str.charCodeAt(i); + if (TOKEN[code] !== 1) { + if (code !== 61/* '=' */) + return; + break; + } + } + + // No value (malformed) + if (i === str.length) + return; + + name = str.slice(nameStart, i); + ++i; // Skip over '=' + + // No value (malformed) + if (i === str.length) + return; + + let value = ''; + let valueStart; + if (str.charCodeAt(i) === 34/* '"' */) { + valueStart = ++i; + let escaping = false; + // Parse quoted value + for (; i < str.length; ++i) { + const code = str.charCodeAt(i); + if (code === 92/* '\\' */) { + if (escaping) { + valueStart = i; + escaping = false; + } else { + value += str.slice(valueStart, i); + escaping = true; + } + continue; + } + if (code === 34/* '"' */) { + if (escaping) { + valueStart = i; + escaping = false; + continue; + } + value += str.slice(valueStart, i); + break; + } + if (escaping) { + valueStart = i - 1; + escaping = false; + } + // Invalid unescaped quoted character (malformed) + if (QDTEXT[code] !== 1) + return; + } + + // No end quote (malformed) + if (i === str.length) + return; + + ++i; // Skip over double quote + } else { + valueStart = i; + // Parse unquoted value + for (; i < str.length; ++i) { + const code = str.charCodeAt(i); + if (TOKEN[code] !== 1) { + // No value (malformed) + if (i === valueStart) + return; + break; + } + } + value = str.slice(valueStart, i); + } + + name = name.toLowerCase(); + if (params[name] === undefined) + params[name] = value; + } + + return params; +} + +function parseDisposition(str, defDecoder) { + if (str.length === 0) + return; + + const params = Object.create(null); + let i = 0; + + for (; i < str.length; ++i) { + const code = str.charCodeAt(i); + if (TOKEN[code] !== 1) { + if (parseDispositionParams(str, i, params, defDecoder) === undefined) + return; + break; + } + } + + const type = str.slice(0, i).toLowerCase(); + + return { type, params }; +} + +function parseDispositionParams(str, i, params, defDecoder) { + while (i < str.length) { + // Consume whitespace + for (; i < str.length; ++i) { + const code = str.charCodeAt(i); + if (code !== 32/* ' ' */ && code !== 9/* '\t' */) + break; + } + + // Ended on whitespace + if (i === str.length) + break; + + // Check for malformed parameter + if (str.charCodeAt(i++) !== 59/* ';' */) + return; + + // Consume whitespace + for (; i < str.length; ++i) { + const code = str.charCodeAt(i); + if (code !== 32/* ' ' */ && code !== 9/* '\t' */) + break; + } + + // Ended on whitespace (malformed) + if (i === str.length) + return; + + let name; + const nameStart = i; + // Parse parameter name + for (; i < str.length; ++i) { + const code = str.charCodeAt(i); + if (TOKEN[code] !== 1) { + if (code === 61/* '=' */) + break; + return; + } + } + + // No value (malformed) + if (i === str.length) + return; + + let value = ''; + let valueStart; + let charset; + //~ let lang; + name = str.slice(nameStart, i); + if (name.charCodeAt(name.length - 1) === 42/* '*' */) { + // Extended value + + const charsetStart = ++i; + // Parse charset name + for (; i < str.length; ++i) { + const code = str.charCodeAt(i); + if (CHARSET[code] !== 1) { + if (code !== 39/* '\'' */) + return; + break; + } + } + + // Incomplete charset (malformed) + if (i === str.length) + return; + + charset = str.slice(charsetStart, i); + ++i; // Skip over the '\'' + + //~ const langStart = ++i; + // Parse language name + for (; i < str.length; ++i) { + const code = str.charCodeAt(i); + if (code === 39/* '\'' */) + break; + } + + // Incomplete language (malformed) + if (i === str.length) + return; + + //~ lang = str.slice(langStart, i); + ++i; // Skip over the '\'' + + // No value (malformed) + if (i === str.length) + return; + + valueStart = i; + + let encode = 0; + // Parse value + for (; i < str.length; ++i) { + const code = str.charCodeAt(i); + if (EXTENDED_VALUE[code] !== 1) { + if (code === 37/* '%' */) { + let hexUpper; + let hexLower; + if (i + 2 < str.length + && (hexUpper = HEX_VALUES[str.charCodeAt(i + 1)]) !== -1 + && (hexLower = HEX_VALUES[str.charCodeAt(i + 2)]) !== -1) { + const byteVal = (hexUpper << 4) + hexLower; + value += str.slice(valueStart, i); + value += String.fromCharCode(byteVal); + i += 2; + valueStart = i + 1; + if (byteVal >= 128) + encode = 2; + else if (encode === 0) + encode = 1; + continue; + } + // '%' disallowed in non-percent encoded contexts (malformed) + return; + } + break; + } + } + + value += str.slice(valueStart, i); + value = convertToUTF8(value, charset, encode); + if (value === undefined) + return; + } else { + // Non-extended value + + ++i; // Skip over '=' + + // No value (malformed) + if (i === str.length) + return; + + if (str.charCodeAt(i) === 34/* '"' */) { + valueStart = ++i; + let escaping = false; + // Parse quoted value + for (; i < str.length; ++i) { + const code = str.charCodeAt(i); + if (code === 92/* '\\' */) { + if (escaping) { + valueStart = i; + escaping = false; + } else { + value += str.slice(valueStart, i); + escaping = true; + } + continue; + } + if (code === 34/* '"' */) { + if (escaping) { + valueStart = i; + escaping = false; + continue; + } + value += str.slice(valueStart, i); + break; + } + if (escaping) { + valueStart = i - 1; + escaping = false; + } + // Invalid unescaped quoted character (malformed) + if (QDTEXT[code] !== 1) + return; + } + + // No end quote (malformed) + if (i === str.length) + return; + + ++i; // Skip over double quote + } else { + valueStart = i; + // Parse unquoted value + for (; i < str.length; ++i) { + const code = str.charCodeAt(i); + if (TOKEN[code] !== 1) { + // No value (malformed) + if (i === valueStart) + return; + break; + } + } + value = str.slice(valueStart, i); + } + + value = defDecoder(value, 2); + if (value === undefined) + return; + } + + name = name.toLowerCase(); + if (params[name] === undefined) + params[name] = value; + } + + return params; +} + +function getDecoder(charset) { + let lc; + while (true) { + switch (charset) { + case 'utf-8': + case 'utf8': + return decoders.utf8; + case 'latin1': + case 'ascii': // TODO: Make these a separate, strict decoder? + case 'us-ascii': + case 'iso-8859-1': + case 'iso8859-1': + case 'iso88591': + case 'iso_8859-1': + case 'windows-1252': + case 'iso_8859-1:1987': + case 'cp1252': + case 'x-cp1252': + return decoders.latin1; + case 'utf16le': + case 'utf-16le': + case 'ucs2': + case 'ucs-2': + return decoders.utf16le; + case 'base64': + return decoders.base64; + default: + if (lc === undefined) { + lc = true; + charset = charset.toLowerCase(); + continue; + } + return decoders.other.bind(charset); + } + } +} + +const decoders = { + utf8: (data, hint) => { + if (data.length === 0) + return ''; + if (typeof data === 'string') { + // If `data` never had any percent-encoded bytes or never had any that + // were outside of the ASCII range, then we can safely just return the + // input since UTF-8 is ASCII compatible + if (hint < 2) + return data; + + data = Buffer.from(data, 'latin1'); + } + return data.utf8Slice(0, data.length); + }, + + latin1: (data, hint) => { + if (data.length === 0) + return ''; + if (typeof data === 'string') + return data; + return data.latin1Slice(0, data.length); + }, + + utf16le: (data, hint) => { + if (data.length === 0) + return ''; + if (typeof data === 'string') + data = Buffer.from(data, 'latin1'); + return data.ucs2Slice(0, data.length); + }, + + base64: (data, hint) => { + if (data.length === 0) + return ''; + if (typeof data === 'string') + data = Buffer.from(data, 'latin1'); + return data.base64Slice(0, data.length); + }, + + other: (data, hint) => { + if (data.length === 0) + return ''; + if (typeof data === 'string') + data = Buffer.from(data, 'latin1'); + try { + const decoder = new TextDecoder(this); + return decoder.decode(data); + } catch {} + }, +}; + +function convertToUTF8(data, charset, hint) { + const decode = getDecoder(charset); + if (decode) + return decode(data, hint); +} + +function basename(path) { + if (typeof path !== 'string') + return ''; + for (let i = path.length - 1; i >= 0; --i) { + switch (path.charCodeAt(i)) { + case 0x2F: // '/' + case 0x5C: // '\' + path = path.slice(i + 1); + return (path === '..' || path === '.' ? '' : path); + } + } + return (path === '..' || path === '.' ? '' : path); +} + +const TOKEN = [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, + 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, +]; + +const QDTEXT = [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, +]; + +const CHARSET = [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, + 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, +]; + +const EXTENDED_VALUE = [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, + 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, +]; + +/* eslint-disable no-multi-spaces */ +const HEX_VALUES = [ + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, -1, -1, -1, -1, -1, -1, + -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, +]; +/* eslint-enable no-multi-spaces */ + +module.exports = { + basename, + convertToUTF8, + getDecoder, + parseContentType, + parseDisposition, +}; diff --git a/backend/Backend/node_modules/busboy/package.json b/backend/Backend/node_modules/busboy/package.json new file mode 100644 index 000000000..ac2577fe2 --- /dev/null +++ b/backend/Backend/node_modules/busboy/package.json @@ -0,0 +1,22 @@ +{ "name": "busboy", + "version": "1.6.0", + "author": "Brian White ", + "description": "A streaming parser for HTML form data for node.js", + "main": "./lib/index.js", + "dependencies": { + "streamsearch": "^1.1.0" + }, + "devDependencies": { + "@mscdex/eslint-config": "^1.1.0", + "eslint": "^7.32.0" + }, + "scripts": { + "test": "node test/test.js", + "lint": "eslint --cache --report-unused-disable-directives --ext=.js .eslintrc.js lib test bench", + "lint:fix": "npm run lint -- --fix" + }, + "engines": { "node": ">=10.16.0" }, + "keywords": [ "uploads", "forms", "multipart", "form-data" ], + "licenses": [ { "type": "MIT", "url": "http://github.com/mscdex/busboy/raw/master/LICENSE" } ], + "repository": { "type": "git", "url": "http://github.com/mscdex/busboy.git" } +} diff --git a/backend/Backend/node_modules/busboy/test/common.js b/backend/Backend/node_modules/busboy/test/common.js new file mode 100644 index 000000000..fb82ad81b --- /dev/null +++ b/backend/Backend/node_modules/busboy/test/common.js @@ -0,0 +1,109 @@ +'use strict'; + +const assert = require('assert'); +const { inspect } = require('util'); + +const mustCallChecks = []; + +function noop() {} + +function runCallChecks(exitCode) { + if (exitCode !== 0) return; + + const failed = mustCallChecks.filter((context) => { + if ('minimum' in context) { + context.messageSegment = `at least ${context.minimum}`; + return context.actual < context.minimum; + } + context.messageSegment = `exactly ${context.exact}`; + return context.actual !== context.exact; + }); + + failed.forEach((context) => { + console.error('Mismatched %s function calls. Expected %s, actual %d.', + context.name, + context.messageSegment, + context.actual); + console.error(context.stack.split('\n').slice(2).join('\n')); + }); + + if (failed.length) + process.exit(1); +} + +function mustCall(fn, exact) { + return _mustCallInner(fn, exact, 'exact'); +} + +function mustCallAtLeast(fn, minimum) { + return _mustCallInner(fn, minimum, 'minimum'); +} + +function _mustCallInner(fn, criteria = 1, field) { + if (process._exiting) + throw new Error('Cannot use common.mustCall*() in process exit handler'); + + if (typeof fn === 'number') { + criteria = fn; + fn = noop; + } else if (fn === undefined) { + fn = noop; + } + + if (typeof criteria !== 'number') + throw new TypeError(`Invalid ${field} value: ${criteria}`); + + const context = { + [field]: criteria, + actual: 0, + stack: inspect(new Error()), + name: fn.name || '' + }; + + // Add the exit listener only once to avoid listener leak warnings + if (mustCallChecks.length === 0) + process.on('exit', runCallChecks); + + mustCallChecks.push(context); + + function wrapped(...args) { + ++context.actual; + return fn.call(this, ...args); + } + // TODO: remove origFn? + wrapped.origFn = fn; + + return wrapped; +} + +function getCallSite(top) { + const originalStackFormatter = Error.prepareStackTrace; + Error.prepareStackTrace = (err, stack) => + `${stack[0].getFileName()}:${stack[0].getLineNumber()}`; + const err = new Error(); + Error.captureStackTrace(err, top); + // With the V8 Error API, the stack is not formatted until it is accessed + // eslint-disable-next-line no-unused-expressions + err.stack; + Error.prepareStackTrace = originalStackFormatter; + return err.stack; +} + +function mustNotCall(msg) { + const callSite = getCallSite(mustNotCall); + return function mustNotCall(...args) { + args = args.map(inspect).join(', '); + const argsInfo = (args.length > 0 + ? `\ncalled with arguments: ${args}` + : ''); + assert.fail( + `${msg || 'function should not have been called'} at ${callSite}` + + argsInfo); + }; +} + +module.exports = { + mustCall, + mustCallAtLeast, + mustNotCall, +}; diff --git a/backend/Backend/node_modules/busboy/test/test-types-multipart-charsets.js b/backend/Backend/node_modules/busboy/test/test-types-multipart-charsets.js new file mode 100644 index 000000000..ed9c38aeb --- /dev/null +++ b/backend/Backend/node_modules/busboy/test/test-types-multipart-charsets.js @@ -0,0 +1,94 @@ +'use strict'; + +const assert = require('assert'); +const { inspect } = require('util'); + +const { mustCall } = require(`${__dirname}/common.js`); + +const busboy = require('..'); + +const input = Buffer.from([ + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_0"; filename="テスト.dat"', + 'Content-Type: application/octet-stream', + '', + 'A'.repeat(1023), + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--' +].join('\r\n')); +const boundary = '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k'; +const expected = [ + { type: 'file', + name: 'upload_file_0', + data: Buffer.from('A'.repeat(1023)), + info: { + filename: 'テスト.dat', + encoding: '7bit', + mimeType: 'application/octet-stream', + }, + limited: false, + }, +]; +const bb = busboy({ + defParamCharset: 'utf8', + headers: { + 'content-type': `multipart/form-data; boundary=${boundary}`, + } +}); +const results = []; + +bb.on('field', (name, val, info) => { + results.push({ type: 'field', name, val, info }); +}); + +bb.on('file', (name, stream, info) => { + const data = []; + let nb = 0; + const file = { + type: 'file', + name, + data: null, + info, + limited: false, + }; + results.push(file); + stream.on('data', (d) => { + data.push(d); + nb += d.length; + }).on('limit', () => { + file.limited = true; + }).on('close', () => { + file.data = Buffer.concat(data, nb); + assert.strictEqual(stream.truncated, file.limited); + }).once('error', (err) => { + file.err = err.message; + }); +}); + +bb.on('error', (err) => { + results.push({ error: err.message }); +}); + +bb.on('partsLimit', () => { + results.push('partsLimit'); +}); + +bb.on('filesLimit', () => { + results.push('filesLimit'); +}); + +bb.on('fieldsLimit', () => { + results.push('fieldsLimit'); +}); + +bb.on('close', mustCall(() => { + assert.deepStrictEqual( + results, + expected, + 'Results mismatch.\n' + + `Parsed: ${inspect(results)}\n` + + `Expected: ${inspect(expected)}` + ); +})); + +bb.end(input); diff --git a/backend/Backend/node_modules/busboy/test/test-types-multipart-stream-pause.js b/backend/Backend/node_modules/busboy/test/test-types-multipart-stream-pause.js new file mode 100644 index 000000000..df7268a4b --- /dev/null +++ b/backend/Backend/node_modules/busboy/test/test-types-multipart-stream-pause.js @@ -0,0 +1,102 @@ +'use strict'; + +const assert = require('assert'); +const { randomFillSync } = require('crypto'); +const { inspect } = require('util'); + +const busboy = require('..'); + +const { mustCall } = require('./common.js'); + +const BOUNDARY = 'u2KxIV5yF1y+xUspOQCCZopaVgeV6Jxihv35XQJmuTx8X3sh'; + +function formDataSection(key, value) { + return Buffer.from( + `\r\n--${BOUNDARY}` + + `\r\nContent-Disposition: form-data; name="${key}"` + + `\r\n\r\n${value}` + ); +} + +function formDataFile(key, filename, contentType) { + const buf = Buffer.allocUnsafe(100000); + return Buffer.concat([ + Buffer.from(`\r\n--${BOUNDARY}\r\n`), + Buffer.from(`Content-Disposition: form-data; name="${key}"` + + `; filename="${filename}"\r\n`), + Buffer.from(`Content-Type: ${contentType}\r\n\r\n`), + randomFillSync(buf) + ]); +} + +const reqChunks = [ + Buffer.concat([ + formDataFile('file', 'file.bin', 'application/octet-stream'), + formDataSection('foo', 'foo value'), + ]), + formDataSection('bar', 'bar value'), + Buffer.from(`\r\n--${BOUNDARY}--\r\n`) +]; +const bb = busboy({ + headers: { + 'content-type': `multipart/form-data; boundary=${BOUNDARY}` + } +}); +const expected = [ + { type: 'file', + name: 'file', + info: { + filename: 'file.bin', + encoding: '7bit', + mimeType: 'application/octet-stream', + }, + }, + { type: 'field', + name: 'foo', + val: 'foo value', + info: { + nameTruncated: false, + valueTruncated: false, + encoding: '7bit', + mimeType: 'text/plain', + }, + }, + { type: 'field', + name: 'bar', + val: 'bar value', + info: { + nameTruncated: false, + valueTruncated: false, + encoding: '7bit', + mimeType: 'text/plain', + }, + }, +]; +const results = []; + +bb.on('field', (name, val, info) => { + results.push({ type: 'field', name, val, info }); +}); + +bb.on('file', (name, stream, info) => { + results.push({ type: 'file', name, info }); + // Simulate a pipe where the destination is pausing (perhaps due to waiting + // for file system write to finish) + setTimeout(() => { + stream.resume(); + }, 10); +}); + +bb.on('close', mustCall(() => { + assert.deepStrictEqual( + results, + expected, + 'Results mismatch.\n' + + `Parsed: ${inspect(results)}\n` + + `Expected: ${inspect(expected)}` + ); +})); + +for (const chunk of reqChunks) + bb.write(chunk); +bb.end(); diff --git a/backend/Backend/node_modules/busboy/test/test-types-multipart.js b/backend/Backend/node_modules/busboy/test/test-types-multipart.js new file mode 100644 index 000000000..9755642ad --- /dev/null +++ b/backend/Backend/node_modules/busboy/test/test-types-multipart.js @@ -0,0 +1,1053 @@ +'use strict'; + +const assert = require('assert'); +const { inspect } = require('util'); + +const busboy = require('..'); + +const active = new Map(); + +const tests = [ + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="file_name_0"', + '', + 'super alpha file', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="file_name_1"', + '', + 'super beta file', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_0"; filename="1k_a.dat"', + 'Content-Type: application/octet-stream', + '', + 'A'.repeat(1023), + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_1"; filename="1k_b.dat"', + 'Content-Type: application/octet-stream', + '', + 'B'.repeat(1023), + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--' + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + expected: [ + { type: 'field', + name: 'file_name_0', + val: 'super alpha file', + info: { + nameTruncated: false, + valueTruncated: false, + encoding: '7bit', + mimeType: 'text/plain', + }, + }, + { type: 'field', + name: 'file_name_1', + val: 'super beta file', + info: { + nameTruncated: false, + valueTruncated: false, + encoding: '7bit', + mimeType: 'text/plain', + }, + }, + { type: 'file', + name: 'upload_file_0', + data: Buffer.from('A'.repeat(1023)), + info: { + filename: '1k_a.dat', + encoding: '7bit', + mimeType: 'application/octet-stream', + }, + limited: false, + }, + { type: 'file', + name: 'upload_file_1', + data: Buffer.from('B'.repeat(1023)), + info: { + filename: '1k_b.dat', + encoding: '7bit', + mimeType: 'application/octet-stream', + }, + limited: false, + }, + ], + what: 'Fields and files' + }, + { source: [ + ['------WebKitFormBoundaryTB2MiQ36fnSJlrhY', + 'Content-Disposition: form-data; name="cont"', + '', + 'some random content', + '------WebKitFormBoundaryTB2MiQ36fnSJlrhY', + 'Content-Disposition: form-data; name="pass"', + '', + 'some random pass', + '------WebKitFormBoundaryTB2MiQ36fnSJlrhY', + 'Content-Disposition: form-data; name=bit', + '', + '2', + '------WebKitFormBoundaryTB2MiQ36fnSJlrhY--' + ].join('\r\n') + ], + boundary: '----WebKitFormBoundaryTB2MiQ36fnSJlrhY', + expected: [ + { type: 'field', + name: 'cont', + val: 'some random content', + info: { + nameTruncated: false, + valueTruncated: false, + encoding: '7bit', + mimeType: 'text/plain', + }, + }, + { type: 'field', + name: 'pass', + val: 'some random pass', + info: { + nameTruncated: false, + valueTruncated: false, + encoding: '7bit', + mimeType: 'text/plain', + }, + }, + { type: 'field', + name: 'bit', + val: '2', + info: { + nameTruncated: false, + valueTruncated: false, + encoding: '7bit', + mimeType: 'text/plain', + }, + }, + ], + what: 'Fields only' + }, + { source: [ + '' + ], + boundary: '----WebKitFormBoundaryTB2MiQ36fnSJlrhY', + expected: [ + { error: 'Unexpected end of form' }, + ], + what: 'No fields and no files' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="file_name_0"', + '', + 'super alpha file', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_0"; filename="1k_a.dat"', + 'Content-Type: application/octet-stream', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--' + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + limits: { + fileSize: 13, + fieldSize: 5 + }, + expected: [ + { type: 'field', + name: 'file_name_0', + val: 'super', + info: { + nameTruncated: false, + valueTruncated: true, + encoding: '7bit', + mimeType: 'text/plain', + }, + }, + { type: 'file', + name: 'upload_file_0', + data: Buffer.from('ABCDEFGHIJKLM'), + info: { + filename: '1k_a.dat', + encoding: '7bit', + mimeType: 'application/octet-stream', + }, + limited: true, + }, + ], + what: 'Fields and files (limits)' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="file_name_0"', + '', + 'super alpha file', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_0"; filename="1k_a.dat"', + 'Content-Type: application/octet-stream', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--' + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + limits: { + files: 0 + }, + expected: [ + { type: 'field', + name: 'file_name_0', + val: 'super alpha file', + info: { + nameTruncated: false, + valueTruncated: false, + encoding: '7bit', + mimeType: 'text/plain', + }, + }, + 'filesLimit', + ], + what: 'Fields and files (limits: 0 files)' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="file_name_0"', + '', + 'super alpha file', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="file_name_1"', + '', + 'super beta file', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_0"; filename="1k_a.dat"', + 'Content-Type: application/octet-stream', + '', + 'A'.repeat(1023), + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_1"; filename="1k_b.dat"', + 'Content-Type: application/octet-stream', + '', + 'B'.repeat(1023), + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--' + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + expected: [ + { type: 'field', + name: 'file_name_0', + val: 'super alpha file', + info: { + nameTruncated: false, + valueTruncated: false, + encoding: '7bit', + mimeType: 'text/plain', + }, + }, + { type: 'field', + name: 'file_name_1', + val: 'super beta file', + info: { + nameTruncated: false, + valueTruncated: false, + encoding: '7bit', + mimeType: 'text/plain', + }, + }, + ], + events: ['field'], + what: 'Fields and (ignored) files' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_0"; filename="/tmp/1k_a.dat"', + 'Content-Type: application/octet-stream', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_1"; filename="C:\\files\\1k_b.dat"', + 'Content-Type: application/octet-stream', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_2"; filename="relative/1k_c.dat"', + 'Content-Type: application/octet-stream', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--' + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + expected: [ + { type: 'file', + name: 'upload_file_0', + data: Buffer.from('ABCDEFGHIJKLMNOPQRSTUVWXYZ'), + info: { + filename: '1k_a.dat', + encoding: '7bit', + mimeType: 'application/octet-stream', + }, + limited: false, + }, + { type: 'file', + name: 'upload_file_1', + data: Buffer.from('ABCDEFGHIJKLMNOPQRSTUVWXYZ'), + info: { + filename: '1k_b.dat', + encoding: '7bit', + mimeType: 'application/octet-stream', + }, + limited: false, + }, + { type: 'file', + name: 'upload_file_2', + data: Buffer.from('ABCDEFGHIJKLMNOPQRSTUVWXYZ'), + info: { + filename: '1k_c.dat', + encoding: '7bit', + mimeType: 'application/octet-stream', + }, + limited: false, + }, + ], + what: 'Files with filenames containing paths' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_0"; filename="/absolute/1k_a.dat"', + 'Content-Type: application/octet-stream', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_1"; filename="C:\\absolute\\1k_b.dat"', + 'Content-Type: application/octet-stream', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_2"; filename="relative/1k_c.dat"', + 'Content-Type: application/octet-stream', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--' + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + preservePath: true, + expected: [ + { type: 'file', + name: 'upload_file_0', + data: Buffer.from('ABCDEFGHIJKLMNOPQRSTUVWXYZ'), + info: { + filename: '/absolute/1k_a.dat', + encoding: '7bit', + mimeType: 'application/octet-stream', + }, + limited: false, + }, + { type: 'file', + name: 'upload_file_1', + data: Buffer.from('ABCDEFGHIJKLMNOPQRSTUVWXYZ'), + info: { + filename: 'C:\\absolute\\1k_b.dat', + encoding: '7bit', + mimeType: 'application/octet-stream', + }, + limited: false, + }, + { type: 'file', + name: 'upload_file_2', + data: Buffer.from('ABCDEFGHIJKLMNOPQRSTUVWXYZ'), + info: { + filename: 'relative/1k_c.dat', + encoding: '7bit', + mimeType: 'application/octet-stream', + }, + limited: false, + }, + ], + what: 'Paths to be preserved through the preservePath option' + }, + { source: [ + ['------WebKitFormBoundaryTB2MiQ36fnSJlrhY', + 'Content-Disposition: form-data; name="cont"', + 'Content-Type: ', + '', + 'some random content', + '------WebKitFormBoundaryTB2MiQ36fnSJlrhY', + 'Content-Disposition: ', + '', + 'some random pass', + '------WebKitFormBoundaryTB2MiQ36fnSJlrhY--' + ].join('\r\n') + ], + boundary: '----WebKitFormBoundaryTB2MiQ36fnSJlrhY', + expected: [ + { type: 'field', + name: 'cont', + val: 'some random content', + info: { + nameTruncated: false, + valueTruncated: false, + encoding: '7bit', + mimeType: 'text/plain', + }, + }, + ], + what: 'Empty content-type and empty content-disposition' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="file"; filename*=utf-8\'\'n%C3%A4me.txt', + 'Content-Type: application/octet-stream', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--' + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + expected: [ + { type: 'file', + name: 'file', + data: Buffer.from('ABCDEFGHIJKLMNOPQRSTUVWXYZ'), + info: { + filename: 'näme.txt', + encoding: '7bit', + mimeType: 'application/octet-stream', + }, + limited: false, + }, + ], + what: 'Unicode filenames' + }, + { source: [ + ['--asdasdasdasd\r\n', + 'Content-Type: text/plain\r\n', + 'Content-Disposition: form-data; name="foo"\r\n', + '\r\n', + 'asd\r\n', + '--asdasdasdasd--' + ].join(':)') + ], + boundary: 'asdasdasdasd', + expected: [ + { error: 'Malformed part header' }, + { error: 'Unexpected end of form' }, + ], + what: 'Stopped mid-header' + }, + { source: [ + ['------WebKitFormBoundaryTB2MiQ36fnSJlrhY', + 'Content-Disposition: form-data; name="cont"', + 'Content-Type: application/json', + '', + '{}', + '------WebKitFormBoundaryTB2MiQ36fnSJlrhY--', + ].join('\r\n') + ], + boundary: '----WebKitFormBoundaryTB2MiQ36fnSJlrhY', + expected: [ + { type: 'field', + name: 'cont', + val: '{}', + info: { + nameTruncated: false, + valueTruncated: false, + encoding: '7bit', + mimeType: 'application/json', + }, + }, + ], + what: 'content-type for fields' + }, + { source: [ + '------WebKitFormBoundaryTB2MiQ36fnSJlrhY--', + ], + boundary: '----WebKitFormBoundaryTB2MiQ36fnSJlrhY', + expected: [], + what: 'empty form' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name=upload_file_0; filename="1k_a.dat"', + 'Content-Type: application/octet-stream', + 'Content-Transfer-Encoding: binary', + '', + '', + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + expected: [ + { type: 'file', + name: 'upload_file_0', + data: Buffer.alloc(0), + info: { + filename: '1k_a.dat', + encoding: 'binary', + mimeType: 'application/octet-stream', + }, + limited: false, + err: 'Unexpected end of form', + }, + { error: 'Unexpected end of form' }, + ], + what: 'Stopped mid-file #1' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name=upload_file_0; filename="1k_a.dat"', + 'Content-Type: application/octet-stream', + '', + 'a', + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + expected: [ + { type: 'file', + name: 'upload_file_0', + data: Buffer.from('a'), + info: { + filename: '1k_a.dat', + encoding: '7bit', + mimeType: 'application/octet-stream', + }, + limited: false, + err: 'Unexpected end of form', + }, + { error: 'Unexpected end of form' }, + ], + what: 'Stopped mid-file #2' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_0"; filename="notes.txt"', + 'Content-Type: text/plain; charset=utf8', + '', + 'a', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--', + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + expected: [ + { type: 'file', + name: 'upload_file_0', + data: Buffer.from('a'), + info: { + filename: 'notes.txt', + encoding: '7bit', + mimeType: 'text/plain', + }, + limited: false, + }, + ], + what: 'Text file with charset' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_0"; filename="notes.txt"', + 'Content-Type: ', + ' text/plain; charset=utf8', + '', + 'a', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--', + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + expected: [ + { type: 'file', + name: 'upload_file_0', + data: Buffer.from('a'), + info: { + filename: 'notes.txt', + encoding: '7bit', + mimeType: 'text/plain', + }, + limited: false, + }, + ], + what: 'Folded header value' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Type: text/plain; charset=utf8', + '', + 'a', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--', + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + expected: [], + what: 'No Content-Disposition' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="file_name_0"', + '', + 'a'.repeat(64 * 1024), + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_0"; filename="notes.txt"', + 'Content-Type: ', + ' text/plain; charset=utf8', + '', + 'bc', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--', + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + limits: { + fieldSize: Infinity, + }, + expected: [ + { type: 'file', + name: 'upload_file_0', + data: Buffer.from('bc'), + info: { + filename: 'notes.txt', + encoding: '7bit', + mimeType: 'text/plain', + }, + limited: false, + }, + ], + events: [ 'file' ], + what: 'Skip field parts if no listener' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="file_name_0"', + '', + 'a', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_0"; filename="notes.txt"', + 'Content-Type: ', + ' text/plain; charset=utf8', + '', + 'bc', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--', + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + limits: { + parts: 1, + }, + expected: [ + { type: 'field', + name: 'file_name_0', + val: 'a', + info: { + nameTruncated: false, + valueTruncated: false, + encoding: '7bit', + mimeType: 'text/plain', + }, + }, + 'partsLimit', + ], + what: 'Parts limit' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="file_name_0"', + '', + 'a', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="file_name_1"', + '', + 'b', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--', + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + limits: { + fields: 1, + }, + expected: [ + { type: 'field', + name: 'file_name_0', + val: 'a', + info: { + nameTruncated: false, + valueTruncated: false, + encoding: '7bit', + mimeType: 'text/plain', + }, + }, + 'fieldsLimit', + ], + what: 'Fields limit' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_0"; filename="notes.txt"', + 'Content-Type: text/plain; charset=utf8', + '', + 'ab', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_1"; filename="notes2.txt"', + 'Content-Type: text/plain; charset=utf8', + '', + 'cd', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--', + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + limits: { + files: 1, + }, + expected: [ + { type: 'file', + name: 'upload_file_0', + data: Buffer.from('ab'), + info: { + filename: 'notes.txt', + encoding: '7bit', + mimeType: 'text/plain', + }, + limited: false, + }, + 'filesLimit', + ], + what: 'Files limit' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + `name="upload_file_0"; filename="${'a'.repeat(64 * 1024)}.txt"`, + 'Content-Type: text/plain; charset=utf8', + '', + 'ab', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_1"; filename="notes2.txt"', + 'Content-Type: text/plain; charset=utf8', + '', + 'cd', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--', + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + expected: [ + { error: 'Malformed part header' }, + { type: 'file', + name: 'upload_file_1', + data: Buffer.from('cd'), + info: { + filename: 'notes2.txt', + encoding: '7bit', + mimeType: 'text/plain', + }, + limited: false, + }, + ], + what: 'Oversized part header' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_0"; filename="notes.txt"', + 'Content-Type: text/plain; charset=utf8', + '', + 'a'.repeat(31) + '\r', + ].join('\r\n'), + 'b'.repeat(40), + '\r\n-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--', + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + fileHwm: 32, + expected: [ + { type: 'file', + name: 'upload_file_0', + data: Buffer.from('a'.repeat(31) + '\r' + 'b'.repeat(40)), + info: { + filename: 'notes.txt', + encoding: '7bit', + mimeType: 'text/plain', + }, + limited: false, + }, + ], + what: 'Lookbehind data should not stall file streams' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + `name="upload_file_0"; filename="${'a'.repeat(8 * 1024)}.txt"`, + 'Content-Type: text/plain; charset=utf8', + '', + 'ab', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + `name="upload_file_1"; filename="${'b'.repeat(8 * 1024)}.txt"`, + 'Content-Type: text/plain; charset=utf8', + '', + 'cd', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + `name="upload_file_2"; filename="${'c'.repeat(8 * 1024)}.txt"`, + 'Content-Type: text/plain; charset=utf8', + '', + 'ef', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--', + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + expected: [ + { type: 'file', + name: 'upload_file_0', + data: Buffer.from('ab'), + info: { + filename: `${'a'.repeat(8 * 1024)}.txt`, + encoding: '7bit', + mimeType: 'text/plain', + }, + limited: false, + }, + { type: 'file', + name: 'upload_file_1', + data: Buffer.from('cd'), + info: { + filename: `${'b'.repeat(8 * 1024)}.txt`, + encoding: '7bit', + mimeType: 'text/plain', + }, + limited: false, + }, + { type: 'file', + name: 'upload_file_2', + data: Buffer.from('ef'), + info: { + filename: `${'c'.repeat(8 * 1024)}.txt`, + encoding: '7bit', + mimeType: 'text/plain', + }, + limited: false, + }, + ], + what: 'Header size limit should be per part' + }, + { source: [ + '\r\n--d1bf46b3-aa33-4061-b28d-6c5ced8b08ee\r\n', + 'Content-Type: application/gzip\r\n' + + 'Content-Encoding: gzip\r\n' + + 'Content-Disposition: form-data; name=batch-1; filename=batch-1' + + '\r\n\r\n', + '\r\n--d1bf46b3-aa33-4061-b28d-6c5ced8b08ee--', + ], + boundary: 'd1bf46b3-aa33-4061-b28d-6c5ced8b08ee', + expected: [ + { type: 'file', + name: 'batch-1', + data: Buffer.alloc(0), + info: { + filename: 'batch-1', + encoding: '7bit', + mimeType: 'application/gzip', + }, + limited: false, + }, + ], + what: 'Empty part' + }, +]; + +for (const test of tests) { + active.set(test, 1); + + const { what, boundary, events, limits, preservePath, fileHwm } = test; + const bb = busboy({ + fileHwm, + limits, + preservePath, + headers: { + 'content-type': `multipart/form-data; boundary=${boundary}`, + } + }); + const results = []; + + if (events === undefined || events.includes('field')) { + bb.on('field', (name, val, info) => { + results.push({ type: 'field', name, val, info }); + }); + } + + if (events === undefined || events.includes('file')) { + bb.on('file', (name, stream, info) => { + const data = []; + let nb = 0; + const file = { + type: 'file', + name, + data: null, + info, + limited: false, + }; + results.push(file); + stream.on('data', (d) => { + data.push(d); + nb += d.length; + }).on('limit', () => { + file.limited = true; + }).on('close', () => { + file.data = Buffer.concat(data, nb); + assert.strictEqual(stream.truncated, file.limited); + }).once('error', (err) => { + file.err = err.message; + }); + }); + } + + bb.on('error', (err) => { + results.push({ error: err.message }); + }); + + bb.on('partsLimit', () => { + results.push('partsLimit'); + }); + + bb.on('filesLimit', () => { + results.push('filesLimit'); + }); + + bb.on('fieldsLimit', () => { + results.push('fieldsLimit'); + }); + + bb.on('close', () => { + active.delete(test); + + assert.deepStrictEqual( + results, + test.expected, + `[${what}] Results mismatch.\n` + + `Parsed: ${inspect(results)}\n` + + `Expected: ${inspect(test.expected)}` + ); + }); + + for (const src of test.source) { + const buf = (typeof src === 'string' ? Buffer.from(src, 'utf8') : src); + bb.write(buf); + } + bb.end(); +} + +// Byte-by-byte versions +for (let test of tests) { + test = { ...test }; + test.what += ' (byte-by-byte)'; + active.set(test, 1); + + const { what, boundary, events, limits, preservePath, fileHwm } = test; + const bb = busboy({ + fileHwm, + limits, + preservePath, + headers: { + 'content-type': `multipart/form-data; boundary=${boundary}`, + } + }); + const results = []; + + if (events === undefined || events.includes('field')) { + bb.on('field', (name, val, info) => { + results.push({ type: 'field', name, val, info }); + }); + } + + if (events === undefined || events.includes('file')) { + bb.on('file', (name, stream, info) => { + const data = []; + let nb = 0; + const file = { + type: 'file', + name, + data: null, + info, + limited: false, + }; + results.push(file); + stream.on('data', (d) => { + data.push(d); + nb += d.length; + }).on('limit', () => { + file.limited = true; + }).on('close', () => { + file.data = Buffer.concat(data, nb); + assert.strictEqual(stream.truncated, file.limited); + }).once('error', (err) => { + file.err = err.message; + }); + }); + } + + bb.on('error', (err) => { + results.push({ error: err.message }); + }); + + bb.on('partsLimit', () => { + results.push('partsLimit'); + }); + + bb.on('filesLimit', () => { + results.push('filesLimit'); + }); + + bb.on('fieldsLimit', () => { + results.push('fieldsLimit'); + }); + + bb.on('close', () => { + active.delete(test); + + assert.deepStrictEqual( + results, + test.expected, + `[${what}] Results mismatch.\n` + + `Parsed: ${inspect(results)}\n` + + `Expected: ${inspect(test.expected)}` + ); + }); + + for (const src of test.source) { + const buf = (typeof src === 'string' ? Buffer.from(src, 'utf8') : src); + for (let i = 0; i < buf.length; ++i) + bb.write(buf.slice(i, i + 1)); + } + bb.end(); +} + +{ + let exception = false; + process.once('uncaughtException', (ex) => { + exception = true; + throw ex; + }); + process.on('exit', () => { + if (exception || active.size === 0) + return; + process.exitCode = 1; + console.error('=========================='); + console.error(`${active.size} test(s) did not finish:`); + console.error('=========================='); + console.error(Array.from(active.keys()).map((v) => v.what).join('\n')); + }); +} diff --git a/backend/Backend/node_modules/busboy/test/test-types-urlencoded.js b/backend/Backend/node_modules/busboy/test/test-types-urlencoded.js new file mode 100644 index 000000000..c35962b97 --- /dev/null +++ b/backend/Backend/node_modules/busboy/test/test-types-urlencoded.js @@ -0,0 +1,488 @@ +'use strict'; + +const assert = require('assert'); +const { transcode } = require('buffer'); +const { inspect } = require('util'); + +const busboy = require('..'); + +const active = new Map(); + +const tests = [ + { source: ['foo'], + expected: [ + ['foo', + '', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Unassigned value' + }, + { source: ['foo=bar'], + expected: [ + ['foo', + 'bar', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Assigned value' + }, + { source: ['foo&bar=baz'], + expected: [ + ['foo', + '', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ['bar', + 'baz', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Unassigned and assigned value' + }, + { source: ['foo=bar&baz'], + expected: [ + ['foo', + 'bar', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ['baz', + '', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Assigned and unassigned value' + }, + { source: ['foo=bar&baz=bla'], + expected: [ + ['foo', + 'bar', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ['baz', + 'bla', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Two assigned values' + }, + { source: ['foo&bar'], + expected: [ + ['foo', + '', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ['bar', + '', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Two unassigned values' + }, + { source: ['foo&bar&'], + expected: [ + ['foo', + '', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ['bar', + '', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Two unassigned values and ampersand' + }, + { source: ['foo+1=bar+baz%2Bquux'], + expected: [ + ['foo 1', + 'bar baz+quux', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Assigned key and value with (plus) space' + }, + { source: ['foo=bar%20baz%21'], + expected: [ + ['foo', + 'bar baz!', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Assigned value with encoded bytes' + }, + { source: ['foo%20bar=baz%20bla%21'], + expected: [ + ['foo bar', + 'baz bla!', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Assigned value with encoded bytes #2' + }, + { source: ['foo=bar%20baz%21&num=1000'], + expected: [ + ['foo', + 'bar baz!', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ['num', + '1000', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Two assigned values, one with encoded bytes' + }, + { source: [ + Array.from(transcode(Buffer.from('foo'), 'utf8', 'utf16le')).map( + (n) => `%${n.toString(16).padStart(2, '0')}` + ).join(''), + '=', + Array.from(transcode(Buffer.from('😀!'), 'utf8', 'utf16le')).map( + (n) => `%${n.toString(16).padStart(2, '0')}` + ).join(''), + ], + expected: [ + ['foo', + '😀!', + { nameTruncated: false, + valueTruncated: false, + encoding: 'UTF-16LE', + mimeType: 'text/plain' }, + ], + ], + charset: 'UTF-16LE', + what: 'Encoded value with multi-byte charset' + }, + { source: [ + 'foo=<', + Array.from(transcode(Buffer.from('©:^þ'), 'utf8', 'latin1')).map( + (n) => `%${n.toString(16).padStart(2, '0')}` + ).join(''), + ], + expected: [ + ['foo', + '<©:^þ', + { nameTruncated: false, + valueTruncated: false, + encoding: 'ISO-8859-1', + mimeType: 'text/plain' }, + ], + ], + charset: 'ISO-8859-1', + what: 'Encoded value with single-byte, ASCII-compatible, non-UTF8 charset' + }, + { source: ['foo=bar&baz=bla'], + expected: [], + what: 'Limits: zero fields', + limits: { fields: 0 } + }, + { source: ['foo=bar&baz=bla'], + expected: [ + ['foo', + 'bar', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Limits: one field', + limits: { fields: 1 } + }, + { source: ['foo=bar&baz=bla'], + expected: [ + ['foo', + 'bar', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ['baz', + 'bla', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Limits: field part lengths match limits', + limits: { fieldNameSize: 3, fieldSize: 3 } + }, + { source: ['foo=bar&baz=bla'], + expected: [ + ['fo', + 'bar', + { nameTruncated: true, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ['ba', + 'bla', + { nameTruncated: true, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Limits: truncated field name', + limits: { fieldNameSize: 2 } + }, + { source: ['foo=bar&baz=bla'], + expected: [ + ['foo', + 'ba', + { nameTruncated: false, + valueTruncated: true, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ['baz', + 'bl', + { nameTruncated: false, + valueTruncated: true, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Limits: truncated field value', + limits: { fieldSize: 2 } + }, + { source: ['foo=bar&baz=bla'], + expected: [ + ['fo', + 'ba', + { nameTruncated: true, + valueTruncated: true, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ['ba', + 'bl', + { nameTruncated: true, + valueTruncated: true, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Limits: truncated field name and value', + limits: { fieldNameSize: 2, fieldSize: 2 } + }, + { source: ['foo=bar&baz=bla'], + expected: [ + ['fo', + '', + { nameTruncated: true, + valueTruncated: true, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ['ba', + '', + { nameTruncated: true, + valueTruncated: true, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Limits: truncated field name and zero value limit', + limits: { fieldNameSize: 2, fieldSize: 0 } + }, + { source: ['foo=bar&baz=bla'], + expected: [ + ['', + '', + { nameTruncated: true, + valueTruncated: true, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ['', + '', + { nameTruncated: true, + valueTruncated: true, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Limits: truncated zero field name and zero value limit', + limits: { fieldNameSize: 0, fieldSize: 0 } + }, + { source: ['&'], + expected: [], + what: 'Ampersand' + }, + { source: ['&&&&&'], + expected: [], + what: 'Many ampersands' + }, + { source: ['='], + expected: [ + ['', + '', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Assigned value, empty name and value' + }, + { source: [''], + expected: [], + what: 'Nothing' + }, +]; + +for (const test of tests) { + active.set(test, 1); + + const { what } = test; + const charset = test.charset || 'utf-8'; + const bb = busboy({ + limits: test.limits, + headers: { + 'content-type': `application/x-www-form-urlencoded; charset=${charset}`, + }, + }); + const results = []; + + bb.on('field', (key, val, info) => { + results.push([key, val, info]); + }); + + bb.on('file', () => { + throw new Error(`[${what}] Unexpected file`); + }); + + bb.on('close', () => { + active.delete(test); + + assert.deepStrictEqual( + results, + test.expected, + `[${what}] Results mismatch.\n` + + `Parsed: ${inspect(results)}\n` + + `Expected: ${inspect(test.expected)}` + ); + }); + + for (const src of test.source) { + const buf = (typeof src === 'string' ? Buffer.from(src, 'utf8') : src); + bb.write(buf); + } + bb.end(); +} + +// Byte-by-byte versions +for (let test of tests) { + test = { ...test }; + test.what += ' (byte-by-byte)'; + active.set(test, 1); + + const { what } = test; + const charset = test.charset || 'utf-8'; + const bb = busboy({ + limits: test.limits, + headers: { + 'content-type': `application/x-www-form-urlencoded; charset="${charset}"`, + }, + }); + const results = []; + + bb.on('field', (key, val, info) => { + results.push([key, val, info]); + }); + + bb.on('file', () => { + throw new Error(`[${what}] Unexpected file`); + }); + + bb.on('close', () => { + active.delete(test); + + assert.deepStrictEqual( + results, + test.expected, + `[${what}] Results mismatch.\n` + + `Parsed: ${inspect(results)}\n` + + `Expected: ${inspect(test.expected)}` + ); + }); + + for (const src of test.source) { + const buf = (typeof src === 'string' ? Buffer.from(src, 'utf8') : src); + for (let i = 0; i < buf.length; ++i) + bb.write(buf.slice(i, i + 1)); + } + bb.end(); +} + +{ + let exception = false; + process.once('uncaughtException', (ex) => { + exception = true; + throw ex; + }); + process.on('exit', () => { + if (exception || active.size === 0) + return; + process.exitCode = 1; + console.error('=========================='); + console.error(`${active.size} test(s) did not finish:`); + console.error('=========================='); + console.error(Array.from(active.keys()).map((v) => v.what).join('\n')); + }); +} diff --git a/backend/Backend/node_modules/busboy/test/test.js b/backend/Backend/node_modules/busboy/test/test.js new file mode 100644 index 000000000..d0380f29d --- /dev/null +++ b/backend/Backend/node_modules/busboy/test/test.js @@ -0,0 +1,20 @@ +'use strict'; + +const { spawnSync } = require('child_process'); +const { readdirSync } = require('fs'); +const { join } = require('path'); + +const files = readdirSync(__dirname).sort(); +for (const filename of files) { + if (filename.startsWith('test-')) { + const path = join(__dirname, filename); + console.log(`> Running ${filename} ...`); + const result = spawnSync(`${process.argv0} ${path}`, { + shell: true, + stdio: 'inherit', + windowsHide: true + }); + if (result.status !== 0) + process.exitCode = 1; + } +} diff --git a/backend/Backend/node_modules/bytes/History.md b/backend/Backend/node_modules/bytes/History.md new file mode 100644 index 000000000..d60ce0e6d --- /dev/null +++ b/backend/Backend/node_modules/bytes/History.md @@ -0,0 +1,97 @@ +3.1.2 / 2022-01-27 +================== + + * Fix return value for un-parsable strings + +3.1.1 / 2021-11-15 +================== + + * Fix "thousandsSeparator" incorrecting formatting fractional part + +3.1.0 / 2019-01-22 +================== + + * Add petabyte (`pb`) support + +3.0.0 / 2017-08-31 +================== + + * Change "kB" to "KB" in format output + * Remove support for Node.js 0.6 + * Remove support for ComponentJS + +2.5.0 / 2017-03-24 +================== + + * Add option "unit" + +2.4.0 / 2016-06-01 +================== + + * Add option "unitSeparator" + +2.3.0 / 2016-02-15 +================== + + * Drop partial bytes on all parsed units + * Fix non-finite numbers to `.format` to return `null` + * Fix parsing byte string that looks like hex + * perf: hoist regular expressions + +2.2.0 / 2015-11-13 +================== + + * add option "decimalPlaces" + * add option "fixedDecimals" + +2.1.0 / 2015-05-21 +================== + + * add `.format` export + * add `.parse` export + +2.0.2 / 2015-05-20 +================== + + * remove map recreation + * remove unnecessary object construction + +2.0.1 / 2015-05-07 +================== + + * fix browserify require + * remove node.extend dependency + +2.0.0 / 2015-04-12 +================== + + * add option "case" + * add option "thousandsSeparator" + * return "null" on invalid parse input + * support proper round-trip: bytes(bytes(num)) === num + * units no longer case sensitive when parsing + +1.0.0 / 2014-05-05 +================== + + * add negative support. fixes #6 + +0.3.0 / 2014-03-19 +================== + + * added terabyte support + +0.2.1 / 2013-04-01 +================== + + * add .component + +0.2.0 / 2012-10-28 +================== + + * bytes(200).should.eql('200b') + +0.1.0 / 2012-07-04 +================== + + * add bytes to string conversion [yields] diff --git a/backend/Backend/node_modules/bytes/LICENSE b/backend/Backend/node_modules/bytes/LICENSE new file mode 100644 index 000000000..63e95a963 --- /dev/null +++ b/backend/Backend/node_modules/bytes/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2012-2014 TJ Holowaychuk +Copyright (c) 2015 Jed Watson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/backend/Backend/node_modules/bytes/Readme.md b/backend/Backend/node_modules/bytes/Readme.md new file mode 100644 index 000000000..5790e23e3 --- /dev/null +++ b/backend/Backend/node_modules/bytes/Readme.md @@ -0,0 +1,152 @@ +# Bytes utility + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Build Status][ci-image]][ci-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Utility to parse a string bytes (ex: `1TB`) to bytes (`1099511627776`) and vice-versa. + +## Installation + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```bash +$ npm install bytes +``` + +## Usage + +```js +var bytes = require('bytes'); +``` + +#### bytes(number|string value, [options]): number|string|null + +Default export function. Delegates to either `bytes.format` or `bytes.parse` based on the type of `value`. + +**Arguments** + +| Name | Type | Description | +|---------|----------|--------------------| +| value | `number`|`string` | Number value to format or string value to parse | +| options | `Object` | Conversion options for `format` | + +**Returns** + +| Name | Type | Description | +|---------|------------------|-------------------------------------------------| +| results | `string`|`number`|`null` | Return null upon error. Numeric value in bytes, or string value otherwise. | + +**Example** + +```js +bytes(1024); +// output: '1KB' + +bytes('1KB'); +// output: 1024 +``` + +#### bytes.format(number value, [options]): string|null + +Format the given value in bytes into a string. If the value is negative, it is kept as such. If it is a float, it is + rounded. + +**Arguments** + +| Name | Type | Description | +|---------|----------|--------------------| +| value | `number` | Value in bytes | +| options | `Object` | Conversion options | + +**Options** + +| Property | Type | Description | +|-------------------|--------|-----------------------------------------------------------------------------------------| +| decimalPlaces | `number`|`null` | Maximum number of decimal places to include in output. Default value to `2`. | +| fixedDecimals | `boolean`|`null` | Whether to always display the maximum number of decimal places. Default value to `false` | +| thousandsSeparator | `string`|`null` | Example of values: `' '`, `','` and `'.'`... Default value to `''`. | +| unit | `string`|`null` | The unit in which the result will be returned (B/KB/MB/GB/TB). Default value to `''` (which means auto detect). | +| unitSeparator | `string`|`null` | Separator to use between number and unit. Default value to `''`. | + +**Returns** + +| Name | Type | Description | +|---------|------------------|-------------------------------------------------| +| results | `string`|`null` | Return null upon error. String value otherwise. | + +**Example** + +```js +bytes.format(1024); +// output: '1KB' + +bytes.format(1000); +// output: '1000B' + +bytes.format(1000, {thousandsSeparator: ' '}); +// output: '1 000B' + +bytes.format(1024 * 1.7, {decimalPlaces: 0}); +// output: '2KB' + +bytes.format(1024, {unitSeparator: ' '}); +// output: '1 KB' +``` + +#### bytes.parse(string|number value): number|null + +Parse the string value into an integer in bytes. If no unit is given, or `value` +is a number, it is assumed the value is in bytes. + +Supported units and abbreviations are as follows and are case-insensitive: + + * `b` for bytes + * `kb` for kilobytes + * `mb` for megabytes + * `gb` for gigabytes + * `tb` for terabytes + * `pb` for petabytes + +The units are in powers of two, not ten. This means 1kb = 1024b according to this parser. + +**Arguments** + +| Name | Type | Description | +|---------------|--------|--------------------| +| value | `string`|`number` | String to parse, or number in bytes. | + +**Returns** + +| Name | Type | Description | +|---------|-------------|-------------------------| +| results | `number`|`null` | Return null upon error. Value in bytes otherwise. | + +**Example** + +```js +bytes.parse('1KB'); +// output: 1024 + +bytes.parse('1024'); +// output: 1024 + +bytes.parse(1024); +// output: 1024 +``` + +## License + +[MIT](LICENSE) + +[ci-image]: https://badgen.net/github/checks/visionmedia/bytes.js/master?label=ci +[ci-url]: https://github.com/visionmedia/bytes.js/actions?query=workflow%3Aci +[coveralls-image]: https://badgen.net/coveralls/c/github/visionmedia/bytes.js/master +[coveralls-url]: https://coveralls.io/r/visionmedia/bytes.js?branch=master +[downloads-image]: https://badgen.net/npm/dm/bytes +[downloads-url]: https://npmjs.org/package/bytes +[npm-image]: https://badgen.net/npm/v/bytes +[npm-url]: https://npmjs.org/package/bytes diff --git a/backend/Backend/node_modules/bytes/index.js b/backend/Backend/node_modules/bytes/index.js new file mode 100644 index 000000000..6f2d0f89e --- /dev/null +++ b/backend/Backend/node_modules/bytes/index.js @@ -0,0 +1,170 @@ +/*! + * bytes + * Copyright(c) 2012-2014 TJ Holowaychuk + * Copyright(c) 2015 Jed Watson + * MIT Licensed + */ + +'use strict'; + +/** + * Module exports. + * @public + */ + +module.exports = bytes; +module.exports.format = format; +module.exports.parse = parse; + +/** + * Module variables. + * @private + */ + +var formatThousandsRegExp = /\B(?=(\d{3})+(?!\d))/g; + +var formatDecimalsRegExp = /(?:\.0*|(\.[^0]+)0+)$/; + +var map = { + b: 1, + kb: 1 << 10, + mb: 1 << 20, + gb: 1 << 30, + tb: Math.pow(1024, 4), + pb: Math.pow(1024, 5), +}; + +var parseRegExp = /^((-|\+)?(\d+(?:\.\d+)?)) *(kb|mb|gb|tb|pb)$/i; + +/** + * Convert the given value in bytes into a string or parse to string to an integer in bytes. + * + * @param {string|number} value + * @param {{ + * case: [string], + * decimalPlaces: [number] + * fixedDecimals: [boolean] + * thousandsSeparator: [string] + * unitSeparator: [string] + * }} [options] bytes options. + * + * @returns {string|number|null} + */ + +function bytes(value, options) { + if (typeof value === 'string') { + return parse(value); + } + + if (typeof value === 'number') { + return format(value, options); + } + + return null; +} + +/** + * Format the given value in bytes into a string. + * + * If the value is negative, it is kept as such. If it is a float, + * it is rounded. + * + * @param {number} value + * @param {object} [options] + * @param {number} [options.decimalPlaces=2] + * @param {number} [options.fixedDecimals=false] + * @param {string} [options.thousandsSeparator=] + * @param {string} [options.unit=] + * @param {string} [options.unitSeparator=] + * + * @returns {string|null} + * @public + */ + +function format(value, options) { + if (!Number.isFinite(value)) { + return null; + } + + var mag = Math.abs(value); + var thousandsSeparator = (options && options.thousandsSeparator) || ''; + var unitSeparator = (options && options.unitSeparator) || ''; + var decimalPlaces = (options && options.decimalPlaces !== undefined) ? options.decimalPlaces : 2; + var fixedDecimals = Boolean(options && options.fixedDecimals); + var unit = (options && options.unit) || ''; + + if (!unit || !map[unit.toLowerCase()]) { + if (mag >= map.pb) { + unit = 'PB'; + } else if (mag >= map.tb) { + unit = 'TB'; + } else if (mag >= map.gb) { + unit = 'GB'; + } else if (mag >= map.mb) { + unit = 'MB'; + } else if (mag >= map.kb) { + unit = 'KB'; + } else { + unit = 'B'; + } + } + + var val = value / map[unit.toLowerCase()]; + var str = val.toFixed(decimalPlaces); + + if (!fixedDecimals) { + str = str.replace(formatDecimalsRegExp, '$1'); + } + + if (thousandsSeparator) { + str = str.split('.').map(function (s, i) { + return i === 0 + ? s.replace(formatThousandsRegExp, thousandsSeparator) + : s + }).join('.'); + } + + return str + unitSeparator + unit; +} + +/** + * Parse the string value into an integer in bytes. + * + * If no unit is given, it is assumed the value is in bytes. + * + * @param {number|string} val + * + * @returns {number|null} + * @public + */ + +function parse(val) { + if (typeof val === 'number' && !isNaN(val)) { + return val; + } + + if (typeof val !== 'string') { + return null; + } + + // Test if the string passed is valid + var results = parseRegExp.exec(val); + var floatValue; + var unit = 'b'; + + if (!results) { + // Nothing could be extracted from the given string + floatValue = parseInt(val, 10); + unit = 'b' + } else { + // Retrieve the value and the unit + floatValue = parseFloat(results[1]); + unit = results[4].toLowerCase(); + } + + if (isNaN(floatValue)) { + return null; + } + + return Math.floor(map[unit] * floatValue); +} diff --git a/backend/Backend/node_modules/bytes/package.json b/backend/Backend/node_modules/bytes/package.json new file mode 100644 index 000000000..f2b6a8b0e --- /dev/null +++ b/backend/Backend/node_modules/bytes/package.json @@ -0,0 +1,42 @@ +{ + "name": "bytes", + "description": "Utility to parse a string bytes to bytes and vice-versa", + "version": "3.1.2", + "author": "TJ Holowaychuk (http://tjholowaychuk.com)", + "contributors": [ + "Jed Watson ", + "Théo FIDRY " + ], + "license": "MIT", + "keywords": [ + "byte", + "bytes", + "utility", + "parse", + "parser", + "convert", + "converter" + ], + "repository": "visionmedia/bytes.js", + "devDependencies": { + "eslint": "7.32.0", + "eslint-plugin-markdown": "2.2.1", + "mocha": "9.2.0", + "nyc": "15.1.0" + }, + "files": [ + "History.md", + "LICENSE", + "Readme.md", + "index.js" + ], + "engines": { + "node": ">= 0.8" + }, + "scripts": { + "lint": "eslint .", + "test": "mocha --check-leaks --reporter spec", + "test-ci": "nyc --reporter=lcov --reporter=text npm test", + "test-cov": "nyc --reporter=html --reporter=text npm test" + } +} diff --git a/backend/Backend/node_modules/call-bind-apply-helpers/.eslintrc b/backend/Backend/node_modules/call-bind-apply-helpers/.eslintrc new file mode 100644 index 000000000..dfa9a6cdc --- /dev/null +++ b/backend/Backend/node_modules/call-bind-apply-helpers/.eslintrc @@ -0,0 +1,16 @@ +{ + "root": true, + + "extends": "@ljharb", + + "rules": { + "func-name-matching": 0, + "id-length": 0, + "new-cap": [2, { + "capIsNewExceptions": [ + "GetIntrinsic", + ], + }], + "no-magic-numbers": 0, + }, +} diff --git a/backend/Backend/node_modules/call-bind-apply-helpers/.github/FUNDING.yml b/backend/Backend/node_modules/call-bind-apply-helpers/.github/FUNDING.yml new file mode 100644 index 000000000..0011e9d65 --- /dev/null +++ b/backend/Backend/node_modules/call-bind-apply-helpers/.github/FUNDING.yml @@ -0,0 +1,12 @@ +# These are supported funding model platforms + +github: [ljharb] +patreon: # Replace with a single Patreon username +open_collective: # Replace with a single Open Collective username +ko_fi: # Replace with a single Ko-fi username +tidelift: npm/call-bind-apply-helpers +community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry +liberapay: # Replace with a single Liberapay username +issuehunt: # Replace with a single IssueHunt username +otechie: # Replace with a single Otechie username +custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] diff --git a/backend/Backend/node_modules/call-bind-apply-helpers/.nycrc b/backend/Backend/node_modules/call-bind-apply-helpers/.nycrc new file mode 100644 index 000000000..bdd626ce9 --- /dev/null +++ b/backend/Backend/node_modules/call-bind-apply-helpers/.nycrc @@ -0,0 +1,9 @@ +{ + "all": true, + "check-coverage": false, + "reporter": ["text-summary", "text", "html", "json"], + "exclude": [ + "coverage", + "test" + ] +} diff --git a/backend/Backend/node_modules/call-bind-apply-helpers/CHANGELOG.md b/backend/Backend/node_modules/call-bind-apply-helpers/CHANGELOG.md new file mode 100644 index 000000000..cf630e88e --- /dev/null +++ b/backend/Backend/node_modules/call-bind-apply-helpers/CHANGELOG.md @@ -0,0 +1,23 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [v1.0.1](https://github.com/ljharb/call-bind-apply-helpers/compare/v1.0.0...v1.0.1) - 2024-12-08 + +### Commits + +- [types] `reflectApply`: fix types [`4efc396`](https://github.com/ljharb/call-bind-apply-helpers/commit/4efc3965351a4f02cc55e836fa391d3d11ef2ef8) +- [Fix] `reflectApply`: oops, Reflect is not a function [`83cc739`](https://github.com/ljharb/call-bind-apply-helpers/commit/83cc7395de6b79b7730bdf092f1436f0b1263c75) +- [Dev Deps] update `@arethetypeswrong/cli` [`80bd5d3`](https://github.com/ljharb/call-bind-apply-helpers/commit/80bd5d3ae58b4f6b6995ce439dd5a1bcb178a940) + +## v1.0.0 - 2024-12-05 + +### Commits + +- Initial implementation, tests, readme [`7879629`](https://github.com/ljharb/call-bind-apply-helpers/commit/78796290f9b7430c9934d6f33d94ae9bc89fce04) +- Initial commit [`3f1dc16`](https://github.com/ljharb/call-bind-apply-helpers/commit/3f1dc164afc43285631b114a5f9dd9137b2b952f) +- npm init [`081df04`](https://github.com/ljharb/call-bind-apply-helpers/commit/081df048c312fcee400922026f6e97281200a603) +- Only apps should have lockfiles [`5b9ca0f`](https://github.com/ljharb/call-bind-apply-helpers/commit/5b9ca0fe8101ebfaf309c549caac4e0a017ed930) diff --git a/backend/Backend/node_modules/call-bind-apply-helpers/LICENSE b/backend/Backend/node_modules/call-bind-apply-helpers/LICENSE new file mode 100644 index 000000000..f82f38963 --- /dev/null +++ b/backend/Backend/node_modules/call-bind-apply-helpers/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2024 Jordan Harband + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/backend/Backend/node_modules/call-bind-apply-helpers/README.md b/backend/Backend/node_modules/call-bind-apply-helpers/README.md new file mode 100644 index 000000000..8fc0dae1b --- /dev/null +++ b/backend/Backend/node_modules/call-bind-apply-helpers/README.md @@ -0,0 +1,62 @@ +# call-bind-apply-helpers [![Version Badge][npm-version-svg]][package-url] + +[![github actions][actions-image]][actions-url] +[![coverage][codecov-image]][codecov-url] +[![dependency status][deps-svg]][deps-url] +[![dev dependency status][dev-deps-svg]][dev-deps-url] +[![License][license-image]][license-url] +[![Downloads][downloads-image]][downloads-url] + +[![npm badge][npm-badge-png]][package-url] + +Helper functions around Function call/apply/bind, for use in `call-bind`. + +The only packages that should likely ever use this package directly are `call-bind` and `get-intrinsic`. +Please use `call-bind` unless you have a very good reason not to. + +## Getting started + +```sh +npm install --save call-bind-apply-helpers +``` + +## Usage/Examples + +```js +const assert = require('assert'); +const callBindBasic = require('call-bind-apply-helpers'); + +function f(a, b) { + assert.equal(this, 1); + assert.equal(a, 2); + assert.equal(b, 3); + assert.equal(arguments.length, 2); +} + +const fBound = callBindBasic([f, 1]); + +delete Function.prototype.call; +delete Function.prototype.bind; + +fBound(2, 3); +``` + +## Tests + +Clone the repo, `npm install`, and run `npm test` + +[package-url]: https://npmjs.org/package/call-bind-apply-helpers +[npm-version-svg]: https://versionbadg.es/ljharb/call-bind-apply-helpers.svg +[deps-svg]: https://david-dm.org/ljharb/call-bind-apply-helpers.svg +[deps-url]: https://david-dm.org/ljharb/call-bind-apply-helpers +[dev-deps-svg]: https://david-dm.org/ljharb/call-bind-apply-helpers/dev-status.svg +[dev-deps-url]: https://david-dm.org/ljharb/call-bind-apply-helpers#info=devDependencies +[npm-badge-png]: https://nodei.co/npm/call-bind-apply-helpers.png?downloads=true&stars=true +[license-image]: https://img.shields.io/npm/l/call-bind-apply-helpers.svg +[license-url]: LICENSE +[downloads-image]: https://img.shields.io/npm/dm/call-bind-apply-helpers.svg +[downloads-url]: https://npm-stat.com/charts.html?package=call-bind-apply-helpers +[codecov-image]: https://codecov.io/gh/ljharb/call-bind-apply-helpers/branch/main/graphs/badge.svg +[codecov-url]: https://app.codecov.io/gh/ljharb/call-bind-apply-helpers/ +[actions-image]: https://img.shields.io/endpoint?url=https://github-actions-badge-u3jn4tfpocch.runkit.sh/ljharb/call-bind-apply-helpers +[actions-url]: https://github.com/ljharb/call-bind-apply-helpers/actions diff --git a/backend/Backend/node_modules/call-bind-apply-helpers/actualApply.d.ts b/backend/Backend/node_modules/call-bind-apply-helpers/actualApply.d.ts new file mode 100644 index 000000000..b87286a21 --- /dev/null +++ b/backend/Backend/node_modules/call-bind-apply-helpers/actualApply.d.ts @@ -0,0 +1 @@ +export = Reflect.apply; \ No newline at end of file diff --git a/backend/Backend/node_modules/call-bind-apply-helpers/actualApply.js b/backend/Backend/node_modules/call-bind-apply-helpers/actualApply.js new file mode 100644 index 000000000..ffa51355d --- /dev/null +++ b/backend/Backend/node_modules/call-bind-apply-helpers/actualApply.js @@ -0,0 +1,10 @@ +'use strict'; + +var bind = require('function-bind'); + +var $apply = require('./functionApply'); +var $call = require('./functionCall'); +var $reflectApply = require('./reflectApply'); + +/** @type {import('./actualApply')} */ +module.exports = $reflectApply || bind.call($call, $apply); diff --git a/backend/Backend/node_modules/call-bind-apply-helpers/applyBind.d.ts b/backend/Backend/node_modules/call-bind-apply-helpers/applyBind.d.ts new file mode 100644 index 000000000..d176c1ab3 --- /dev/null +++ b/backend/Backend/node_modules/call-bind-apply-helpers/applyBind.d.ts @@ -0,0 +1,19 @@ +import actualApply from './actualApply'; + +type TupleSplitHead = T['length'] extends N + ? T + : T extends [...infer R, any] + ? TupleSplitHead + : never + +type TupleSplitTail = O['length'] extends N + ? T + : T extends [infer F, ...infer R] + ? TupleSplitTail<[...R], N, [...O, F]> + : never + +type TupleSplit = [TupleSplitHead, TupleSplitTail] + +declare function applyBind(...args: TupleSplit, 2>[1]): ReturnType; + +export = applyBind; \ No newline at end of file diff --git a/backend/Backend/node_modules/call-bind-apply-helpers/applyBind.js b/backend/Backend/node_modules/call-bind-apply-helpers/applyBind.js new file mode 100644 index 000000000..d2b772314 --- /dev/null +++ b/backend/Backend/node_modules/call-bind-apply-helpers/applyBind.js @@ -0,0 +1,10 @@ +'use strict'; + +var bind = require('function-bind'); +var $apply = require('./functionApply'); +var actualApply = require('./actualApply'); + +/** @type {import('./applyBind')} */ +module.exports = function applyBind() { + return actualApply(bind, $apply, arguments); +}; diff --git a/backend/Backend/node_modules/call-bind-apply-helpers/functionApply.d.ts b/backend/Backend/node_modules/call-bind-apply-helpers/functionApply.d.ts new file mode 100644 index 000000000..1f6e11b3d --- /dev/null +++ b/backend/Backend/node_modules/call-bind-apply-helpers/functionApply.d.ts @@ -0,0 +1 @@ +export = Function.prototype.apply; \ No newline at end of file diff --git a/backend/Backend/node_modules/call-bind-apply-helpers/functionApply.js b/backend/Backend/node_modules/call-bind-apply-helpers/functionApply.js new file mode 100644 index 000000000..c71df9c2b --- /dev/null +++ b/backend/Backend/node_modules/call-bind-apply-helpers/functionApply.js @@ -0,0 +1,4 @@ +'use strict'; + +/** @type {import('./functionApply')} */ +module.exports = Function.prototype.apply; diff --git a/backend/Backend/node_modules/call-bind-apply-helpers/functionCall.d.ts b/backend/Backend/node_modules/call-bind-apply-helpers/functionCall.d.ts new file mode 100644 index 000000000..15e93df35 --- /dev/null +++ b/backend/Backend/node_modules/call-bind-apply-helpers/functionCall.d.ts @@ -0,0 +1 @@ +export = Function.prototype.call; \ No newline at end of file diff --git a/backend/Backend/node_modules/call-bind-apply-helpers/functionCall.js b/backend/Backend/node_modules/call-bind-apply-helpers/functionCall.js new file mode 100644 index 000000000..7a8d87357 --- /dev/null +++ b/backend/Backend/node_modules/call-bind-apply-helpers/functionCall.js @@ -0,0 +1,4 @@ +'use strict'; + +/** @type {import('./functionCall')} */ +module.exports = Function.prototype.call; diff --git a/backend/Backend/node_modules/call-bind-apply-helpers/index.d.ts b/backend/Backend/node_modules/call-bind-apply-helpers/index.d.ts new file mode 100644 index 000000000..a7ae2c57c --- /dev/null +++ b/backend/Backend/node_modules/call-bind-apply-helpers/index.d.ts @@ -0,0 +1,46 @@ +type RemoveFromTuple< + Tuple extends unknown[], + RemoveCount extends number, + Index extends 1[] = [] +> = Index["length"] extends RemoveCount + ? Tuple + : Tuple extends [first: unknown, ...infer Rest] + ? RemoveFromTuple + : Tuple; + +type ConcatTuples< + Prefix extends unknown[], + Suffix extends unknown[] +> = [...Prefix, ...Suffix]; + +type ReplaceThis = T extends (this: infer OldThis, ...args: infer A) => infer R + ? (this: NewThis, ...args: A) => R + : never; + +type BindFunction< + TThis, + T extends (this: TThis, ...args: any[]) => any, // Allow specific types to propagate + TBoundArgs extends unknown[], + ReceiverBound extends boolean +> = ReceiverBound extends true + ? (...args: RemoveFromTuple, TBoundArgs["length"] & number>) => ReturnType> + : (...args: ConcatTuples<[TThis], RemoveFromTuple, TBoundArgs["length"] & number>>) => ReturnType; + +declare function callBind< + TThis, + T extends (this: TThis, ...args: any[]) => any, + TBoundArgs extends Partial> +>( + args: [fn: T, thisArg: TThis, ...boundArgs: TBoundArgs] +): BindFunction; + +declare function callBind< + TThis, + T extends (this: TThis, ...args: any[]) => any, + TBoundArgs extends Partial> +>( + args: [fn: T, ...boundArgs: TBoundArgs] +): BindFunction; + +export as namespace callBind; +export = callBind; diff --git a/backend/Backend/node_modules/call-bind-apply-helpers/index.js b/backend/Backend/node_modules/call-bind-apply-helpers/index.js new file mode 100644 index 000000000..8b6b99463 --- /dev/null +++ b/backend/Backend/node_modules/call-bind-apply-helpers/index.js @@ -0,0 +1,15 @@ +'use strict'; + +var bind = require('function-bind'); +var $TypeError = require('es-errors/type'); + +var $call = require('./functionCall'); +var $actualApply = require('./actualApply'); + +/** @type {import('.')} */ +module.exports = function callBindBasic(args) { + if (args.length < 1 || typeof args[0] !== 'function') { + throw new $TypeError('a function is required'); + } + return $actualApply(bind, $call, args); +}; diff --git a/backend/Backend/node_modules/call-bind-apply-helpers/package.json b/backend/Backend/node_modules/call-bind-apply-helpers/package.json new file mode 100644 index 000000000..7398be704 --- /dev/null +++ b/backend/Backend/node_modules/call-bind-apply-helpers/package.json @@ -0,0 +1,85 @@ +{ + "name": "call-bind-apply-helpers", + "version": "1.0.1", + "description": "Helper functions around Function call/apply/bind, for use in `call-bind`", + "main": "index.js", + "exports": { + ".": "./index.js", + "./actualApply": "./actualApply.js", + "./applyBind": "./applyBind.js", + "./functionApply": "./functionApply.js", + "./functionCall": "./functionCall.js", + "./reflectApply": "./reflectApply.js", + "./package.json": "./package.json" + }, + "scripts": { + "prepack": "npmignore --auto --commentLines=auto", + "prepublish": "not-in-publish || npm run prepublishOnly", + "prepublishOnly": "safe-publish-latest", + "prelint": "evalmd README.md", + "lint": "eslint --ext=.js,.mjs .", + "postlint": "tsc -p . && attw -P", + "pretest": "npm run lint", + "tests-only": "nyc tape 'test/**/*.js'", + "test": "npm run tests-only", + "posttest": "npx npm@'>=10.2' audit --production", + "version": "auto-changelog && git add CHANGELOG.md", + "postversion": "auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\"" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/ljharb/call-bind-apply-helpers.git" + }, + "author": "Jordan Harband ", + "license": "MIT", + "bugs": { + "url": "https://github.com/ljharb/call-bind-apply-helpers/issues" + }, + "homepage": "https://github.com/ljharb/call-bind-apply-helpers#readme", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "devDependencies": { + "@arethetypeswrong/cli": "^0.17.1", + "@ljharb/eslint-config": "^21.1.1", + "@ljharb/tsconfig": "^0.2.2", + "@types/for-each": "^0.3.3", + "@types/function-bind": "^1.1.10", + "@types/object-inspect": "^1.13.0", + "@types/tape": "^5.6.5", + "auto-changelog": "^2.5.0", + "encoding": "^0.1.13", + "es-value-fixtures": "^1.5.0", + "eslint": "=8.8.0", + "evalmd": "^0.0.19", + "for-each": "^0.3.3", + "has-strict-mode": "^1.0.1", + "in-publish": "^2.0.1", + "npmignore": "^0.3.1", + "nyc": "^10.3.2", + "object-inspect": "^1.13.3", + "safe-publish-latest": "^2.0.0", + "tape": "^5.9.0", + "typescript": "next" + }, + "testling": { + "files": "test/index.js" + }, + "auto-changelog": { + "output": "CHANGELOG.md", + "template": "keepachangelog", + "unreleased": false, + "commitLimit": false, + "backfillLimit": false, + "hideCredit": true + }, + "publishConfig": { + "ignore": [ + ".github/workflows" + ] + }, + "engines": { + "node": ">= 0.4" + } +} diff --git a/backend/Backend/node_modules/call-bind-apply-helpers/reflectApply.d.ts b/backend/Backend/node_modules/call-bind-apply-helpers/reflectApply.d.ts new file mode 100644 index 000000000..6b2ae764c --- /dev/null +++ b/backend/Backend/node_modules/call-bind-apply-helpers/reflectApply.d.ts @@ -0,0 +1,3 @@ +declare const reflectApply: false | typeof Reflect.apply; + +export = reflectApply; diff --git a/backend/Backend/node_modules/call-bind-apply-helpers/reflectApply.js b/backend/Backend/node_modules/call-bind-apply-helpers/reflectApply.js new file mode 100644 index 000000000..3d03caa69 --- /dev/null +++ b/backend/Backend/node_modules/call-bind-apply-helpers/reflectApply.js @@ -0,0 +1,4 @@ +'use strict'; + +/** @type {import('./reflectApply')} */ +module.exports = typeof Reflect !== 'undefined' && Reflect && Reflect.apply; diff --git a/backend/Backend/node_modules/call-bind-apply-helpers/test/index.js b/backend/Backend/node_modules/call-bind-apply-helpers/test/index.js new file mode 100644 index 000000000..8acc08a68 --- /dev/null +++ b/backend/Backend/node_modules/call-bind-apply-helpers/test/index.js @@ -0,0 +1,63 @@ +'use strict'; + +var callBind = require('../'); +var hasStrictMode = require('has-strict-mode')(); +var forEach = require('for-each'); +var inspect = require('object-inspect'); +var v = require('es-value-fixtures'); + +var test = require('tape'); + +test('callBindBasic', function (t) { + forEach(v.nonFunctions, function (nonFunction) { + t['throws']( + // @ts-expect-error + function () { callBind([nonFunction]); }, + TypeError, + inspect(nonFunction) + ' is not a function' + ); + }); + + var sentinel = { sentinel: true }; + /** @type {(this: T, a: number, b: number) => [T | undefined, number, number]} */ + var func = function (a, b) { + // eslint-disable-next-line no-invalid-this + return [!hasStrictMode && this === global ? undefined : this, a, b]; + }; + t.equal(func.length, 2, 'original function length is 2'); + + /** type {(thisArg: unknown, a: number, b: number) => [unknown, number, number]} */ + var bound = callBind([func]); + /** type {((a: number, b: number) => [sentinel, typeof a, typeof b])} */ + var boundR = callBind([func, sentinel]); + /** type {((b: number) => [sentinel, number, typeof b])} */ + var boundArg = callBind([func, sentinel, 1]); + + // @ts-expect-error + t.deepEqual(bound(), [undefined, undefined, undefined], 'bound func with no args'); + + // @ts-expect-error + t.deepEqual(func(), [undefined, undefined, undefined], 'unbound func with too few args'); + // @ts-expect-error + t.deepEqual(bound(1, 2), [hasStrictMode ? 1 : Object(1), 2, undefined], 'bound func too few args'); + // @ts-expect-error + t.deepEqual(boundR(), [sentinel, undefined, undefined], 'bound func with receiver, with too few args'); + // @ts-expect-error + t.deepEqual(boundArg(), [sentinel, 1, undefined], 'bound func with receiver and arg, with too few args'); + + t.deepEqual(func(1, 2), [undefined, 1, 2], 'unbound func with right args'); + t.deepEqual(bound(1, 2, 3), [hasStrictMode ? 1 : Object(1), 2, 3], 'bound func with right args'); + t.deepEqual(boundR(1, 2), [sentinel, 1, 2], 'bound func with receiver, with right args'); + t.deepEqual(boundArg(2), [sentinel, 1, 2], 'bound func with receiver and arg, with right arg'); + + // @ts-expect-error + t.deepEqual(func(1, 2, 3), [undefined, 1, 2], 'unbound func with too many args'); + // @ts-expect-error + t.deepEqual(bound(1, 2, 3, 4), [hasStrictMode ? 1 : Object(1), 2, 3], 'bound func with too many args'); + // @ts-expect-error + t.deepEqual(boundR(1, 2, 3), [sentinel, 1, 2], 'bound func with receiver, with too many args'); + // @ts-expect-error + t.deepEqual(boundArg(2, 3), [sentinel, 1, 2], 'bound func with receiver and arg, with too many args'); + + t.end(); +}); diff --git a/backend/Backend/node_modules/call-bind-apply-helpers/tsconfig.json b/backend/Backend/node_modules/call-bind-apply-helpers/tsconfig.json new file mode 100644 index 000000000..aef999308 --- /dev/null +++ b/backend/Backend/node_modules/call-bind-apply-helpers/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "@ljharb/tsconfig", + "compilerOptions": { + "target": "es2021", + }, + "exclude": [ + "coverage", + ], +} \ No newline at end of file diff --git a/backend/Backend/node_modules/call-bound/.eslintrc b/backend/Backend/node_modules/call-bound/.eslintrc new file mode 100644 index 000000000..2612ed8fe --- /dev/null +++ b/backend/Backend/node_modules/call-bound/.eslintrc @@ -0,0 +1,13 @@ +{ + "root": true, + + "extends": "@ljharb", + + "rules": { + "new-cap": [2, { + "capIsNewExceptions": [ + "GetIntrinsic", + ], + }], + }, +} diff --git a/backend/Backend/node_modules/call-bound/.github/FUNDING.yml b/backend/Backend/node_modules/call-bound/.github/FUNDING.yml new file mode 100644 index 000000000..2a2a13571 --- /dev/null +++ b/backend/Backend/node_modules/call-bound/.github/FUNDING.yml @@ -0,0 +1,12 @@ +# These are supported funding model platforms + +github: [ljharb] +patreon: # Replace with a single Patreon username +open_collective: # Replace with a single Open Collective username +ko_fi: # Replace with a single Ko-fi username +tidelift: npm/call-bound +community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry +liberapay: # Replace with a single Liberapay username +issuehunt: # Replace with a single IssueHunt username +otechie: # Replace with a single Otechie username +custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] diff --git a/backend/Backend/node_modules/call-bound/.nycrc b/backend/Backend/node_modules/call-bound/.nycrc new file mode 100644 index 000000000..bdd626ce9 --- /dev/null +++ b/backend/Backend/node_modules/call-bound/.nycrc @@ -0,0 +1,9 @@ +{ + "all": true, + "check-coverage": false, + "reporter": ["text-summary", "text", "html", "json"], + "exclude": [ + "coverage", + "test" + ] +} diff --git a/backend/Backend/node_modules/call-bound/CHANGELOG.md b/backend/Backend/node_modules/call-bound/CHANGELOG.md new file mode 100644 index 000000000..25fa7a5e1 --- /dev/null +++ b/backend/Backend/node_modules/call-bound/CHANGELOG.md @@ -0,0 +1,34 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [v1.0.3](https://github.com/ljharb/call-bound/compare/v1.0.2...v1.0.3) - 2024-12-15 + +### Commits + +- [Refactor] use `call-bind-apply-helpers` instead of `call-bind` [`5e0b134`](https://github.com/ljharb/call-bound/commit/5e0b13496df14fb7d05dae9412f088da8d3f75be) +- [Deps] update `get-intrinsic` [`41fc967`](https://github.com/ljharb/call-bound/commit/41fc96732a22c7b7e8f381f93ccc54bb6293be2e) +- [readme] fix example [`79a0137`](https://github.com/ljharb/call-bound/commit/79a0137723f7c6d09c9c05452bbf8d5efb5d6e49) +- [meta] add `sideEffects` flag [`08b07be`](https://github.com/ljharb/call-bound/commit/08b07be7f1c03f67dc6f3cdaf0906259771859f7) + +## [v1.0.2](https://github.com/ljharb/call-bound/compare/v1.0.1...v1.0.2) - 2024-12-10 + +### Commits + +- [Dev Deps] update `@arethetypeswrong/cli`, `@ljharb/tsconfig`, `gopd` [`e6a5ffe`](https://github.com/ljharb/call-bound/commit/e6a5ffe849368fe4f74dfd6cdeca1b9baa39e8d5) +- [Deps] update `call-bind`, `get-intrinsic` [`2aeb5b5`](https://github.com/ljharb/call-bound/commit/2aeb5b521dc2b2683d1345c753ea1161de2d1c14) +- [types] improve return type [`1a0c9fe`](https://github.com/ljharb/call-bound/commit/1a0c9fe3114471e7ca1f57d104e2efe713bb4871) + +## v1.0.1 - 2024-12-05 + +### Commits + +- Initial implementation, tests, readme, types [`6d94121`](https://github.com/ljharb/call-bound/commit/6d94121a9243602e506334069f7a03189fe3363d) +- Initial commit [`0eae867`](https://github.com/ljharb/call-bound/commit/0eae867334ea025c33e6e91cdecfc9df96680cf9) +- npm init [`71b2479`](https://github.com/ljharb/call-bound/commit/71b2479c6723e0b7d91a6b663613067e98b7b275) +- Only apps should have lockfiles [`c3754a9`](https://github.com/ljharb/call-bound/commit/c3754a949b7f9132b47e2d18c1729889736741eb) +- [actions] skip `npm ls` in node < 10 [`74275a5`](https://github.com/ljharb/call-bound/commit/74275a5186b8caf6309b6b97472bdcb0df4683a8) +- [Dev Deps] add missing peer dep [`1354de8`](https://github.com/ljharb/call-bound/commit/1354de8679413e4ae9c523d85f76fa7a5e032d97) diff --git a/backend/Backend/node_modules/call-bound/LICENSE b/backend/Backend/node_modules/call-bound/LICENSE new file mode 100644 index 000000000..f82f38963 --- /dev/null +++ b/backend/Backend/node_modules/call-bound/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2024 Jordan Harband + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/backend/Backend/node_modules/call-bound/README.md b/backend/Backend/node_modules/call-bound/README.md new file mode 100644 index 000000000..a44e43e56 --- /dev/null +++ b/backend/Backend/node_modules/call-bound/README.md @@ -0,0 +1,53 @@ +# call-bound [![Version Badge][npm-version-svg]][package-url] + +[![github actions][actions-image]][actions-url] +[![coverage][codecov-image]][codecov-url] +[![dependency status][deps-svg]][deps-url] +[![dev dependency status][dev-deps-svg]][dev-deps-url] +[![License][license-image]][license-url] +[![Downloads][downloads-image]][downloads-url] + +[![npm badge][npm-badge-png]][package-url] + +Robust call-bound JavaScript intrinsics, using `call-bind` and `get-intrinsic`. + +## Getting started + +```sh +npm install --save call-bound +``` + +## Usage/Examples + +```js +const assert = require('assert'); +const callBound = require('call-bound'); + +const slice = callBound('Array.prototype.slice'); + +delete Function.prototype.call; +delete Function.prototype.bind; +delete Array.prototype.slice; + +assert.deepEqual(slice([1, 2, 3, 4], 1, -1), [2, 3]); +``` + +## Tests + +Clone the repo, `npm install`, and run `npm test` + +[package-url]: https://npmjs.org/package/call-bound +[npm-version-svg]: https://versionbadg.es/ljharb/call-bound.svg +[deps-svg]: https://david-dm.org/ljharb/call-bound.svg +[deps-url]: https://david-dm.org/ljharb/call-bound +[dev-deps-svg]: https://david-dm.org/ljharb/call-bound/dev-status.svg +[dev-deps-url]: https://david-dm.org/ljharb/call-bound#info=devDependencies +[npm-badge-png]: https://nodei.co/npm/call-bound.png?downloads=true&stars=true +[license-image]: https://img.shields.io/npm/l/call-bound.svg +[license-url]: LICENSE +[downloads-image]: https://img.shields.io/npm/dm/call-bound.svg +[downloads-url]: https://npm-stat.com/charts.html?package=call-bound +[codecov-image]: https://codecov.io/gh/ljharb/call-bound/branch/main/graphs/badge.svg +[codecov-url]: https://app.codecov.io/gh/ljharb/call-bound/ +[actions-image]: https://img.shields.io/endpoint?url=https://github-actions-badge-u3jn4tfpocch.runkit.sh/ljharb/call-bound +[actions-url]: https://github.com/ljharb/call-bound/actions diff --git a/backend/Backend/node_modules/call-bound/index.d.ts b/backend/Backend/node_modules/call-bound/index.d.ts new file mode 100644 index 000000000..e3d772ce5 --- /dev/null +++ b/backend/Backend/node_modules/call-bound/index.d.ts @@ -0,0 +1,13 @@ +import callBind from 'call-bind-apply-helpers'; + +declare function callBoundIntrinsic( + name: string, + allowMissing?: false +): ReturnType; + +declare function callBoundIntrinsic( + name: string, + allowMissing: true +): undefined | ReturnType; + +export = callBoundIntrinsic; \ No newline at end of file diff --git a/backend/Backend/node_modules/call-bound/index.js b/backend/Backend/node_modules/call-bound/index.js new file mode 100644 index 000000000..3bb40121a --- /dev/null +++ b/backend/Backend/node_modules/call-bound/index.js @@ -0,0 +1,18 @@ +'use strict'; + +var GetIntrinsic = require('get-intrinsic'); + +var callBindBasic = require('call-bind-apply-helpers'); + +/** @type {(thisArg: string, searchString: string, position?: number) => number} */ +var $indexOf = callBindBasic([GetIntrinsic('%String.prototype.indexOf%')]); + +/** @type {import('.')} */ +module.exports = function callBoundIntrinsic(name, allowMissing) { + // eslint-disable-next-line no-extra-parens + var intrinsic = /** @type {Parameters[0][0]} */ (GetIntrinsic(name, !!allowMissing)); + if (typeof intrinsic === 'function' && $indexOf(name, '.prototype.') > -1) { + return callBindBasic([intrinsic]); + } + return intrinsic; +}; diff --git a/backend/Backend/node_modules/call-bound/package.json b/backend/Backend/node_modules/call-bound/package.json new file mode 100644 index 000000000..2893ed11a --- /dev/null +++ b/backend/Backend/node_modules/call-bound/package.json @@ -0,0 +1,99 @@ +{ + "name": "call-bound", + "version": "1.0.3", + "description": "Robust call-bound JavaScript intrinsics, using `call-bind` and `get-intrinsic`.", + "main": "index.js", + "exports": { + ".": "./index.js", + "./package.json": "./package.json" + }, + "sideEffects": false, + "scripts": { + "prepack": "npmignore --auto --commentLines=auto", + "prepublish": "not-in-publish || npm run prepublishOnly", + "prepublishOnly": "safe-publish-latest", + "prelint": "evalmd README.md", + "lint": "eslint --ext=.js,.mjs .", + "postlint": "tsc -p . && attw -P", + "pretest": "npm run lint", + "tests-only": "nyc tape 'test/**/*.js'", + "test": "npm run tests-only", + "posttest": "npx npm@'>=10.2' audit --production", + "version": "auto-changelog && git add CHANGELOG.md", + "postversion": "auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\"" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/ljharb/call-bound.git" + }, + "keywords": [ + "javascript", + "ecmascript", + "es", + "js", + "callbind", + "callbound", + "call", + "bind", + "bound", + "call-bind", + "call-bound", + "function", + "es-abstract" + ], + "author": "Jordan Harband ", + "funding": { + "url": "https://github.com/sponsors/ljharb" + }, + "license": "MIT", + "bugs": { + "url": "https://github.com/ljharb/call-bound/issues" + }, + "homepage": "https://github.com/ljharb/call-bound#readme", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "get-intrinsic": "^1.2.6" + }, + "devDependencies": { + "@arethetypeswrong/cli": "^0.17.1", + "@ljharb/eslint-config": "^21.1.1", + "@ljharb/tsconfig": "^0.2.2", + "@types/call-bind": "^1.0.5", + "@types/get-intrinsic": "^1.2.3", + "@types/tape": "^5.6.5", + "auto-changelog": "^2.5.0", + "encoding": "^0.1.13", + "es-value-fixtures": "^1.5.0", + "eslint": "=8.8.0", + "evalmd": "^0.0.19", + "for-each": "^0.3.3", + "gopd": "^1.2.0", + "has-strict-mode": "^1.0.1", + "in-publish": "^2.0.1", + "npmignore": "^0.3.1", + "nyc": "^10.3.2", + "object-inspect": "^1.13.3", + "safe-publish-latest": "^2.0.0", + "tape": "^5.9.0", + "typescript": "next" + }, + "testling": { + "files": "test/index.js" + }, + "auto-changelog": { + "output": "CHANGELOG.md", + "template": "keepachangelog", + "unreleased": false, + "commitLimit": false, + "backfillLimit": false, + "hideCredit": true + }, + "publishConfig": { + "ignore": [ + ".github/workflows" + ] + }, + "engines": { + "node": ">= 0.4" + } +} diff --git a/backend/Backend/node_modules/call-bound/test/index.js b/backend/Backend/node_modules/call-bound/test/index.js new file mode 100644 index 000000000..36f5f0b97 --- /dev/null +++ b/backend/Backend/node_modules/call-bound/test/index.js @@ -0,0 +1,54 @@ +'use strict'; + +var test = require('tape'); + +var callBound = require('../'); + +test('callBound', function (t) { + // static primitive + t.equal(callBound('Array.length'), Array.length, 'Array.length yields itself'); + t.equal(callBound('%Array.length%'), Array.length, '%Array.length% yields itself'); + + // static non-function object + t.equal(callBound('Array.prototype'), Array.prototype, 'Array.prototype yields itself'); + t.equal(callBound('%Array.prototype%'), Array.prototype, '%Array.prototype% yields itself'); + t.equal(callBound('Array.constructor'), Array.constructor, 'Array.constructor yields itself'); + t.equal(callBound('%Array.constructor%'), Array.constructor, '%Array.constructor% yields itself'); + + // static function + t.equal(callBound('Date.parse'), Date.parse, 'Date.parse yields itself'); + t.equal(callBound('%Date.parse%'), Date.parse, '%Date.parse% yields itself'); + + // prototype primitive + t.equal(callBound('Error.prototype.message'), Error.prototype.message, 'Error.prototype.message yields itself'); + t.equal(callBound('%Error.prototype.message%'), Error.prototype.message, '%Error.prototype.message% yields itself'); + + // prototype function + t.notEqual(callBound('Object.prototype.toString'), Object.prototype.toString, 'Object.prototype.toString does not yield itself'); + t.notEqual(callBound('%Object.prototype.toString%'), Object.prototype.toString, '%Object.prototype.toString% does not yield itself'); + t.equal(callBound('Object.prototype.toString')(true), Object.prototype.toString.call(true), 'call-bound Object.prototype.toString calls into the original'); + t.equal(callBound('%Object.prototype.toString%')(true), Object.prototype.toString.call(true), 'call-bound %Object.prototype.toString% calls into the original'); + + t['throws']( + function () { callBound('does not exist'); }, + SyntaxError, + 'nonexistent intrinsic throws' + ); + t['throws']( + function () { callBound('does not exist', true); }, + SyntaxError, + 'allowMissing arg still throws for unknown intrinsic' + ); + + t.test('real but absent intrinsic', { skip: typeof WeakRef !== 'undefined' }, function (st) { + st['throws']( + function () { callBound('WeakRef'); }, + TypeError, + 'real but absent intrinsic throws' + ); + st.equal(callBound('WeakRef', true), undefined, 'allowMissing arg avoids exception'); + st.end(); + }); + + t.end(); +}); diff --git a/backend/Backend/node_modules/call-bound/tsconfig.json b/backend/Backend/node_modules/call-bound/tsconfig.json new file mode 100644 index 000000000..d9a6668c3 --- /dev/null +++ b/backend/Backend/node_modules/call-bound/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "@ljharb/tsconfig", + "compilerOptions": { + "target": "es2021", + }, + "exclude": [ + "coverage", + ], +} diff --git a/backend/Backend/node_modules/chokidar/LICENSE b/backend/Backend/node_modules/chokidar/LICENSE new file mode 100644 index 000000000..fa9162b51 --- /dev/null +++ b/backend/Backend/node_modules/chokidar/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2012-2019 Paul Miller (https://paulmillr.com), Elan Shanker + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the “Software”), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/backend/Backend/node_modules/chokidar/README.md b/backend/Backend/node_modules/chokidar/README.md new file mode 100644 index 000000000..8e25decb4 --- /dev/null +++ b/backend/Backend/node_modules/chokidar/README.md @@ -0,0 +1,308 @@ +# Chokidar [![Weekly downloads](https://img.shields.io/npm/dw/chokidar.svg)](https://github.com/paulmillr/chokidar) [![Yearly downloads](https://img.shields.io/npm/dy/chokidar.svg)](https://github.com/paulmillr/chokidar) + +> Minimal and efficient cross-platform file watching library + +[![NPM](https://nodei.co/npm/chokidar.png)](https://www.npmjs.com/package/chokidar) + +## Why? + +Node.js `fs.watch`: + +* Doesn't report filenames on MacOS. +* Doesn't report events at all when using editors like Sublime on MacOS. +* Often reports events twice. +* Emits most changes as `rename`. +* Does not provide an easy way to recursively watch file trees. +* Does not support recursive watching on Linux. + +Node.js `fs.watchFile`: + +* Almost as bad at event handling. +* Also does not provide any recursive watching. +* Results in high CPU utilization. + +Chokidar resolves these problems. + +Initially made for **[Brunch](https://brunch.io/)** (an ultra-swift web app build tool), it is now used in +[Microsoft's Visual Studio Code](https://github.com/microsoft/vscode), +[gulp](https://github.com/gulpjs/gulp/), +[karma](https://karma-runner.github.io/), +[PM2](https://github.com/Unitech/PM2), +[browserify](http://browserify.org/), +[webpack](https://webpack.github.io/), +[BrowserSync](https://www.browsersync.io/), +and [many others](https://www.npmjs.com/browse/depended/chokidar). +It has proven itself in production environments. + +Version 3 is out! Check out our blog post about it: [Chokidar 3: How to save 32TB of traffic every week](https://paulmillr.com/posts/chokidar-3-save-32tb-of-traffic/) + +## How? + +Chokidar does still rely on the Node.js core `fs` module, but when using +`fs.watch` and `fs.watchFile` for watching, it normalizes the events it +receives, often checking for truth by getting file stats and/or dir contents. + +On MacOS, chokidar by default uses a native extension exposing the Darwin +`FSEvents` API. This provides very efficient recursive watching compared with +implementations like `kqueue` available on most \*nix platforms. Chokidar still +does have to do some work to normalize the events received that way as well. + +On most other platforms, the `fs.watch`-based implementation is the default, which +avoids polling and keeps CPU usage down. Be advised that chokidar will initiate +watchers recursively for everything within scope of the paths that have been +specified, so be judicious about not wasting system resources by watching much +more than needed. + +## Getting started + +Install with npm: + +```sh +npm install chokidar +``` + +Then `require` and use it in your code: + +```javascript +const chokidar = require('chokidar'); + +// One-liner for current directory +chokidar.watch('.').on('all', (event, path) => { + console.log(event, path); +}); +``` + +## API + +```javascript +// Example of a more typical implementation structure + +// Initialize watcher. +const watcher = chokidar.watch('file, dir, glob, or array', { + ignored: /(^|[\/\\])\../, // ignore dotfiles + persistent: true +}); + +// Something to use when events are received. +const log = console.log.bind(console); +// Add event listeners. +watcher + .on('add', path => log(`File ${path} has been added`)) + .on('change', path => log(`File ${path} has been changed`)) + .on('unlink', path => log(`File ${path} has been removed`)); + +// More possible events. +watcher + .on('addDir', path => log(`Directory ${path} has been added`)) + .on('unlinkDir', path => log(`Directory ${path} has been removed`)) + .on('error', error => log(`Watcher error: ${error}`)) + .on('ready', () => log('Initial scan complete. Ready for changes')) + .on('raw', (event, path, details) => { // internal + log('Raw event info:', event, path, details); + }); + +// 'add', 'addDir' and 'change' events also receive stat() results as second +// argument when available: https://nodejs.org/api/fs.html#fs_class_fs_stats +watcher.on('change', (path, stats) => { + if (stats) console.log(`File ${path} changed size to ${stats.size}`); +}); + +// Watch new files. +watcher.add('new-file'); +watcher.add(['new-file-2', 'new-file-3', '**/other-file*']); + +// Get list of actual paths being watched on the filesystem +var watchedPaths = watcher.getWatched(); + +// Un-watch some files. +await watcher.unwatch('new-file*'); + +// Stop watching. +// The method is async! +watcher.close().then(() => console.log('closed')); + +// Full list of options. See below for descriptions. +// Do not use this example! +chokidar.watch('file', { + persistent: true, + + ignored: '*.txt', + ignoreInitial: false, + followSymlinks: true, + cwd: '.', + disableGlobbing: false, + + usePolling: false, + interval: 100, + binaryInterval: 300, + alwaysStat: false, + depth: 99, + awaitWriteFinish: { + stabilityThreshold: 2000, + pollInterval: 100 + }, + + ignorePermissionErrors: false, + atomic: true // or a custom 'atomicity delay', in milliseconds (default 100) +}); + +``` + +`chokidar.watch(paths, [options])` + +* `paths` (string or array of strings). Paths to files, dirs to be watched +recursively, or glob patterns. + - Note: globs must not contain windows separators (`\`), + because that's how they work by the standard — + you'll need to replace them with forward slashes (`/`). + - Note 2: for additional glob documentation, check out low-level + library: [picomatch](https://github.com/micromatch/picomatch). +* `options` (object) Options object as defined below: + +#### Persistence + +* `persistent` (default: `true`). Indicates whether the process +should continue to run as long as files are being watched. If set to +`false` when using `fsevents` to watch, no more events will be emitted +after `ready`, even if the process continues to run. + +#### Path filtering + +* `ignored` ([anymatch](https://github.com/es128/anymatch)-compatible definition) +Defines files/paths to be ignored. The whole relative or absolute path is +tested, not just filename. If a function with two arguments is provided, it +gets called twice per path - once with a single argument (the path), second +time with two arguments (the path and the +[`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats) +object of that path). +* `ignoreInitial` (default: `false`). If set to `false` then `add`/`addDir` events are also emitted for matching paths while +instantiating the watching as chokidar discovers these file paths (before the `ready` event). +* `followSymlinks` (default: `true`). When `false`, only the +symlinks themselves will be watched for changes instead of following +the link references and bubbling events through the link's path. +* `cwd` (no default). The base directory from which watch `paths` are to be +derived. Paths emitted with events will be relative to this. +* `disableGlobbing` (default: `false`). If set to `true` then the strings passed to `.watch()` and `.add()` are treated as +literal path names, even if they look like globs. + +#### Performance + +* `usePolling` (default: `false`). +Whether to use fs.watchFile (backed by polling), or fs.watch. If polling +leads to high CPU utilization, consider setting this to `false`. It is +typically necessary to **set this to `true` to successfully watch files over +a network**, and it may be necessary to successfully watch files in other +non-standard situations. Setting to `true` explicitly on MacOS overrides the +`useFsEvents` default. You may also set the CHOKIDAR_USEPOLLING env variable +to true (1) or false (0) in order to override this option. +* _Polling-specific settings_ (effective when `usePolling: true`) + * `interval` (default: `100`). Interval of file system polling, in milliseconds. You may also + set the CHOKIDAR_INTERVAL env variable to override this option. + * `binaryInterval` (default: `300`). Interval of file system + polling for binary files. + ([see list of binary extensions](https://github.com/sindresorhus/binary-extensions/blob/master/binary-extensions.json)) +* `useFsEvents` (default: `true` on MacOS). Whether to use the +`fsevents` watching interface if available. When set to `true` explicitly +and `fsevents` is available this supercedes the `usePolling` setting. When +set to `false` on MacOS, `usePolling: true` becomes the default. +* `alwaysStat` (default: `false`). If relying upon the +[`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats) +object that may get passed with `add`, `addDir`, and `change` events, set +this to `true` to ensure it is provided even in cases where it wasn't +already available from the underlying watch events. +* `depth` (default: `undefined`). If set, limits how many levels of +subdirectories will be traversed. +* `awaitWriteFinish` (default: `false`). +By default, the `add` event will fire when a file first appears on disk, before +the entire file has been written. Furthermore, in some cases some `change` +events will be emitted while the file is being written. In some cases, +especially when watching for large files there will be a need to wait for the +write operation to finish before responding to a file creation or modification. +Setting `awaitWriteFinish` to `true` (or a truthy value) will poll file size, +holding its `add` and `change` events until the size does not change for a +configurable amount of time. The appropriate duration setting is heavily +dependent on the OS and hardware. For accurate detection this parameter should +be relatively high, making file watching much less responsive. +Use with caution. + * *`options.awaitWriteFinish` can be set to an object in order to adjust + timing params:* + * `awaitWriteFinish.stabilityThreshold` (default: 2000). Amount of time in + milliseconds for a file size to remain constant before emitting its event. + * `awaitWriteFinish.pollInterval` (default: 100). File size polling interval, in milliseconds. + +#### Errors + +* `ignorePermissionErrors` (default: `false`). Indicates whether to watch files +that don't have read permissions if possible. If watching fails due to `EPERM` +or `EACCES` with this set to `true`, the errors will be suppressed silently. +* `atomic` (default: `true` if `useFsEvents` and `usePolling` are `false`). +Automatically filters out artifacts that occur when using editors that use +"atomic writes" instead of writing directly to the source file. If a file is +re-added within 100 ms of being deleted, Chokidar emits a `change` event +rather than `unlink` then `add`. If the default of 100 ms does not work well +for you, you can override it by setting `atomic` to a custom value, in +milliseconds. + +### Methods & Events + +`chokidar.watch()` produces an instance of `FSWatcher`. Methods of `FSWatcher`: + +* `.add(path / paths)`: Add files, directories, or glob patterns for tracking. +Takes an array of strings or just one string. +* `.on(event, callback)`: Listen for an FS event. +Available events: `add`, `addDir`, `change`, `unlink`, `unlinkDir`, `ready`, +`raw`, `error`. +Additionally `all` is available which gets emitted with the underlying event +name and path for every event other than `ready`, `raw`, and `error`. `raw` is internal, use it carefully. +* `.unwatch(path / paths)`: Stop watching files, directories, or glob patterns. +Takes an array of strings or just one string. +* `.close()`: **async** Removes all listeners from watched files. Asynchronous, returns Promise. Use with `await` to ensure bugs don't happen. +* `.getWatched()`: Returns an object representing all the paths on the file +system being watched by this `FSWatcher` instance. The object's keys are all the +directories (using absolute paths unless the `cwd` option was used), and the +values are arrays of the names of the items contained in each directory. + +## CLI + +If you need a CLI interface for your file watching, check out +[chokidar-cli](https://github.com/open-cli-tools/chokidar-cli), allowing you to +execute a command on each change, or get a stdio stream of change events. + +## Install Troubleshooting + +* `npm WARN optional dep failed, continuing fsevents@n.n.n` + * This message is normal part of how `npm` handles optional dependencies and is + not indicative of a problem. Even if accompanied by other related error messages, + Chokidar should function properly. + +* `TypeError: fsevents is not a constructor` + * Update chokidar by doing `rm -rf node_modules package-lock.json yarn.lock && npm install`, or update your dependency that uses chokidar. + +* Chokidar is producing `ENOSP` error on Linux, like this: + * `bash: cannot set terminal process group (-1): Inappropriate ioctl for device bash: no job control in this shell` + `Error: watch /home/ ENOSPC` + * This means Chokidar ran out of file handles and you'll need to increase their count by executing the following command in Terminal: + `echo fs.inotify.max_user_watches=524288 | sudo tee -a /etc/sysctl.conf && sudo sysctl -p` + +## Changelog + +For more detailed changelog, see [`full_changelog.md`](.github/full_changelog.md). +- **v3.5 (Jan 6, 2021):** Support for ARM Macs with Apple Silicon. Fixes for deleted symlinks. +- **v3.4 (Apr 26, 2020):** Support for directory-based symlinks. Fixes for macos file replacement. +- **v3.3 (Nov 2, 2019):** `FSWatcher#close()` method became async. That fixes IO race conditions related to close method. +- **v3.2 (Oct 1, 2019):** Improve Linux RAM usage by 50%. Race condition fixes. Windows glob fixes. Improve stability by using tight range of dependency versions. +- **v3.1 (Sep 16, 2019):** dotfiles are no longer filtered out by default. Use `ignored` option if needed. Improve initial Linux scan time by 50%. +- **v3 (Apr 30, 2019):** massive CPU & RAM consumption improvements; reduces deps / package size by a factor of 17x and bumps Node.js requirement to v8.16 and higher. +- **v2 (Dec 29, 2017):** Globs are now posix-style-only; without windows support. Tons of bugfixes. +- **v1 (Apr 7, 2015):** Glob support, symlink support, tons of bugfixes. Node 0.8+ is supported +- **v0.1 (Apr 20, 2012):** Initial release, extracted from [Brunch](https://github.com/brunch/brunch/blob/9847a065aea300da99bd0753f90354cde9de1261/src/helpers.coffee#L66) + +## Also + +Why was chokidar named this way? What's the meaning behind it? + +>Chowkidar is a transliteration of a Hindi word meaning 'watchman, gatekeeper', चौकीदार. This ultimately comes from Sanskrit _ चतुष्क_ (crossway, quadrangle, consisting-of-four). This word is also used in other languages like Urdu as (چوکیدار) which is widely used in Pakistan and India. + +## License + +MIT (c) Paul Miller (), see [LICENSE](LICENSE) file. diff --git a/backend/Backend/node_modules/chokidar/index.js b/backend/Backend/node_modules/chokidar/index.js new file mode 100644 index 000000000..8752893ca --- /dev/null +++ b/backend/Backend/node_modules/chokidar/index.js @@ -0,0 +1,973 @@ +'use strict'; + +const { EventEmitter } = require('events'); +const fs = require('fs'); +const sysPath = require('path'); +const { promisify } = require('util'); +const readdirp = require('readdirp'); +const anymatch = require('anymatch').default; +const globParent = require('glob-parent'); +const isGlob = require('is-glob'); +const braces = require('braces'); +const normalizePath = require('normalize-path'); + +const NodeFsHandler = require('./lib/nodefs-handler'); +const FsEventsHandler = require('./lib/fsevents-handler'); +const { + EV_ALL, + EV_READY, + EV_ADD, + EV_CHANGE, + EV_UNLINK, + EV_ADD_DIR, + EV_UNLINK_DIR, + EV_RAW, + EV_ERROR, + + STR_CLOSE, + STR_END, + + BACK_SLASH_RE, + DOUBLE_SLASH_RE, + SLASH_OR_BACK_SLASH_RE, + DOT_RE, + REPLACER_RE, + + SLASH, + SLASH_SLASH, + BRACE_START, + BANG, + ONE_DOT, + TWO_DOTS, + GLOBSTAR, + SLASH_GLOBSTAR, + ANYMATCH_OPTS, + STRING_TYPE, + FUNCTION_TYPE, + EMPTY_STR, + EMPTY_FN, + + isWindows, + isMacos, + isIBMi +} = require('./lib/constants'); + +const stat = promisify(fs.stat); +const readdir = promisify(fs.readdir); + +/** + * @typedef {String} Path + * @typedef {'all'|'add'|'addDir'|'change'|'unlink'|'unlinkDir'|'raw'|'error'|'ready'} EventName + * @typedef {'readdir'|'watch'|'add'|'remove'|'change'} ThrottleType + */ + +/** + * + * @typedef {Object} WatchHelpers + * @property {Boolean} followSymlinks + * @property {'stat'|'lstat'} statMethod + * @property {Path} path + * @property {Path} watchPath + * @property {Function} entryPath + * @property {Boolean} hasGlob + * @property {Object} globFilter + * @property {Function} filterPath + * @property {Function} filterDir + */ + +const arrify = (value = []) => Array.isArray(value) ? value : [value]; +const flatten = (list, result = []) => { + list.forEach(item => { + if (Array.isArray(item)) { + flatten(item, result); + } else { + result.push(item); + } + }); + return result; +}; + +const unifyPaths = (paths_) => { + /** + * @type {Array} + */ + const paths = flatten(arrify(paths_)); + if (!paths.every(p => typeof p === STRING_TYPE)) { + throw new TypeError(`Non-string provided as watch path: ${paths}`); + } + return paths.map(normalizePathToUnix); +}; + +// If SLASH_SLASH occurs at the beginning of path, it is not replaced +// because "//StoragePC/DrivePool/Movies" is a valid network path +const toUnix = (string) => { + let str = string.replace(BACK_SLASH_RE, SLASH); + let prepend = false; + if (str.startsWith(SLASH_SLASH)) { + prepend = true; + } + while (str.match(DOUBLE_SLASH_RE)) { + str = str.replace(DOUBLE_SLASH_RE, SLASH); + } + if (prepend) { + str = SLASH + str; + } + return str; +}; + +// Our version of upath.normalize +// TODO: this is not equal to path-normalize module - investigate why +const normalizePathToUnix = (path) => toUnix(sysPath.normalize(toUnix(path))); + +const normalizeIgnored = (cwd = EMPTY_STR) => (path) => { + if (typeof path !== STRING_TYPE) return path; + return normalizePathToUnix(sysPath.isAbsolute(path) ? path : sysPath.join(cwd, path)); +}; + +const getAbsolutePath = (path, cwd) => { + if (sysPath.isAbsolute(path)) { + return path; + } + if (path.startsWith(BANG)) { + return BANG + sysPath.join(cwd, path.slice(1)); + } + return sysPath.join(cwd, path); +}; + +const undef = (opts, key) => opts[key] === undefined; + +/** + * Directory entry. + * @property {Path} path + * @property {Set} items + */ +class DirEntry { + /** + * @param {Path} dir + * @param {Function} removeWatcher + */ + constructor(dir, removeWatcher) { + this.path = dir; + this._removeWatcher = removeWatcher; + /** @type {Set} */ + this.items = new Set(); + } + + add(item) { + const {items} = this; + if (!items) return; + if (item !== ONE_DOT && item !== TWO_DOTS) items.add(item); + } + + async remove(item) { + const {items} = this; + if (!items) return; + items.delete(item); + if (items.size > 0) return; + + const dir = this.path; + try { + await readdir(dir); + } catch (err) { + if (this._removeWatcher) { + this._removeWatcher(sysPath.dirname(dir), sysPath.basename(dir)); + } + } + } + + has(item) { + const {items} = this; + if (!items) return; + return items.has(item); + } + + /** + * @returns {Array} + */ + getChildren() { + const {items} = this; + if (!items) return; + return [...items.values()]; + } + + dispose() { + this.items.clear(); + delete this.path; + delete this._removeWatcher; + delete this.items; + Object.freeze(this); + } +} + +const STAT_METHOD_F = 'stat'; +const STAT_METHOD_L = 'lstat'; +class WatchHelper { + constructor(path, watchPath, follow, fsw) { + this.fsw = fsw; + this.path = path = path.replace(REPLACER_RE, EMPTY_STR); + this.watchPath = watchPath; + this.fullWatchPath = sysPath.resolve(watchPath); + this.hasGlob = watchPath !== path; + /** @type {object|boolean} */ + if (path === EMPTY_STR) this.hasGlob = false; + this.globSymlink = this.hasGlob && follow ? undefined : false; + this.globFilter = this.hasGlob ? anymatch(path, undefined, ANYMATCH_OPTS) : false; + this.dirParts = this.getDirParts(path); + this.dirParts.forEach((parts) => { + if (parts.length > 1) parts.pop(); + }); + this.followSymlinks = follow; + this.statMethod = follow ? STAT_METHOD_F : STAT_METHOD_L; + } + + checkGlobSymlink(entry) { + // only need to resolve once + // first entry should always have entry.parentDir === EMPTY_STR + if (this.globSymlink === undefined) { + this.globSymlink = entry.fullParentDir === this.fullWatchPath ? + false : {realPath: entry.fullParentDir, linkPath: this.fullWatchPath}; + } + + if (this.globSymlink) { + return entry.fullPath.replace(this.globSymlink.realPath, this.globSymlink.linkPath); + } + + return entry.fullPath; + } + + entryPath(entry) { + return sysPath.join(this.watchPath, + sysPath.relative(this.watchPath, this.checkGlobSymlink(entry)) + ); + } + + filterPath(entry) { + const {stats} = entry; + if (stats && stats.isSymbolicLink()) return this.filterDir(entry); + const resolvedPath = this.entryPath(entry); + const matchesGlob = this.hasGlob && typeof this.globFilter === FUNCTION_TYPE ? + this.globFilter(resolvedPath) : true; + return matchesGlob && + this.fsw._isntIgnored(resolvedPath, stats) && + this.fsw._hasReadPermissions(stats); + } + + getDirParts(path) { + if (!this.hasGlob) return []; + const parts = []; + const expandedPath = path.includes(BRACE_START) ? braces.expand(path) : [path]; + expandedPath.forEach((path) => { + parts.push(sysPath.relative(this.watchPath, path).split(SLASH_OR_BACK_SLASH_RE)); + }); + return parts; + } + + filterDir(entry) { + if (this.hasGlob) { + const entryParts = this.getDirParts(this.checkGlobSymlink(entry)); + let globstar = false; + this.unmatchedGlob = !this.dirParts.some((parts) => { + return parts.every((part, i) => { + if (part === GLOBSTAR) globstar = true; + return globstar || !entryParts[0][i] || anymatch(part, entryParts[0][i], ANYMATCH_OPTS); + }); + }); + } + return !this.unmatchedGlob && this.fsw._isntIgnored(this.entryPath(entry), entry.stats); + } +} + +/** + * Watches files & directories for changes. Emitted events: + * `add`, `addDir`, `change`, `unlink`, `unlinkDir`, `all`, `error` + * + * new FSWatcher() + * .add(directories) + * .on('add', path => log('File', path, 'was added')) + */ +class FSWatcher extends EventEmitter { +// Not indenting methods for history sake; for now. +constructor(_opts) { + super(); + + const opts = {}; + if (_opts) Object.assign(opts, _opts); // for frozen objects + + /** @type {Map} */ + this._watched = new Map(); + /** @type {Map} */ + this._closers = new Map(); + /** @type {Set} */ + this._ignoredPaths = new Set(); + + /** @type {Map} */ + this._throttled = new Map(); + + /** @type {Map} */ + this._symlinkPaths = new Map(); + + this._streams = new Set(); + this.closed = false; + + // Set up default options. + if (undef(opts, 'persistent')) opts.persistent = true; + if (undef(opts, 'ignoreInitial')) opts.ignoreInitial = false; + if (undef(opts, 'ignorePermissionErrors')) opts.ignorePermissionErrors = false; + if (undef(opts, 'interval')) opts.interval = 100; + if (undef(opts, 'binaryInterval')) opts.binaryInterval = 300; + if (undef(opts, 'disableGlobbing')) opts.disableGlobbing = false; + opts.enableBinaryInterval = opts.binaryInterval !== opts.interval; + + // Enable fsevents on OS X when polling isn't explicitly enabled. + if (undef(opts, 'useFsEvents')) opts.useFsEvents = !opts.usePolling; + + // If we can't use fsevents, ensure the options reflect it's disabled. + const canUseFsEvents = FsEventsHandler.canUse(); + if (!canUseFsEvents) opts.useFsEvents = false; + + // Use polling on Mac if not using fsevents. + // Other platforms use non-polling fs_watch. + if (undef(opts, 'usePolling') && !opts.useFsEvents) { + opts.usePolling = isMacos; + } + + // Always default to polling on IBM i because fs.watch() is not available on IBM i. + if(isIBMi) { + opts.usePolling = true; + } + + // Global override (useful for end-developers that need to force polling for all + // instances of chokidar, regardless of usage/dependency depth) + const envPoll = process.env.CHOKIDAR_USEPOLLING; + if (envPoll !== undefined) { + const envLower = envPoll.toLowerCase(); + + if (envLower === 'false' || envLower === '0') { + opts.usePolling = false; + } else if (envLower === 'true' || envLower === '1') { + opts.usePolling = true; + } else { + opts.usePolling = !!envLower; + } + } + const envInterval = process.env.CHOKIDAR_INTERVAL; + if (envInterval) { + opts.interval = Number.parseInt(envInterval, 10); + } + + // Editor atomic write normalization enabled by default with fs.watch + if (undef(opts, 'atomic')) opts.atomic = !opts.usePolling && !opts.useFsEvents; + if (opts.atomic) this._pendingUnlinks = new Map(); + + if (undef(opts, 'followSymlinks')) opts.followSymlinks = true; + + if (undef(opts, 'awaitWriteFinish')) opts.awaitWriteFinish = false; + if (opts.awaitWriteFinish === true) opts.awaitWriteFinish = {}; + const awf = opts.awaitWriteFinish; + if (awf) { + if (!awf.stabilityThreshold) awf.stabilityThreshold = 2000; + if (!awf.pollInterval) awf.pollInterval = 100; + this._pendingWrites = new Map(); + } + if (opts.ignored) opts.ignored = arrify(opts.ignored); + + let readyCalls = 0; + this._emitReady = () => { + readyCalls++; + if (readyCalls >= this._readyCount) { + this._emitReady = EMPTY_FN; + this._readyEmitted = true; + // use process.nextTick to allow time for listener to be bound + process.nextTick(() => this.emit(EV_READY)); + } + }; + this._emitRaw = (...args) => this.emit(EV_RAW, ...args); + this._readyEmitted = false; + this.options = opts; + + // Initialize with proper watcher. + if (opts.useFsEvents) { + this._fsEventsHandler = new FsEventsHandler(this); + } else { + this._nodeFsHandler = new NodeFsHandler(this); + } + + // You’re frozen when your heart’s not open. + Object.freeze(opts); +} + +// Public methods + +/** + * Adds paths to be watched on an existing FSWatcher instance + * @param {Path|Array} paths_ + * @param {String=} _origAdd private; for handling non-existent paths to be watched + * @param {Boolean=} _internal private; indicates a non-user add + * @returns {FSWatcher} for chaining + */ +add(paths_, _origAdd, _internal) { + const {cwd, disableGlobbing} = this.options; + this.closed = false; + let paths = unifyPaths(paths_); + if (cwd) { + paths = paths.map((path) => { + const absPath = getAbsolutePath(path, cwd); + + // Check `path` instead of `absPath` because the cwd portion can't be a glob + if (disableGlobbing || !isGlob(path)) { + return absPath; + } + return normalizePath(absPath); + }); + } + + // set aside negated glob strings + paths = paths.filter((path) => { + if (path.startsWith(BANG)) { + this._ignoredPaths.add(path.slice(1)); + return false; + } + + // if a path is being added that was previously ignored, stop ignoring it + this._ignoredPaths.delete(path); + this._ignoredPaths.delete(path + SLASH_GLOBSTAR); + + // reset the cached userIgnored anymatch fn + // to make ignoredPaths changes effective + this._userIgnored = undefined; + + return true; + }); + + if (this.options.useFsEvents && this._fsEventsHandler) { + if (!this._readyCount) this._readyCount = paths.length; + if (this.options.persistent) this._readyCount += paths.length; + paths.forEach((path) => this._fsEventsHandler._addToFsEvents(path)); + } else { + if (!this._readyCount) this._readyCount = 0; + this._readyCount += paths.length; + Promise.all( + paths.map(async path => { + const res = await this._nodeFsHandler._addToNodeFs(path, !_internal, 0, 0, _origAdd); + if (res) this._emitReady(); + return res; + }) + ).then(results => { + if (this.closed) return; + results.filter(item => item).forEach(item => { + this.add(sysPath.dirname(item), sysPath.basename(_origAdd || item)); + }); + }); + } + + return this; +} + +/** + * Close watchers or start ignoring events from specified paths. + * @param {Path|Array} paths_ - string or array of strings, file/directory paths and/or globs + * @returns {FSWatcher} for chaining +*/ +unwatch(paths_) { + if (this.closed) return this; + const paths = unifyPaths(paths_); + const {cwd} = this.options; + + paths.forEach((path) => { + // convert to absolute path unless relative path already matches + if (!sysPath.isAbsolute(path) && !this._closers.has(path)) { + if (cwd) path = sysPath.join(cwd, path); + path = sysPath.resolve(path); + } + + this._closePath(path); + + this._ignoredPaths.add(path); + if (this._watched.has(path)) { + this._ignoredPaths.add(path + SLASH_GLOBSTAR); + } + + // reset the cached userIgnored anymatch fn + // to make ignoredPaths changes effective + this._userIgnored = undefined; + }); + + return this; +} + +/** + * Close watchers and remove all listeners from watched paths. + * @returns {Promise}. +*/ +close() { + if (this.closed) return this._closePromise; + this.closed = true; + + // Memory management. + this.removeAllListeners(); + const closers = []; + this._closers.forEach(closerList => closerList.forEach(closer => { + const promise = closer(); + if (promise instanceof Promise) closers.push(promise); + })); + this._streams.forEach(stream => stream.destroy()); + this._userIgnored = undefined; + this._readyCount = 0; + this._readyEmitted = false; + this._watched.forEach(dirent => dirent.dispose()); + ['closers', 'watched', 'streams', 'symlinkPaths', 'throttled'].forEach(key => { + this[`_${key}`].clear(); + }); + + this._closePromise = closers.length ? Promise.all(closers).then(() => undefined) : Promise.resolve(); + return this._closePromise; +} + +/** + * Expose list of watched paths + * @returns {Object} for chaining +*/ +getWatched() { + const watchList = {}; + this._watched.forEach((entry, dir) => { + const key = this.options.cwd ? sysPath.relative(this.options.cwd, dir) : dir; + watchList[key || ONE_DOT] = entry.getChildren().sort(); + }); + return watchList; +} + +emitWithAll(event, args) { + this.emit(...args); + if (event !== EV_ERROR) this.emit(EV_ALL, ...args); +} + +// Common helpers +// -------------- + +/** + * Normalize and emit events. + * Calling _emit DOES NOT MEAN emit() would be called! + * @param {EventName} event Type of event + * @param {Path} path File or directory path + * @param {*=} val1 arguments to be passed with event + * @param {*=} val2 + * @param {*=} val3 + * @returns the error if defined, otherwise the value of the FSWatcher instance's `closed` flag + */ +async _emit(event, path, val1, val2, val3) { + if (this.closed) return; + + const opts = this.options; + if (isWindows) path = sysPath.normalize(path); + if (opts.cwd) path = sysPath.relative(opts.cwd, path); + /** @type Array */ + const args = [event, path]; + if (val3 !== undefined) args.push(val1, val2, val3); + else if (val2 !== undefined) args.push(val1, val2); + else if (val1 !== undefined) args.push(val1); + + const awf = opts.awaitWriteFinish; + let pw; + if (awf && (pw = this._pendingWrites.get(path))) { + pw.lastChange = new Date(); + return this; + } + + if (opts.atomic) { + if (event === EV_UNLINK) { + this._pendingUnlinks.set(path, args); + setTimeout(() => { + this._pendingUnlinks.forEach((entry, path) => { + this.emit(...entry); + this.emit(EV_ALL, ...entry); + this._pendingUnlinks.delete(path); + }); + }, typeof opts.atomic === 'number' ? opts.atomic : 100); + return this; + } + if (event === EV_ADD && this._pendingUnlinks.has(path)) { + event = args[0] = EV_CHANGE; + this._pendingUnlinks.delete(path); + } + } + + if (awf && (event === EV_ADD || event === EV_CHANGE) && this._readyEmitted) { + const awfEmit = (err, stats) => { + if (err) { + event = args[0] = EV_ERROR; + args[1] = err; + this.emitWithAll(event, args); + } else if (stats) { + // if stats doesn't exist the file must have been deleted + if (args.length > 2) { + args[2] = stats; + } else { + args.push(stats); + } + this.emitWithAll(event, args); + } + }; + + this._awaitWriteFinish(path, awf.stabilityThreshold, event, awfEmit); + return this; + } + + if (event === EV_CHANGE) { + const isThrottled = !this._throttle(EV_CHANGE, path, 50); + if (isThrottled) return this; + } + + if (opts.alwaysStat && val1 === undefined && + (event === EV_ADD || event === EV_ADD_DIR || event === EV_CHANGE) + ) { + const fullPath = opts.cwd ? sysPath.join(opts.cwd, path) : path; + let stats; + try { + stats = await stat(fullPath); + } catch (err) {} + // Suppress event when fs_stat fails, to avoid sending undefined 'stat' + if (!stats || this.closed) return; + args.push(stats); + } + this.emitWithAll(event, args); + + return this; +} + +/** + * Common handler for errors + * @param {Error} error + * @returns {Error|Boolean} The error if defined, otherwise the value of the FSWatcher instance's `closed` flag + */ +_handleError(error) { + const code = error && error.code; + if (error && code !== 'ENOENT' && code !== 'ENOTDIR' && + (!this.options.ignorePermissionErrors || (code !== 'EPERM' && code !== 'EACCES')) + ) { + this.emit(EV_ERROR, error); + } + return error || this.closed; +} + +/** + * Helper utility for throttling + * @param {ThrottleType} actionType type being throttled + * @param {Path} path being acted upon + * @param {Number} timeout duration of time to suppress duplicate actions + * @returns {Object|false} tracking object or false if action should be suppressed + */ +_throttle(actionType, path, timeout) { + if (!this._throttled.has(actionType)) { + this._throttled.set(actionType, new Map()); + } + + /** @type {Map} */ + const action = this._throttled.get(actionType); + /** @type {Object} */ + const actionPath = action.get(path); + + if (actionPath) { + actionPath.count++; + return false; + } + + let timeoutObject; + const clear = () => { + const item = action.get(path); + const count = item ? item.count : 0; + action.delete(path); + clearTimeout(timeoutObject); + if (item) clearTimeout(item.timeoutObject); + return count; + }; + timeoutObject = setTimeout(clear, timeout); + const thr = {timeoutObject, clear, count: 0}; + action.set(path, thr); + return thr; +} + +_incrReadyCount() { + return this._readyCount++; +} + +/** + * Awaits write operation to finish. + * Polls a newly created file for size variations. When files size does not change for 'threshold' milliseconds calls callback. + * @param {Path} path being acted upon + * @param {Number} threshold Time in milliseconds a file size must be fixed before acknowledging write OP is finished + * @param {EventName} event + * @param {Function} awfEmit Callback to be called when ready for event to be emitted. + */ +_awaitWriteFinish(path, threshold, event, awfEmit) { + let timeoutHandler; + + let fullPath = path; + if (this.options.cwd && !sysPath.isAbsolute(path)) { + fullPath = sysPath.join(this.options.cwd, path); + } + + const now = new Date(); + + const awaitWriteFinish = (prevStat) => { + fs.stat(fullPath, (err, curStat) => { + if (err || !this._pendingWrites.has(path)) { + if (err && err.code !== 'ENOENT') awfEmit(err); + return; + } + + const now = Number(new Date()); + + if (prevStat && curStat.size !== prevStat.size) { + this._pendingWrites.get(path).lastChange = now; + } + const pw = this._pendingWrites.get(path); + const df = now - pw.lastChange; + + if (df >= threshold) { + this._pendingWrites.delete(path); + awfEmit(undefined, curStat); + } else { + timeoutHandler = setTimeout( + awaitWriteFinish, + this.options.awaitWriteFinish.pollInterval, + curStat + ); + } + }); + }; + + if (!this._pendingWrites.has(path)) { + this._pendingWrites.set(path, { + lastChange: now, + cancelWait: () => { + this._pendingWrites.delete(path); + clearTimeout(timeoutHandler); + return event; + } + }); + timeoutHandler = setTimeout( + awaitWriteFinish, + this.options.awaitWriteFinish.pollInterval + ); + } +} + +_getGlobIgnored() { + return [...this._ignoredPaths.values()]; +} + +/** + * Determines whether user has asked to ignore this path. + * @param {Path} path filepath or dir + * @param {fs.Stats=} stats result of fs.stat + * @returns {Boolean} + */ +_isIgnored(path, stats) { + if (this.options.atomic && DOT_RE.test(path)) return true; + if (!this._userIgnored) { + const {cwd} = this.options; + const ign = this.options.ignored; + + const ignored = ign && ign.map(normalizeIgnored(cwd)); + const paths = arrify(ignored) + .filter((path) => typeof path === STRING_TYPE && !isGlob(path)) + .map((path) => path + SLASH_GLOBSTAR); + const list = this._getGlobIgnored().map(normalizeIgnored(cwd)).concat(ignored, paths); + this._userIgnored = anymatch(list, undefined, ANYMATCH_OPTS); + } + + return this._userIgnored([path, stats]); +} + +_isntIgnored(path, stat) { + return !this._isIgnored(path, stat); +} + +/** + * Provides a set of common helpers and properties relating to symlink and glob handling. + * @param {Path} path file, directory, or glob pattern being watched + * @param {Number=} depth at any depth > 0, this isn't a glob + * @returns {WatchHelper} object containing helpers for this path + */ +_getWatchHelpers(path, depth) { + const watchPath = depth || this.options.disableGlobbing || !isGlob(path) ? path : globParent(path); + const follow = this.options.followSymlinks; + + return new WatchHelper(path, watchPath, follow, this); +} + +// Directory helpers +// ----------------- + +/** + * Provides directory tracking objects + * @param {String} directory path of the directory + * @returns {DirEntry} the directory's tracking object + */ +_getWatchedDir(directory) { + if (!this._boundRemove) this._boundRemove = this._remove.bind(this); + const dir = sysPath.resolve(directory); + if (!this._watched.has(dir)) this._watched.set(dir, new DirEntry(dir, this._boundRemove)); + return this._watched.get(dir); +} + +// File helpers +// ------------ + +/** + * Check for read permissions. + * Based on this answer on SO: https://stackoverflow.com/a/11781404/1358405 + * @param {fs.Stats} stats - object, result of fs_stat + * @returns {Boolean} indicates whether the file can be read +*/ +_hasReadPermissions(stats) { + if (this.options.ignorePermissionErrors) return true; + + // stats.mode may be bigint + const md = stats && Number.parseInt(stats.mode, 10); + const st = md & 0o777; + const it = Number.parseInt(st.toString(8)[0], 10); + return Boolean(4 & it); +} + +/** + * Handles emitting unlink events for + * files and directories, and via recursion, for + * files and directories within directories that are unlinked + * @param {String} directory within which the following item is located + * @param {String} item base path of item/directory + * @returns {void} +*/ +_remove(directory, item, isDirectory) { + // if what is being deleted is a directory, get that directory's paths + // for recursive deleting and cleaning of watched object + // if it is not a directory, nestedDirectoryChildren will be empty array + const path = sysPath.join(directory, item); + const fullPath = sysPath.resolve(path); + isDirectory = isDirectory != null + ? isDirectory + : this._watched.has(path) || this._watched.has(fullPath); + + // prevent duplicate handling in case of arriving here nearly simultaneously + // via multiple paths (such as _handleFile and _handleDir) + if (!this._throttle('remove', path, 100)) return; + + // if the only watched file is removed, watch for its return + if (!isDirectory && !this.options.useFsEvents && this._watched.size === 1) { + this.add(directory, item, true); + } + + // This will create a new entry in the watched object in either case + // so we got to do the directory check beforehand + const wp = this._getWatchedDir(path); + const nestedDirectoryChildren = wp.getChildren(); + + // Recursively remove children directories / files. + nestedDirectoryChildren.forEach(nested => this._remove(path, nested)); + + // Check if item was on the watched list and remove it + const parent = this._getWatchedDir(directory); + const wasTracked = parent.has(item); + parent.remove(item); + + // Fixes issue #1042 -> Relative paths were detected and added as symlinks + // (https://github.com/paulmillr/chokidar/blob/e1753ddbc9571bdc33b4a4af172d52cb6e611c10/lib/nodefs-handler.js#L612), + // but never removed from the map in case the path was deleted. + // This leads to an incorrect state if the path was recreated: + // https://github.com/paulmillr/chokidar/blob/e1753ddbc9571bdc33b4a4af172d52cb6e611c10/lib/nodefs-handler.js#L553 + if (this._symlinkPaths.has(fullPath)) { + this._symlinkPaths.delete(fullPath); + } + + // If we wait for this file to be fully written, cancel the wait. + let relPath = path; + if (this.options.cwd) relPath = sysPath.relative(this.options.cwd, path); + if (this.options.awaitWriteFinish && this._pendingWrites.has(relPath)) { + const event = this._pendingWrites.get(relPath).cancelWait(); + if (event === EV_ADD) return; + } + + // The Entry will either be a directory that just got removed + // or a bogus entry to a file, in either case we have to remove it + this._watched.delete(path); + this._watched.delete(fullPath); + const eventName = isDirectory ? EV_UNLINK_DIR : EV_UNLINK; + if (wasTracked && !this._isIgnored(path)) this._emit(eventName, path); + + // Avoid conflicts if we later create another file with the same name + if (!this.options.useFsEvents) { + this._closePath(path); + } +} + +/** + * Closes all watchers for a path + * @param {Path} path + */ +_closePath(path) { + this._closeFile(path) + const dir = sysPath.dirname(path); + this._getWatchedDir(dir).remove(sysPath.basename(path)); +} + +/** + * Closes only file-specific watchers + * @param {Path} path + */ +_closeFile(path) { + const closers = this._closers.get(path); + if (!closers) return; + closers.forEach(closer => closer()); + this._closers.delete(path); +} + +/** + * + * @param {Path} path + * @param {Function} closer + */ +_addPathCloser(path, closer) { + if (!closer) return; + let list = this._closers.get(path); + if (!list) { + list = []; + this._closers.set(path, list); + } + list.push(closer); +} + +_readdirp(root, opts) { + if (this.closed) return; + const options = {type: EV_ALL, alwaysStat: true, lstat: true, ...opts}; + let stream = readdirp(root, options); + this._streams.add(stream); + stream.once(STR_CLOSE, () => { + stream = undefined; + }); + stream.once(STR_END, () => { + if (stream) { + this._streams.delete(stream); + stream = undefined; + } + }); + return stream; +} + +} + +// Export FSWatcher class +exports.FSWatcher = FSWatcher; + +/** + * Instantiates watcher with paths to be tracked. + * @param {String|Array} paths file/directory paths and/or globs + * @param {Object=} options chokidar opts + * @returns an instance of FSWatcher for chaining. + */ +const watch = (paths, options) => { + const watcher = new FSWatcher(options); + watcher.add(paths); + return watcher; +}; + +exports.watch = watch; diff --git a/backend/Backend/node_modules/chokidar/lib/constants.js b/backend/Backend/node_modules/chokidar/lib/constants.js new file mode 100644 index 000000000..4743865d6 --- /dev/null +++ b/backend/Backend/node_modules/chokidar/lib/constants.js @@ -0,0 +1,66 @@ +'use strict'; + +const {sep} = require('path'); +const {platform} = process; +const os = require('os'); + +exports.EV_ALL = 'all'; +exports.EV_READY = 'ready'; +exports.EV_ADD = 'add'; +exports.EV_CHANGE = 'change'; +exports.EV_ADD_DIR = 'addDir'; +exports.EV_UNLINK = 'unlink'; +exports.EV_UNLINK_DIR = 'unlinkDir'; +exports.EV_RAW = 'raw'; +exports.EV_ERROR = 'error'; + +exports.STR_DATA = 'data'; +exports.STR_END = 'end'; +exports.STR_CLOSE = 'close'; + +exports.FSEVENT_CREATED = 'created'; +exports.FSEVENT_MODIFIED = 'modified'; +exports.FSEVENT_DELETED = 'deleted'; +exports.FSEVENT_MOVED = 'moved'; +exports.FSEVENT_CLONED = 'cloned'; +exports.FSEVENT_UNKNOWN = 'unknown'; +exports.FSEVENT_FLAG_MUST_SCAN_SUBDIRS = 1; +exports.FSEVENT_TYPE_FILE = 'file'; +exports.FSEVENT_TYPE_DIRECTORY = 'directory'; +exports.FSEVENT_TYPE_SYMLINK = 'symlink'; + +exports.KEY_LISTENERS = 'listeners'; +exports.KEY_ERR = 'errHandlers'; +exports.KEY_RAW = 'rawEmitters'; +exports.HANDLER_KEYS = [exports.KEY_LISTENERS, exports.KEY_ERR, exports.KEY_RAW]; + +exports.DOT_SLASH = `.${sep}`; + +exports.BACK_SLASH_RE = /\\/g; +exports.DOUBLE_SLASH_RE = /\/\//; +exports.SLASH_OR_BACK_SLASH_RE = /[/\\]/; +exports.DOT_RE = /\..*\.(sw[px])$|~$|\.subl.*\.tmp/; +exports.REPLACER_RE = /^\.[/\\]/; + +exports.SLASH = '/'; +exports.SLASH_SLASH = '//'; +exports.BRACE_START = '{'; +exports.BANG = '!'; +exports.ONE_DOT = '.'; +exports.TWO_DOTS = '..'; +exports.STAR = '*'; +exports.GLOBSTAR = '**'; +exports.ROOT_GLOBSTAR = '/**/*'; +exports.SLASH_GLOBSTAR = '/**'; +exports.DIR_SUFFIX = 'Dir'; +exports.ANYMATCH_OPTS = {dot: true}; +exports.STRING_TYPE = 'string'; +exports.FUNCTION_TYPE = 'function'; +exports.EMPTY_STR = ''; +exports.EMPTY_FN = () => {}; +exports.IDENTITY_FN = val => val; + +exports.isWindows = platform === 'win32'; +exports.isMacos = platform === 'darwin'; +exports.isLinux = platform === 'linux'; +exports.isIBMi = os.type() === 'OS400'; diff --git a/backend/Backend/node_modules/chokidar/lib/fsevents-handler.js b/backend/Backend/node_modules/chokidar/lib/fsevents-handler.js new file mode 100644 index 000000000..fe29393c1 --- /dev/null +++ b/backend/Backend/node_modules/chokidar/lib/fsevents-handler.js @@ -0,0 +1,526 @@ +'use strict'; + +const fs = require('fs'); +const sysPath = require('path'); +const { promisify } = require('util'); + +let fsevents; +try { + fsevents = require('fsevents'); +} catch (error) { + if (process.env.CHOKIDAR_PRINT_FSEVENTS_REQUIRE_ERROR) console.error(error); +} + +if (fsevents) { + // TODO: real check + const mtch = process.version.match(/v(\d+)\.(\d+)/); + if (mtch && mtch[1] && mtch[2]) { + const maj = Number.parseInt(mtch[1], 10); + const min = Number.parseInt(mtch[2], 10); + if (maj === 8 && min < 16) { + fsevents = undefined; + } + } +} + +const { + EV_ADD, + EV_CHANGE, + EV_ADD_DIR, + EV_UNLINK, + EV_ERROR, + STR_DATA, + STR_END, + FSEVENT_CREATED, + FSEVENT_MODIFIED, + FSEVENT_DELETED, + FSEVENT_MOVED, + // FSEVENT_CLONED, + FSEVENT_UNKNOWN, + FSEVENT_FLAG_MUST_SCAN_SUBDIRS, + FSEVENT_TYPE_FILE, + FSEVENT_TYPE_DIRECTORY, + FSEVENT_TYPE_SYMLINK, + + ROOT_GLOBSTAR, + DIR_SUFFIX, + DOT_SLASH, + FUNCTION_TYPE, + EMPTY_FN, + IDENTITY_FN +} = require('./constants'); + +const Depth = (value) => isNaN(value) ? {} : {depth: value}; + +const stat = promisify(fs.stat); +const lstat = promisify(fs.lstat); +const realpath = promisify(fs.realpath); + +const statMethods = { stat, lstat }; + +/** + * @typedef {String} Path + */ + +/** + * @typedef {Object} FsEventsWatchContainer + * @property {Set} listeners + * @property {Function} rawEmitter + * @property {{stop: Function}} watcher + */ + +// fsevents instance helper functions +/** + * Object to hold per-process fsevents instances (may be shared across chokidar FSWatcher instances) + * @type {Map} + */ +const FSEventsWatchers = new Map(); + +// Threshold of duplicate path prefixes at which to start +// consolidating going forward +const consolidateThreshhold = 10; + +const wrongEventFlags = new Set([ + 69888, 70400, 71424, 72704, 73472, 131328, 131840, 262912 +]); + +/** + * Instantiates the fsevents interface + * @param {Path} path path to be watched + * @param {Function} callback called when fsevents is bound and ready + * @returns {{stop: Function}} new fsevents instance + */ +const createFSEventsInstance = (path, callback) => { + const stop = fsevents.watch(path, callback); + return {stop}; +}; + +/** + * Instantiates the fsevents interface or binds listeners to an existing one covering + * the same file tree. + * @param {Path} path - to be watched + * @param {Path} realPath - real path for symlinks + * @param {Function} listener - called when fsevents emits events + * @param {Function} rawEmitter - passes data to listeners of the 'raw' event + * @returns {Function} closer + */ +function setFSEventsListener(path, realPath, listener, rawEmitter) { + let watchPath = sysPath.extname(realPath) ? sysPath.dirname(realPath) : realPath; + + const parentPath = sysPath.dirname(watchPath); + let cont = FSEventsWatchers.get(watchPath); + + // If we've accumulated a substantial number of paths that + // could have been consolidated by watching one directory + // above the current one, create a watcher on the parent + // path instead, so that we do consolidate going forward. + if (couldConsolidate(parentPath)) { + watchPath = parentPath; + } + + const resolvedPath = sysPath.resolve(path); + const hasSymlink = resolvedPath !== realPath; + + const filteredListener = (fullPath, flags, info) => { + if (hasSymlink) fullPath = fullPath.replace(realPath, resolvedPath); + if ( + fullPath === resolvedPath || + !fullPath.indexOf(resolvedPath + sysPath.sep) + ) listener(fullPath, flags, info); + }; + + // check if there is already a watcher on a parent path + // modifies `watchPath` to the parent path when it finds a match + let watchedParent = false; + for (const watchedPath of FSEventsWatchers.keys()) { + if (realPath.indexOf(sysPath.resolve(watchedPath) + sysPath.sep) === 0) { + watchPath = watchedPath; + cont = FSEventsWatchers.get(watchPath); + watchedParent = true; + break; + } + } + + if (cont || watchedParent) { + cont.listeners.add(filteredListener); + } else { + cont = { + listeners: new Set([filteredListener]), + rawEmitter, + watcher: createFSEventsInstance(watchPath, (fullPath, flags) => { + if (!cont.listeners.size) return; + if (flags & FSEVENT_FLAG_MUST_SCAN_SUBDIRS) return; + const info = fsevents.getInfo(fullPath, flags); + cont.listeners.forEach(list => { + list(fullPath, flags, info); + }); + + cont.rawEmitter(info.event, fullPath, info); + }) + }; + FSEventsWatchers.set(watchPath, cont); + } + + // removes this instance's listeners and closes the underlying fsevents + // instance if there are no more listeners left + return () => { + const lst = cont.listeners; + + lst.delete(filteredListener); + if (!lst.size) { + FSEventsWatchers.delete(watchPath); + if (cont.watcher) return cont.watcher.stop().then(() => { + cont.rawEmitter = cont.watcher = undefined; + Object.freeze(cont); + }); + } + }; +} + +// Decide whether or not we should start a new higher-level +// parent watcher +const couldConsolidate = (path) => { + let count = 0; + for (const watchPath of FSEventsWatchers.keys()) { + if (watchPath.indexOf(path) === 0) { + count++; + if (count >= consolidateThreshhold) { + return true; + } + } + } + + return false; +}; + +// returns boolean indicating whether fsevents can be used +const canUse = () => fsevents && FSEventsWatchers.size < 128; + +// determines subdirectory traversal levels from root to path +const calcDepth = (path, root) => { + let i = 0; + while (!path.indexOf(root) && (path = sysPath.dirname(path)) !== root) i++; + return i; +}; + +// returns boolean indicating whether the fsevents' event info has the same type +// as the one returned by fs.stat +const sameTypes = (info, stats) => ( + info.type === FSEVENT_TYPE_DIRECTORY && stats.isDirectory() || + info.type === FSEVENT_TYPE_SYMLINK && stats.isSymbolicLink() || + info.type === FSEVENT_TYPE_FILE && stats.isFile() +) + +/** + * @mixin + */ +class FsEventsHandler { + +/** + * @param {import('../index').FSWatcher} fsw + */ +constructor(fsw) { + this.fsw = fsw; +} +checkIgnored(path, stats) { + const ipaths = this.fsw._ignoredPaths; + if (this.fsw._isIgnored(path, stats)) { + ipaths.add(path); + if (stats && stats.isDirectory()) { + ipaths.add(path + ROOT_GLOBSTAR); + } + return true; + } + + ipaths.delete(path); + ipaths.delete(path + ROOT_GLOBSTAR); +} + +addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts) { + const event = watchedDir.has(item) ? EV_CHANGE : EV_ADD; + this.handleEvent(event, path, fullPath, realPath, parent, watchedDir, item, info, opts); +} + +async checkExists(path, fullPath, realPath, parent, watchedDir, item, info, opts) { + try { + const stats = await stat(path) + if (this.fsw.closed) return; + if (sameTypes(info, stats)) { + this.addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts); + } else { + this.handleEvent(EV_UNLINK, path, fullPath, realPath, parent, watchedDir, item, info, opts); + } + } catch (error) { + if (error.code === 'EACCES') { + this.addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts); + } else { + this.handleEvent(EV_UNLINK, path, fullPath, realPath, parent, watchedDir, item, info, opts); + } + } +} + +handleEvent(event, path, fullPath, realPath, parent, watchedDir, item, info, opts) { + if (this.fsw.closed || this.checkIgnored(path)) return; + + if (event === EV_UNLINK) { + const isDirectory = info.type === FSEVENT_TYPE_DIRECTORY + // suppress unlink events on never before seen files + if (isDirectory || watchedDir.has(item)) { + this.fsw._remove(parent, item, isDirectory); + } + } else { + if (event === EV_ADD) { + // track new directories + if (info.type === FSEVENT_TYPE_DIRECTORY) this.fsw._getWatchedDir(path); + + if (info.type === FSEVENT_TYPE_SYMLINK && opts.followSymlinks) { + // push symlinks back to the top of the stack to get handled + const curDepth = opts.depth === undefined ? + undefined : calcDepth(fullPath, realPath) + 1; + return this._addToFsEvents(path, false, true, curDepth); + } + + // track new paths + // (other than symlinks being followed, which will be tracked soon) + this.fsw._getWatchedDir(parent).add(item); + } + /** + * @type {'add'|'addDir'|'unlink'|'unlinkDir'} + */ + const eventName = info.type === FSEVENT_TYPE_DIRECTORY ? event + DIR_SUFFIX : event; + this.fsw._emit(eventName, path); + if (eventName === EV_ADD_DIR) this._addToFsEvents(path, false, true); + } +} + +/** + * Handle symlinks encountered during directory scan + * @param {String} watchPath - file/dir path to be watched with fsevents + * @param {String} realPath - real path (in case of symlinks) + * @param {Function} transform - path transformer + * @param {Function} globFilter - path filter in case a glob pattern was provided + * @returns {Function} closer for the watcher instance +*/ +_watchWithFsEvents(watchPath, realPath, transform, globFilter) { + if (this.fsw.closed || this.fsw._isIgnored(watchPath)) return; + const opts = this.fsw.options; + const watchCallback = async (fullPath, flags, info) => { + if (this.fsw.closed) return; + if ( + opts.depth !== undefined && + calcDepth(fullPath, realPath) > opts.depth + ) return; + const path = transform(sysPath.join( + watchPath, sysPath.relative(watchPath, fullPath) + )); + if (globFilter && !globFilter(path)) return; + // ensure directories are tracked + const parent = sysPath.dirname(path); + const item = sysPath.basename(path); + const watchedDir = this.fsw._getWatchedDir( + info.type === FSEVENT_TYPE_DIRECTORY ? path : parent + ); + + // correct for wrong events emitted + if (wrongEventFlags.has(flags) || info.event === FSEVENT_UNKNOWN) { + if (typeof opts.ignored === FUNCTION_TYPE) { + let stats; + try { + stats = await stat(path); + } catch (error) {} + if (this.fsw.closed) return; + if (this.checkIgnored(path, stats)) return; + if (sameTypes(info, stats)) { + this.addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts); + } else { + this.handleEvent(EV_UNLINK, path, fullPath, realPath, parent, watchedDir, item, info, opts); + } + } else { + this.checkExists(path, fullPath, realPath, parent, watchedDir, item, info, opts); + } + } else { + switch (info.event) { + case FSEVENT_CREATED: + case FSEVENT_MODIFIED: + return this.addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts); + case FSEVENT_DELETED: + case FSEVENT_MOVED: + return this.checkExists(path, fullPath, realPath, parent, watchedDir, item, info, opts); + } + } + }; + + const closer = setFSEventsListener( + watchPath, + realPath, + watchCallback, + this.fsw._emitRaw + ); + + this.fsw._emitReady(); + return closer; +} + +/** + * Handle symlinks encountered during directory scan + * @param {String} linkPath path to symlink + * @param {String} fullPath absolute path to the symlink + * @param {Function} transform pre-existing path transformer + * @param {Number} curDepth level of subdirectories traversed to where symlink is + * @returns {Promise} + */ +async _handleFsEventsSymlink(linkPath, fullPath, transform, curDepth) { + // don't follow the same symlink more than once + if (this.fsw.closed || this.fsw._symlinkPaths.has(fullPath)) return; + + this.fsw._symlinkPaths.set(fullPath, true); + this.fsw._incrReadyCount(); + + try { + const linkTarget = await realpath(linkPath); + if (this.fsw.closed) return; + if (this.fsw._isIgnored(linkTarget)) { + return this.fsw._emitReady(); + } + + this.fsw._incrReadyCount(); + + // add the linkTarget for watching with a wrapper for transform + // that causes emitted paths to incorporate the link's path + this._addToFsEvents(linkTarget || linkPath, (path) => { + let aliasedPath = linkPath; + if (linkTarget && linkTarget !== DOT_SLASH) { + aliasedPath = path.replace(linkTarget, linkPath); + } else if (path !== DOT_SLASH) { + aliasedPath = sysPath.join(linkPath, path); + } + return transform(aliasedPath); + }, false, curDepth); + } catch(error) { + if (this.fsw._handleError(error)) { + return this.fsw._emitReady(); + } + } +} + +/** + * + * @param {Path} newPath + * @param {fs.Stats} stats + */ +emitAdd(newPath, stats, processPath, opts, forceAdd) { + const pp = processPath(newPath); + const isDir = stats.isDirectory(); + const dirObj = this.fsw._getWatchedDir(sysPath.dirname(pp)); + const base = sysPath.basename(pp); + + // ensure empty dirs get tracked + if (isDir) this.fsw._getWatchedDir(pp); + if (dirObj.has(base)) return; + dirObj.add(base); + + if (!opts.ignoreInitial || forceAdd === true) { + this.fsw._emit(isDir ? EV_ADD_DIR : EV_ADD, pp, stats); + } +} + +initWatch(realPath, path, wh, processPath) { + if (this.fsw.closed) return; + const closer = this._watchWithFsEvents( + wh.watchPath, + sysPath.resolve(realPath || wh.watchPath), + processPath, + wh.globFilter + ); + this.fsw._addPathCloser(path, closer); +} + +/** + * Handle added path with fsevents + * @param {String} path file/dir path or glob pattern + * @param {Function|Boolean=} transform converts working path to what the user expects + * @param {Boolean=} forceAdd ensure add is emitted + * @param {Number=} priorDepth Level of subdirectories already traversed. + * @returns {Promise} + */ +async _addToFsEvents(path, transform, forceAdd, priorDepth) { + if (this.fsw.closed) { + return; + } + const opts = this.fsw.options; + const processPath = typeof transform === FUNCTION_TYPE ? transform : IDENTITY_FN; + + const wh = this.fsw._getWatchHelpers(path); + + // evaluate what is at the path we're being asked to watch + try { + const stats = await statMethods[wh.statMethod](wh.watchPath); + if (this.fsw.closed) return; + if (this.fsw._isIgnored(wh.watchPath, stats)) { + throw null; + } + if (stats.isDirectory()) { + // emit addDir unless this is a glob parent + if (!wh.globFilter) this.emitAdd(processPath(path), stats, processPath, opts, forceAdd); + + // don't recurse further if it would exceed depth setting + if (priorDepth && priorDepth > opts.depth) return; + + // scan the contents of the dir + this.fsw._readdirp(wh.watchPath, { + fileFilter: entry => wh.filterPath(entry), + directoryFilter: entry => wh.filterDir(entry), + ...Depth(opts.depth - (priorDepth || 0)) + }).on(STR_DATA, (entry) => { + // need to check filterPath on dirs b/c filterDir is less restrictive + if (this.fsw.closed) { + return; + } + if (entry.stats.isDirectory() && !wh.filterPath(entry)) return; + + const joinedPath = sysPath.join(wh.watchPath, entry.path); + const {fullPath} = entry; + + if (wh.followSymlinks && entry.stats.isSymbolicLink()) { + // preserve the current depth here since it can't be derived from + // real paths past the symlink + const curDepth = opts.depth === undefined ? + undefined : calcDepth(joinedPath, sysPath.resolve(wh.watchPath)) + 1; + + this._handleFsEventsSymlink(joinedPath, fullPath, processPath, curDepth); + } else { + this.emitAdd(joinedPath, entry.stats, processPath, opts, forceAdd); + } + }).on(EV_ERROR, EMPTY_FN).on(STR_END, () => { + this.fsw._emitReady(); + }); + } else { + this.emitAdd(wh.watchPath, stats, processPath, opts, forceAdd); + this.fsw._emitReady(); + } + } catch (error) { + if (!error || this.fsw._handleError(error)) { + // TODO: Strange thing: "should not choke on an ignored watch path" will be failed without 2 ready calls -__- + this.fsw._emitReady(); + this.fsw._emitReady(); + } + } + + if (opts.persistent && forceAdd !== true) { + if (typeof transform === FUNCTION_TYPE) { + // realpath has already been resolved + this.initWatch(undefined, path, wh, processPath); + } else { + let realPath; + try { + realPath = await realpath(wh.watchPath); + } catch (e) {} + this.initWatch(realPath, path, wh, processPath); + } + } +} + +} + +module.exports = FsEventsHandler; +module.exports.canUse = canUse; diff --git a/backend/Backend/node_modules/chokidar/lib/nodefs-handler.js b/backend/Backend/node_modules/chokidar/lib/nodefs-handler.js new file mode 100644 index 000000000..199cfe9f9 --- /dev/null +++ b/backend/Backend/node_modules/chokidar/lib/nodefs-handler.js @@ -0,0 +1,654 @@ +'use strict'; + +const fs = require('fs'); +const sysPath = require('path'); +const { promisify } = require('util'); +const isBinaryPath = require('is-binary-path'); +const { + isWindows, + isLinux, + EMPTY_FN, + EMPTY_STR, + KEY_LISTENERS, + KEY_ERR, + KEY_RAW, + HANDLER_KEYS, + EV_CHANGE, + EV_ADD, + EV_ADD_DIR, + EV_ERROR, + STR_DATA, + STR_END, + BRACE_START, + STAR +} = require('./constants'); + +const THROTTLE_MODE_WATCH = 'watch'; + +const open = promisify(fs.open); +const stat = promisify(fs.stat); +const lstat = promisify(fs.lstat); +const close = promisify(fs.close); +const fsrealpath = promisify(fs.realpath); + +const statMethods = { lstat, stat }; + +// TODO: emit errors properly. Example: EMFILE on Macos. +const foreach = (val, fn) => { + if (val instanceof Set) { + val.forEach(fn); + } else { + fn(val); + } +}; + +const addAndConvert = (main, prop, item) => { + let container = main[prop]; + if (!(container instanceof Set)) { + main[prop] = container = new Set([container]); + } + container.add(item); +}; + +const clearItem = cont => key => { + const set = cont[key]; + if (set instanceof Set) { + set.clear(); + } else { + delete cont[key]; + } +}; + +const delFromSet = (main, prop, item) => { + const container = main[prop]; + if (container instanceof Set) { + container.delete(item); + } else if (container === item) { + delete main[prop]; + } +}; + +const isEmptySet = (val) => val instanceof Set ? val.size === 0 : !val; + +/** + * @typedef {String} Path + */ + +// fs_watch helpers + +// object to hold per-process fs_watch instances +// (may be shared across chokidar FSWatcher instances) + +/** + * @typedef {Object} FsWatchContainer + * @property {Set} listeners + * @property {Set} errHandlers + * @property {Set} rawEmitters + * @property {fs.FSWatcher=} watcher + * @property {Boolean=} watcherUnusable + */ + +/** + * @type {Map} + */ +const FsWatchInstances = new Map(); + +/** + * Instantiates the fs_watch interface + * @param {String} path to be watched + * @param {Object} options to be passed to fs_watch + * @param {Function} listener main event handler + * @param {Function} errHandler emits info about errors + * @param {Function} emitRaw emits raw event data + * @returns {fs.FSWatcher} new fsevents instance + */ +function createFsWatchInstance(path, options, listener, errHandler, emitRaw) { + const handleEvent = (rawEvent, evPath) => { + listener(path); + emitRaw(rawEvent, evPath, {watchedPath: path}); + + // emit based on events occurring for files from a directory's watcher in + // case the file's watcher misses it (and rely on throttling to de-dupe) + if (evPath && path !== evPath) { + fsWatchBroadcast( + sysPath.resolve(path, evPath), KEY_LISTENERS, sysPath.join(path, evPath) + ); + } + }; + try { + return fs.watch(path, options, handleEvent); + } catch (error) { + errHandler(error); + } +} + +/** + * Helper for passing fs_watch event data to a collection of listeners + * @param {Path} fullPath absolute path bound to fs_watch instance + * @param {String} type listener type + * @param {*=} val1 arguments to be passed to listeners + * @param {*=} val2 + * @param {*=} val3 + */ +const fsWatchBroadcast = (fullPath, type, val1, val2, val3) => { + const cont = FsWatchInstances.get(fullPath); + if (!cont) return; + foreach(cont[type], (listener) => { + listener(val1, val2, val3); + }); +}; + +/** + * Instantiates the fs_watch interface or binds listeners + * to an existing one covering the same file system entry + * @param {String} path + * @param {String} fullPath absolute path + * @param {Object} options to be passed to fs_watch + * @param {Object} handlers container for event listener functions + */ +const setFsWatchListener = (path, fullPath, options, handlers) => { + const {listener, errHandler, rawEmitter} = handlers; + let cont = FsWatchInstances.get(fullPath); + + /** @type {fs.FSWatcher=} */ + let watcher; + if (!options.persistent) { + watcher = createFsWatchInstance( + path, options, listener, errHandler, rawEmitter + ); + return watcher.close.bind(watcher); + } + if (cont) { + addAndConvert(cont, KEY_LISTENERS, listener); + addAndConvert(cont, KEY_ERR, errHandler); + addAndConvert(cont, KEY_RAW, rawEmitter); + } else { + watcher = createFsWatchInstance( + path, + options, + fsWatchBroadcast.bind(null, fullPath, KEY_LISTENERS), + errHandler, // no need to use broadcast here + fsWatchBroadcast.bind(null, fullPath, KEY_RAW) + ); + if (!watcher) return; + watcher.on(EV_ERROR, async (error) => { + const broadcastErr = fsWatchBroadcast.bind(null, fullPath, KEY_ERR); + cont.watcherUnusable = true; // documented since Node 10.4.1 + // Workaround for https://github.com/joyent/node/issues/4337 + if (isWindows && error.code === 'EPERM') { + try { + const fd = await open(path, 'r'); + await close(fd); + broadcastErr(error); + } catch (err) {} + } else { + broadcastErr(error); + } + }); + cont = { + listeners: listener, + errHandlers: errHandler, + rawEmitters: rawEmitter, + watcher + }; + FsWatchInstances.set(fullPath, cont); + } + // const index = cont.listeners.indexOf(listener); + + // removes this instance's listeners and closes the underlying fs_watch + // instance if there are no more listeners left + return () => { + delFromSet(cont, KEY_LISTENERS, listener); + delFromSet(cont, KEY_ERR, errHandler); + delFromSet(cont, KEY_RAW, rawEmitter); + if (isEmptySet(cont.listeners)) { + // Check to protect against issue gh-730. + // if (cont.watcherUnusable) { + cont.watcher.close(); + // } + FsWatchInstances.delete(fullPath); + HANDLER_KEYS.forEach(clearItem(cont)); + cont.watcher = undefined; + Object.freeze(cont); + } + }; +}; + +// fs_watchFile helpers + +// object to hold per-process fs_watchFile instances +// (may be shared across chokidar FSWatcher instances) +const FsWatchFileInstances = new Map(); + +/** + * Instantiates the fs_watchFile interface or binds listeners + * to an existing one covering the same file system entry + * @param {String} path to be watched + * @param {String} fullPath absolute path + * @param {Object} options options to be passed to fs_watchFile + * @param {Object} handlers container for event listener functions + * @returns {Function} closer + */ +const setFsWatchFileListener = (path, fullPath, options, handlers) => { + const {listener, rawEmitter} = handlers; + let cont = FsWatchFileInstances.get(fullPath); + + /* eslint-disable no-unused-vars, prefer-destructuring */ + let listeners = new Set(); + let rawEmitters = new Set(); + + const copts = cont && cont.options; + if (copts && (copts.persistent < options.persistent || copts.interval > options.interval)) { + // "Upgrade" the watcher to persistence or a quicker interval. + // This creates some unlikely edge case issues if the user mixes + // settings in a very weird way, but solving for those cases + // doesn't seem worthwhile for the added complexity. + listeners = cont.listeners; + rawEmitters = cont.rawEmitters; + fs.unwatchFile(fullPath); + cont = undefined; + } + + /* eslint-enable no-unused-vars, prefer-destructuring */ + + if (cont) { + addAndConvert(cont, KEY_LISTENERS, listener); + addAndConvert(cont, KEY_RAW, rawEmitter); + } else { + // TODO + // listeners.add(listener); + // rawEmitters.add(rawEmitter); + cont = { + listeners: listener, + rawEmitters: rawEmitter, + options, + watcher: fs.watchFile(fullPath, options, (curr, prev) => { + foreach(cont.rawEmitters, (rawEmitter) => { + rawEmitter(EV_CHANGE, fullPath, {curr, prev}); + }); + const currmtime = curr.mtimeMs; + if (curr.size !== prev.size || currmtime > prev.mtimeMs || currmtime === 0) { + foreach(cont.listeners, (listener) => listener(path, curr)); + } + }) + }; + FsWatchFileInstances.set(fullPath, cont); + } + // const index = cont.listeners.indexOf(listener); + + // Removes this instance's listeners and closes the underlying fs_watchFile + // instance if there are no more listeners left. + return () => { + delFromSet(cont, KEY_LISTENERS, listener); + delFromSet(cont, KEY_RAW, rawEmitter); + if (isEmptySet(cont.listeners)) { + FsWatchFileInstances.delete(fullPath); + fs.unwatchFile(fullPath); + cont.options = cont.watcher = undefined; + Object.freeze(cont); + } + }; +}; + +/** + * @mixin + */ +class NodeFsHandler { + +/** + * @param {import("../index").FSWatcher} fsW + */ +constructor(fsW) { + this.fsw = fsW; + this._boundHandleError = (error) => fsW._handleError(error); +} + +/** + * Watch file for changes with fs_watchFile or fs_watch. + * @param {String} path to file or dir + * @param {Function} listener on fs change + * @returns {Function} closer for the watcher instance + */ +_watchWithNodeFs(path, listener) { + const opts = this.fsw.options; + const directory = sysPath.dirname(path); + const basename = sysPath.basename(path); + const parent = this.fsw._getWatchedDir(directory); + parent.add(basename); + const absolutePath = sysPath.resolve(path); + const options = {persistent: opts.persistent}; + if (!listener) listener = EMPTY_FN; + + let closer; + if (opts.usePolling) { + options.interval = opts.enableBinaryInterval && isBinaryPath(basename) ? + opts.binaryInterval : opts.interval; + closer = setFsWatchFileListener(path, absolutePath, options, { + listener, + rawEmitter: this.fsw._emitRaw + }); + } else { + closer = setFsWatchListener(path, absolutePath, options, { + listener, + errHandler: this._boundHandleError, + rawEmitter: this.fsw._emitRaw + }); + } + return closer; +} + +/** + * Watch a file and emit add event if warranted. + * @param {Path} file Path + * @param {fs.Stats} stats result of fs_stat + * @param {Boolean} initialAdd was the file added at watch instantiation? + * @returns {Function} closer for the watcher instance + */ +_handleFile(file, stats, initialAdd) { + if (this.fsw.closed) { + return; + } + const dirname = sysPath.dirname(file); + const basename = sysPath.basename(file); + const parent = this.fsw._getWatchedDir(dirname); + // stats is always present + let prevStats = stats; + + // if the file is already being watched, do nothing + if (parent.has(basename)) return; + + const listener = async (path, newStats) => { + if (!this.fsw._throttle(THROTTLE_MODE_WATCH, file, 5)) return; + if (!newStats || newStats.mtimeMs === 0) { + try { + const newStats = await stat(file); + if (this.fsw.closed) return; + // Check that change event was not fired because of changed only accessTime. + const at = newStats.atimeMs; + const mt = newStats.mtimeMs; + if (!at || at <= mt || mt !== prevStats.mtimeMs) { + this.fsw._emit(EV_CHANGE, file, newStats); + } + if (isLinux && prevStats.ino !== newStats.ino) { + this.fsw._closeFile(path) + prevStats = newStats; + this.fsw._addPathCloser(path, this._watchWithNodeFs(file, listener)); + } else { + prevStats = newStats; + } + } catch (error) { + // Fix issues where mtime is null but file is still present + this.fsw._remove(dirname, basename); + } + // add is about to be emitted if file not already tracked in parent + } else if (parent.has(basename)) { + // Check that change event was not fired because of changed only accessTime. + const at = newStats.atimeMs; + const mt = newStats.mtimeMs; + if (!at || at <= mt || mt !== prevStats.mtimeMs) { + this.fsw._emit(EV_CHANGE, file, newStats); + } + prevStats = newStats; + } + } + // kick off the watcher + const closer = this._watchWithNodeFs(file, listener); + + // emit an add event if we're supposed to + if (!(initialAdd && this.fsw.options.ignoreInitial) && this.fsw._isntIgnored(file)) { + if (!this.fsw._throttle(EV_ADD, file, 0)) return; + this.fsw._emit(EV_ADD, file, stats); + } + + return closer; +} + +/** + * Handle symlinks encountered while reading a dir. + * @param {Object} entry returned by readdirp + * @param {String} directory path of dir being read + * @param {String} path of this item + * @param {String} item basename of this item + * @returns {Promise} true if no more processing is needed for this entry. + */ +async _handleSymlink(entry, directory, path, item) { + if (this.fsw.closed) { + return; + } + const full = entry.fullPath; + const dir = this.fsw._getWatchedDir(directory); + + if (!this.fsw.options.followSymlinks) { + // watch symlink directly (don't follow) and detect changes + this.fsw._incrReadyCount(); + + let linkPath; + try { + linkPath = await fsrealpath(path); + } catch (e) { + this.fsw._emitReady(); + return true; + } + + if (this.fsw.closed) return; + if (dir.has(item)) { + if (this.fsw._symlinkPaths.get(full) !== linkPath) { + this.fsw._symlinkPaths.set(full, linkPath); + this.fsw._emit(EV_CHANGE, path, entry.stats); + } + } else { + dir.add(item); + this.fsw._symlinkPaths.set(full, linkPath); + this.fsw._emit(EV_ADD, path, entry.stats); + } + this.fsw._emitReady(); + return true; + } + + // don't follow the same symlink more than once + if (this.fsw._symlinkPaths.has(full)) { + return true; + } + + this.fsw._symlinkPaths.set(full, true); +} + +_handleRead(directory, initialAdd, wh, target, dir, depth, throttler) { + // Normalize the directory name on Windows + directory = sysPath.join(directory, EMPTY_STR); + + if (!wh.hasGlob) { + throttler = this.fsw._throttle('readdir', directory, 1000); + if (!throttler) return; + } + + const previous = this.fsw._getWatchedDir(wh.path); + const current = new Set(); + + let stream = this.fsw._readdirp(directory, { + fileFilter: entry => wh.filterPath(entry), + directoryFilter: entry => wh.filterDir(entry), + depth: 0 + }).on(STR_DATA, async (entry) => { + if (this.fsw.closed) { + stream = undefined; + return; + } + const item = entry.path; + let path = sysPath.join(directory, item); + current.add(item); + + if (entry.stats.isSymbolicLink() && await this._handleSymlink(entry, directory, path, item)) { + return; + } + + if (this.fsw.closed) { + stream = undefined; + return; + } + // Files that present in current directory snapshot + // but absent in previous are added to watch list and + // emit `add` event. + if (item === target || !target && !previous.has(item)) { + this.fsw._incrReadyCount(); + + // ensure relativeness of path is preserved in case of watcher reuse + path = sysPath.join(dir, sysPath.relative(dir, path)); + + this._addToNodeFs(path, initialAdd, wh, depth + 1); + } + }).on(EV_ERROR, this._boundHandleError); + + return new Promise(resolve => + stream.once(STR_END, () => { + if (this.fsw.closed) { + stream = undefined; + return; + } + const wasThrottled = throttler ? throttler.clear() : false; + + resolve(); + + // Files that absent in current directory snapshot + // but present in previous emit `remove` event + // and are removed from @watched[directory]. + previous.getChildren().filter((item) => { + return item !== directory && + !current.has(item) && + // in case of intersecting globs; + // a path may have been filtered out of this readdir, but + // shouldn't be removed because it matches a different glob + (!wh.hasGlob || wh.filterPath({ + fullPath: sysPath.resolve(directory, item) + })); + }).forEach((item) => { + this.fsw._remove(directory, item); + }); + + stream = undefined; + + // one more time for any missed in case changes came in extremely quickly + if (wasThrottled) this._handleRead(directory, false, wh, target, dir, depth, throttler); + }) + ); +} + +/** + * Read directory to add / remove files from `@watched` list and re-read it on change. + * @param {String} dir fs path + * @param {fs.Stats} stats + * @param {Boolean} initialAdd + * @param {Number} depth relative to user-supplied path + * @param {String} target child path targeted for watch + * @param {Object} wh Common watch helpers for this path + * @param {String} realpath + * @returns {Promise} closer for the watcher instance. + */ +async _handleDir(dir, stats, initialAdd, depth, target, wh, realpath) { + const parentDir = this.fsw._getWatchedDir(sysPath.dirname(dir)); + const tracked = parentDir.has(sysPath.basename(dir)); + if (!(initialAdd && this.fsw.options.ignoreInitial) && !target && !tracked) { + if (!wh.hasGlob || wh.globFilter(dir)) this.fsw._emit(EV_ADD_DIR, dir, stats); + } + + // ensure dir is tracked (harmless if redundant) + parentDir.add(sysPath.basename(dir)); + this.fsw._getWatchedDir(dir); + let throttler; + let closer; + + const oDepth = this.fsw.options.depth; + if ((oDepth == null || depth <= oDepth) && !this.fsw._symlinkPaths.has(realpath)) { + if (!target) { + await this._handleRead(dir, initialAdd, wh, target, dir, depth, throttler); + if (this.fsw.closed) return; + } + + closer = this._watchWithNodeFs(dir, (dirPath, stats) => { + // if current directory is removed, do nothing + if (stats && stats.mtimeMs === 0) return; + + this._handleRead(dirPath, false, wh, target, dir, depth, throttler); + }); + } + return closer; +} + +/** + * Handle added file, directory, or glob pattern. + * Delegates call to _handleFile / _handleDir after checks. + * @param {String} path to file or ir + * @param {Boolean} initialAdd was the file added at watch instantiation? + * @param {Object} priorWh depth relative to user-supplied path + * @param {Number} depth Child path actually targeted for watch + * @param {String=} target Child path actually targeted for watch + * @returns {Promise} + */ +async _addToNodeFs(path, initialAdd, priorWh, depth, target) { + const ready = this.fsw._emitReady; + if (this.fsw._isIgnored(path) || this.fsw.closed) { + ready(); + return false; + } + + const wh = this.fsw._getWatchHelpers(path, depth); + if (!wh.hasGlob && priorWh) { + wh.hasGlob = priorWh.hasGlob; + wh.globFilter = priorWh.globFilter; + wh.filterPath = entry => priorWh.filterPath(entry); + wh.filterDir = entry => priorWh.filterDir(entry); + } + + // evaluate what is at the path we're being asked to watch + try { + const stats = await statMethods[wh.statMethod](wh.watchPath); + if (this.fsw.closed) return; + if (this.fsw._isIgnored(wh.watchPath, stats)) { + ready(); + return false; + } + + const follow = this.fsw.options.followSymlinks && !path.includes(STAR) && !path.includes(BRACE_START); + let closer; + if (stats.isDirectory()) { + const absPath = sysPath.resolve(path); + const targetPath = follow ? await fsrealpath(path) : path; + if (this.fsw.closed) return; + closer = await this._handleDir(wh.watchPath, stats, initialAdd, depth, target, wh, targetPath); + if (this.fsw.closed) return; + // preserve this symlink's target path + if (absPath !== targetPath && targetPath !== undefined) { + this.fsw._symlinkPaths.set(absPath, targetPath); + } + } else if (stats.isSymbolicLink()) { + const targetPath = follow ? await fsrealpath(path) : path; + if (this.fsw.closed) return; + const parent = sysPath.dirname(wh.watchPath); + this.fsw._getWatchedDir(parent).add(wh.watchPath); + this.fsw._emit(EV_ADD, wh.watchPath, stats); + closer = await this._handleDir(parent, stats, initialAdd, depth, path, wh, targetPath); + if (this.fsw.closed) return; + + // preserve this symlink's target path + if (targetPath !== undefined) { + this.fsw._symlinkPaths.set(sysPath.resolve(path), targetPath); + } + } else { + closer = this._handleFile(wh.watchPath, stats, initialAdd); + } + ready(); + + this.fsw._addPathCloser(path, closer); + return false; + + } catch (error) { + if (this.fsw._handleError(error)) { + ready(); + return path; + } + } +} + +} + +module.exports = NodeFsHandler; diff --git a/backend/Backend/node_modules/chokidar/package.json b/backend/Backend/node_modules/chokidar/package.json new file mode 100644 index 000000000..e8f8b3d99 --- /dev/null +++ b/backend/Backend/node_modules/chokidar/package.json @@ -0,0 +1,70 @@ +{ + "name": "chokidar", + "description": "Minimal and efficient cross-platform file watching library", + "version": "3.6.0", + "homepage": "https://github.com/paulmillr/chokidar", + "author": "Paul Miller (https://paulmillr.com)", + "contributors": [ + "Paul Miller (https://paulmillr.com)", + "Elan Shanker" + ], + "engines": { + "node": ">= 8.10.0" + }, + "main": "index.js", + "types": "./types/index.d.ts", + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + }, + "devDependencies": { + "@types/node": "^14", + "chai": "^4.3", + "dtslint": "^3.3.0", + "eslint": "^7.0.0", + "mocha": "^7.0.0", + "rimraf": "^3.0.0", + "sinon": "^9.0.1", + "sinon-chai": "^3.3.0", + "typescript": "^4.4.3", + "upath": "^1.2.0" + }, + "files": [ + "index.js", + "lib/*.js", + "types/index.d.ts" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/paulmillr/chokidar.git" + }, + "bugs": { + "url": "https://github.com/paulmillr/chokidar/issues" + }, + "license": "MIT", + "scripts": { + "dtslint": "dtslint types", + "lint": "eslint --report-unused-disable-directives --ignore-path .gitignore .", + "build": "npm ls", + "mocha": "mocha --exit --timeout 90000", + "test": "npm run lint && npm run mocha" + }, + "keywords": [ + "fs", + "watch", + "watchFile", + "watcher", + "watching", + "file", + "fsevents" + ], + "funding": "https://paulmillr.com/funding/" +} diff --git a/backend/Backend/node_modules/chokidar/types/index.d.ts b/backend/Backend/node_modules/chokidar/types/index.d.ts new file mode 100644 index 000000000..455806638 --- /dev/null +++ b/backend/Backend/node_modules/chokidar/types/index.d.ts @@ -0,0 +1,192 @@ +// TypeScript Version: 3.0 + +/// + +import * as fs from "fs"; +import { EventEmitter } from "events"; +import { Matcher } from 'anymatch'; + +export class FSWatcher extends EventEmitter implements fs.FSWatcher { + options: WatchOptions; + + /** + * Constructs a new FSWatcher instance with optional WatchOptions parameter. + */ + constructor(options?: WatchOptions); + + /** + * Add files, directories, or glob patterns for tracking. Takes an array of strings or just one + * string. + */ + add(paths: string | ReadonlyArray): this; + + /** + * Stop watching files, directories, or glob patterns. Takes an array of strings or just one + * string. + */ + unwatch(paths: string | ReadonlyArray): this; + + /** + * Returns an object representing all the paths on the file system being watched by this + * `FSWatcher` instance. The object's keys are all the directories (using absolute paths unless + * the `cwd` option was used), and the values are arrays of the names of the items contained in + * each directory. + */ + getWatched(): { + [directory: string]: string[]; + }; + + /** + * Removes all listeners from watched files. + */ + close(): Promise; + + on(event: 'add'|'addDir'|'change', listener: (path: string, stats?: fs.Stats) => void): this; + + on(event: 'all', listener: (eventName: 'add'|'addDir'|'change'|'unlink'|'unlinkDir', path: string, stats?: fs.Stats) => void): this; + + /** + * Error occurred + */ + on(event: 'error', listener: (error: Error) => void): this; + + /** + * Exposes the native Node `fs.FSWatcher events` + */ + on(event: 'raw', listener: (eventName: string, path: string, details: any) => void): this; + + /** + * Fires when the initial scan is complete + */ + on(event: 'ready', listener: () => void): this; + + on(event: 'unlink'|'unlinkDir', listener: (path: string) => void): this; + + on(event: string, listener: (...args: any[]) => void): this; + + ref(): this; + + unref(): this; +} + +export interface WatchOptions { + /** + * Indicates whether the process should continue to run as long as files are being watched. If + * set to `false` when using `fsevents` to watch, no more events will be emitted after `ready`, + * even if the process continues to run. + */ + persistent?: boolean; + + /** + * ([anymatch](https://github.com/micromatch/anymatch)-compatible definition) Defines files/paths to + * be ignored. The whole relative or absolute path is tested, not just filename. If a function + * with two arguments is provided, it gets called twice per path - once with a single argument + * (the path), second time with two arguments (the path and the + * [`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats) object of that path). + */ + ignored?: Matcher; + + /** + * If set to `false` then `add`/`addDir` events are also emitted for matching paths while + * instantiating the watching as chokidar discovers these file paths (before the `ready` event). + */ + ignoreInitial?: boolean; + + /** + * When `false`, only the symlinks themselves will be watched for changes instead of following + * the link references and bubbling events through the link's path. + */ + followSymlinks?: boolean; + + /** + * The base directory from which watch `paths` are to be derived. Paths emitted with events will + * be relative to this. + */ + cwd?: string; + + /** + * If set to true then the strings passed to .watch() and .add() are treated as literal path + * names, even if they look like globs. Default: false. + */ + disableGlobbing?: boolean; + + /** + * Whether to use fs.watchFile (backed by polling), or fs.watch. If polling leads to high CPU + * utilization, consider setting this to `false`. It is typically necessary to **set this to + * `true` to successfully watch files over a network**, and it may be necessary to successfully + * watch files in other non-standard situations. Setting to `true` explicitly on OS X overrides + * the `useFsEvents` default. + */ + usePolling?: boolean; + + /** + * Whether to use the `fsevents` watching interface if available. When set to `true` explicitly + * and `fsevents` is available this supercedes the `usePolling` setting. When set to `false` on + * OS X, `usePolling: true` becomes the default. + */ + useFsEvents?: boolean; + + /** + * If relying upon the [`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats) object that + * may get passed with `add`, `addDir`, and `change` events, set this to `true` to ensure it is + * provided even in cases where it wasn't already available from the underlying watch events. + */ + alwaysStat?: boolean; + + /** + * If set, limits how many levels of subdirectories will be traversed. + */ + depth?: number; + + /** + * Interval of file system polling. + */ + interval?: number; + + /** + * Interval of file system polling for binary files. ([see list of binary extensions](https://gi + * thub.com/sindresorhus/binary-extensions/blob/master/binary-extensions.json)) + */ + binaryInterval?: number; + + /** + * Indicates whether to watch files that don't have read permissions if possible. If watching + * fails due to `EPERM` or `EACCES` with this set to `true`, the errors will be suppressed + * silently. + */ + ignorePermissionErrors?: boolean; + + /** + * `true` if `useFsEvents` and `usePolling` are `false`). Automatically filters out artifacts + * that occur when using editors that use "atomic writes" instead of writing directly to the + * source file. If a file is re-added within 100 ms of being deleted, Chokidar emits a `change` + * event rather than `unlink` then `add`. If the default of 100 ms does not work well for you, + * you can override it by setting `atomic` to a custom value, in milliseconds. + */ + atomic?: boolean | number; + + /** + * can be set to an object in order to adjust timing params: + */ + awaitWriteFinish?: AwaitWriteFinishOptions | boolean; +} + +export interface AwaitWriteFinishOptions { + /** + * Amount of time in milliseconds for a file size to remain constant before emitting its event. + */ + stabilityThreshold?: number; + + /** + * File size polling interval. + */ + pollInterval?: number; +} + +/** + * produces an instance of `FSWatcher`. + */ +export function watch( + paths: string | ReadonlyArray, + options?: WatchOptions +): FSWatcher; diff --git a/backend/Backend/node_modules/concat-map/.travis.yml b/backend/Backend/node_modules/concat-map/.travis.yml new file mode 100644 index 000000000..f1d0f13c8 --- /dev/null +++ b/backend/Backend/node_modules/concat-map/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - 0.4 + - 0.6 diff --git a/backend/Backend/node_modules/concat-map/LICENSE b/backend/Backend/node_modules/concat-map/LICENSE new file mode 100644 index 000000000..ee27ba4b4 --- /dev/null +++ b/backend/Backend/node_modules/concat-map/LICENSE @@ -0,0 +1,18 @@ +This software is released under the MIT license: + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/backend/Backend/node_modules/concat-map/README.markdown b/backend/Backend/node_modules/concat-map/README.markdown new file mode 100644 index 000000000..408f70a1b --- /dev/null +++ b/backend/Backend/node_modules/concat-map/README.markdown @@ -0,0 +1,62 @@ +concat-map +========== + +Concatenative mapdashery. + +[![browser support](http://ci.testling.com/substack/node-concat-map.png)](http://ci.testling.com/substack/node-concat-map) + +[![build status](https://secure.travis-ci.org/substack/node-concat-map.png)](http://travis-ci.org/substack/node-concat-map) + +example +======= + +``` js +var concatMap = require('concat-map'); +var xs = [ 1, 2, 3, 4, 5, 6 ]; +var ys = concatMap(xs, function (x) { + return x % 2 ? [ x - 0.1, x, x + 0.1 ] : []; +}); +console.dir(ys); +``` + +*** + +``` +[ 0.9, 1, 1.1, 2.9, 3, 3.1, 4.9, 5, 5.1 ] +``` + +methods +======= + +``` js +var concatMap = require('concat-map') +``` + +concatMap(xs, fn) +----------------- + +Return an array of concatenated elements by calling `fn(x, i)` for each element +`x` and each index `i` in the array `xs`. + +When `fn(x, i)` returns an array, its result will be concatenated with the +result array. If `fn(x, i)` returns anything else, that value will be pushed +onto the end of the result array. + +install +======= + +With [npm](http://npmjs.org) do: + +``` +npm install concat-map +``` + +license +======= + +MIT + +notes +===== + +This module was written while sitting high above the ground in a tree. diff --git a/backend/Backend/node_modules/concat-map/example/map.js b/backend/Backend/node_modules/concat-map/example/map.js new file mode 100644 index 000000000..33656217b --- /dev/null +++ b/backend/Backend/node_modules/concat-map/example/map.js @@ -0,0 +1,6 @@ +var concatMap = require('../'); +var xs = [ 1, 2, 3, 4, 5, 6 ]; +var ys = concatMap(xs, function (x) { + return x % 2 ? [ x - 0.1, x, x + 0.1 ] : []; +}); +console.dir(ys); diff --git a/backend/Backend/node_modules/concat-map/index.js b/backend/Backend/node_modules/concat-map/index.js new file mode 100644 index 000000000..b29a7812e --- /dev/null +++ b/backend/Backend/node_modules/concat-map/index.js @@ -0,0 +1,13 @@ +module.exports = function (xs, fn) { + var res = []; + for (var i = 0; i < xs.length; i++) { + var x = fn(xs[i], i); + if (isArray(x)) res.push.apply(res, x); + else res.push(x); + } + return res; +}; + +var isArray = Array.isArray || function (xs) { + return Object.prototype.toString.call(xs) === '[object Array]'; +}; diff --git a/backend/Backend/node_modules/concat-map/package.json b/backend/Backend/node_modules/concat-map/package.json new file mode 100644 index 000000000..d3640e6b0 --- /dev/null +++ b/backend/Backend/node_modules/concat-map/package.json @@ -0,0 +1,43 @@ +{ + "name" : "concat-map", + "description" : "concatenative mapdashery", + "version" : "0.0.1", + "repository" : { + "type" : "git", + "url" : "git://github.com/substack/node-concat-map.git" + }, + "main" : "index.js", + "keywords" : [ + "concat", + "concatMap", + "map", + "functional", + "higher-order" + ], + "directories" : { + "example" : "example", + "test" : "test" + }, + "scripts" : { + "test" : "tape test/*.js" + }, + "devDependencies" : { + "tape" : "~2.4.0" + }, + "license" : "MIT", + "author" : { + "name" : "James Halliday", + "email" : "mail@substack.net", + "url" : "http://substack.net" + }, + "testling" : { + "files" : "test/*.js", + "browsers" : { + "ie" : [ 6, 7, 8, 9 ], + "ff" : [ 3.5, 10, 15.0 ], + "chrome" : [ 10, 22 ], + "safari" : [ 5.1 ], + "opera" : [ 12 ] + } + } +} diff --git a/backend/Backend/node_modules/concat-map/test/map.js b/backend/Backend/node_modules/concat-map/test/map.js new file mode 100644 index 000000000..fdbd7022f --- /dev/null +++ b/backend/Backend/node_modules/concat-map/test/map.js @@ -0,0 +1,39 @@ +var concatMap = require('../'); +var test = require('tape'); + +test('empty or not', function (t) { + var xs = [ 1, 2, 3, 4, 5, 6 ]; + var ixes = []; + var ys = concatMap(xs, function (x, ix) { + ixes.push(ix); + return x % 2 ? [ x - 0.1, x, x + 0.1 ] : []; + }); + t.same(ys, [ 0.9, 1, 1.1, 2.9, 3, 3.1, 4.9, 5, 5.1 ]); + t.same(ixes, [ 0, 1, 2, 3, 4, 5 ]); + t.end(); +}); + +test('always something', function (t) { + var xs = [ 'a', 'b', 'c', 'd' ]; + var ys = concatMap(xs, function (x) { + return x === 'b' ? [ 'B', 'B', 'B' ] : [ x ]; + }); + t.same(ys, [ 'a', 'B', 'B', 'B', 'c', 'd' ]); + t.end(); +}); + +test('scalars', function (t) { + var xs = [ 'a', 'b', 'c', 'd' ]; + var ys = concatMap(xs, function (x) { + return x === 'b' ? [ 'B', 'B', 'B' ] : x; + }); + t.same(ys, [ 'a', 'B', 'B', 'B', 'c', 'd' ]); + t.end(); +}); + +test('undefs', function (t) { + var xs = [ 'a', 'b', 'c', 'd' ]; + var ys = concatMap(xs, function () {}); + t.same(ys, [ undefined, undefined, undefined, undefined ]); + t.end(); +}); diff --git a/backend/Backend/node_modules/concat-stream/LICENSE b/backend/Backend/node_modules/concat-stream/LICENSE new file mode 100644 index 000000000..99c130e1d --- /dev/null +++ b/backend/Backend/node_modules/concat-stream/LICENSE @@ -0,0 +1,24 @@ +The MIT License + +Copyright (c) 2013 Max Ogden + +Permission is hereby granted, free of charge, +to any person obtaining a copy of this software and +associated documentation files (the "Software"), to +deal in the Software without restriction, including +without limitation the rights to use, copy, modify, +merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom +the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR +ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/backend/Backend/node_modules/concat-stream/index.js b/backend/Backend/node_modules/concat-stream/index.js new file mode 100644 index 000000000..dd672a761 --- /dev/null +++ b/backend/Backend/node_modules/concat-stream/index.js @@ -0,0 +1,144 @@ +var Writable = require('readable-stream').Writable +var inherits = require('inherits') +var bufferFrom = require('buffer-from') + +if (typeof Uint8Array === 'undefined') { + var U8 = require('typedarray').Uint8Array +} else { + var U8 = Uint8Array +} + +function ConcatStream(opts, cb) { + if (!(this instanceof ConcatStream)) return new ConcatStream(opts, cb) + + if (typeof opts === 'function') { + cb = opts + opts = {} + } + if (!opts) opts = {} + + var encoding = opts.encoding + var shouldInferEncoding = false + + if (!encoding) { + shouldInferEncoding = true + } else { + encoding = String(encoding).toLowerCase() + if (encoding === 'u8' || encoding === 'uint8') { + encoding = 'uint8array' + } + } + + Writable.call(this, { objectMode: true }) + + this.encoding = encoding + this.shouldInferEncoding = shouldInferEncoding + + if (cb) this.on('finish', function () { cb(this.getBody()) }) + this.body = [] +} + +module.exports = ConcatStream +inherits(ConcatStream, Writable) + +ConcatStream.prototype._write = function(chunk, enc, next) { + this.body.push(chunk) + next() +} + +ConcatStream.prototype.inferEncoding = function (buff) { + var firstBuffer = buff === undefined ? this.body[0] : buff; + if (Buffer.isBuffer(firstBuffer)) return 'buffer' + if (typeof Uint8Array !== 'undefined' && firstBuffer instanceof Uint8Array) return 'uint8array' + if (Array.isArray(firstBuffer)) return 'array' + if (typeof firstBuffer === 'string') return 'string' + if (Object.prototype.toString.call(firstBuffer) === "[object Object]") return 'object' + return 'buffer' +} + +ConcatStream.prototype.getBody = function () { + if (!this.encoding && this.body.length === 0) return [] + if (this.shouldInferEncoding) this.encoding = this.inferEncoding() + if (this.encoding === 'array') return arrayConcat(this.body) + if (this.encoding === 'string') return stringConcat(this.body) + if (this.encoding === 'buffer') return bufferConcat(this.body) + if (this.encoding === 'uint8array') return u8Concat(this.body) + return this.body +} + +var isArray = Array.isArray || function (arr) { + return Object.prototype.toString.call(arr) == '[object Array]' +} + +function isArrayish (arr) { + return /Array\]$/.test(Object.prototype.toString.call(arr)) +} + +function isBufferish (p) { + return typeof p === 'string' || isArrayish(p) || (p && typeof p.subarray === 'function') +} + +function stringConcat (parts) { + var strings = [] + var needsToString = false + for (var i = 0; i < parts.length; i++) { + var p = parts[i] + if (typeof p === 'string') { + strings.push(p) + } else if (Buffer.isBuffer(p)) { + strings.push(p) + } else if (isBufferish(p)) { + strings.push(bufferFrom(p)) + } else { + strings.push(bufferFrom(String(p))) + } + } + if (Buffer.isBuffer(parts[0])) { + strings = Buffer.concat(strings) + strings = strings.toString('utf8') + } else { + strings = strings.join('') + } + return strings +} + +function bufferConcat (parts) { + var bufs = [] + for (var i = 0; i < parts.length; i++) { + var p = parts[i] + if (Buffer.isBuffer(p)) { + bufs.push(p) + } else if (isBufferish(p)) { + bufs.push(bufferFrom(p)) + } else { + bufs.push(bufferFrom(String(p))) + } + } + return Buffer.concat(bufs) +} + +function arrayConcat (parts) { + var res = [] + for (var i = 0; i < parts.length; i++) { + res.push.apply(res, parts[i]) + } + return res +} + +function u8Concat (parts) { + var len = 0 + for (var i = 0; i < parts.length; i++) { + if (typeof parts[i] === 'string') { + parts[i] = bufferFrom(parts[i]) + } + len += parts[i].length + } + var u8 = new U8(len) + for (var i = 0, offset = 0; i < parts.length; i++) { + var part = parts[i] + for (var j = 0; j < part.length; j++) { + u8[offset++] = part[j] + } + } + return u8 +} diff --git a/backend/Backend/node_modules/concat-stream/package.json b/backend/Backend/node_modules/concat-stream/package.json new file mode 100644 index 000000000..f70902234 --- /dev/null +++ b/backend/Backend/node_modules/concat-stream/package.json @@ -0,0 +1,55 @@ +{ + "name": "concat-stream", + "version": "1.6.2", + "description": "writable stream that concatenates strings or binary data and calls a callback with the result", + "tags": [ + "stream", + "simple", + "util", + "utility" + ], + "author": "Max Ogden ", + "repository": { + "type": "git", + "url": "http://github.com/maxogden/concat-stream.git" + }, + "bugs": { + "url": "http://github.com/maxogden/concat-stream/issues" + }, + "engines": [ + "node >= 0.8" + ], + "main": "index.js", + "files": [ + "index.js" + ], + "scripts": { + "test": "tape test/*.js test/server/*.js" + }, + "license": "MIT", + "dependencies": { + "buffer-from": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^2.2.2", + "typedarray": "^0.0.6" + }, + "devDependencies": { + "tape": "^4.6.3" + }, + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8..latest", + "firefox/17..latest", + "firefox/nightly", + "chrome/22..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } +} diff --git a/backend/Backend/node_modules/concat-stream/readme.md b/backend/Backend/node_modules/concat-stream/readme.md new file mode 100644 index 000000000..7aa19c4fb --- /dev/null +++ b/backend/Backend/node_modules/concat-stream/readme.md @@ -0,0 +1,102 @@ +# concat-stream + +Writable stream that concatenates all the data from a stream and calls a callback with the result. Use this when you want to collect all the data from a stream into a single buffer. + +[![Build Status](https://travis-ci.org/maxogden/concat-stream.svg?branch=master)](https://travis-ci.org/maxogden/concat-stream) + +[![NPM](https://nodei.co/npm/concat-stream.png)](https://nodei.co/npm/concat-stream/) + +### description + +Streams emit many buffers. If you want to collect all of the buffers, and when the stream ends concatenate all of the buffers together and receive a single buffer then this is the module for you. + +Only use this if you know you can fit all of the output of your stream into a single Buffer (e.g. in RAM). + +There are also `objectMode` streams that emit things other than Buffers, and you can concatenate these too. See below for details. + +## Related + +`concat-stream` is part of the [mississippi stream utility collection](https://github.com/maxogden/mississippi) which includes more useful stream modules similar to this one. + +### examples + +#### Buffers + +```js +var fs = require('fs') +var concat = require('concat-stream') + +var readStream = fs.createReadStream('cat.png') +var concatStream = concat(gotPicture) + +readStream.on('error', handleError) +readStream.pipe(concatStream) + +function gotPicture(imageBuffer) { + // imageBuffer is all of `cat.png` as a node.js Buffer +} + +function handleError(err) { + // handle your error appropriately here, e.g.: + console.error(err) // print the error to STDERR + process.exit(1) // exit program with non-zero exit code +} + +``` + +#### Arrays + +```js +var write = concat(function(data) {}) +write.write([1,2,3]) +write.write([4,5,6]) +write.end() +// data will be [1,2,3,4,5,6] in the above callback +``` + +#### Uint8Arrays + +```js +var write = concat(function(data) {}) +var a = new Uint8Array(3) +a[0] = 97; a[1] = 98; a[2] = 99 +write.write(a) +write.write('!') +write.end(Buffer.from('!!1')) +``` + +See `test/` for more examples + +# methods + +```js +var concat = require('concat-stream') +``` + +## var writable = concat(opts={}, cb) + +Return a `writable` stream that will fire `cb(data)` with all of the data that +was written to the stream. Data can be written to `writable` as strings, +Buffers, arrays of byte integers, and Uint8Arrays. + +By default `concat-stream` will give you back the same data type as the type of the first buffer written to the stream. Use `opts.encoding` to set what format `data` should be returned as, e.g. if you if you don't want to rely on the built-in type checking or for some other reason. + +* `string` - get a string +* `buffer` - get back a Buffer +* `array` - get an array of byte integers +* `uint8array`, `u8`, `uint8` - get back a Uint8Array +* `object`, get back an array of Objects + +If you don't specify an encoding, and the types can't be inferred (e.g. you write things that aren't in the list above), it will try to convert concat them into a `Buffer`. + +If nothing is written to `writable` then `data` will be an empty array `[]`. + +# error handling + +`concat-stream` does not handle errors for you, so you must handle errors on whatever streams you pipe into `concat-stream`. This is a general rule when programming with node.js streams: always handle errors on each and every stream. Since `concat-stream` is not itself a stream it does not emit errors. + +We recommend using [`end-of-stream`](https://npmjs.org/end-of-stream) or [`pump`](https://npmjs.org/pump) for writing error tolerant stream code. + +# license + +MIT LICENSE diff --git a/backend/Backend/node_modules/content-disposition/HISTORY.md b/backend/Backend/node_modules/content-disposition/HISTORY.md new file mode 100644 index 000000000..488effa0c --- /dev/null +++ b/backend/Backend/node_modules/content-disposition/HISTORY.md @@ -0,0 +1,60 @@ +0.5.4 / 2021-12-10 +================== + + * deps: safe-buffer@5.2.1 + +0.5.3 / 2018-12-17 +================== + + * Use `safe-buffer` for improved Buffer API + +0.5.2 / 2016-12-08 +================== + + * Fix `parse` to accept any linear whitespace character + +0.5.1 / 2016-01-17 +================== + + * perf: enable strict mode + +0.5.0 / 2014-10-11 +================== + + * Add `parse` function + +0.4.0 / 2014-09-21 +================== + + * Expand non-Unicode `filename` to the full ISO-8859-1 charset + +0.3.0 / 2014-09-20 +================== + + * Add `fallback` option + * Add `type` option + +0.2.0 / 2014-09-19 +================== + + * Reduce ambiguity of file names with hex escape in buggy browsers + +0.1.2 / 2014-09-19 +================== + + * Fix periodic invalid Unicode filename header + +0.1.1 / 2014-09-19 +================== + + * Fix invalid characters appearing in `filename*` parameter + +0.1.0 / 2014-09-18 +================== + + * Make the `filename` argument optional + +0.0.0 / 2014-09-18 +================== + + * Initial release diff --git a/backend/Backend/node_modules/content-disposition/LICENSE b/backend/Backend/node_modules/content-disposition/LICENSE new file mode 100644 index 000000000..84441fbb5 --- /dev/null +++ b/backend/Backend/node_modules/content-disposition/LICENSE @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2014-2017 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/backend/Backend/node_modules/content-disposition/README.md b/backend/Backend/node_modules/content-disposition/README.md new file mode 100644 index 000000000..3a0bb0559 --- /dev/null +++ b/backend/Backend/node_modules/content-disposition/README.md @@ -0,0 +1,142 @@ +# content-disposition + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][github-actions-ci-image]][github-actions-ci-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Create and parse HTTP `Content-Disposition` header + +## Installation + +```sh +$ npm install content-disposition +``` + +## API + +```js +var contentDisposition = require('content-disposition') +``` + +### contentDisposition(filename, options) + +Create an attachment `Content-Disposition` header value using the given file name, +if supplied. The `filename` is optional and if no file name is desired, but you +want to specify `options`, set `filename` to `undefined`. + +```js +res.setHeader('Content-Disposition', contentDisposition('∫ maths.pdf')) +``` + +**note** HTTP headers are of the ISO-8859-1 character set. If you are writing this +header through a means different from `setHeader` in Node.js, you'll want to specify +the `'binary'` encoding in Node.js. + +#### Options + +`contentDisposition` accepts these properties in the options object. + +##### fallback + +If the `filename` option is outside ISO-8859-1, then the file name is actually +stored in a supplemental field for clients that support Unicode file names and +a ISO-8859-1 version of the file name is automatically generated. + +This specifies the ISO-8859-1 file name to override the automatic generation or +disables the generation all together, defaults to `true`. + + - A string will specify the ISO-8859-1 file name to use in place of automatic + generation. + - `false` will disable including a ISO-8859-1 file name and only include the + Unicode version (unless the file name is already ISO-8859-1). + - `true` will enable automatic generation if the file name is outside ISO-8859-1. + +If the `filename` option is ISO-8859-1 and this option is specified and has a +different value, then the `filename` option is encoded in the extended field +and this set as the fallback field, even though they are both ISO-8859-1. + +##### type + +Specifies the disposition type, defaults to `"attachment"`. This can also be +`"inline"`, or any other value (all values except inline are treated like +`attachment`, but can convey additional information if both parties agree to +it). The type is normalized to lower-case. + +### contentDisposition.parse(string) + +```js +var disposition = contentDisposition.parse('attachment; filename="EURO rates.txt"; filename*=UTF-8\'\'%e2%82%ac%20rates.txt') +``` + +Parse a `Content-Disposition` header string. This automatically handles extended +("Unicode") parameters by decoding them and providing them under the standard +parameter name. This will return an object with the following properties (examples +are shown for the string `'attachment; filename="EURO rates.txt"; filename*=UTF-8\'\'%e2%82%ac%20rates.txt'`): + + - `type`: The disposition type (always lower case). Example: `'attachment'` + + - `parameters`: An object of the parameters in the disposition (name of parameter + always lower case and extended versions replace non-extended versions). Example: + `{filename: "€ rates.txt"}` + +## Examples + +### Send a file for download + +```js +var contentDisposition = require('content-disposition') +var destroy = require('destroy') +var fs = require('fs') +var http = require('http') +var onFinished = require('on-finished') + +var filePath = '/path/to/public/plans.pdf' + +http.createServer(function onRequest (req, res) { + // set headers + res.setHeader('Content-Type', 'application/pdf') + res.setHeader('Content-Disposition', contentDisposition(filePath)) + + // send file + var stream = fs.createReadStream(filePath) + stream.pipe(res) + onFinished(res, function () { + destroy(stream) + }) +}) +``` + +## Testing + +```sh +$ npm test +``` + +## References + +- [RFC 2616: Hypertext Transfer Protocol -- HTTP/1.1][rfc-2616] +- [RFC 5987: Character Set and Language Encoding for Hypertext Transfer Protocol (HTTP) Header Field Parameters][rfc-5987] +- [RFC 6266: Use of the Content-Disposition Header Field in the Hypertext Transfer Protocol (HTTP)][rfc-6266] +- [Test Cases for HTTP Content-Disposition header field (RFC 6266) and the Encodings defined in RFCs 2047, 2231 and 5987][tc-2231] + +[rfc-2616]: https://tools.ietf.org/html/rfc2616 +[rfc-5987]: https://tools.ietf.org/html/rfc5987 +[rfc-6266]: https://tools.ietf.org/html/rfc6266 +[tc-2231]: http://greenbytes.de/tech/tc2231/ + +## License + +[MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/content-disposition.svg +[npm-url]: https://npmjs.org/package/content-disposition +[node-version-image]: https://img.shields.io/node/v/content-disposition.svg +[node-version-url]: https://nodejs.org/en/download +[coveralls-image]: https://img.shields.io/coveralls/jshttp/content-disposition.svg +[coveralls-url]: https://coveralls.io/r/jshttp/content-disposition?branch=master +[downloads-image]: https://img.shields.io/npm/dm/content-disposition.svg +[downloads-url]: https://npmjs.org/package/content-disposition +[github-actions-ci-image]: https://img.shields.io/github/workflow/status/jshttp/content-disposition/ci/master?label=ci +[github-actions-ci-url]: https://github.com/jshttp/content-disposition?query=workflow%3Aci diff --git a/backend/Backend/node_modules/content-disposition/index.js b/backend/Backend/node_modules/content-disposition/index.js new file mode 100644 index 000000000..ecec899a9 --- /dev/null +++ b/backend/Backend/node_modules/content-disposition/index.js @@ -0,0 +1,458 @@ +/*! + * content-disposition + * Copyright(c) 2014-2017 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module exports. + * @public + */ + +module.exports = contentDisposition +module.exports.parse = parse + +/** + * Module dependencies. + * @private + */ + +var basename = require('path').basename +var Buffer = require('safe-buffer').Buffer + +/** + * RegExp to match non attr-char, *after* encodeURIComponent (i.e. not including "%") + * @private + */ + +var ENCODE_URL_ATTR_CHAR_REGEXP = /[\x00-\x20"'()*,/:;<=>?@[\\\]{}\x7f]/g // eslint-disable-line no-control-regex + +/** + * RegExp to match percent encoding escape. + * @private + */ + +var HEX_ESCAPE_REGEXP = /%[0-9A-Fa-f]{2}/ +var HEX_ESCAPE_REPLACE_REGEXP = /%([0-9A-Fa-f]{2})/g + +/** + * RegExp to match non-latin1 characters. + * @private + */ + +var NON_LATIN1_REGEXP = /[^\x20-\x7e\xa0-\xff]/g + +/** + * RegExp to match quoted-pair in RFC 2616 + * + * quoted-pair = "\" CHAR + * CHAR = + * @private + */ + +var QESC_REGEXP = /\\([\u0000-\u007f])/g // eslint-disable-line no-control-regex + +/** + * RegExp to match chars that must be quoted-pair in RFC 2616 + * @private + */ + +var QUOTE_REGEXP = /([\\"])/g + +/** + * RegExp for various RFC 2616 grammar + * + * parameter = token "=" ( token | quoted-string ) + * token = 1* + * separators = "(" | ")" | "<" | ">" | "@" + * | "," | ";" | ":" | "\" | <"> + * | "/" | "[" | "]" | "?" | "=" + * | "{" | "}" | SP | HT + * quoted-string = ( <"> *(qdtext | quoted-pair ) <"> ) + * qdtext = > + * quoted-pair = "\" CHAR + * CHAR = + * TEXT = + * LWS = [CRLF] 1*( SP | HT ) + * CRLF = CR LF + * CR = + * LF = + * SP = + * HT = + * CTL = + * OCTET = + * @private + */ + +var PARAM_REGEXP = /;[\x09\x20]*([!#$%&'*+.0-9A-Z^_`a-z|~-]+)[\x09\x20]*=[\x09\x20]*("(?:[\x20!\x23-\x5b\x5d-\x7e\x80-\xff]|\\[\x20-\x7e])*"|[!#$%&'*+.0-9A-Z^_`a-z|~-]+)[\x09\x20]*/g // eslint-disable-line no-control-regex +var TEXT_REGEXP = /^[\x20-\x7e\x80-\xff]+$/ +var TOKEN_REGEXP = /^[!#$%&'*+.0-9A-Z^_`a-z|~-]+$/ + +/** + * RegExp for various RFC 5987 grammar + * + * ext-value = charset "'" [ language ] "'" value-chars + * charset = "UTF-8" / "ISO-8859-1" / mime-charset + * mime-charset = 1*mime-charsetc + * mime-charsetc = ALPHA / DIGIT + * / "!" / "#" / "$" / "%" / "&" + * / "+" / "-" / "^" / "_" / "`" + * / "{" / "}" / "~" + * language = ( 2*3ALPHA [ extlang ] ) + * / 4ALPHA + * / 5*8ALPHA + * extlang = *3( "-" 3ALPHA ) + * value-chars = *( pct-encoded / attr-char ) + * pct-encoded = "%" HEXDIG HEXDIG + * attr-char = ALPHA / DIGIT + * / "!" / "#" / "$" / "&" / "+" / "-" / "." + * / "^" / "_" / "`" / "|" / "~" + * @private + */ + +var EXT_VALUE_REGEXP = /^([A-Za-z0-9!#$%&+\-^_`{}~]+)'(?:[A-Za-z]{2,3}(?:-[A-Za-z]{3}){0,3}|[A-Za-z]{4,8}|)'((?:%[0-9A-Fa-f]{2}|[A-Za-z0-9!#$&+.^_`|~-])+)$/ + +/** + * RegExp for various RFC 6266 grammar + * + * disposition-type = "inline" | "attachment" | disp-ext-type + * disp-ext-type = token + * disposition-parm = filename-parm | disp-ext-parm + * filename-parm = "filename" "=" value + * | "filename*" "=" ext-value + * disp-ext-parm = token "=" value + * | ext-token "=" ext-value + * ext-token = + * @private + */ + +var DISPOSITION_TYPE_REGEXP = /^([!#$%&'*+.0-9A-Z^_`a-z|~-]+)[\x09\x20]*(?:$|;)/ // eslint-disable-line no-control-regex + +/** + * Create an attachment Content-Disposition header. + * + * @param {string} [filename] + * @param {object} [options] + * @param {string} [options.type=attachment] + * @param {string|boolean} [options.fallback=true] + * @return {string} + * @public + */ + +function contentDisposition (filename, options) { + var opts = options || {} + + // get type + var type = opts.type || 'attachment' + + // get parameters + var params = createparams(filename, opts.fallback) + + // format into string + return format(new ContentDisposition(type, params)) +} + +/** + * Create parameters object from filename and fallback. + * + * @param {string} [filename] + * @param {string|boolean} [fallback=true] + * @return {object} + * @private + */ + +function createparams (filename, fallback) { + if (filename === undefined) { + return + } + + var params = {} + + if (typeof filename !== 'string') { + throw new TypeError('filename must be a string') + } + + // fallback defaults to true + if (fallback === undefined) { + fallback = true + } + + if (typeof fallback !== 'string' && typeof fallback !== 'boolean') { + throw new TypeError('fallback must be a string or boolean') + } + + if (typeof fallback === 'string' && NON_LATIN1_REGEXP.test(fallback)) { + throw new TypeError('fallback must be ISO-8859-1 string') + } + + // restrict to file base name + var name = basename(filename) + + // determine if name is suitable for quoted string + var isQuotedString = TEXT_REGEXP.test(name) + + // generate fallback name + var fallbackName = typeof fallback !== 'string' + ? fallback && getlatin1(name) + : basename(fallback) + var hasFallback = typeof fallbackName === 'string' && fallbackName !== name + + // set extended filename parameter + if (hasFallback || !isQuotedString || HEX_ESCAPE_REGEXP.test(name)) { + params['filename*'] = name + } + + // set filename parameter + if (isQuotedString || hasFallback) { + params.filename = hasFallback + ? fallbackName + : name + } + + return params +} + +/** + * Format object to Content-Disposition header. + * + * @param {object} obj + * @param {string} obj.type + * @param {object} [obj.parameters] + * @return {string} + * @private + */ + +function format (obj) { + var parameters = obj.parameters + var type = obj.type + + if (!type || typeof type !== 'string' || !TOKEN_REGEXP.test(type)) { + throw new TypeError('invalid type') + } + + // start with normalized type + var string = String(type).toLowerCase() + + // append parameters + if (parameters && typeof parameters === 'object') { + var param + var params = Object.keys(parameters).sort() + + for (var i = 0; i < params.length; i++) { + param = params[i] + + var val = param.substr(-1) === '*' + ? ustring(parameters[param]) + : qstring(parameters[param]) + + string += '; ' + param + '=' + val + } + } + + return string +} + +/** + * Decode a RFC 5987 field value (gracefully). + * + * @param {string} str + * @return {string} + * @private + */ + +function decodefield (str) { + var match = EXT_VALUE_REGEXP.exec(str) + + if (!match) { + throw new TypeError('invalid extended field value') + } + + var charset = match[1].toLowerCase() + var encoded = match[2] + var value + + // to binary string + var binary = encoded.replace(HEX_ESCAPE_REPLACE_REGEXP, pdecode) + + switch (charset) { + case 'iso-8859-1': + value = getlatin1(binary) + break + case 'utf-8': + value = Buffer.from(binary, 'binary').toString('utf8') + break + default: + throw new TypeError('unsupported charset in extended field') + } + + return value +} + +/** + * Get ISO-8859-1 version of string. + * + * @param {string} val + * @return {string} + * @private + */ + +function getlatin1 (val) { + // simple Unicode -> ISO-8859-1 transformation + return String(val).replace(NON_LATIN1_REGEXP, '?') +} + +/** + * Parse Content-Disposition header string. + * + * @param {string} string + * @return {object} + * @public + */ + +function parse (string) { + if (!string || typeof string !== 'string') { + throw new TypeError('argument string is required') + } + + var match = DISPOSITION_TYPE_REGEXP.exec(string) + + if (!match) { + throw new TypeError('invalid type format') + } + + // normalize type + var index = match[0].length + var type = match[1].toLowerCase() + + var key + var names = [] + var params = {} + var value + + // calculate index to start at + index = PARAM_REGEXP.lastIndex = match[0].substr(-1) === ';' + ? index - 1 + : index + + // match parameters + while ((match = PARAM_REGEXP.exec(string))) { + if (match.index !== index) { + throw new TypeError('invalid parameter format') + } + + index += match[0].length + key = match[1].toLowerCase() + value = match[2] + + if (names.indexOf(key) !== -1) { + throw new TypeError('invalid duplicate parameter') + } + + names.push(key) + + if (key.indexOf('*') + 1 === key.length) { + // decode extended value + key = key.slice(0, -1) + value = decodefield(value) + + // overwrite existing value + params[key] = value + continue + } + + if (typeof params[key] === 'string') { + continue + } + + if (value[0] === '"') { + // remove quotes and escapes + value = value + .substr(1, value.length - 2) + .replace(QESC_REGEXP, '$1') + } + + params[key] = value + } + + if (index !== -1 && index !== string.length) { + throw new TypeError('invalid parameter format') + } + + return new ContentDisposition(type, params) +} + +/** + * Percent decode a single character. + * + * @param {string} str + * @param {string} hex + * @return {string} + * @private + */ + +function pdecode (str, hex) { + return String.fromCharCode(parseInt(hex, 16)) +} + +/** + * Percent encode a single character. + * + * @param {string} char + * @return {string} + * @private + */ + +function pencode (char) { + return '%' + String(char) + .charCodeAt(0) + .toString(16) + .toUpperCase() +} + +/** + * Quote a string for HTTP. + * + * @param {string} val + * @return {string} + * @private + */ + +function qstring (val) { + var str = String(val) + + return '"' + str.replace(QUOTE_REGEXP, '\\$1') + '"' +} + +/** + * Encode a Unicode string for HTTP (RFC 5987). + * + * @param {string} val + * @return {string} + * @private + */ + +function ustring (val) { + var str = String(val) + + // percent encode as UTF-8 + var encoded = encodeURIComponent(str) + .replace(ENCODE_URL_ATTR_CHAR_REGEXP, pencode) + + return 'UTF-8\'\'' + encoded +} + +/** + * Class for parsed Content-Disposition header for v8 optimization + * + * @public + * @param {string} type + * @param {object} parameters + * @constructor + */ + +function ContentDisposition (type, parameters) { + this.type = type + this.parameters = parameters +} diff --git a/backend/Backend/node_modules/content-disposition/package.json b/backend/Backend/node_modules/content-disposition/package.json new file mode 100644 index 000000000..43c70ce24 --- /dev/null +++ b/backend/Backend/node_modules/content-disposition/package.json @@ -0,0 +1,44 @@ +{ + "name": "content-disposition", + "description": "Create and parse Content-Disposition header", + "version": "0.5.4", + "author": "Douglas Christopher Wilson ", + "license": "MIT", + "keywords": [ + "content-disposition", + "http", + "rfc6266", + "res" + ], + "repository": "jshttp/content-disposition", + "dependencies": { + "safe-buffer": "5.2.1" + }, + "devDependencies": { + "deep-equal": "1.0.1", + "eslint": "7.32.0", + "eslint-config-standard": "13.0.1", + "eslint-plugin-import": "2.25.3", + "eslint-plugin-markdown": "2.2.1", + "eslint-plugin-node": "11.1.0", + "eslint-plugin-promise": "5.2.0", + "eslint-plugin-standard": "4.1.0", + "istanbul": "0.4.5", + "mocha": "9.1.3" + }, + "files": [ + "LICENSE", + "HISTORY.md", + "README.md", + "index.js" + ], + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "lint": "eslint .", + "test": "mocha --reporter spec --bail --check-leaks test/", + "test-ci": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/" + } +} diff --git a/backend/Backend/node_modules/content-type/HISTORY.md b/backend/Backend/node_modules/content-type/HISTORY.md new file mode 100644 index 000000000..458367139 --- /dev/null +++ b/backend/Backend/node_modules/content-type/HISTORY.md @@ -0,0 +1,29 @@ +1.0.5 / 2023-01-29 +================== + + * perf: skip value escaping when unnecessary + +1.0.4 / 2017-09-11 +================== + + * perf: skip parameter parsing when no parameters + +1.0.3 / 2017-09-10 +================== + + * perf: remove argument reassignment + +1.0.2 / 2016-05-09 +================== + + * perf: enable strict mode + +1.0.1 / 2015-02-13 +================== + + * Improve missing `Content-Type` header error message + +1.0.0 / 2015-02-01 +================== + + * Initial implementation, derived from `media-typer@0.3.0` diff --git a/backend/Backend/node_modules/content-type/LICENSE b/backend/Backend/node_modules/content-type/LICENSE new file mode 100644 index 000000000..34b1a2de3 --- /dev/null +++ b/backend/Backend/node_modules/content-type/LICENSE @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/backend/Backend/node_modules/content-type/README.md b/backend/Backend/node_modules/content-type/README.md new file mode 100644 index 000000000..c1a922a9a --- /dev/null +++ b/backend/Backend/node_modules/content-type/README.md @@ -0,0 +1,94 @@ +# content-type + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Node.js Version][node-image]][node-url] +[![Build Status][ci-image]][ci-url] +[![Coverage Status][coveralls-image]][coveralls-url] + +Create and parse HTTP Content-Type header according to RFC 7231 + +## Installation + +```sh +$ npm install content-type +``` + +## API + +```js +var contentType = require('content-type') +``` + +### contentType.parse(string) + +```js +var obj = contentType.parse('image/svg+xml; charset=utf-8') +``` + +Parse a `Content-Type` header. This will return an object with the following +properties (examples are shown for the string `'image/svg+xml; charset=utf-8'`): + + - `type`: The media type (the type and subtype, always lower case). + Example: `'image/svg+xml'` + + - `parameters`: An object of the parameters in the media type (name of parameter + always lower case). Example: `{charset: 'utf-8'}` + +Throws a `TypeError` if the string is missing or invalid. + +### contentType.parse(req) + +```js +var obj = contentType.parse(req) +``` + +Parse the `Content-Type` header from the given `req`. Short-cut for +`contentType.parse(req.headers['content-type'])`. + +Throws a `TypeError` if the `Content-Type` header is missing or invalid. + +### contentType.parse(res) + +```js +var obj = contentType.parse(res) +``` + +Parse the `Content-Type` header set on the given `res`. Short-cut for +`contentType.parse(res.getHeader('content-type'))`. + +Throws a `TypeError` if the `Content-Type` header is missing or invalid. + +### contentType.format(obj) + +```js +var str = contentType.format({ + type: 'image/svg+xml', + parameters: { charset: 'utf-8' } +}) +``` + +Format an object into a `Content-Type` header. This will return a string of the +content type for the given object with the following properties (examples are +shown that produce the string `'image/svg+xml; charset=utf-8'`): + + - `type`: The media type (will be lower-cased). Example: `'image/svg+xml'` + + - `parameters`: An object of the parameters in the media type (name of the + parameter will be lower-cased). Example: `{charset: 'utf-8'}` + +Throws a `TypeError` if the object contains an invalid type or parameter names. + +## License + +[MIT](LICENSE) + +[ci-image]: https://badgen.net/github/checks/jshttp/content-type/master?label=ci +[ci-url]: https://github.com/jshttp/content-type/actions/workflows/ci.yml +[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/content-type/master +[coveralls-url]: https://coveralls.io/r/jshttp/content-type?branch=master +[node-image]: https://badgen.net/npm/node/content-type +[node-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/content-type +[npm-url]: https://npmjs.org/package/content-type +[npm-version-image]: https://badgen.net/npm/v/content-type diff --git a/backend/Backend/node_modules/content-type/index.js b/backend/Backend/node_modules/content-type/index.js new file mode 100644 index 000000000..41840e7bc --- /dev/null +++ b/backend/Backend/node_modules/content-type/index.js @@ -0,0 +1,225 @@ +/*! + * content-type + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * RegExp to match *( ";" parameter ) in RFC 7231 sec 3.1.1.1 + * + * parameter = token "=" ( token / quoted-string ) + * token = 1*tchar + * tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" + * / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" + * / DIGIT / ALPHA + * ; any VCHAR, except delimiters + * quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE + * qdtext = HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text + * obs-text = %x80-FF + * quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text ) + */ +var PARAM_REGEXP = /; *([!#$%&'*+.^_`|~0-9A-Za-z-]+) *= *("(?:[\u000b\u0020\u0021\u0023-\u005b\u005d-\u007e\u0080-\u00ff]|\\[\u000b\u0020-\u00ff])*"|[!#$%&'*+.^_`|~0-9A-Za-z-]+) */g // eslint-disable-line no-control-regex +var TEXT_REGEXP = /^[\u000b\u0020-\u007e\u0080-\u00ff]+$/ // eslint-disable-line no-control-regex +var TOKEN_REGEXP = /^[!#$%&'*+.^_`|~0-9A-Za-z-]+$/ + +/** + * RegExp to match quoted-pair in RFC 7230 sec 3.2.6 + * + * quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text ) + * obs-text = %x80-FF + */ +var QESC_REGEXP = /\\([\u000b\u0020-\u00ff])/g // eslint-disable-line no-control-regex + +/** + * RegExp to match chars that must be quoted-pair in RFC 7230 sec 3.2.6 + */ +var QUOTE_REGEXP = /([\\"])/g + +/** + * RegExp to match type in RFC 7231 sec 3.1.1.1 + * + * media-type = type "/" subtype + * type = token + * subtype = token + */ +var TYPE_REGEXP = /^[!#$%&'*+.^_`|~0-9A-Za-z-]+\/[!#$%&'*+.^_`|~0-9A-Za-z-]+$/ + +/** + * Module exports. + * @public + */ + +exports.format = format +exports.parse = parse + +/** + * Format object to media type. + * + * @param {object} obj + * @return {string} + * @public + */ + +function format (obj) { + if (!obj || typeof obj !== 'object') { + throw new TypeError('argument obj is required') + } + + var parameters = obj.parameters + var type = obj.type + + if (!type || !TYPE_REGEXP.test(type)) { + throw new TypeError('invalid type') + } + + var string = type + + // append parameters + if (parameters && typeof parameters === 'object') { + var param + var params = Object.keys(parameters).sort() + + for (var i = 0; i < params.length; i++) { + param = params[i] + + if (!TOKEN_REGEXP.test(param)) { + throw new TypeError('invalid parameter name') + } + + string += '; ' + param + '=' + qstring(parameters[param]) + } + } + + return string +} + +/** + * Parse media type to object. + * + * @param {string|object} string + * @return {Object} + * @public + */ + +function parse (string) { + if (!string) { + throw new TypeError('argument string is required') + } + + // support req/res-like objects as argument + var header = typeof string === 'object' + ? getcontenttype(string) + : string + + if (typeof header !== 'string') { + throw new TypeError('argument string is required to be a string') + } + + var index = header.indexOf(';') + var type = index !== -1 + ? header.slice(0, index).trim() + : header.trim() + + if (!TYPE_REGEXP.test(type)) { + throw new TypeError('invalid media type') + } + + var obj = new ContentType(type.toLowerCase()) + + // parse parameters + if (index !== -1) { + var key + var match + var value + + PARAM_REGEXP.lastIndex = index + + while ((match = PARAM_REGEXP.exec(header))) { + if (match.index !== index) { + throw new TypeError('invalid parameter format') + } + + index += match[0].length + key = match[1].toLowerCase() + value = match[2] + + if (value.charCodeAt(0) === 0x22 /* " */) { + // remove quotes + value = value.slice(1, -1) + + // remove escapes + if (value.indexOf('\\') !== -1) { + value = value.replace(QESC_REGEXP, '$1') + } + } + + obj.parameters[key] = value + } + + if (index !== header.length) { + throw new TypeError('invalid parameter format') + } + } + + return obj +} + +/** + * Get content-type from req/res objects. + * + * @param {object} + * @return {Object} + * @private + */ + +function getcontenttype (obj) { + var header + + if (typeof obj.getHeader === 'function') { + // res-like + header = obj.getHeader('content-type') + } else if (typeof obj.headers === 'object') { + // req-like + header = obj.headers && obj.headers['content-type'] + } + + if (typeof header !== 'string') { + throw new TypeError('content-type header is missing from object') + } + + return header +} + +/** + * Quote a string if necessary. + * + * @param {string} val + * @return {string} + * @private + */ + +function qstring (val) { + var str = String(val) + + // no need to quote tokens + if (TOKEN_REGEXP.test(str)) { + return str + } + + if (str.length > 0 && !TEXT_REGEXP.test(str)) { + throw new TypeError('invalid parameter value') + } + + return '"' + str.replace(QUOTE_REGEXP, '\\$1') + '"' +} + +/** + * Class to represent a content type. + * @private + */ +function ContentType (type) { + this.parameters = Object.create(null) + this.type = type +} diff --git a/backend/Backend/node_modules/content-type/package.json b/backend/Backend/node_modules/content-type/package.json new file mode 100644 index 000000000..9db19f63f --- /dev/null +++ b/backend/Backend/node_modules/content-type/package.json @@ -0,0 +1,42 @@ +{ + "name": "content-type", + "description": "Create and parse HTTP Content-Type header", + "version": "1.0.5", + "author": "Douglas Christopher Wilson ", + "license": "MIT", + "keywords": [ + "content-type", + "http", + "req", + "res", + "rfc7231" + ], + "repository": "jshttp/content-type", + "devDependencies": { + "deep-equal": "1.0.1", + "eslint": "8.32.0", + "eslint-config-standard": "15.0.1", + "eslint-plugin-import": "2.27.5", + "eslint-plugin-node": "11.1.0", + "eslint-plugin-promise": "6.1.1", + "eslint-plugin-standard": "4.1.0", + "mocha": "10.2.0", + "nyc": "15.1.0" + }, + "files": [ + "LICENSE", + "HISTORY.md", + "README.md", + "index.js" + ], + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "lint": "eslint .", + "test": "mocha --reporter spec --check-leaks --bail test/", + "test-ci": "nyc --reporter=lcovonly --reporter=text npm test", + "test-cov": "nyc --reporter=html --reporter=text npm test", + "version": "node scripts/version-history.js && git add HISTORY.md" + } +} diff --git a/backend/Backend/node_modules/cookie-signature/.npmignore b/backend/Backend/node_modules/cookie-signature/.npmignore new file mode 100644 index 000000000..f1250e584 --- /dev/null +++ b/backend/Backend/node_modules/cookie-signature/.npmignore @@ -0,0 +1,4 @@ +support +test +examples +*.sock diff --git a/backend/Backend/node_modules/cookie-signature/History.md b/backend/Backend/node_modules/cookie-signature/History.md new file mode 100644 index 000000000..78513cc3d --- /dev/null +++ b/backend/Backend/node_modules/cookie-signature/History.md @@ -0,0 +1,38 @@ +1.0.6 / 2015-02-03 +================== + +* use `npm test` instead of `make test` to run tests +* clearer assertion messages when checking input + + +1.0.5 / 2014-09-05 +================== + +* add license to package.json + +1.0.4 / 2014-06-25 +================== + + * corrected avoidance of timing attacks (thanks @tenbits!) + +1.0.3 / 2014-01-28 +================== + + * [incorrect] fix for timing attacks + +1.0.2 / 2014-01-28 +================== + + * fix missing repository warning + * fix typo in test + +1.0.1 / 2013-04-15 +================== + + * Revert "Changed underlying HMAC algo. to sha512." + * Revert "Fix for timing attacks on MAC verification." + +0.0.1 / 2010-01-03 +================== + + * Initial release diff --git a/backend/Backend/node_modules/cookie-signature/Readme.md b/backend/Backend/node_modules/cookie-signature/Readme.md new file mode 100644 index 000000000..2559e841b --- /dev/null +++ b/backend/Backend/node_modules/cookie-signature/Readme.md @@ -0,0 +1,42 @@ + +# cookie-signature + + Sign and unsign cookies. + +## Example + +```js +var cookie = require('cookie-signature'); + +var val = cookie.sign('hello', 'tobiiscool'); +val.should.equal('hello.DGDUkGlIkCzPz+C0B064FNgHdEjox7ch8tOBGslZ5QI'); + +var val = cookie.sign('hello', 'tobiiscool'); +cookie.unsign(val, 'tobiiscool').should.equal('hello'); +cookie.unsign(val, 'luna').should.be.false; +``` + +## License + +(The MIT License) + +Copyright (c) 2012 LearnBoost <tj@learnboost.com> + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/backend/Backend/node_modules/cookie-signature/index.js b/backend/Backend/node_modules/cookie-signature/index.js new file mode 100644 index 000000000..b8c9463a2 --- /dev/null +++ b/backend/Backend/node_modules/cookie-signature/index.js @@ -0,0 +1,51 @@ +/** + * Module dependencies. + */ + +var crypto = require('crypto'); + +/** + * Sign the given `val` with `secret`. + * + * @param {String} val + * @param {String} secret + * @return {String} + * @api private + */ + +exports.sign = function(val, secret){ + if ('string' != typeof val) throw new TypeError("Cookie value must be provided as a string."); + if ('string' != typeof secret) throw new TypeError("Secret string must be provided."); + return val + '.' + crypto + .createHmac('sha256', secret) + .update(val) + .digest('base64') + .replace(/\=+$/, ''); +}; + +/** + * Unsign and decode the given `val` with `secret`, + * returning `false` if the signature is invalid. + * + * @param {String} val + * @param {String} secret + * @return {String|Boolean} + * @api private + */ + +exports.unsign = function(val, secret){ + if ('string' != typeof val) throw new TypeError("Signed cookie string must be provided."); + if ('string' != typeof secret) throw new TypeError("Secret string must be provided."); + var str = val.slice(0, val.lastIndexOf('.')) + , mac = exports.sign(str, secret); + + return sha1(mac) == sha1(val) ? str : false; +}; + +/** + * Private + */ + +function sha1(str){ + return crypto.createHash('sha1').update(str).digest('hex'); +} diff --git a/backend/Backend/node_modules/cookie-signature/package.json b/backend/Backend/node_modules/cookie-signature/package.json new file mode 100644 index 000000000..29c4498e0 --- /dev/null +++ b/backend/Backend/node_modules/cookie-signature/package.json @@ -0,0 +1,18 @@ +{ + "name": "cookie-signature", + "version": "1.0.6", + "description": "Sign and unsign cookies", + "keywords": ["cookie", "sign", "unsign"], + "author": "TJ Holowaychuk ", + "license": "MIT", + "repository": { "type": "git", "url": "https://github.com/visionmedia/node-cookie-signature.git"}, + "dependencies": {}, + "devDependencies": { + "mocha": "*", + "should": "*" + }, + "scripts": { + "test": "mocha --require should --reporter spec" + }, + "main": "index" +} diff --git a/backend/Backend/node_modules/cookie/LICENSE b/backend/Backend/node_modules/cookie/LICENSE new file mode 100644 index 000000000..058b6b4ef --- /dev/null +++ b/backend/Backend/node_modules/cookie/LICENSE @@ -0,0 +1,24 @@ +(The MIT License) + +Copyright (c) 2012-2014 Roman Shtylman +Copyright (c) 2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/backend/Backend/node_modules/cookie/README.md b/backend/Backend/node_modules/cookie/README.md new file mode 100644 index 000000000..71fdac111 --- /dev/null +++ b/backend/Backend/node_modules/cookie/README.md @@ -0,0 +1,317 @@ +# cookie + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Node.js Version][node-image]][node-url] +[![Build Status][ci-image]][ci-url] +[![Coverage Status][coveralls-image]][coveralls-url] + +Basic HTTP cookie parser and serializer for HTTP servers. + +## Installation + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```sh +$ npm install cookie +``` + +## API + +```js +var cookie = require('cookie'); +``` + +### cookie.parse(str, options) + +Parse an HTTP `Cookie` header string and returning an object of all cookie name-value pairs. +The `str` argument is the string representing a `Cookie` header value and `options` is an +optional object containing additional parsing options. + +```js +var cookies = cookie.parse('foo=bar; equation=E%3Dmc%5E2'); +// { foo: 'bar', equation: 'E=mc^2' } +``` + +#### Options + +`cookie.parse` accepts these properties in the options object. + +##### decode + +Specifies a function that will be used to decode a cookie's value. Since the value of a cookie +has a limited character set (and must be a simple string), this function can be used to decode +a previously-encoded cookie value into a JavaScript string or other object. + +The default function is the global `decodeURIComponent`, which will decode any URL-encoded +sequences into their byte representations. + +**note** if an error is thrown from this function, the original, non-decoded cookie value will +be returned as the cookie's value. + +### cookie.serialize(name, value, options) + +Serialize a cookie name-value pair into a `Set-Cookie` header string. The `name` argument is the +name for the cookie, the `value` argument is the value to set the cookie to, and the `options` +argument is an optional object containing additional serialization options. + +```js +var setCookie = cookie.serialize('foo', 'bar'); +// foo=bar +``` + +#### Options + +`cookie.serialize` accepts these properties in the options object. + +##### domain + +Specifies the value for the [`Domain` `Set-Cookie` attribute][rfc-6265-5.2.3]. By default, no +domain is set, and most clients will consider the cookie to apply to only the current domain. + +##### encode + +Specifies a function that will be used to encode a cookie's value. Since value of a cookie +has a limited character set (and must be a simple string), this function can be used to encode +a value into a string suited for a cookie's value. + +The default function is the global `encodeURIComponent`, which will encode a JavaScript string +into UTF-8 byte sequences and then URL-encode any that fall outside of the cookie range. + +##### expires + +Specifies the `Date` object to be the value for the [`Expires` `Set-Cookie` attribute][rfc-6265-5.2.1]. +By default, no expiration is set, and most clients will consider this a "non-persistent cookie" and +will delete it on a condition like exiting a web browser application. + +**note** the [cookie storage model specification][rfc-6265-5.3] states that if both `expires` and +`maxAge` are set, then `maxAge` takes precedence, but it is possible not all clients by obey this, +so if both are set, they should point to the same date and time. + +##### httpOnly + +Specifies the `boolean` value for the [`HttpOnly` `Set-Cookie` attribute][rfc-6265-5.2.6]. When truthy, +the `HttpOnly` attribute is set, otherwise it is not. By default, the `HttpOnly` attribute is not set. + +**note** be careful when setting this to `true`, as compliant clients will not allow client-side +JavaScript to see the cookie in `document.cookie`. + +##### maxAge + +Specifies the `number` (in seconds) to be the value for the [`Max-Age` `Set-Cookie` attribute][rfc-6265-5.2.2]. +The given number will be converted to an integer by rounding down. By default, no maximum age is set. + +**note** the [cookie storage model specification][rfc-6265-5.3] states that if both `expires` and +`maxAge` are set, then `maxAge` takes precedence, but it is possible not all clients by obey this, +so if both are set, they should point to the same date and time. + +##### partitioned + +Specifies the `boolean` value for the [`Partitioned` `Set-Cookie`](rfc-cutler-httpbis-partitioned-cookies) +attribute. When truthy, the `Partitioned` attribute is set, otherwise it is not. By default, the +`Partitioned` attribute is not set. + +**note** This is an attribute that has not yet been fully standardized, and may change in the future. +This also means many clients may ignore this attribute until they understand it. + +More information about can be found in [the proposal](https://github.com/privacycg/CHIPS). + +##### path + +Specifies the value for the [`Path` `Set-Cookie` attribute][rfc-6265-5.2.4]. By default, the path +is considered the ["default path"][rfc-6265-5.1.4]. + +##### priority + +Specifies the `string` to be the value for the [`Priority` `Set-Cookie` attribute][rfc-west-cookie-priority-00-4.1]. + + - `'low'` will set the `Priority` attribute to `Low`. + - `'medium'` will set the `Priority` attribute to `Medium`, the default priority when not set. + - `'high'` will set the `Priority` attribute to `High`. + +More information about the different priority levels can be found in +[the specification][rfc-west-cookie-priority-00-4.1]. + +**note** This is an attribute that has not yet been fully standardized, and may change in the future. +This also means many clients may ignore this attribute until they understand it. + +##### sameSite + +Specifies the `boolean` or `string` to be the value for the [`SameSite` `Set-Cookie` attribute][rfc-6265bis-09-5.4.7]. + + - `true` will set the `SameSite` attribute to `Strict` for strict same site enforcement. + - `false` will not set the `SameSite` attribute. + - `'lax'` will set the `SameSite` attribute to `Lax` for lax same site enforcement. + - `'none'` will set the `SameSite` attribute to `None` for an explicit cross-site cookie. + - `'strict'` will set the `SameSite` attribute to `Strict` for strict same site enforcement. + +More information about the different enforcement levels can be found in +[the specification][rfc-6265bis-09-5.4.7]. + +**note** This is an attribute that has not yet been fully standardized, and may change in the future. +This also means many clients may ignore this attribute until they understand it. + +##### secure + +Specifies the `boolean` value for the [`Secure` `Set-Cookie` attribute][rfc-6265-5.2.5]. When truthy, +the `Secure` attribute is set, otherwise it is not. By default, the `Secure` attribute is not set. + +**note** be careful when setting this to `true`, as compliant clients will not send the cookie back to +the server in the future if the browser does not have an HTTPS connection. + +## Example + +The following example uses this module in conjunction with the Node.js core HTTP server +to prompt a user for their name and display it back on future visits. + +```js +var cookie = require('cookie'); +var escapeHtml = require('escape-html'); +var http = require('http'); +var url = require('url'); + +function onRequest(req, res) { + // Parse the query string + var query = url.parse(req.url, true, true).query; + + if (query && query.name) { + // Set a new cookie with the name + res.setHeader('Set-Cookie', cookie.serialize('name', String(query.name), { + httpOnly: true, + maxAge: 60 * 60 * 24 * 7 // 1 week + })); + + // Redirect back after setting cookie + res.statusCode = 302; + res.setHeader('Location', req.headers.referer || '/'); + res.end(); + return; + } + + // Parse the cookies on the request + var cookies = cookie.parse(req.headers.cookie || ''); + + // Get the visitor name set in the cookie + var name = cookies.name; + + res.setHeader('Content-Type', 'text/html; charset=UTF-8'); + + if (name) { + res.write('

Welcome back, ' + escapeHtml(name) + '!

'); + } else { + res.write('

Hello, new visitor!

'); + } + + res.write('
'); + res.write(' '); + res.end('
'); +} + +http.createServer(onRequest).listen(3000); +``` + +## Testing + +```sh +$ npm test +``` + +## Benchmark + +``` +$ npm run bench + +> cookie@0.5.0 bench +> node benchmark/index.js + + node@18.18.2 + acorn@8.10.0 + ada@2.6.0 + ares@1.19.1 + brotli@1.0.9 + cldr@43.1 + icu@73.2 + llhttp@6.0.11 + modules@108 + napi@9 + nghttp2@1.57.0 + nghttp3@0.7.0 + ngtcp2@0.8.1 + openssl@3.0.10+quic + simdutf@3.2.14 + tz@2023c + undici@5.26.3 + unicode@15.0 + uv@1.44.2 + uvwasi@0.0.18 + v8@10.2.154.26-node.26 + zlib@1.2.13.1-motley + +> node benchmark/parse-top.js + + cookie.parse - top sites + + 14 tests completed. + + parse accounts.google.com x 2,588,913 ops/sec ±0.74% (186 runs sampled) + parse apple.com x 2,370,002 ops/sec ±0.69% (186 runs sampled) + parse cloudflare.com x 2,213,102 ops/sec ±0.88% (188 runs sampled) + parse docs.google.com x 2,194,157 ops/sec ±1.03% (184 runs sampled) + parse drive.google.com x 2,265,084 ops/sec ±0.79% (187 runs sampled) + parse en.wikipedia.org x 457,099 ops/sec ±0.81% (186 runs sampled) + parse linkedin.com x 504,407 ops/sec ±0.89% (186 runs sampled) + parse maps.google.com x 1,230,959 ops/sec ±0.98% (186 runs sampled) + parse microsoft.com x 926,294 ops/sec ±0.88% (184 runs sampled) + parse play.google.com x 2,311,338 ops/sec ±0.83% (185 runs sampled) + parse support.google.com x 1,508,850 ops/sec ±0.86% (186 runs sampled) + parse www.google.com x 1,022,582 ops/sec ±1.32% (182 runs sampled) + parse youtu.be x 332,136 ops/sec ±1.02% (185 runs sampled) + parse youtube.com x 323,833 ops/sec ±0.77% (183 runs sampled) + +> node benchmark/parse.js + + cookie.parse - generic + + 6 tests completed. + + simple x 3,214,032 ops/sec ±1.61% (183 runs sampled) + decode x 587,237 ops/sec ±1.16% (187 runs sampled) + unquote x 2,954,618 ops/sec ±1.35% (183 runs sampled) + duplicates x 857,008 ops/sec ±0.89% (187 runs sampled) + 10 cookies x 292,133 ops/sec ±0.89% (187 runs sampled) + 100 cookies x 22,610 ops/sec ±0.68% (187 runs sampled) +``` + +## References + +- [RFC 6265: HTTP State Management Mechanism][rfc-6265] +- [Same-site Cookies][rfc-6265bis-09-5.4.7] + +[rfc-cutler-httpbis-partitioned-cookies]: https://tools.ietf.org/html/draft-cutler-httpbis-partitioned-cookies/ +[rfc-west-cookie-priority-00-4.1]: https://tools.ietf.org/html/draft-west-cookie-priority-00#section-4.1 +[rfc-6265bis-09-5.4.7]: https://tools.ietf.org/html/draft-ietf-httpbis-rfc6265bis-09#section-5.4.7 +[rfc-6265]: https://tools.ietf.org/html/rfc6265 +[rfc-6265-5.1.4]: https://tools.ietf.org/html/rfc6265#section-5.1.4 +[rfc-6265-5.2.1]: https://tools.ietf.org/html/rfc6265#section-5.2.1 +[rfc-6265-5.2.2]: https://tools.ietf.org/html/rfc6265#section-5.2.2 +[rfc-6265-5.2.3]: https://tools.ietf.org/html/rfc6265#section-5.2.3 +[rfc-6265-5.2.4]: https://tools.ietf.org/html/rfc6265#section-5.2.4 +[rfc-6265-5.2.5]: https://tools.ietf.org/html/rfc6265#section-5.2.5 +[rfc-6265-5.2.6]: https://tools.ietf.org/html/rfc6265#section-5.2.6 +[rfc-6265-5.3]: https://tools.ietf.org/html/rfc6265#section-5.3 + +## License + +[MIT](LICENSE) + +[ci-image]: https://badgen.net/github/checks/jshttp/cookie/master?label=ci +[ci-url]: https://github.com/jshttp/cookie/actions/workflows/ci.yml +[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/cookie/master +[coveralls-url]: https://coveralls.io/r/jshttp/cookie?branch=master +[node-image]: https://badgen.net/npm/node/cookie +[node-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/cookie +[npm-url]: https://npmjs.org/package/cookie +[npm-version-image]: https://badgen.net/npm/v/cookie diff --git a/backend/Backend/node_modules/cookie/SECURITY.md b/backend/Backend/node_modules/cookie/SECURITY.md new file mode 100644 index 000000000..fd4a6c53a --- /dev/null +++ b/backend/Backend/node_modules/cookie/SECURITY.md @@ -0,0 +1,25 @@ +# Security Policies and Procedures + +## Reporting a Bug + +The `cookie` team and community take all security bugs seriously. Thank +you for improving the security of the project. We appreciate your efforts and +responsible disclosure and will make every effort to acknowledge your +contributions. + +Report security bugs by emailing the current owner(s) of `cookie`. This +information can be found in the npm registry using the command +`npm owner ls cookie`. +If unsure or unable to get the information from the above, open an issue +in the [project issue tracker](https://github.com/jshttp/cookie/issues) +asking for the current contact information. + +To ensure the timely response to your report, please ensure that the entirety +of the report is contained within the email body and not solely behind a web +link or an attachment. + +At least one owner will acknowledge your email within 48 hours, and will send a +more detailed response within 48 hours indicating the next steps in handling +your report. After the initial reply to your report, the owners will +endeavor to keep you informed of the progress towards a fix and full +announcement, and may ask for additional information or guidance. diff --git a/backend/Backend/node_modules/cookie/index.js b/backend/Backend/node_modules/cookie/index.js new file mode 100644 index 000000000..51a58cbe9 --- /dev/null +++ b/backend/Backend/node_modules/cookie/index.js @@ -0,0 +1,334 @@ +/*! + * cookie + * Copyright(c) 2012-2014 Roman Shtylman + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +/** + * Module exports. + * @public + */ + +exports.parse = parse; +exports.serialize = serialize; + +/** + * Module variables. + * @private + */ + +var __toString = Object.prototype.toString + +/** + * RegExp to match cookie-name in RFC 6265 sec 4.1.1 + * This refers out to the obsoleted definition of token in RFC 2616 sec 2.2 + * which has been replaced by the token definition in RFC 7230 appendix B. + * + * cookie-name = token + * token = 1*tchar + * tchar = "!" / "#" / "$" / "%" / "&" / "'" / + * "*" / "+" / "-" / "." / "^" / "_" / + * "`" / "|" / "~" / DIGIT / ALPHA + */ + +var cookieNameRegExp = /^[!#$%&'*+\-.^_`|~0-9A-Za-z]+$/; + +/** + * RegExp to match cookie-value in RFC 6265 sec 4.1.1 + * + * cookie-value = *cookie-octet / ( DQUOTE *cookie-octet DQUOTE ) + * cookie-octet = %x21 / %x23-2B / %x2D-3A / %x3C-5B / %x5D-7E + * ; US-ASCII characters excluding CTLs, + * ; whitespace DQUOTE, comma, semicolon, + * ; and backslash + */ + +var cookieValueRegExp = /^("?)[\u0021\u0023-\u002B\u002D-\u003A\u003C-\u005B\u005D-\u007E]*\1$/; + +/** + * RegExp to match domain-value in RFC 6265 sec 4.1.1 + * + * domain-value = + * ; defined in [RFC1034], Section 3.5, as + * ; enhanced by [RFC1123], Section 2.1 + * =