parent
347989f5c1
commit
5cde94997e
@ -0,0 +1,5 @@
|
||||
# 默认忽略的文件
|
||||
/shelf/
|
||||
/workspace.xml
|
||||
# 基于编辑器的 HTTP 客户端请求
|
||||
/httpRequests/
|
@ -0,0 +1,12 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="WEB_MODULE" version="4">
|
||||
<component name="NewModuleRootManager">
|
||||
<content url="file://$MODULE_DIR$">
|
||||
<excludeFolder url="file://$MODULE_DIR$/temp" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/.tmp" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/tmp" />
|
||||
</content>
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
</component>
|
||||
</module>
|
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ProjectModuleManager">
|
||||
<modules>
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/demo.iml" filepath="$PROJECT_DIR$/.idea/demo.iml" />
|
||||
</modules>
|
||||
</component>
|
||||
</project>
|
@ -0,0 +1,16 @@
|
||||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*|*MINGW*|*MSYS*)
|
||||
if command -v cygpath > /dev/null 2>&1; then
|
||||
basedir=`cygpath -w "$basedir"`
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ -x "$basedir/node" ]; then
|
||||
exec "$basedir/node" "$basedir/../crc-32/bin/crc32.njs" "$@"
|
||||
else
|
||||
exec node "$basedir/../crc-32/bin/crc32.njs" "$@"
|
||||
fi
|
@ -0,0 +1,17 @@
|
||||
@ECHO off
|
||||
GOTO start
|
||||
:find_dp0
|
||||
SET dp0=%~dp0
|
||||
EXIT /b
|
||||
:start
|
||||
SETLOCAL
|
||||
CALL :find_dp0
|
||||
|
||||
IF EXIST "%dp0%\node.exe" (
|
||||
SET "_prog=%dp0%\node.exe"
|
||||
) ELSE (
|
||||
SET "_prog=node"
|
||||
SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||
)
|
||||
|
||||
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\crc-32\bin\crc32.njs" %*
|
@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env pwsh
|
||||
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
|
||||
|
||||
$exe=""
|
||||
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
|
||||
# Fix case when both the Windows and Linux builds of Node
|
||||
# are installed in the same directory
|
||||
$exe=".exe"
|
||||
}
|
||||
$ret=0
|
||||
if (Test-Path "$basedir/node$exe") {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "$basedir/node$exe" "$basedir/../crc-32/bin/crc32.njs" $args
|
||||
} else {
|
||||
& "$basedir/node$exe" "$basedir/../crc-32/bin/crc32.njs" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
} else {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "node$exe" "$basedir/../crc-32/bin/crc32.njs" $args
|
||||
} else {
|
||||
& "node$exe" "$basedir/../crc-32/bin/crc32.njs" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
}
|
||||
exit $ret
|
@ -0,0 +1,16 @@
|
||||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*|*MINGW*|*MSYS*)
|
||||
if command -v cygpath > /dev/null 2>&1; then
|
||||
basedir=`cygpath -w "$basedir"`
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ -x "$basedir/node" ]; then
|
||||
exec "$basedir/node" "$basedir/../mime/cli.js" "$@"
|
||||
else
|
||||
exec node "$basedir/../mime/cli.js" "$@"
|
||||
fi
|
@ -0,0 +1,17 @@
|
||||
@ECHO off
|
||||
GOTO start
|
||||
:find_dp0
|
||||
SET dp0=%~dp0
|
||||
EXIT /b
|
||||
:start
|
||||
SETLOCAL
|
||||
CALL :find_dp0
|
||||
|
||||
IF EXIST "%dp0%\node.exe" (
|
||||
SET "_prog=%dp0%\node.exe"
|
||||
) ELSE (
|
||||
SET "_prog=node"
|
||||
SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||
)
|
||||
|
||||
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\mime\cli.js" %*
|
@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env pwsh
|
||||
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
|
||||
|
||||
$exe=""
|
||||
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
|
||||
# Fix case when both the Windows and Linux builds of Node
|
||||
# are installed in the same directory
|
||||
$exe=".exe"
|
||||
}
|
||||
$ret=0
|
||||
if (Test-Path "$basedir/node$exe") {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "$basedir/node$exe" "$basedir/../mime/cli.js" $args
|
||||
} else {
|
||||
& "$basedir/node$exe" "$basedir/../mime/cli.js" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
} else {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "node$exe" "$basedir/../mime/cli.js" $args
|
||||
} else {
|
||||
& "node$exe" "$basedir/../mime/cli.js" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
}
|
||||
exit $ret
|
@ -0,0 +1,16 @@
|
||||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*|*MINGW*|*MSYS*)
|
||||
if command -v cygpath > /dev/null 2>&1; then
|
||||
basedir=`cygpath -w "$basedir"`
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ -x "$basedir/node" ]; then
|
||||
exec "$basedir/node" "$basedir/../mkdirp/bin/cmd.js" "$@"
|
||||
else
|
||||
exec node "$basedir/../mkdirp/bin/cmd.js" "$@"
|
||||
fi
|
@ -0,0 +1,17 @@
|
||||
@ECHO off
|
||||
GOTO start
|
||||
:find_dp0
|
||||
SET dp0=%~dp0
|
||||
EXIT /b
|
||||
:start
|
||||
SETLOCAL
|
||||
CALL :find_dp0
|
||||
|
||||
IF EXIST "%dp0%\node.exe" (
|
||||
SET "_prog=%dp0%\node.exe"
|
||||
) ELSE (
|
||||
SET "_prog=node"
|
||||
SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||
)
|
||||
|
||||
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\mkdirp\bin\cmd.js" %*
|
@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env pwsh
|
||||
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
|
||||
|
||||
$exe=""
|
||||
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
|
||||
# Fix case when both the Windows and Linux builds of Node
|
||||
# are installed in the same directory
|
||||
$exe=".exe"
|
||||
}
|
||||
$ret=0
|
||||
if (Test-Path "$basedir/node$exe") {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "$basedir/node$exe" "$basedir/../mkdirp/bin/cmd.js" $args
|
||||
} else {
|
||||
& "$basedir/node$exe" "$basedir/../mkdirp/bin/cmd.js" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
} else {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "node$exe" "$basedir/../mkdirp/bin/cmd.js" $args
|
||||
} else {
|
||||
& "node$exe" "$basedir/../mkdirp/bin/cmd.js" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
}
|
||||
exit $ret
|
@ -0,0 +1,16 @@
|
||||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*|*MINGW*|*MSYS*)
|
||||
if command -v cygpath > /dev/null 2>&1; then
|
||||
basedir=`cygpath -w "$basedir"`
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ -x "$basedir/node" ]; then
|
||||
exec "$basedir/node" "$basedir/../xlsx/bin/xlsx.njs" "$@"
|
||||
else
|
||||
exec node "$basedir/../xlsx/bin/xlsx.njs" "$@"
|
||||
fi
|
@ -0,0 +1,17 @@
|
||||
@ECHO off
|
||||
GOTO start
|
||||
:find_dp0
|
||||
SET dp0=%~dp0
|
||||
EXIT /b
|
||||
:start
|
||||
SETLOCAL
|
||||
CALL :find_dp0
|
||||
|
||||
IF EXIST "%dp0%\node.exe" (
|
||||
SET "_prog=%dp0%\node.exe"
|
||||
) ELSE (
|
||||
SET "_prog=node"
|
||||
SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||
)
|
||||
|
||||
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\xlsx\bin\xlsx.njs" %*
|
@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env pwsh
|
||||
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
|
||||
|
||||
$exe=""
|
||||
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
|
||||
# Fix case when both the Windows and Linux builds of Node
|
||||
# are installed in the same directory
|
||||
$exe=".exe"
|
||||
}
|
||||
$ret=0
|
||||
if (Test-Path "$basedir/node$exe") {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "$basedir/node$exe" "$basedir/../xlsx/bin/xlsx.njs" $args
|
||||
} else {
|
||||
& "$basedir/node$exe" "$basedir/../xlsx/bin/xlsx.njs" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
} else {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "node$exe" "$basedir/../xlsx/bin/xlsx.njs" $args
|
||||
} else {
|
||||
& "node$exe" "$basedir/../xlsx/bin/xlsx.njs" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
}
|
||||
exit $ret
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,243 @@
|
||||
1.3.8 / 2022-02-02
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.1.34
|
||||
- deps: mime-db@~1.51.0
|
||||
* deps: negotiator@0.6.3
|
||||
|
||||
1.3.7 / 2019-04-29
|
||||
==================
|
||||
|
||||
* deps: negotiator@0.6.2
|
||||
- Fix sorting charset, encoding, and language with extra parameters
|
||||
|
||||
1.3.6 / 2019-04-28
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.1.24
|
||||
- deps: mime-db@~1.40.0
|
||||
|
||||
1.3.5 / 2018-02-28
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.1.18
|
||||
- deps: mime-db@~1.33.0
|
||||
|
||||
1.3.4 / 2017-08-22
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.1.16
|
||||
- deps: mime-db@~1.29.0
|
||||
|
||||
1.3.3 / 2016-05-02
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.1.11
|
||||
- deps: mime-db@~1.23.0
|
||||
* deps: negotiator@0.6.1
|
||||
- perf: improve `Accept` parsing speed
|
||||
- perf: improve `Accept-Charset` parsing speed
|
||||
- perf: improve `Accept-Encoding` parsing speed
|
||||
- perf: improve `Accept-Language` parsing speed
|
||||
|
||||
1.3.2 / 2016-03-08
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.1.10
|
||||
- Fix extension of `application/dash+xml`
|
||||
- Update primary extension for `audio/mp4`
|
||||
- deps: mime-db@~1.22.0
|
||||
|
||||
1.3.1 / 2016-01-19
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.1.9
|
||||
- deps: mime-db@~1.21.0
|
||||
|
||||
1.3.0 / 2015-09-29
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.1.7
|
||||
- deps: mime-db@~1.19.0
|
||||
* deps: negotiator@0.6.0
|
||||
- Fix including type extensions in parameters in `Accept` parsing
|
||||
- Fix parsing `Accept` parameters with quoted equals
|
||||
- Fix parsing `Accept` parameters with quoted semicolons
|
||||
- Lazy-load modules from main entry point
|
||||
- perf: delay type concatenation until needed
|
||||
- perf: enable strict mode
|
||||
- perf: hoist regular expressions
|
||||
- perf: remove closures getting spec properties
|
||||
- perf: remove a closure from media type parsing
|
||||
- perf: remove property delete from media type parsing
|
||||
|
||||
1.2.13 / 2015-09-06
|
||||
===================
|
||||
|
||||
* deps: mime-types@~2.1.6
|
||||
- deps: mime-db@~1.18.0
|
||||
|
||||
1.2.12 / 2015-07-30
|
||||
===================
|
||||
|
||||
* deps: mime-types@~2.1.4
|
||||
- deps: mime-db@~1.16.0
|
||||
|
||||
1.2.11 / 2015-07-16
|
||||
===================
|
||||
|
||||
* deps: mime-types@~2.1.3
|
||||
- deps: mime-db@~1.15.0
|
||||
|
||||
1.2.10 / 2015-07-01
|
||||
===================
|
||||
|
||||
* deps: mime-types@~2.1.2
|
||||
- deps: mime-db@~1.14.0
|
||||
|
||||
1.2.9 / 2015-06-08
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.1.1
|
||||
- perf: fix deopt during mapping
|
||||
|
||||
1.2.8 / 2015-06-07
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.1.0
|
||||
- deps: mime-db@~1.13.0
|
||||
* perf: avoid argument reassignment & argument slice
|
||||
* perf: avoid negotiator recursive construction
|
||||
* perf: enable strict mode
|
||||
* perf: remove unnecessary bitwise operator
|
||||
|
||||
1.2.7 / 2015-05-10
|
||||
==================
|
||||
|
||||
* deps: negotiator@0.5.3
|
||||
- Fix media type parameter matching to be case-insensitive
|
||||
|
||||
1.2.6 / 2015-05-07
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.0.11
|
||||
- deps: mime-db@~1.9.1
|
||||
* deps: negotiator@0.5.2
|
||||
- Fix comparing media types with quoted values
|
||||
- Fix splitting media types with quoted commas
|
||||
|
||||
1.2.5 / 2015-03-13
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.0.10
|
||||
- deps: mime-db@~1.8.0
|
||||
|
||||
1.2.4 / 2015-02-14
|
||||
==================
|
||||
|
||||
* Support Node.js 0.6
|
||||
* deps: mime-types@~2.0.9
|
||||
- deps: mime-db@~1.7.0
|
||||
* deps: negotiator@0.5.1
|
||||
- Fix preference sorting to be stable for long acceptable lists
|
||||
|
||||
1.2.3 / 2015-01-31
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.0.8
|
||||
- deps: mime-db@~1.6.0
|
||||
|
||||
1.2.2 / 2014-12-30
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.0.7
|
||||
- deps: mime-db@~1.5.0
|
||||
|
||||
1.2.1 / 2014-12-30
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.0.5
|
||||
- deps: mime-db@~1.3.1
|
||||
|
||||
1.2.0 / 2014-12-19
|
||||
==================
|
||||
|
||||
* deps: negotiator@0.5.0
|
||||
- Fix list return order when large accepted list
|
||||
- Fix missing identity encoding when q=0 exists
|
||||
- Remove dynamic building of Negotiator class
|
||||
|
||||
1.1.4 / 2014-12-10
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.0.4
|
||||
- deps: mime-db@~1.3.0
|
||||
|
||||
1.1.3 / 2014-11-09
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.0.3
|
||||
- deps: mime-db@~1.2.0
|
||||
|
||||
1.1.2 / 2014-10-14
|
||||
==================
|
||||
|
||||
* deps: negotiator@0.4.9
|
||||
- Fix error when media type has invalid parameter
|
||||
|
||||
1.1.1 / 2014-09-28
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.0.2
|
||||
- deps: mime-db@~1.1.0
|
||||
* deps: negotiator@0.4.8
|
||||
- Fix all negotiations to be case-insensitive
|
||||
- Stable sort preferences of same quality according to client order
|
||||
|
||||
1.1.0 / 2014-09-02
|
||||
==================
|
||||
|
||||
* update `mime-types`
|
||||
|
||||
1.0.7 / 2014-07-04
|
||||
==================
|
||||
|
||||
* Fix wrong type returned from `type` when match after unknown extension
|
||||
|
||||
1.0.6 / 2014-06-24
|
||||
==================
|
||||
|
||||
* deps: negotiator@0.4.7
|
||||
|
||||
1.0.5 / 2014-06-20
|
||||
==================
|
||||
|
||||
* fix crash when unknown extension given
|
||||
|
||||
1.0.4 / 2014-06-19
|
||||
==================
|
||||
|
||||
* use `mime-types`
|
||||
|
||||
1.0.3 / 2014-06-11
|
||||
==================
|
||||
|
||||
* deps: negotiator@0.4.6
|
||||
- Order by specificity when quality is the same
|
||||
|
||||
1.0.2 / 2014-05-29
|
||||
==================
|
||||
|
||||
* Fix interpretation when header not in request
|
||||
* deps: pin negotiator@0.4.5
|
||||
|
||||
1.0.1 / 2014-01-18
|
||||
==================
|
||||
|
||||
* Identity encoding isn't always acceptable
|
||||
* deps: negotiator@~0.4.0
|
||||
|
||||
1.0.0 / 2013-12-27
|
||||
==================
|
||||
|
||||
* Genesis
|
@ -0,0 +1,23 @@
|
||||
(The MIT License)
|
||||
|
||||
Copyright (c) 2014 Jonathan Ong <me@jongleberry.com>
|
||||
Copyright (c) 2015 Douglas Christopher Wilson <doug@somethingdoug.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
'Software'), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
@ -0,0 +1,140 @@
|
||||
# accepts
|
||||
|
||||
[![NPM Version][npm-version-image]][npm-url]
|
||||
[![NPM Downloads][npm-downloads-image]][npm-url]
|
||||
[![Node.js Version][node-version-image]][node-version-url]
|
||||
[![Build Status][github-actions-ci-image]][github-actions-ci-url]
|
||||
[![Test Coverage][coveralls-image]][coveralls-url]
|
||||
|
||||
Higher level content negotiation based on [negotiator](https://www.npmjs.com/package/negotiator).
|
||||
Extracted from [koa](https://www.npmjs.com/package/koa) for general use.
|
||||
|
||||
In addition to negotiator, it allows:
|
||||
|
||||
- Allows types as an array or arguments list, ie `(['text/html', 'application/json'])`
|
||||
as well as `('text/html', 'application/json')`.
|
||||
- Allows type shorthands such as `json`.
|
||||
- Returns `false` when no types match
|
||||
- Treats non-existent headers as `*`
|
||||
|
||||
## Installation
|
||||
|
||||
This is a [Node.js](https://nodejs.org/en/) module available through the
|
||||
[npm registry](https://www.npmjs.com/). Installation is done using the
|
||||
[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally):
|
||||
|
||||
```sh
|
||||
$ npm install accepts
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
```js
|
||||
var accepts = require('accepts')
|
||||
```
|
||||
|
||||
### accepts(req)
|
||||
|
||||
Create a new `Accepts` object for the given `req`.
|
||||
|
||||
#### .charset(charsets)
|
||||
|
||||
Return the first accepted charset. If nothing in `charsets` is accepted,
|
||||
then `false` is returned.
|
||||
|
||||
#### .charsets()
|
||||
|
||||
Return the charsets that the request accepts, in the order of the client's
|
||||
preference (most preferred first).
|
||||
|
||||
#### .encoding(encodings)
|
||||
|
||||
Return the first accepted encoding. If nothing in `encodings` is accepted,
|
||||
then `false` is returned.
|
||||
|
||||
#### .encodings()
|
||||
|
||||
Return the encodings that the request accepts, in the order of the client's
|
||||
preference (most preferred first).
|
||||
|
||||
#### .language(languages)
|
||||
|
||||
Return the first accepted language. If nothing in `languages` is accepted,
|
||||
then `false` is returned.
|
||||
|
||||
#### .languages()
|
||||
|
||||
Return the languages that the request accepts, in the order of the client's
|
||||
preference (most preferred first).
|
||||
|
||||
#### .type(types)
|
||||
|
||||
Return the first accepted type (and it is returned as the same text as what
|
||||
appears in the `types` array). If nothing in `types` is accepted, then `false`
|
||||
is returned.
|
||||
|
||||
The `types` array can contain full MIME types or file extensions. Any value
|
||||
that is not a full MIME types is passed to `require('mime-types').lookup`.
|
||||
|
||||
#### .types()
|
||||
|
||||
Return the types that the request accepts, in the order of the client's
|
||||
preference (most preferred first).
|
||||
|
||||
## Examples
|
||||
|
||||
### Simple type negotiation
|
||||
|
||||
This simple example shows how to use `accepts` to return a different typed
|
||||
respond body based on what the client wants to accept. The server lists it's
|
||||
preferences in order and will get back the best match between the client and
|
||||
server.
|
||||
|
||||
```js
|
||||
var accepts = require('accepts')
|
||||
var http = require('http')
|
||||
|
||||
function app (req, res) {
|
||||
var accept = accepts(req)
|
||||
|
||||
// the order of this list is significant; should be server preferred order
|
||||
switch (accept.type(['json', 'html'])) {
|
||||
case 'json':
|
||||
res.setHeader('Content-Type', 'application/json')
|
||||
res.write('{"hello":"world!"}')
|
||||
break
|
||||
case 'html':
|
||||
res.setHeader('Content-Type', 'text/html')
|
||||
res.write('<b>hello, world!</b>')
|
||||
break
|
||||
default:
|
||||
// the fallback is text/plain, so no need to specify it above
|
||||
res.setHeader('Content-Type', 'text/plain')
|
||||
res.write('hello, world!')
|
||||
break
|
||||
}
|
||||
|
||||
res.end()
|
||||
}
|
||||
|
||||
http.createServer(app).listen(3000)
|
||||
```
|
||||
|
||||
You can test this out with the cURL program:
|
||||
```sh
|
||||
curl -I -H'Accept: text/html' http://localhost:3000/
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
[MIT](LICENSE)
|
||||
|
||||
[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/accepts/master
|
||||
[coveralls-url]: https://coveralls.io/r/jshttp/accepts?branch=master
|
||||
[github-actions-ci-image]: https://badgen.net/github/checks/jshttp/accepts/master?label=ci
|
||||
[github-actions-ci-url]: https://github.com/jshttp/accepts/actions/workflows/ci.yml
|
||||
[node-version-image]: https://badgen.net/npm/node/accepts
|
||||
[node-version-url]: https://nodejs.org/en/download
|
||||
[npm-downloads-image]: https://badgen.net/npm/dm/accepts
|
||||
[npm-url]: https://npmjs.org/package/accepts
|
||||
[npm-version-image]: https://badgen.net/npm/v/accepts
|
@ -0,0 +1,238 @@
|
||||
/*!
|
||||
* accepts
|
||||
* Copyright(c) 2014 Jonathan Ong
|
||||
* Copyright(c) 2015 Douglas Christopher Wilson
|
||||
* MIT Licensed
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
/**
|
||||
* Module dependencies.
|
||||
* @private
|
||||
*/
|
||||
|
||||
var Negotiator = require('negotiator')
|
||||
var mime = require('mime-types')
|
||||
|
||||
/**
|
||||
* Module exports.
|
||||
* @public
|
||||
*/
|
||||
|
||||
module.exports = Accepts
|
||||
|
||||
/**
|
||||
* Create a new Accepts object for the given req.
|
||||
*
|
||||
* @param {object} req
|
||||
* @public
|
||||
*/
|
||||
|
||||
function Accepts (req) {
|
||||
if (!(this instanceof Accepts)) {
|
||||
return new Accepts(req)
|
||||
}
|
||||
|
||||
this.headers = req.headers
|
||||
this.negotiator = new Negotiator(req)
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the given `type(s)` is acceptable, returning
|
||||
* the best match when true, otherwise `undefined`, in which
|
||||
* case you should respond with 406 "Not Acceptable".
|
||||
*
|
||||
* The `type` value may be a single mime type string
|
||||
* such as "application/json", the extension name
|
||||
* such as "json" or an array `["json", "html", "text/plain"]`. When a list
|
||||
* or array is given the _best_ match, if any is returned.
|
||||
*
|
||||
* Examples:
|
||||
*
|
||||
* // Accept: text/html
|
||||
* this.types('html');
|
||||
* // => "html"
|
||||
*
|
||||
* // Accept: text/*, application/json
|
||||
* this.types('html');
|
||||
* // => "html"
|
||||
* this.types('text/html');
|
||||
* // => "text/html"
|
||||
* this.types('json', 'text');
|
||||
* // => "json"
|
||||
* this.types('application/json');
|
||||
* // => "application/json"
|
||||
*
|
||||
* // Accept: text/*, application/json
|
||||
* this.types('image/png');
|
||||
* this.types('png');
|
||||
* // => undefined
|
||||
*
|
||||
* // Accept: text/*;q=.5, application/json
|
||||
* this.types(['html', 'json']);
|
||||
* this.types('html', 'json');
|
||||
* // => "json"
|
||||
*
|
||||
* @param {String|Array} types...
|
||||
* @return {String|Array|Boolean}
|
||||
* @public
|
||||
*/
|
||||
|
||||
Accepts.prototype.type =
|
||||
Accepts.prototype.types = function (types_) {
|
||||
var types = types_
|
||||
|
||||
// support flattened arguments
|
||||
if (types && !Array.isArray(types)) {
|
||||
types = new Array(arguments.length)
|
||||
for (var i = 0; i < types.length; i++) {
|
||||
types[i] = arguments[i]
|
||||
}
|
||||
}
|
||||
|
||||
// no types, return all requested types
|
||||
if (!types || types.length === 0) {
|
||||
return this.negotiator.mediaTypes()
|
||||
}
|
||||
|
||||
// no accept header, return first given type
|
||||
if (!this.headers.accept) {
|
||||
return types[0]
|
||||
}
|
||||
|
||||
var mimes = types.map(extToMime)
|
||||
var accepts = this.negotiator.mediaTypes(mimes.filter(validMime))
|
||||
var first = accepts[0]
|
||||
|
||||
return first
|
||||
? types[mimes.indexOf(first)]
|
||||
: false
|
||||
}
|
||||
|
||||
/**
|
||||
* Return accepted encodings or best fit based on `encodings`.
|
||||
*
|
||||
* Given `Accept-Encoding: gzip, deflate`
|
||||
* an array sorted by quality is returned:
|
||||
*
|
||||
* ['gzip', 'deflate']
|
||||
*
|
||||
* @param {String|Array} encodings...
|
||||
* @return {String|Array}
|
||||
* @public
|
||||
*/
|
||||
|
||||
Accepts.prototype.encoding =
|
||||
Accepts.prototype.encodings = function (encodings_) {
|
||||
var encodings = encodings_
|
||||
|
||||
// support flattened arguments
|
||||
if (encodings && !Array.isArray(encodings)) {
|
||||
encodings = new Array(arguments.length)
|
||||
for (var i = 0; i < encodings.length; i++) {
|
||||
encodings[i] = arguments[i]
|
||||
}
|
||||
}
|
||||
|
||||
// no encodings, return all requested encodings
|
||||
if (!encodings || encodings.length === 0) {
|
||||
return this.negotiator.encodings()
|
||||
}
|
||||
|
||||
return this.negotiator.encodings(encodings)[0] || false
|
||||
}
|
||||
|
||||
/**
|
||||
* Return accepted charsets or best fit based on `charsets`.
|
||||
*
|
||||
* Given `Accept-Charset: utf-8, iso-8859-1;q=0.2, utf-7;q=0.5`
|
||||
* an array sorted by quality is returned:
|
||||
*
|
||||
* ['utf-8', 'utf-7', 'iso-8859-1']
|
||||
*
|
||||
* @param {String|Array} charsets...
|
||||
* @return {String|Array}
|
||||
* @public
|
||||
*/
|
||||
|
||||
Accepts.prototype.charset =
|
||||
Accepts.prototype.charsets = function (charsets_) {
|
||||
var charsets = charsets_
|
||||
|
||||
// support flattened arguments
|
||||
if (charsets && !Array.isArray(charsets)) {
|
||||
charsets = new Array(arguments.length)
|
||||
for (var i = 0; i < charsets.length; i++) {
|
||||
charsets[i] = arguments[i]
|
||||
}
|
||||
}
|
||||
|
||||
// no charsets, return all requested charsets
|
||||
if (!charsets || charsets.length === 0) {
|
||||
return this.negotiator.charsets()
|
||||
}
|
||||
|
||||
return this.negotiator.charsets(charsets)[0] || false
|
||||
}
|
||||
|
||||
/**
|
||||
* Return accepted languages or best fit based on `langs`.
|
||||
*
|
||||
* Given `Accept-Language: en;q=0.8, es, pt`
|
||||
* an array sorted by quality is returned:
|
||||
*
|
||||
* ['es', 'pt', 'en']
|
||||
*
|
||||
* @param {String|Array} langs...
|
||||
* @return {Array|String}
|
||||
* @public
|
||||
*/
|
||||
|
||||
Accepts.prototype.lang =
|
||||
Accepts.prototype.langs =
|
||||
Accepts.prototype.language =
|
||||
Accepts.prototype.languages = function (languages_) {
|
||||
var languages = languages_
|
||||
|
||||
// support flattened arguments
|
||||
if (languages && !Array.isArray(languages)) {
|
||||
languages = new Array(arguments.length)
|
||||
for (var i = 0; i < languages.length; i++) {
|
||||
languages[i] = arguments[i]
|
||||
}
|
||||
}
|
||||
|
||||
// no languages, return all requested languages
|
||||
if (!languages || languages.length === 0) {
|
||||
return this.negotiator.languages()
|
||||
}
|
||||
|
||||
return this.negotiator.languages(languages)[0] || false
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert extnames to mime.
|
||||
*
|
||||
* @param {String} type
|
||||
* @return {String}
|
||||
* @private
|
||||
*/
|
||||
|
||||
function extToMime (type) {
|
||||
return type.indexOf('/') === -1
|
||||
? mime.lookup(type)
|
||||
: type
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if mime is valid.
|
||||
*
|
||||
* @param {String} type
|
||||
* @return {String}
|
||||
* @private
|
||||
*/
|
||||
|
||||
function validMime (type) {
|
||||
return typeof type === 'string'
|
||||
}
|
@ -0,0 +1,47 @@
|
||||
{
|
||||
"name": "accepts",
|
||||
"description": "Higher-level content negotiation",
|
||||
"version": "1.3.8",
|
||||
"contributors": [
|
||||
"Douglas Christopher Wilson <doug@somethingdoug.com>",
|
||||
"Jonathan Ong <me@jongleberry.com> (http://jongleberry.com)"
|
||||
],
|
||||
"license": "MIT",
|
||||
"repository": "jshttp/accepts",
|
||||
"dependencies": {
|
||||
"mime-types": "~2.1.34",
|
||||
"negotiator": "0.6.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"deep-equal": "1.0.1",
|
||||
"eslint": "7.32.0",
|
||||
"eslint-config-standard": "14.1.1",
|
||||
"eslint-plugin-import": "2.25.4",
|
||||
"eslint-plugin-markdown": "2.2.1",
|
||||
"eslint-plugin-node": "11.1.0",
|
||||
"eslint-plugin-promise": "4.3.1",
|
||||
"eslint-plugin-standard": "4.1.0",
|
||||
"mocha": "9.2.0",
|
||||
"nyc": "15.1.0"
|
||||
},
|
||||
"files": [
|
||||
"LICENSE",
|
||||
"HISTORY.md",
|
||||
"index.js"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
},
|
||||
"scripts": {
|
||||
"lint": "eslint .",
|
||||
"test": "mocha --reporter spec --check-leaks --bail test/",
|
||||
"test-ci": "nyc --reporter=lcov --reporter=text npm test",
|
||||
"test-cov": "nyc --reporter=html --reporter=text npm test"
|
||||
},
|
||||
"keywords": [
|
||||
"content",
|
||||
"negotiation",
|
||||
"accept",
|
||||
"accepts"
|
||||
]
|
||||
}
|
@ -0,0 +1,201 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright (C) 2014-present SheetJS LLC
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
@ -0,0 +1,140 @@
|
||||
# adler32
|
||||
|
||||
Signed ADLER-32 algorithm implementation in JS (for the browser and nodejs).
|
||||
Emphasis on correctness, performance, and IE6+ support.
|
||||
|
||||
## Installation
|
||||
|
||||
With [npm](https://www.npmjs.org/package/adler-32):
|
||||
|
||||
```bash
|
||||
$ npm install adler-32
|
||||
```
|
||||
|
||||
In the browser:
|
||||
|
||||
```html
|
||||
<script src="adler32.js"></script>
|
||||
```
|
||||
|
||||
The browser exposes a variable `ADLER32`.
|
||||
|
||||
When installed globally, npm installs a script `adler32` that computes the
|
||||
checksum for a specified file or standard input.
|
||||
|
||||
The script will manipulate `module.exports` if available . This is not always
|
||||
desirable. To prevent the behavior, define `DO_NOT_EXPORT_ADLER`.
|
||||
|
||||
## Usage
|
||||
|
||||
In all cases, the relevant function takes an argument representing data and an
|
||||
optional second argument representing the starting "seed" (for running hash).
|
||||
|
||||
The return value is a signed 32-bit integer.
|
||||
|
||||
- `ADLER32.buf(byte array or buffer[, seed])` assumes the argument is a sequence
|
||||
of 8-bit unsigned integers (nodejs `Buffer`, `Uint8Array` or array of bytes).
|
||||
|
||||
- `ADLER32.bstr(binary string[, seed])` assumes the argument is a binary string
|
||||
where byte `i` is the low byte of the UCS-2 char: `str.charCodeAt(i) & 0xFF`
|
||||
|
||||
- `ADLER32.str(string)` assumes the argument is a standard JS string and
|
||||
calculates the hash of the UTF-8 encoding.
|
||||
|
||||
For example:
|
||||
|
||||
```js
|
||||
// var ADLER32 = require('adler-32'); // uncomment if in node
|
||||
ADLER32.str("SheetJS") // 176947863
|
||||
ADLER32.bstr("SheetJS") // 176947863
|
||||
ADLER32.buf([ 83, 104, 101, 101, 116, 74, 83 ]) // 176947863
|
||||
|
||||
adler32 = ADLER32.buf([83, 104]) // 17825980 "Sh"
|
||||
adler32 = ADLER32.str("eet", adler32) // 95486458 "Sheet"
|
||||
ADLER32.bstr("JS", adler32) // 176947863 "SheetJS"
|
||||
|
||||
[ADLER32.str("\u2603"), ADLER32.str("\u0003")] // [ 73138686, 262148 ]
|
||||
[ADLER32.bstr("\u2603"), ADLER32.bstr("\u0003")] // [ 262148, 262148 ]
|
||||
[ADLER32.buf([0x2603]), ADLER32.buf([0x0003])] // [ 262148, 262148 ]
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
||||
`make test` will run the nodejs-based test.
|
||||
|
||||
To run the in-browser tests, run a local server and go to the `ctest` directory.
|
||||
`make ctestserv` will start a python `SimpleHTTPServer` server on port 8000.
|
||||
|
||||
To update the browser artifacts, run `make ctest`.
|
||||
|
||||
To generate the bits file, use the `adler32` function from python `zlib`:
|
||||
|
||||
```python
|
||||
>>> from zlib import adler32
|
||||
>>> x="foo bar baz٪☃🍣"
|
||||
>>> adler32(x)
|
||||
1543572022
|
||||
>>> adler32(x+x)
|
||||
-2076896149
|
||||
>>> adler32(x+x+x)
|
||||
2023497376
|
||||
```
|
||||
|
||||
The [`adler32-cli`](https://www.npmjs.com/package/adler32-cli) package includes
|
||||
scripts for processing files or text on standard input:
|
||||
|
||||
```bash
|
||||
$ echo "this is a test" > t.txt
|
||||
$ adler32-cli t.txt
|
||||
726861088
|
||||
```
|
||||
|
||||
For comparison, the `adler32.py` script in the subdirectory uses python `zlib`:
|
||||
|
||||
```bash
|
||||
$ packages/adler32-cli/bin/adler32.py t.txt
|
||||
726861088
|
||||
```
|
||||
|
||||
## Performance
|
||||
|
||||
`make perf` will run algorithmic performance tests (which should justify certain
|
||||
decisions in the code).
|
||||
|
||||
Bit twiddling is much faster than taking the mod in Safari and Firefox browsers.
|
||||
Instead of taking the literal mod 65521, it is faster to keep it in the integers
|
||||
by bit-shifting: `65536 ~ 15 mod 65521` so for nonnegative integer `a`:
|
||||
|
||||
```
|
||||
a = (a >>> 16) * 65536 + (a & 65535) [equality]
|
||||
a ~ (a >>> 16) * 15 + (a & 65535) mod 65521
|
||||
```
|
||||
|
||||
The mod is taken at the very end, since the intermediate result may exceed 65521
|
||||
|
||||
## Magic Number
|
||||
|
||||
The magic numbers were chosen so as to not overflow a 31-bit integer:
|
||||
|
||||
```mathematica
|
||||
F[n_] := Reduce[x*(x + 1)*n/2 + (x + 1)*(65521) < (2^31 - 1) && x > 0, x, Integers]
|
||||
F[255] (* bstr: x \[Element] Integers && 1 <= x <= 3854 *)
|
||||
F[127] (* ascii: x \[Element] Integers && 1 <= x <= 5321 *)
|
||||
```
|
||||
|
||||
Subtract up to 4 elements for the Unicode case.
|
||||
|
||||
## License
|
||||
|
||||
Please consult the attached LICENSE file for details. All rights not explicitly
|
||||
granted by the Apache 2.0 license are reserved by the Original Author.
|
||||
|
||||
## Badges
|
||||
|
||||
[![Sauce Test Status](https://saucelabs.com/browser-matrix/adler32.svg)](https://saucelabs.com/u/adler32)
|
||||
|
||||
[![Build Status](https://img.shields.io/github/workflow/status/sheetjs/js-adler32/Tests:%20node.js)](https://github.com/SheetJS/js-adler32/actions)
|
||||
|
||||
[![Coverage Status](http://img.shields.io/coveralls/SheetJS/js-adler32/master.svg)](https://coveralls.io/r/SheetJS/js-adler32?branch=master)
|
||||
|
||||
[![Analytics](https://ga-beacon.appspot.com/UA-36810333-1/SheetJS/js-adler32?pixel)](https://github.com/SheetJS/js-adler32)
|
@ -0,0 +1,92 @@
|
||||
/* adler32.js (C) 2014-present SheetJS -- http://sheetjs.com */
|
||||
/* vim: set ts=2: */
|
||||
/*exported ADLER32 */
|
||||
var ADLER32;
|
||||
(function (factory) {
|
||||
/*jshint ignore:start */
|
||||
/*eslint-disable */
|
||||
if(typeof DO_NOT_EXPORT_ADLER === 'undefined') {
|
||||
if('object' === typeof exports) {
|
||||
factory(exports);
|
||||
} else if ('function' === typeof define && define.amd) {
|
||||
define(function () {
|
||||
var module = {};
|
||||
factory(module);
|
||||
return module;
|
||||
});
|
||||
} else {
|
||||
factory(ADLER32 = {});
|
||||
}
|
||||
} else {
|
||||
factory(ADLER32 = {});
|
||||
}
|
||||
/*eslint-enable */
|
||||
/*jshint ignore:end */
|
||||
}(function(ADLER32) {
|
||||
ADLER32.version = '1.3.1';
|
||||
function adler32_bstr(bstr, seed) {
|
||||
var a = 1, b = 0, L = bstr.length, M = 0;
|
||||
if(typeof seed === 'number') { a = seed & 0xFFFF; b = seed >>> 16; }
|
||||
for(var i = 0; i < L;) {
|
||||
M = Math.min(L-i, 2654)+i;
|
||||
for(;i<M;i++) {
|
||||
a += bstr.charCodeAt(i)&0xFF;
|
||||
b += a;
|
||||
}
|
||||
a = (15*(a>>>16)+(a&65535));
|
||||
b = (15*(b>>>16)+(b&65535));
|
||||
}
|
||||
return ((b%65521) << 16) | (a%65521);
|
||||
}
|
||||
|
||||
function adler32_buf(buf, seed) {
|
||||
var a = 1, b = 0, L = buf.length, M = 0;
|
||||
if(typeof seed === 'number') { a = seed & 0xFFFF; b = (seed >>> 16) & 0xFFFF; }
|
||||
for(var i = 0; i < L;) {
|
||||
M = Math.min(L-i, 2654)+i;
|
||||
for(;i<M;i++) {
|
||||
a += buf[i]&0xFF;
|
||||
b += a;
|
||||
}
|
||||
a = (15*(a>>>16)+(a&65535));
|
||||
b = (15*(b>>>16)+(b&65535));
|
||||
}
|
||||
return ((b%65521) << 16) | (a%65521);
|
||||
}
|
||||
|
||||
function adler32_str(str, seed) {
|
||||
var a = 1, b = 0, L = str.length, M = 0, c = 0, d = 0;
|
||||
if(typeof seed === 'number') { a = seed & 0xFFFF; b = seed >>> 16; }
|
||||
for(var i = 0; i < L;) {
|
||||
M = Math.min(L-i, 2918);
|
||||
while(M>0) {
|
||||
c = str.charCodeAt(i++);
|
||||
if(c < 0x80) { a += c; }
|
||||
else if(c < 0x800) {
|
||||
a += 192|((c>>6)&31); b += a; --M;
|
||||
a += 128|(c&63);
|
||||
} else if(c >= 0xD800 && c < 0xE000) {
|
||||
c = (c&1023)+64; d = str.charCodeAt(i++) & 1023;
|
||||
a += 240|((c>>8)&7); b += a; --M;
|
||||
a += 128|((c>>2)&63); b += a; --M;
|
||||
a += 128|((d>>6)&15)|((c&3)<<4); b += a; --M;
|
||||
a += 128|(d&63);
|
||||
} else {
|
||||
a += 224|((c>>12)&15); b += a; --M;
|
||||
a += 128|((c>>6)&63); b += a; --M;
|
||||
a += 128|(c&63);
|
||||
}
|
||||
b += a; --M;
|
||||
}
|
||||
a = (15*(a>>>16)+(a&65535));
|
||||
b = (15*(b>>>16)+(b&65535));
|
||||
}
|
||||
return ((b%65521) << 16) | (a%65521);
|
||||
}
|
||||
// $FlowIgnore
|
||||
ADLER32.bstr = adler32_bstr;
|
||||
// $FlowIgnore
|
||||
ADLER32.buf = adler32_buf;
|
||||
// $FlowIgnore
|
||||
ADLER32.str = adler32_str;
|
||||
}));
|
@ -0,0 +1,35 @@
|
||||
{
|
||||
"name": "adler-32",
|
||||
"version": "1.3.1",
|
||||
"author": "sheetjs",
|
||||
"description": "Pure-JS ADLER-32",
|
||||
"keywords": [ "adler32", "checksum" ],
|
||||
"main": "./adler32",
|
||||
"types": "types/index.d.ts",
|
||||
"devDependencies": {
|
||||
"mocha": "~2.5.3",
|
||||
"blanket": "~1.2.3",
|
||||
"codepage": "~1.10.0",
|
||||
"@sheetjs/uglify-js": "~2.7.3",
|
||||
"@types/node": "^8.0.7",
|
||||
"dtslint": "^0.1.2",
|
||||
"typescript": "2.2.0"
|
||||
},
|
||||
"repository": { "type": "git", "url": "git://github.com/SheetJS/js-adler32.git" },
|
||||
"scripts": {
|
||||
"test": "make test",
|
||||
"build": "make",
|
||||
"lint": "make fullint",
|
||||
"dtslint": "dtslint types"
|
||||
},
|
||||
"config": {
|
||||
"blanket": {
|
||||
"pattern": "adler32.js"
|
||||
}
|
||||
},
|
||||
"homepage": "http://sheetjs.com/opensource",
|
||||
"files": ["adler32.js", "LICENSE", "README.md", "types/index.d.ts", "types/*.json"],
|
||||
"bugs": { "url": "https://github.com/SheetJS/js-adler32/issues" },
|
||||
"license": "Apache-2.0",
|
||||
"engines": { "node": ">=0.8" }
|
||||
}
|
@ -0,0 +1,14 @@
|
||||
/* adler32.js (C) 2014-present SheetJS -- http://sheetjs.com */
|
||||
// TypeScript Version: 2.2
|
||||
|
||||
/** Version string */
|
||||
export const version: string;
|
||||
|
||||
/** Process a node buffer or byte array */
|
||||
export function buf(data: number[] | Uint8Array, seed?: number): number;
|
||||
|
||||
/** Process a binary string */
|
||||
export function bstr(data: string, seed?: number): number;
|
||||
|
||||
/** Process a JS string based on the UTF8 encoding */
|
||||
export function str(data: string, seed?: number): number;
|
@ -0,0 +1,15 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"module": "commonjs",
|
||||
"lib": [ "es5" ],
|
||||
"noImplicitAny": true,
|
||||
"noImplicitThis": true,
|
||||
"strictNullChecks": false,
|
||||
"baseUrl": ".",
|
||||
"paths": { "adler-32": ["."] },
|
||||
"types": [],
|
||||
"noEmit": true,
|
||||
"strictFunctionTypes": true,
|
||||
"forceConsistentCasingInFileNames": true
|
||||
}
|
||||
}
|
@ -0,0 +1,14 @@
|
||||
{
|
||||
"extends": "dtslint/dtslint.json",
|
||||
"rules": {
|
||||
"no-implicit-dependencies": false,
|
||||
"whitespace": false,
|
||||
"no-sparse-arrays": false,
|
||||
"only-arrow-functions": false,
|
||||
"no-consecutive-blank-lines": false,
|
||||
"prefer-conditional-expression": false,
|
||||
"one-variable-per-declaration": false,
|
||||
"strict-export-declare-modifiers": false,
|
||||
"prefer-template": false
|
||||
}
|
||||
}
|
@ -0,0 +1 @@
|
||||
node_modules/
|
@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2015 Linus Unnebäck
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
@ -0,0 +1,44 @@
|
||||
# `append-field`
|
||||
|
||||
A [W3C HTML JSON forms spec](http://www.w3.org/TR/html-json-forms/) compliant
|
||||
field appender (for lack of a better name). Useful for people implementing
|
||||
`application/x-www-form-urlencoded` and `multipart/form-data` parsers.
|
||||
|
||||
It works best on objects created with `Object.create(null)`. Otherwise it might
|
||||
conflict with variables from the prototype (e.g. `hasOwnProperty`).
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
npm install --save append-field
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```javascript
|
||||
var appendField = require('append-field')
|
||||
var obj = Object.create(null)
|
||||
|
||||
appendField(obj, 'pets[0][species]', 'Dahut')
|
||||
appendField(obj, 'pets[0][name]', 'Hypatia')
|
||||
appendField(obj, 'pets[1][species]', 'Felis Stultus')
|
||||
appendField(obj, 'pets[1][name]', 'Billie')
|
||||
|
||||
console.log(obj)
|
||||
```
|
||||
|
||||
```text
|
||||
{ pets:
|
||||
[ { species: 'Dahut', name: 'Hypatia' },
|
||||
{ species: 'Felis Stultus', name: 'Billie' } ] }
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
### `appendField(store, key, value)`
|
||||
|
||||
Adds the field named `key` with the value `value` to the object `store`.
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
@ -0,0 +1,12 @@
|
||||
var parsePath = require('./lib/parse-path')
|
||||
var setValue = require('./lib/set-value')
|
||||
|
||||
function appendField (store, key, value) {
|
||||
var steps = parsePath(key)
|
||||
|
||||
steps.reduce(function (context, step) {
|
||||
return setValue(context, step, context[step.key], value)
|
||||
}, store)
|
||||
}
|
||||
|
||||
module.exports = appendField
|
@ -0,0 +1,53 @@
|
||||
var reFirstKey = /^[^\[]*/
|
||||
var reDigitPath = /^\[(\d+)\]/
|
||||
var reNormalPath = /^\[([^\]]+)\]/
|
||||
|
||||
function parsePath (key) {
|
||||
function failure () {
|
||||
return [{ type: 'object', key: key, last: true }]
|
||||
}
|
||||
|
||||
var firstKey = reFirstKey.exec(key)[0]
|
||||
if (!firstKey) return failure()
|
||||
|
||||
var len = key.length
|
||||
var pos = firstKey.length
|
||||
var tail = { type: 'object', key: firstKey }
|
||||
var steps = [tail]
|
||||
|
||||
while (pos < len) {
|
||||
var m
|
||||
|
||||
if (key[pos] === '[' && key[pos + 1] === ']') {
|
||||
pos += 2
|
||||
tail.append = true
|
||||
if (pos !== len) return failure()
|
||||
continue
|
||||
}
|
||||
|
||||
m = reDigitPath.exec(key.substring(pos))
|
||||
if (m !== null) {
|
||||
pos += m[0].length
|
||||
tail.nextType = 'array'
|
||||
tail = { type: 'array', key: parseInt(m[1], 10) }
|
||||
steps.push(tail)
|
||||
continue
|
||||
}
|
||||
|
||||
m = reNormalPath.exec(key.substring(pos))
|
||||
if (m !== null) {
|
||||
pos += m[0].length
|
||||
tail.nextType = 'object'
|
||||
tail = { type: 'object', key: m[1] }
|
||||
steps.push(tail)
|
||||
continue
|
||||
}
|
||||
|
||||
return failure()
|
||||
}
|
||||
|
||||
tail.last = true
|
||||
return steps
|
||||
}
|
||||
|
||||
module.exports = parsePath
|
@ -0,0 +1,64 @@
|
||||
function valueType (value) {
|
||||
if (value === undefined) return 'undefined'
|
||||
if (Array.isArray(value)) return 'array'
|
||||
if (typeof value === 'object') return 'object'
|
||||
return 'scalar'
|
||||
}
|
||||
|
||||
function setLastValue (context, step, currentValue, entryValue) {
|
||||
switch (valueType(currentValue)) {
|
||||
case 'undefined':
|
||||
if (step.append) {
|
||||
context[step.key] = [entryValue]
|
||||
} else {
|
||||
context[step.key] = entryValue
|
||||
}
|
||||
break
|
||||
case 'array':
|
||||
context[step.key].push(entryValue)
|
||||
break
|
||||
case 'object':
|
||||
return setLastValue(currentValue, { type: 'object', key: '', last: true }, currentValue[''], entryValue)
|
||||
case 'scalar':
|
||||
context[step.key] = [context[step.key], entryValue]
|
||||
break
|
||||
}
|
||||
|
||||
return context
|
||||
}
|
||||
|
||||
function setValue (context, step, currentValue, entryValue) {
|
||||
if (step.last) return setLastValue(context, step, currentValue, entryValue)
|
||||
|
||||
var obj
|
||||
switch (valueType(currentValue)) {
|
||||
case 'undefined':
|
||||
if (step.nextType === 'array') {
|
||||
context[step.key] = []
|
||||
} else {
|
||||
context[step.key] = Object.create(null)
|
||||
}
|
||||
return context[step.key]
|
||||
case 'object':
|
||||
return context[step.key]
|
||||
case 'array':
|
||||
if (step.nextType === 'array') {
|
||||
return currentValue
|
||||
}
|
||||
|
||||
obj = Object.create(null)
|
||||
context[step.key] = obj
|
||||
currentValue.forEach(function (item, i) {
|
||||
if (item !== undefined) obj['' + i] = item
|
||||
})
|
||||
|
||||
return obj
|
||||
case 'scalar':
|
||||
obj = Object.create(null)
|
||||
obj[''] = currentValue
|
||||
context[step.key] = obj
|
||||
return obj
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = setValue
|
@ -0,0 +1,19 @@
|
||||
{
|
||||
"name": "append-field",
|
||||
"version": "1.0.0",
|
||||
"license": "MIT",
|
||||
"author": "Linus Unnebäck <linus@folkdatorn.se>",
|
||||
"main": "index.js",
|
||||
"devDependencies": {
|
||||
"mocha": "^2.2.4",
|
||||
"standard": "^6.0.5",
|
||||
"testdata-w3c-json-form": "^0.2.0"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "standard && mocha"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "http://github.com/LinusU/node-append-field.git"
|
||||
}
|
||||
}
|
@ -0,0 +1,19 @@
|
||||
/* eslint-env mocha */
|
||||
|
||||
var assert = require('assert')
|
||||
var appendField = require('../')
|
||||
var testData = require('testdata-w3c-json-form')
|
||||
|
||||
describe('Append Field', function () {
|
||||
for (var test of testData) {
|
||||
it('handles ' + test.name, function () {
|
||||
var store = Object.create(null)
|
||||
|
||||
for (var field of test.fields) {
|
||||
appendField(store, field.key, field.value)
|
||||
}
|
||||
|
||||
assert.deepEqual(store, test.expected)
|
||||
})
|
||||
}
|
||||
})
|
@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Blake Embrey (hello@blakeembrey.com)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
@ -0,0 +1,43 @@
|
||||
# Array Flatten
|
||||
|
||||
[![NPM version][npm-image]][npm-url]
|
||||
[![NPM downloads][downloads-image]][downloads-url]
|
||||
[![Build status][travis-image]][travis-url]
|
||||
[![Test coverage][coveralls-image]][coveralls-url]
|
||||
|
||||
> Flatten an array of nested arrays into a single flat array. Accepts an optional depth.
|
||||
|
||||
## Installation
|
||||
|
||||
```
|
||||
npm install array-flatten --save
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```javascript
|
||||
var flatten = require('array-flatten')
|
||||
|
||||
flatten([1, [2, [3, [4, [5], 6], 7], 8], 9])
|
||||
//=> [1, 2, 3, 4, 5, 6, 7, 8, 9]
|
||||
|
||||
flatten([1, [2, [3, [4, [5], 6], 7], 8], 9], 2)
|
||||
//=> [1, 2, 3, [4, [5], 6], 7, 8, 9]
|
||||
|
||||
(function () {
|
||||
flatten(arguments) //=> [1, 2, 3]
|
||||
})(1, [2, 3])
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
|
||||
[npm-image]: https://img.shields.io/npm/v/array-flatten.svg?style=flat
|
||||
[npm-url]: https://npmjs.org/package/array-flatten
|
||||
[downloads-image]: https://img.shields.io/npm/dm/array-flatten.svg?style=flat
|
||||
[downloads-url]: https://npmjs.org/package/array-flatten
|
||||
[travis-image]: https://img.shields.io/travis/blakeembrey/array-flatten.svg?style=flat
|
||||
[travis-url]: https://travis-ci.org/blakeembrey/array-flatten
|
||||
[coveralls-image]: https://img.shields.io/coveralls/blakeembrey/array-flatten.svg?style=flat
|
||||
[coveralls-url]: https://coveralls.io/r/blakeembrey/array-flatten?branch=master
|
@ -0,0 +1,64 @@
|
||||
'use strict'
|
||||
|
||||
/**
|
||||
* Expose `arrayFlatten`.
|
||||
*/
|
||||
module.exports = arrayFlatten
|
||||
|
||||
/**
|
||||
* Recursive flatten function with depth.
|
||||
*
|
||||
* @param {Array} array
|
||||
* @param {Array} result
|
||||
* @param {Number} depth
|
||||
* @return {Array}
|
||||
*/
|
||||
function flattenWithDepth (array, result, depth) {
|
||||
for (var i = 0; i < array.length; i++) {
|
||||
var value = array[i]
|
||||
|
||||
if (depth > 0 && Array.isArray(value)) {
|
||||
flattenWithDepth(value, result, depth - 1)
|
||||
} else {
|
||||
result.push(value)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursive flatten function. Omitting depth is slightly faster.
|
||||
*
|
||||
* @param {Array} array
|
||||
* @param {Array} result
|
||||
* @return {Array}
|
||||
*/
|
||||
function flattenForever (array, result) {
|
||||
for (var i = 0; i < array.length; i++) {
|
||||
var value = array[i]
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
flattenForever(value, result)
|
||||
} else {
|
||||
result.push(value)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Flatten an array, with the ability to define a depth.
|
||||
*
|
||||
* @param {Array} array
|
||||
* @param {Number} depth
|
||||
* @return {Array}
|
||||
*/
|
||||
function arrayFlatten (array, depth) {
|
||||
if (depth == null) {
|
||||
return flattenForever(array, [])
|
||||
}
|
||||
|
||||
return flattenWithDepth(array, [], depth)
|
||||
}
|
@ -0,0 +1,39 @@
|
||||
{
|
||||
"name": "array-flatten",
|
||||
"version": "1.1.1",
|
||||
"description": "Flatten an array of nested arrays into a single flat array",
|
||||
"main": "array-flatten.js",
|
||||
"files": [
|
||||
"array-flatten.js",
|
||||
"LICENSE"
|
||||
],
|
||||
"scripts": {
|
||||
"test": "istanbul cover _mocha -- -R spec"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/blakeembrey/array-flatten.git"
|
||||
},
|
||||
"keywords": [
|
||||
"array",
|
||||
"flatten",
|
||||
"arguments",
|
||||
"depth"
|
||||
],
|
||||
"author": {
|
||||
"name": "Blake Embrey",
|
||||
"email": "hello@blakeembrey.com",
|
||||
"url": "http://blakeembrey.me"
|
||||
},
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/blakeembrey/array-flatten/issues"
|
||||
},
|
||||
"homepage": "https://github.com/blakeembrey/array-flatten",
|
||||
"devDependencies": {
|
||||
"istanbul": "^0.3.13",
|
||||
"mocha": "^2.2.4",
|
||||
"pre-commit": "^1.0.7",
|
||||
"standard": "^3.7.3"
|
||||
}
|
||||
}
|
@ -0,0 +1,672 @@
|
||||
1.20.3 / 2024-09-10
|
||||
===================
|
||||
|
||||
* deps: qs@6.13.0
|
||||
* add `depth` option to customize the depth level in the parser
|
||||
* IMPORTANT: The default `depth` level for parsing URL-encoded data is now `32` (previously was `Infinity`)
|
||||
|
||||
1.20.2 / 2023-02-21
|
||||
===================
|
||||
|
||||
* Fix strict json error message on Node.js 19+
|
||||
* deps: content-type@~1.0.5
|
||||
- perf: skip value escaping when unnecessary
|
||||
* deps: raw-body@2.5.2
|
||||
|
||||
1.20.1 / 2022-10-06
|
||||
===================
|
||||
|
||||
* deps: qs@6.11.0
|
||||
* perf: remove unnecessary object clone
|
||||
|
||||
1.20.0 / 2022-04-02
|
||||
===================
|
||||
|
||||
* Fix error message for json parse whitespace in `strict`
|
||||
* Fix internal error when inflated body exceeds limit
|
||||
* Prevent loss of async hooks context
|
||||
* Prevent hanging when request already read
|
||||
* deps: depd@2.0.0
|
||||
- Replace internal `eval` usage with `Function` constructor
|
||||
- Use instance methods on `process` to check for listeners
|
||||
* deps: http-errors@2.0.0
|
||||
- deps: depd@2.0.0
|
||||
- deps: statuses@2.0.1
|
||||
* deps: on-finished@2.4.1
|
||||
* deps: qs@6.10.3
|
||||
* deps: raw-body@2.5.1
|
||||
- deps: http-errors@2.0.0
|
||||
|
||||
1.19.2 / 2022-02-15
|
||||
===================
|
||||
|
||||
* deps: bytes@3.1.2
|
||||
* deps: qs@6.9.7
|
||||
* Fix handling of `__proto__` keys
|
||||
* deps: raw-body@2.4.3
|
||||
- deps: bytes@3.1.2
|
||||
|
||||
1.19.1 / 2021-12-10
|
||||
===================
|
||||
|
||||
* deps: bytes@3.1.1
|
||||
* deps: http-errors@1.8.1
|
||||
- deps: inherits@2.0.4
|
||||
- deps: toidentifier@1.0.1
|
||||
- deps: setprototypeof@1.2.0
|
||||
* deps: qs@6.9.6
|
||||
* deps: raw-body@2.4.2
|
||||
- deps: bytes@3.1.1
|
||||
- deps: http-errors@1.8.1
|
||||
* deps: safe-buffer@5.2.1
|
||||
* deps: type-is@~1.6.18
|
||||
|
||||
1.19.0 / 2019-04-25
|
||||
===================
|
||||
|
||||
* deps: bytes@3.1.0
|
||||
- Add petabyte (`pb`) support
|
||||
* deps: http-errors@1.7.2
|
||||
- Set constructor name when possible
|
||||
- deps: setprototypeof@1.1.1
|
||||
- deps: statuses@'>= 1.5.0 < 2'
|
||||
* deps: iconv-lite@0.4.24
|
||||
- Added encoding MIK
|
||||
* deps: qs@6.7.0
|
||||
- Fix parsing array brackets after index
|
||||
* deps: raw-body@2.4.0
|
||||
- deps: bytes@3.1.0
|
||||
- deps: http-errors@1.7.2
|
||||
- deps: iconv-lite@0.4.24
|
||||
* deps: type-is@~1.6.17
|
||||
- deps: mime-types@~2.1.24
|
||||
- perf: prevent internal `throw` on invalid type
|
||||
|
||||
1.18.3 / 2018-05-14
|
||||
===================
|
||||
|
||||
* Fix stack trace for strict json parse error
|
||||
* deps: depd@~1.1.2
|
||||
- perf: remove argument reassignment
|
||||
* deps: http-errors@~1.6.3
|
||||
- deps: depd@~1.1.2
|
||||
- deps: setprototypeof@1.1.0
|
||||
- deps: statuses@'>= 1.3.1 < 2'
|
||||
* deps: iconv-lite@0.4.23
|
||||
- Fix loading encoding with year appended
|
||||
- Fix deprecation warnings on Node.js 10+
|
||||
* deps: qs@6.5.2
|
||||
* deps: raw-body@2.3.3
|
||||
- deps: http-errors@1.6.3
|
||||
- deps: iconv-lite@0.4.23
|
||||
* deps: type-is@~1.6.16
|
||||
- deps: mime-types@~2.1.18
|
||||
|
||||
1.18.2 / 2017-09-22
|
||||
===================
|
||||
|
||||
* deps: debug@2.6.9
|
||||
* perf: remove argument reassignment
|
||||
|
||||
1.18.1 / 2017-09-12
|
||||
===================
|
||||
|
||||
* deps: content-type@~1.0.4
|
||||
- perf: remove argument reassignment
|
||||
- perf: skip parameter parsing when no parameters
|
||||
* deps: iconv-lite@0.4.19
|
||||
- Fix ISO-8859-1 regression
|
||||
- Update Windows-1255
|
||||
* deps: qs@6.5.1
|
||||
- Fix parsing & compacting very deep objects
|
||||
* deps: raw-body@2.3.2
|
||||
- deps: iconv-lite@0.4.19
|
||||
|
||||
1.18.0 / 2017-09-08
|
||||
===================
|
||||
|
||||
* Fix JSON strict violation error to match native parse error
|
||||
* Include the `body` property on verify errors
|
||||
* Include the `type` property on all generated errors
|
||||
* Use `http-errors` to set status code on errors
|
||||
* deps: bytes@3.0.0
|
||||
* deps: debug@2.6.8
|
||||
* deps: depd@~1.1.1
|
||||
- Remove unnecessary `Buffer` loading
|
||||
* deps: http-errors@~1.6.2
|
||||
- deps: depd@1.1.1
|
||||
* deps: iconv-lite@0.4.18
|
||||
- Add support for React Native
|
||||
- Add a warning if not loaded as utf-8
|
||||
- Fix CESU-8 decoding in Node.js 8
|
||||
- Improve speed of ISO-8859-1 encoding
|
||||
* deps: qs@6.5.0
|
||||
* deps: raw-body@2.3.1
|
||||
- Use `http-errors` for standard emitted errors
|
||||
- deps: bytes@3.0.0
|
||||
- deps: iconv-lite@0.4.18
|
||||
- perf: skip buffer decoding on overage chunk
|
||||
* perf: prevent internal `throw` when missing charset
|
||||
|
||||
1.17.2 / 2017-05-17
|
||||
===================
|
||||
|
||||
* deps: debug@2.6.7
|
||||
- Fix `DEBUG_MAX_ARRAY_LENGTH`
|
||||
- deps: ms@2.0.0
|
||||
* deps: type-is@~1.6.15
|
||||
- deps: mime-types@~2.1.15
|
||||
|
||||
1.17.1 / 2017-03-06
|
||||
===================
|
||||
|
||||
* deps: qs@6.4.0
|
||||
- Fix regression parsing keys starting with `[`
|
||||
|
||||
1.17.0 / 2017-03-01
|
||||
===================
|
||||
|
||||
* deps: http-errors@~1.6.1
|
||||
- Make `message` property enumerable for `HttpError`s
|
||||
- deps: setprototypeof@1.0.3
|
||||
* deps: qs@6.3.1
|
||||
- Fix compacting nested arrays
|
||||
|
||||
1.16.1 / 2017-02-10
|
||||
===================
|
||||
|
||||
* deps: debug@2.6.1
|
||||
- Fix deprecation messages in WebStorm and other editors
|
||||
- Undeprecate `DEBUG_FD` set to `1` or `2`
|
||||
|
||||
1.16.0 / 2017-01-17
|
||||
===================
|
||||
|
||||
* deps: debug@2.6.0
|
||||
- Allow colors in workers
|
||||
- Deprecated `DEBUG_FD` environment variable
|
||||
- Fix error when running under React Native
|
||||
- Use same color for same namespace
|
||||
- deps: ms@0.7.2
|
||||
* deps: http-errors@~1.5.1
|
||||
- deps: inherits@2.0.3
|
||||
- deps: setprototypeof@1.0.2
|
||||
- deps: statuses@'>= 1.3.1 < 2'
|
||||
* deps: iconv-lite@0.4.15
|
||||
- Added encoding MS-31J
|
||||
- Added encoding MS-932
|
||||
- Added encoding MS-936
|
||||
- Added encoding MS-949
|
||||
- Added encoding MS-950
|
||||
- Fix GBK/GB18030 handling of Euro character
|
||||
* deps: qs@6.2.1
|
||||
- Fix array parsing from skipping empty values
|
||||
* deps: raw-body@~2.2.0
|
||||
- deps: iconv-lite@0.4.15
|
||||
* deps: type-is@~1.6.14
|
||||
- deps: mime-types@~2.1.13
|
||||
|
||||
1.15.2 / 2016-06-19
|
||||
===================
|
||||
|
||||
* deps: bytes@2.4.0
|
||||
* deps: content-type@~1.0.2
|
||||
- perf: enable strict mode
|
||||
* deps: http-errors@~1.5.0
|
||||
- Use `setprototypeof` module to replace `__proto__` setting
|
||||
- deps: statuses@'>= 1.3.0 < 2'
|
||||
- perf: enable strict mode
|
||||
* deps: qs@6.2.0
|
||||
* deps: raw-body@~2.1.7
|
||||
- deps: bytes@2.4.0
|
||||
- perf: remove double-cleanup on happy path
|
||||
* deps: type-is@~1.6.13
|
||||
- deps: mime-types@~2.1.11
|
||||
|
||||
1.15.1 / 2016-05-05
|
||||
===================
|
||||
|
||||
* deps: bytes@2.3.0
|
||||
- Drop partial bytes on all parsed units
|
||||
- Fix parsing byte string that looks like hex
|
||||
* deps: raw-body@~2.1.6
|
||||
- deps: bytes@2.3.0
|
||||
* deps: type-is@~1.6.12
|
||||
- deps: mime-types@~2.1.10
|
||||
|
||||
1.15.0 / 2016-02-10
|
||||
===================
|
||||
|
||||
* deps: http-errors@~1.4.0
|
||||
- Add `HttpError` export, for `err instanceof createError.HttpError`
|
||||
- deps: inherits@2.0.1
|
||||
- deps: statuses@'>= 1.2.1 < 2'
|
||||
* deps: qs@6.1.0
|
||||
* deps: type-is@~1.6.11
|
||||
- deps: mime-types@~2.1.9
|
||||
|
||||
1.14.2 / 2015-12-16
|
||||
===================
|
||||
|
||||
* deps: bytes@2.2.0
|
||||
* deps: iconv-lite@0.4.13
|
||||
* deps: qs@5.2.0
|
||||
* deps: raw-body@~2.1.5
|
||||
- deps: bytes@2.2.0
|
||||
- deps: iconv-lite@0.4.13
|
||||
* deps: type-is@~1.6.10
|
||||
- deps: mime-types@~2.1.8
|
||||
|
||||
1.14.1 / 2015-09-27
|
||||
===================
|
||||
|
||||
* Fix issue where invalid charset results in 400 when `verify` used
|
||||
* deps: iconv-lite@0.4.12
|
||||
- Fix CESU-8 decoding in Node.js 4.x
|
||||
* deps: raw-body@~2.1.4
|
||||
- Fix masking critical errors from `iconv-lite`
|
||||
- deps: iconv-lite@0.4.12
|
||||
* deps: type-is@~1.6.9
|
||||
- deps: mime-types@~2.1.7
|
||||
|
||||
1.14.0 / 2015-09-16
|
||||
===================
|
||||
|
||||
* Fix JSON strict parse error to match syntax errors
|
||||
* Provide static `require` analysis in `urlencoded` parser
|
||||
* deps: depd@~1.1.0
|
||||
- Support web browser loading
|
||||
* deps: qs@5.1.0
|
||||
* deps: raw-body@~2.1.3
|
||||
- Fix sync callback when attaching data listener causes sync read
|
||||
* deps: type-is@~1.6.8
|
||||
- Fix type error when given invalid type to match against
|
||||
- deps: mime-types@~2.1.6
|
||||
|
||||
1.13.3 / 2015-07-31
|
||||
===================
|
||||
|
||||
* deps: type-is@~1.6.6
|
||||
- deps: mime-types@~2.1.4
|
||||
|
||||
1.13.2 / 2015-07-05
|
||||
===================
|
||||
|
||||
* deps: iconv-lite@0.4.11
|
||||
* deps: qs@4.0.0
|
||||
- Fix dropping parameters like `hasOwnProperty`
|
||||
- Fix user-visible incompatibilities from 3.1.0
|
||||
- Fix various parsing edge cases
|
||||
* deps: raw-body@~2.1.2
|
||||
- Fix error stack traces to skip `makeError`
|
||||
- deps: iconv-lite@0.4.11
|
||||
* deps: type-is@~1.6.4
|
||||
- deps: mime-types@~2.1.2
|
||||
- perf: enable strict mode
|
||||
- perf: remove argument reassignment
|
||||
|
||||
1.13.1 / 2015-06-16
|
||||
===================
|
||||
|
||||
* deps: qs@2.4.2
|
||||
- Downgraded from 3.1.0 because of user-visible incompatibilities
|
||||
|
||||
1.13.0 / 2015-06-14
|
||||
===================
|
||||
|
||||
* Add `statusCode` property on `Error`s, in addition to `status`
|
||||
* Change `type` default to `application/json` for JSON parser
|
||||
* Change `type` default to `application/x-www-form-urlencoded` for urlencoded parser
|
||||
* Provide static `require` analysis
|
||||
* Use the `http-errors` module to generate errors
|
||||
* deps: bytes@2.1.0
|
||||
- Slight optimizations
|
||||
* deps: iconv-lite@0.4.10
|
||||
- The encoding UTF-16 without BOM now defaults to UTF-16LE when detection fails
|
||||
- Leading BOM is now removed when decoding
|
||||
* deps: on-finished@~2.3.0
|
||||
- Add defined behavior for HTTP `CONNECT` requests
|
||||
- Add defined behavior for HTTP `Upgrade` requests
|
||||
- deps: ee-first@1.1.1
|
||||
* deps: qs@3.1.0
|
||||
- Fix dropping parameters like `hasOwnProperty`
|
||||
- Fix various parsing edge cases
|
||||
- Parsed object now has `null` prototype
|
||||
* deps: raw-body@~2.1.1
|
||||
- Use `unpipe` module for unpiping requests
|
||||
- deps: iconv-lite@0.4.10
|
||||
* deps: type-is@~1.6.3
|
||||
- deps: mime-types@~2.1.1
|
||||
- perf: reduce try block size
|
||||
- perf: remove bitwise operations
|
||||
* perf: enable strict mode
|
||||
* perf: remove argument reassignment
|
||||
* perf: remove delete call
|
||||
|
||||
1.12.4 / 2015-05-10
|
||||
===================
|
||||
|
||||
* deps: debug@~2.2.0
|
||||
* deps: qs@2.4.2
|
||||
- Fix allowing parameters like `constructor`
|
||||
* deps: on-finished@~2.2.1
|
||||
* deps: raw-body@~2.0.1
|
||||
- Fix a false-positive when unpiping in Node.js 0.8
|
||||
- deps: bytes@2.0.1
|
||||
* deps: type-is@~1.6.2
|
||||
- deps: mime-types@~2.0.11
|
||||
|
||||
1.12.3 / 2015-04-15
|
||||
===================
|
||||
|
||||
* Slight efficiency improvement when not debugging
|
||||
* deps: depd@~1.0.1
|
||||
* deps: iconv-lite@0.4.8
|
||||
- Add encoding alias UNICODE-1-1-UTF-7
|
||||
* deps: raw-body@1.3.4
|
||||
- Fix hanging callback if request aborts during read
|
||||
- deps: iconv-lite@0.4.8
|
||||
|
||||
1.12.2 / 2015-03-16
|
||||
===================
|
||||
|
||||
* deps: qs@2.4.1
|
||||
- Fix error when parameter `hasOwnProperty` is present
|
||||
|
||||
1.12.1 / 2015-03-15
|
||||
===================
|
||||
|
||||
* deps: debug@~2.1.3
|
||||
- Fix high intensity foreground color for bold
|
||||
- deps: ms@0.7.0
|
||||
* deps: type-is@~1.6.1
|
||||
- deps: mime-types@~2.0.10
|
||||
|
||||
1.12.0 / 2015-02-13
|
||||
===================
|
||||
|
||||
* add `debug` messages
|
||||
* accept a function for the `type` option
|
||||
* use `content-type` to parse `Content-Type` headers
|
||||
* deps: iconv-lite@0.4.7
|
||||
- Gracefully support enumerables on `Object.prototype`
|
||||
* deps: raw-body@1.3.3
|
||||
- deps: iconv-lite@0.4.7
|
||||
* deps: type-is@~1.6.0
|
||||
- fix argument reassignment
|
||||
- fix false-positives in `hasBody` `Transfer-Encoding` check
|
||||
- support wildcard for both type and subtype (`*/*`)
|
||||
- deps: mime-types@~2.0.9
|
||||
|
||||
1.11.0 / 2015-01-30
|
||||
===================
|
||||
|
||||
* make internal `extended: true` depth limit infinity
|
||||
* deps: type-is@~1.5.6
|
||||
- deps: mime-types@~2.0.8
|
||||
|
||||
1.10.2 / 2015-01-20
|
||||
===================
|
||||
|
||||
* deps: iconv-lite@0.4.6
|
||||
- Fix rare aliases of single-byte encodings
|
||||
* deps: raw-body@1.3.2
|
||||
- deps: iconv-lite@0.4.6
|
||||
|
||||
1.10.1 / 2015-01-01
|
||||
===================
|
||||
|
||||
* deps: on-finished@~2.2.0
|
||||
* deps: type-is@~1.5.5
|
||||
- deps: mime-types@~2.0.7
|
||||
|
||||
1.10.0 / 2014-12-02
|
||||
===================
|
||||
|
||||
* make internal `extended: true` array limit dynamic
|
||||
|
||||
1.9.3 / 2014-11-21
|
||||
==================
|
||||
|
||||
* deps: iconv-lite@0.4.5
|
||||
- Fix Windows-31J and X-SJIS encoding support
|
||||
* deps: qs@2.3.3
|
||||
- Fix `arrayLimit` behavior
|
||||
* deps: raw-body@1.3.1
|
||||
- deps: iconv-lite@0.4.5
|
||||
* deps: type-is@~1.5.3
|
||||
- deps: mime-types@~2.0.3
|
||||
|
||||
1.9.2 / 2014-10-27
|
||||
==================
|
||||
|
||||
* deps: qs@2.3.2
|
||||
- Fix parsing of mixed objects and values
|
||||
|
||||
1.9.1 / 2014-10-22
|
||||
==================
|
||||
|
||||
* deps: on-finished@~2.1.1
|
||||
- Fix handling of pipelined requests
|
||||
* deps: qs@2.3.0
|
||||
- Fix parsing of mixed implicit and explicit arrays
|
||||
* deps: type-is@~1.5.2
|
||||
- deps: mime-types@~2.0.2
|
||||
|
||||
1.9.0 / 2014-09-24
|
||||
==================
|
||||
|
||||
* include the charset in "unsupported charset" error message
|
||||
* include the encoding in "unsupported content encoding" error message
|
||||
* deps: depd@~1.0.0
|
||||
|
||||
1.8.4 / 2014-09-23
|
||||
==================
|
||||
|
||||
* fix content encoding to be case-insensitive
|
||||
|
||||
1.8.3 / 2014-09-19
|
||||
==================
|
||||
|
||||
* deps: qs@2.2.4
|
||||
- Fix issue with object keys starting with numbers truncated
|
||||
|
||||
1.8.2 / 2014-09-15
|
||||
==================
|
||||
|
||||
* deps: depd@0.4.5
|
||||
|
||||
1.8.1 / 2014-09-07
|
||||
==================
|
||||
|
||||
* deps: media-typer@0.3.0
|
||||
* deps: type-is@~1.5.1
|
||||
|
||||
1.8.0 / 2014-09-05
|
||||
==================
|
||||
|
||||
* make empty-body-handling consistent between chunked requests
|
||||
- empty `json` produces `{}`
|
||||
- empty `raw` produces `new Buffer(0)`
|
||||
- empty `text` produces `''`
|
||||
- empty `urlencoded` produces `{}`
|
||||
* deps: qs@2.2.3
|
||||
- Fix issue where first empty value in array is discarded
|
||||
* deps: type-is@~1.5.0
|
||||
- fix `hasbody` to be true for `content-length: 0`
|
||||
|
||||
1.7.0 / 2014-09-01
|
||||
==================
|
||||
|
||||
* add `parameterLimit` option to `urlencoded` parser
|
||||
* change `urlencoded` extended array limit to 100
|
||||
* respond with 413 when over `parameterLimit` in `urlencoded`
|
||||
|
||||
1.6.7 / 2014-08-29
|
||||
==================
|
||||
|
||||
* deps: qs@2.2.2
|
||||
- Remove unnecessary cloning
|
||||
|
||||
1.6.6 / 2014-08-27
|
||||
==================
|
||||
|
||||
* deps: qs@2.2.0
|
||||
- Array parsing fix
|
||||
- Performance improvements
|
||||
|
||||
1.6.5 / 2014-08-16
|
||||
==================
|
||||
|
||||
* deps: on-finished@2.1.0
|
||||
|
||||
1.6.4 / 2014-08-14
|
||||
==================
|
||||
|
||||
* deps: qs@1.2.2
|
||||
|
||||
1.6.3 / 2014-08-10
|
||||
==================
|
||||
|
||||
* deps: qs@1.2.1
|
||||
|
||||
1.6.2 / 2014-08-07
|
||||
==================
|
||||
|
||||
* deps: qs@1.2.0
|
||||
- Fix parsing array of objects
|
||||
|
||||
1.6.1 / 2014-08-06
|
||||
==================
|
||||
|
||||
* deps: qs@1.1.0
|
||||
- Accept urlencoded square brackets
|
||||
- Accept empty values in implicit array notation
|
||||
|
||||
1.6.0 / 2014-08-05
|
||||
==================
|
||||
|
||||
* deps: qs@1.0.2
|
||||
- Complete rewrite
|
||||
- Limits array length to 20
|
||||
- Limits object depth to 5
|
||||
- Limits parameters to 1,000
|
||||
|
||||
1.5.2 / 2014-07-27
|
||||
==================
|
||||
|
||||
* deps: depd@0.4.4
|
||||
- Work-around v8 generating empty stack traces
|
||||
|
||||
1.5.1 / 2014-07-26
|
||||
==================
|
||||
|
||||
* deps: depd@0.4.3
|
||||
- Fix exception when global `Error.stackTraceLimit` is too low
|
||||
|
||||
1.5.0 / 2014-07-20
|
||||
==================
|
||||
|
||||
* deps: depd@0.4.2
|
||||
- Add `TRACE_DEPRECATION` environment variable
|
||||
- Remove non-standard grey color from color output
|
||||
- Support `--no-deprecation` argument
|
||||
- Support `--trace-deprecation` argument
|
||||
* deps: iconv-lite@0.4.4
|
||||
- Added encoding UTF-7
|
||||
* deps: raw-body@1.3.0
|
||||
- deps: iconv-lite@0.4.4
|
||||
- Added encoding UTF-7
|
||||
- Fix `Cannot switch to old mode now` error on Node.js 0.10+
|
||||
* deps: type-is@~1.3.2
|
||||
|
||||
1.4.3 / 2014-06-19
|
||||
==================
|
||||
|
||||
* deps: type-is@1.3.1
|
||||
- fix global variable leak
|
||||
|
||||
1.4.2 / 2014-06-19
|
||||
==================
|
||||
|
||||
* deps: type-is@1.3.0
|
||||
- improve type parsing
|
||||
|
||||
1.4.1 / 2014-06-19
|
||||
==================
|
||||
|
||||
* fix urlencoded extended deprecation message
|
||||
|
||||
1.4.0 / 2014-06-19
|
||||
==================
|
||||
|
||||
* add `text` parser
|
||||
* add `raw` parser
|
||||
* check accepted charset in content-type (accepts utf-8)
|
||||
* check accepted encoding in content-encoding (accepts identity)
|
||||
* deprecate `bodyParser()` middleware; use `.json()` and `.urlencoded()` as needed
|
||||
* deprecate `urlencoded()` without provided `extended` option
|
||||
* lazy-load urlencoded parsers
|
||||
* parsers split into files for reduced mem usage
|
||||
* support gzip and deflate bodies
|
||||
- set `inflate: false` to turn off
|
||||
* deps: raw-body@1.2.2
|
||||
- Support all encodings from `iconv-lite`
|
||||
|
||||
1.3.1 / 2014-06-11
|
||||
==================
|
||||
|
||||
* deps: type-is@1.2.1
|
||||
- Switch dependency from mime to mime-types@1.0.0
|
||||
|
||||
1.3.0 / 2014-05-31
|
||||
==================
|
||||
|
||||
* add `extended` option to urlencoded parser
|
||||
|
||||
1.2.2 / 2014-05-27
|
||||
==================
|
||||
|
||||
* deps: raw-body@1.1.6
|
||||
- assert stream encoding on node.js 0.8
|
||||
- assert stream encoding on node.js < 0.10.6
|
||||
- deps: bytes@1
|
||||
|
||||
1.2.1 / 2014-05-26
|
||||
==================
|
||||
|
||||
* invoke `next(err)` after request fully read
|
||||
- prevents hung responses and socket hang ups
|
||||
|
||||
1.2.0 / 2014-05-11
|
||||
==================
|
||||
|
||||
* add `verify` option
|
||||
* deps: type-is@1.2.0
|
||||
- support suffix matching
|
||||
|
||||
1.1.2 / 2014-05-11
|
||||
==================
|
||||
|
||||
* improve json parser speed
|
||||
|
||||
1.1.1 / 2014-05-11
|
||||
==================
|
||||
|
||||
* fix repeated limit parsing with every request
|
||||
|
||||
1.1.0 / 2014-05-10
|
||||
==================
|
||||
|
||||
* add `type` option
|
||||
* deps: pin for safety and consistency
|
||||
|
||||
1.0.2 / 2014-04-14
|
||||
==================
|
||||
|
||||
* use `type-is` module
|
||||
|
||||
1.0.1 / 2014-03-20
|
||||
==================
|
||||
|
||||
* lower default limits to 100kb
|
@ -0,0 +1,23 @@
|
||||
(The MIT License)
|
||||
|
||||
Copyright (c) 2014 Jonathan Ong <me@jongleberry.com>
|
||||
Copyright (c) 2014-2015 Douglas Christopher Wilson <doug@somethingdoug.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
'Software'), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
@ -0,0 +1,476 @@
|
||||
# body-parser
|
||||
|
||||
[![NPM Version][npm-version-image]][npm-url]
|
||||
[![NPM Downloads][npm-downloads-image]][npm-url]
|
||||
[![Build Status][ci-image]][ci-url]
|
||||
[![Test Coverage][coveralls-image]][coveralls-url]
|
||||
[![OpenSSF Scorecard Badge][ossf-scorecard-badge]][ossf-scorecard-visualizer]
|
||||
|
||||
Node.js body parsing middleware.
|
||||
|
||||
Parse incoming request bodies in a middleware before your handlers, available
|
||||
under the `req.body` property.
|
||||
|
||||
**Note** As `req.body`'s shape is based on user-controlled input, all
|
||||
properties and values in this object are untrusted and should be validated
|
||||
before trusting. For example, `req.body.foo.toString()` may fail in multiple
|
||||
ways, for example the `foo` property may not be there or may not be a string,
|
||||
and `toString` may not be a function and instead a string or other user input.
|
||||
|
||||
[Learn about the anatomy of an HTTP transaction in Node.js](https://nodejs.org/en/docs/guides/anatomy-of-an-http-transaction/).
|
||||
|
||||
_This does not handle multipart bodies_, due to their complex and typically
|
||||
large nature. For multipart bodies, you may be interested in the following
|
||||
modules:
|
||||
|
||||
* [busboy](https://www.npmjs.org/package/busboy#readme) and
|
||||
[connect-busboy](https://www.npmjs.org/package/connect-busboy#readme)
|
||||
* [multiparty](https://www.npmjs.org/package/multiparty#readme) and
|
||||
[connect-multiparty](https://www.npmjs.org/package/connect-multiparty#readme)
|
||||
* [formidable](https://www.npmjs.org/package/formidable#readme)
|
||||
* [multer](https://www.npmjs.org/package/multer#readme)
|
||||
|
||||
This module provides the following parsers:
|
||||
|
||||
* [JSON body parser](#bodyparserjsonoptions)
|
||||
* [Raw body parser](#bodyparserrawoptions)
|
||||
* [Text body parser](#bodyparsertextoptions)
|
||||
* [URL-encoded form body parser](#bodyparserurlencodedoptions)
|
||||
|
||||
Other body parsers you might be interested in:
|
||||
|
||||
- [body](https://www.npmjs.org/package/body#readme)
|
||||
- [co-body](https://www.npmjs.org/package/co-body#readme)
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
$ npm install body-parser
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
```js
|
||||
var bodyParser = require('body-parser')
|
||||
```
|
||||
|
||||
The `bodyParser` object exposes various factories to create middlewares. All
|
||||
middlewares will populate the `req.body` property with the parsed body when
|
||||
the `Content-Type` request header matches the `type` option, or an empty
|
||||
object (`{}`) if there was no body to parse, the `Content-Type` was not matched,
|
||||
or an error occurred.
|
||||
|
||||
The various errors returned by this module are described in the
|
||||
[errors section](#errors).
|
||||
|
||||
### bodyParser.json([options])
|
||||
|
||||
Returns middleware that only parses `json` and only looks at requests where
|
||||
the `Content-Type` header matches the `type` option. This parser accepts any
|
||||
Unicode encoding of the body and supports automatic inflation of `gzip` and
|
||||
`deflate` encodings.
|
||||
|
||||
A new `body` object containing the parsed data is populated on the `request`
|
||||
object after the middleware (i.e. `req.body`).
|
||||
|
||||
#### Options
|
||||
|
||||
The `json` function takes an optional `options` object that may contain any of
|
||||
the following keys:
|
||||
|
||||
##### inflate
|
||||
|
||||
When set to `true`, then deflated (compressed) bodies will be inflated; when
|
||||
`false`, deflated bodies are rejected. Defaults to `true`.
|
||||
|
||||
##### limit
|
||||
|
||||
Controls the maximum request body size. If this is a number, then the value
|
||||
specifies the number of bytes; if it is a string, the value is passed to the
|
||||
[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults
|
||||
to `'100kb'`.
|
||||
|
||||
##### reviver
|
||||
|
||||
The `reviver` option is passed directly to `JSON.parse` as the second
|
||||
argument. You can find more information on this argument
|
||||
[in the MDN documentation about JSON.parse](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse#Example.3A_Using_the_reviver_parameter).
|
||||
|
||||
##### strict
|
||||
|
||||
When set to `true`, will only accept arrays and objects; when `false` will
|
||||
accept anything `JSON.parse` accepts. Defaults to `true`.
|
||||
|
||||
##### type
|
||||
|
||||
The `type` option is used to determine what media type the middleware will
|
||||
parse. This option can be a string, array of strings, or a function. If not a
|
||||
function, `type` option is passed directly to the
|
||||
[type-is](https://www.npmjs.org/package/type-is#readme) library and this can
|
||||
be an extension name (like `json`), a mime type (like `application/json`), or
|
||||
a mime type with a wildcard (like `*/*` or `*/json`). If a function, the `type`
|
||||
option is called as `fn(req)` and the request is parsed if it returns a truthy
|
||||
value. Defaults to `application/json`.
|
||||
|
||||
##### verify
|
||||
|
||||
The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`,
|
||||
where `buf` is a `Buffer` of the raw request body and `encoding` is the
|
||||
encoding of the request. The parsing can be aborted by throwing an error.
|
||||
|
||||
### bodyParser.raw([options])
|
||||
|
||||
Returns middleware that parses all bodies as a `Buffer` and only looks at
|
||||
requests where the `Content-Type` header matches the `type` option. This
|
||||
parser supports automatic inflation of `gzip` and `deflate` encodings.
|
||||
|
||||
A new `body` object containing the parsed data is populated on the `request`
|
||||
object after the middleware (i.e. `req.body`). This will be a `Buffer` object
|
||||
of the body.
|
||||
|
||||
#### Options
|
||||
|
||||
The `raw` function takes an optional `options` object that may contain any of
|
||||
the following keys:
|
||||
|
||||
##### inflate
|
||||
|
||||
When set to `true`, then deflated (compressed) bodies will be inflated; when
|
||||
`false`, deflated bodies are rejected. Defaults to `true`.
|
||||
|
||||
##### limit
|
||||
|
||||
Controls the maximum request body size. If this is a number, then the value
|
||||
specifies the number of bytes; if it is a string, the value is passed to the
|
||||
[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults
|
||||
to `'100kb'`.
|
||||
|
||||
##### type
|
||||
|
||||
The `type` option is used to determine what media type the middleware will
|
||||
parse. This option can be a string, array of strings, or a function.
|
||||
If not a function, `type` option is passed directly to the
|
||||
[type-is](https://www.npmjs.org/package/type-is#readme) library and this
|
||||
can be an extension name (like `bin`), a mime type (like
|
||||
`application/octet-stream`), or a mime type with a wildcard (like `*/*` or
|
||||
`application/*`). If a function, the `type` option is called as `fn(req)`
|
||||
and the request is parsed if it returns a truthy value. Defaults to
|
||||
`application/octet-stream`.
|
||||
|
||||
##### verify
|
||||
|
||||
The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`,
|
||||
where `buf` is a `Buffer` of the raw request body and `encoding` is the
|
||||
encoding of the request. The parsing can be aborted by throwing an error.
|
||||
|
||||
### bodyParser.text([options])
|
||||
|
||||
Returns middleware that parses all bodies as a string and only looks at
|
||||
requests where the `Content-Type` header matches the `type` option. This
|
||||
parser supports automatic inflation of `gzip` and `deflate` encodings.
|
||||
|
||||
A new `body` string containing the parsed data is populated on the `request`
|
||||
object after the middleware (i.e. `req.body`). This will be a string of the
|
||||
body.
|
||||
|
||||
#### Options
|
||||
|
||||
The `text` function takes an optional `options` object that may contain any of
|
||||
the following keys:
|
||||
|
||||
##### defaultCharset
|
||||
|
||||
Specify the default character set for the text content if the charset is not
|
||||
specified in the `Content-Type` header of the request. Defaults to `utf-8`.
|
||||
|
||||
##### inflate
|
||||
|
||||
When set to `true`, then deflated (compressed) bodies will be inflated; when
|
||||
`false`, deflated bodies are rejected. Defaults to `true`.
|
||||
|
||||
##### limit
|
||||
|
||||
Controls the maximum request body size. If this is a number, then the value
|
||||
specifies the number of bytes; if it is a string, the value is passed to the
|
||||
[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults
|
||||
to `'100kb'`.
|
||||
|
||||
##### type
|
||||
|
||||
The `type` option is used to determine what media type the middleware will
|
||||
parse. This option can be a string, array of strings, or a function. If not
|
||||
a function, `type` option is passed directly to the
|
||||
[type-is](https://www.npmjs.org/package/type-is#readme) library and this can
|
||||
be an extension name (like `txt`), a mime type (like `text/plain`), or a mime
|
||||
type with a wildcard (like `*/*` or `text/*`). If a function, the `type`
|
||||
option is called as `fn(req)` and the request is parsed if it returns a
|
||||
truthy value. Defaults to `text/plain`.
|
||||
|
||||
##### verify
|
||||
|
||||
The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`,
|
||||
where `buf` is a `Buffer` of the raw request body and `encoding` is the
|
||||
encoding of the request. The parsing can be aborted by throwing an error.
|
||||
|
||||
### bodyParser.urlencoded([options])
|
||||
|
||||
Returns middleware that only parses `urlencoded` bodies and only looks at
|
||||
requests where the `Content-Type` header matches the `type` option. This
|
||||
parser accepts only UTF-8 encoding of the body and supports automatic
|
||||
inflation of `gzip` and `deflate` encodings.
|
||||
|
||||
A new `body` object containing the parsed data is populated on the `request`
|
||||
object after the middleware (i.e. `req.body`). This object will contain
|
||||
key-value pairs, where the value can be a string or array (when `extended` is
|
||||
`false`), or any type (when `extended` is `true`).
|
||||
|
||||
#### Options
|
||||
|
||||
The `urlencoded` function takes an optional `options` object that may contain
|
||||
any of the following keys:
|
||||
|
||||
##### extended
|
||||
|
||||
The `extended` option allows to choose between parsing the URL-encoded data
|
||||
with the `querystring` library (when `false`) or the `qs` library (when
|
||||
`true`). The "extended" syntax allows for rich objects and arrays to be
|
||||
encoded into the URL-encoded format, allowing for a JSON-like experience
|
||||
with URL-encoded. For more information, please
|
||||
[see the qs library](https://www.npmjs.org/package/qs#readme).
|
||||
|
||||
Defaults to `true`, but using the default has been deprecated. Please
|
||||
research into the difference between `qs` and `querystring` and choose the
|
||||
appropriate setting.
|
||||
|
||||
##### inflate
|
||||
|
||||
When set to `true`, then deflated (compressed) bodies will be inflated; when
|
||||
`false`, deflated bodies are rejected. Defaults to `true`.
|
||||
|
||||
##### limit
|
||||
|
||||
Controls the maximum request body size. If this is a number, then the value
|
||||
specifies the number of bytes; if it is a string, the value is passed to the
|
||||
[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults
|
||||
to `'100kb'`.
|
||||
|
||||
##### parameterLimit
|
||||
|
||||
The `parameterLimit` option controls the maximum number of parameters that
|
||||
are allowed in the URL-encoded data. If a request contains more parameters
|
||||
than this value, a 413 will be returned to the client. Defaults to `1000`.
|
||||
|
||||
##### type
|
||||
|
||||
The `type` option is used to determine what media type the middleware will
|
||||
parse. This option can be a string, array of strings, or a function. If not
|
||||
a function, `type` option is passed directly to the
|
||||
[type-is](https://www.npmjs.org/package/type-is#readme) library and this can
|
||||
be an extension name (like `urlencoded`), a mime type (like
|
||||
`application/x-www-form-urlencoded`), or a mime type with a wildcard (like
|
||||
`*/x-www-form-urlencoded`). If a function, the `type` option is called as
|
||||
`fn(req)` and the request is parsed if it returns a truthy value. Defaults
|
||||
to `application/x-www-form-urlencoded`.
|
||||
|
||||
##### verify
|
||||
|
||||
The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`,
|
||||
where `buf` is a `Buffer` of the raw request body and `encoding` is the
|
||||
encoding of the request. The parsing can be aborted by throwing an error.
|
||||
|
||||
#### depth
|
||||
|
||||
The `depth` option is used to configure the maximum depth of the `qs` library when `extended` is `true`. This allows you to limit the amount of keys that are parsed and can be useful to prevent certain types of abuse. Defaults to `32`. It is recommended to keep this value as low as possible.
|
||||
|
||||
## Errors
|
||||
|
||||
The middlewares provided by this module create errors using the
|
||||
[`http-errors` module](https://www.npmjs.com/package/http-errors). The errors
|
||||
will typically have a `status`/`statusCode` property that contains the suggested
|
||||
HTTP response code, an `expose` property to determine if the `message` property
|
||||
should be displayed to the client, a `type` property to determine the type of
|
||||
error without matching against the `message`, and a `body` property containing
|
||||
the read body, if available.
|
||||
|
||||
The following are the common errors created, though any error can come through
|
||||
for various reasons.
|
||||
|
||||
### content encoding unsupported
|
||||
|
||||
This error will occur when the request had a `Content-Encoding` header that
|
||||
contained an encoding but the "inflation" option was set to `false`. The
|
||||
`status` property is set to `415`, the `type` property is set to
|
||||
`'encoding.unsupported'`, and the `charset` property will be set to the
|
||||
encoding that is unsupported.
|
||||
|
||||
### entity parse failed
|
||||
|
||||
This error will occur when the request contained an entity that could not be
|
||||
parsed by the middleware. The `status` property is set to `400`, the `type`
|
||||
property is set to `'entity.parse.failed'`, and the `body` property is set to
|
||||
the entity value that failed parsing.
|
||||
|
||||
### entity verify failed
|
||||
|
||||
This error will occur when the request contained an entity that could not be
|
||||
failed verification by the defined `verify` option. The `status` property is
|
||||
set to `403`, the `type` property is set to `'entity.verify.failed'`, and the
|
||||
`body` property is set to the entity value that failed verification.
|
||||
|
||||
### request aborted
|
||||
|
||||
This error will occur when the request is aborted by the client before reading
|
||||
the body has finished. The `received` property will be set to the number of
|
||||
bytes received before the request was aborted and the `expected` property is
|
||||
set to the number of expected bytes. The `status` property is set to `400`
|
||||
and `type` property is set to `'request.aborted'`.
|
||||
|
||||
### request entity too large
|
||||
|
||||
This error will occur when the request body's size is larger than the "limit"
|
||||
option. The `limit` property will be set to the byte limit and the `length`
|
||||
property will be set to the request body's length. The `status` property is
|
||||
set to `413` and the `type` property is set to `'entity.too.large'`.
|
||||
|
||||
### request size did not match content length
|
||||
|
||||
This error will occur when the request's length did not match the length from
|
||||
the `Content-Length` header. This typically occurs when the request is malformed,
|
||||
typically when the `Content-Length` header was calculated based on characters
|
||||
instead of bytes. The `status` property is set to `400` and the `type` property
|
||||
is set to `'request.size.invalid'`.
|
||||
|
||||
### stream encoding should not be set
|
||||
|
||||
This error will occur when something called the `req.setEncoding` method prior
|
||||
to this middleware. This module operates directly on bytes only and you cannot
|
||||
call `req.setEncoding` when using this module. The `status` property is set to
|
||||
`500` and the `type` property is set to `'stream.encoding.set'`.
|
||||
|
||||
### stream is not readable
|
||||
|
||||
This error will occur when the request is no longer readable when this middleware
|
||||
attempts to read it. This typically means something other than a middleware from
|
||||
this module read the request body already and the middleware was also configured to
|
||||
read the same request. The `status` property is set to `500` and the `type`
|
||||
property is set to `'stream.not.readable'`.
|
||||
|
||||
### too many parameters
|
||||
|
||||
This error will occur when the content of the request exceeds the configured
|
||||
`parameterLimit` for the `urlencoded` parser. The `status` property is set to
|
||||
`413` and the `type` property is set to `'parameters.too.many'`.
|
||||
|
||||
### unsupported charset "BOGUS"
|
||||
|
||||
This error will occur when the request had a charset parameter in the
|
||||
`Content-Type` header, but the `iconv-lite` module does not support it OR the
|
||||
parser does not support it. The charset is contained in the message as well
|
||||
as in the `charset` property. The `status` property is set to `415`, the
|
||||
`type` property is set to `'charset.unsupported'`, and the `charset` property
|
||||
is set to the charset that is unsupported.
|
||||
|
||||
### unsupported content encoding "bogus"
|
||||
|
||||
This error will occur when the request had a `Content-Encoding` header that
|
||||
contained an unsupported encoding. The encoding is contained in the message
|
||||
as well as in the `encoding` property. The `status` property is set to `415`,
|
||||
the `type` property is set to `'encoding.unsupported'`, and the `encoding`
|
||||
property is set to the encoding that is unsupported.
|
||||
|
||||
### The input exceeded the depth
|
||||
|
||||
This error occurs when using `bodyParser.urlencoded` with the `extended` property set to `true` and the input exceeds the configured `depth` option. The `status` property is set to `400`. It is recommended to review the `depth` option and evaluate if it requires a higher value. When the `depth` option is set to `32` (default value), the error will not be thrown.
|
||||
|
||||
## Examples
|
||||
|
||||
### Express/Connect top-level generic
|
||||
|
||||
This example demonstrates adding a generic JSON and URL-encoded parser as a
|
||||
top-level middleware, which will parse the bodies of all incoming requests.
|
||||
This is the simplest setup.
|
||||
|
||||
```js
|
||||
var express = require('express')
|
||||
var bodyParser = require('body-parser')
|
||||
|
||||
var app = express()
|
||||
|
||||
// parse application/x-www-form-urlencoded
|
||||
app.use(bodyParser.urlencoded({ extended: false }))
|
||||
|
||||
// parse application/json
|
||||
app.use(bodyParser.json())
|
||||
|
||||
app.use(function (req, res) {
|
||||
res.setHeader('Content-Type', 'text/plain')
|
||||
res.write('you posted:\n')
|
||||
res.end(JSON.stringify(req.body, null, 2))
|
||||
})
|
||||
```
|
||||
|
||||
### Express route-specific
|
||||
|
||||
This example demonstrates adding body parsers specifically to the routes that
|
||||
need them. In general, this is the most recommended way to use body-parser with
|
||||
Express.
|
||||
|
||||
```js
|
||||
var express = require('express')
|
||||
var bodyParser = require('body-parser')
|
||||
|
||||
var app = express()
|
||||
|
||||
// create application/json parser
|
||||
var jsonParser = bodyParser.json()
|
||||
|
||||
// create application/x-www-form-urlencoded parser
|
||||
var urlencodedParser = bodyParser.urlencoded({ extended: false })
|
||||
|
||||
// POST /login gets urlencoded bodies
|
||||
app.post('/login', urlencodedParser, function (req, res) {
|
||||
res.send('welcome, ' + req.body.username)
|
||||
})
|
||||
|
||||
// POST /api/users gets JSON bodies
|
||||
app.post('/api/users', jsonParser, function (req, res) {
|
||||
// create user in req.body
|
||||
})
|
||||
```
|
||||
|
||||
### Change accepted type for parsers
|
||||
|
||||
All the parsers accept a `type` option which allows you to change the
|
||||
`Content-Type` that the middleware will parse.
|
||||
|
||||
```js
|
||||
var express = require('express')
|
||||
var bodyParser = require('body-parser')
|
||||
|
||||
var app = express()
|
||||
|
||||
// parse various different custom JSON types as JSON
|
||||
app.use(bodyParser.json({ type: 'application/*+json' }))
|
||||
|
||||
// parse some custom thing into a Buffer
|
||||
app.use(bodyParser.raw({ type: 'application/vnd.custom-type' }))
|
||||
|
||||
// parse an HTML body into a string
|
||||
app.use(bodyParser.text({ type: 'text/html' }))
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
[MIT](LICENSE)
|
||||
|
||||
[ci-image]: https://badgen.net/github/checks/expressjs/body-parser/master?label=ci
|
||||
[ci-url]: https://github.com/expressjs/body-parser/actions/workflows/ci.yml
|
||||
[coveralls-image]: https://badgen.net/coveralls/c/github/expressjs/body-parser/master
|
||||
[coveralls-url]: https://coveralls.io/r/expressjs/body-parser?branch=master
|
||||
[node-version-image]: https://badgen.net/npm/node/body-parser
|
||||
[node-version-url]: https://nodejs.org/en/download
|
||||
[npm-downloads-image]: https://badgen.net/npm/dm/body-parser
|
||||
[npm-url]: https://npmjs.org/package/body-parser
|
||||
[npm-version-image]: https://badgen.net/npm/v/body-parser
|
||||
[ossf-scorecard-badge]: https://api.scorecard.dev/projects/github.com/expressjs/body-parser/badge
|
||||
[ossf-scorecard-visualizer]: https://ossf.github.io/scorecard-visualizer/#/projects/github.com/expressjs/body-parser
|
@ -0,0 +1,25 @@
|
||||
# Security Policies and Procedures
|
||||
|
||||
## Reporting a Bug
|
||||
|
||||
The Express team and community take all security bugs seriously. Thank you
|
||||
for improving the security of Express. We appreciate your efforts and
|
||||
responsible disclosure and will make every effort to acknowledge your
|
||||
contributions.
|
||||
|
||||
Report security bugs by emailing the current owner(s) of `body-parser`. This
|
||||
information can be found in the npm registry using the command
|
||||
`npm owner ls body-parser`.
|
||||
If unsure or unable to get the information from the above, open an issue
|
||||
in the [project issue tracker](https://github.com/expressjs/body-parser/issues)
|
||||
asking for the current contact information.
|
||||
|
||||
To ensure the timely response to your report, please ensure that the entirety
|
||||
of the report is contained within the email body and not solely behind a web
|
||||
link or an attachment.
|
||||
|
||||
At least one owner will acknowledge your email within 48 hours, and will send a
|
||||
more detailed response within 48 hours indicating the next steps in handling
|
||||
your report. After the initial reply to your report, the owners will
|
||||
endeavor to keep you informed of the progress towards a fix and full
|
||||
announcement, and may ask for additional information or guidance.
|
@ -0,0 +1,156 @@
|
||||
/*!
|
||||
* body-parser
|
||||
* Copyright(c) 2014-2015 Douglas Christopher Wilson
|
||||
* MIT Licensed
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
/**
|
||||
* Module dependencies.
|
||||
* @private
|
||||
*/
|
||||
|
||||
var deprecate = require('depd')('body-parser')
|
||||
|
||||
/**
|
||||
* Cache of loaded parsers.
|
||||
* @private
|
||||
*/
|
||||
|
||||
var parsers = Object.create(null)
|
||||
|
||||
/**
|
||||
* @typedef Parsers
|
||||
* @type {function}
|
||||
* @property {function} json
|
||||
* @property {function} raw
|
||||
* @property {function} text
|
||||
* @property {function} urlencoded
|
||||
*/
|
||||
|
||||
/**
|
||||
* Module exports.
|
||||
* @type {Parsers}
|
||||
*/
|
||||
|
||||
exports = module.exports = deprecate.function(bodyParser,
|
||||
'bodyParser: use individual json/urlencoded middlewares')
|
||||
|
||||
/**
|
||||
* JSON parser.
|
||||
* @public
|
||||
*/
|
||||
|
||||
Object.defineProperty(exports, 'json', {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
get: createParserGetter('json')
|
||||
})
|
||||
|
||||
/**
|
||||
* Raw parser.
|
||||
* @public
|
||||
*/
|
||||
|
||||
Object.defineProperty(exports, 'raw', {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
get: createParserGetter('raw')
|
||||
})
|
||||
|
||||
/**
|
||||
* Text parser.
|
||||
* @public
|
||||
*/
|
||||
|
||||
Object.defineProperty(exports, 'text', {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
get: createParserGetter('text')
|
||||
})
|
||||
|
||||
/**
|
||||
* URL-encoded parser.
|
||||
* @public
|
||||
*/
|
||||
|
||||
Object.defineProperty(exports, 'urlencoded', {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
get: createParserGetter('urlencoded')
|
||||
})
|
||||
|
||||
/**
|
||||
* Create a middleware to parse json and urlencoded bodies.
|
||||
*
|
||||
* @param {object} [options]
|
||||
* @return {function}
|
||||
* @deprecated
|
||||
* @public
|
||||
*/
|
||||
|
||||
function bodyParser (options) {
|
||||
// use default type for parsers
|
||||
var opts = Object.create(options || null, {
|
||||
type: {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
value: undefined,
|
||||
writable: true
|
||||
}
|
||||
})
|
||||
|
||||
var _urlencoded = exports.urlencoded(opts)
|
||||
var _json = exports.json(opts)
|
||||
|
||||
return function bodyParser (req, res, next) {
|
||||
_json(req, res, function (err) {
|
||||
if (err) return next(err)
|
||||
_urlencoded(req, res, next)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a getter for loading a parser.
|
||||
* @private
|
||||
*/
|
||||
|
||||
function createParserGetter (name) {
|
||||
return function get () {
|
||||
return loadParser(name)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load a parser module.
|
||||
* @private
|
||||
*/
|
||||
|
||||
function loadParser (parserName) {
|
||||
var parser = parsers[parserName]
|
||||
|
||||
if (parser !== undefined) {
|
||||
return parser
|
||||
}
|
||||
|
||||
// this uses a switch for static require analysis
|
||||
switch (parserName) {
|
||||
case 'json':
|
||||
parser = require('./lib/types/json')
|
||||
break
|
||||
case 'raw':
|
||||
parser = require('./lib/types/raw')
|
||||
break
|
||||
case 'text':
|
||||
parser = require('./lib/types/text')
|
||||
break
|
||||
case 'urlencoded':
|
||||
parser = require('./lib/types/urlencoded')
|
||||
break
|
||||
}
|
||||
|
||||
// store to prevent invoking require()
|
||||
return (parsers[parserName] = parser)
|
||||
}
|
@ -0,0 +1,205 @@
|
||||
/*!
|
||||
* body-parser
|
||||
* Copyright(c) 2014-2015 Douglas Christopher Wilson
|
||||
* MIT Licensed
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
/**
|
||||
* Module dependencies.
|
||||
* @private
|
||||
*/
|
||||
|
||||
var createError = require('http-errors')
|
||||
var destroy = require('destroy')
|
||||
var getBody = require('raw-body')
|
||||
var iconv = require('iconv-lite')
|
||||
var onFinished = require('on-finished')
|
||||
var unpipe = require('unpipe')
|
||||
var zlib = require('zlib')
|
||||
|
||||
/**
|
||||
* Module exports.
|
||||
*/
|
||||
|
||||
module.exports = read
|
||||
|
||||
/**
|
||||
* Read a request into a buffer and parse.
|
||||
*
|
||||
* @param {object} req
|
||||
* @param {object} res
|
||||
* @param {function} next
|
||||
* @param {function} parse
|
||||
* @param {function} debug
|
||||
* @param {object} options
|
||||
* @private
|
||||
*/
|
||||
|
||||
function read (req, res, next, parse, debug, options) {
|
||||
var length
|
||||
var opts = options
|
||||
var stream
|
||||
|
||||
// flag as parsed
|
||||
req._body = true
|
||||
|
||||
// read options
|
||||
var encoding = opts.encoding !== null
|
||||
? opts.encoding
|
||||
: null
|
||||
var verify = opts.verify
|
||||
|
||||
try {
|
||||
// get the content stream
|
||||
stream = contentstream(req, debug, opts.inflate)
|
||||
length = stream.length
|
||||
stream.length = undefined
|
||||
} catch (err) {
|
||||
return next(err)
|
||||
}
|
||||
|
||||
// set raw-body options
|
||||
opts.length = length
|
||||
opts.encoding = verify
|
||||
? null
|
||||
: encoding
|
||||
|
||||
// assert charset is supported
|
||||
if (opts.encoding === null && encoding !== null && !iconv.encodingExists(encoding)) {
|
||||
return next(createError(415, 'unsupported charset "' + encoding.toUpperCase() + '"', {
|
||||
charset: encoding.toLowerCase(),
|
||||
type: 'charset.unsupported'
|
||||
}))
|
||||
}
|
||||
|
||||
// read body
|
||||
debug('read body')
|
||||
getBody(stream, opts, function (error, body) {
|
||||
if (error) {
|
||||
var _error
|
||||
|
||||
if (error.type === 'encoding.unsupported') {
|
||||
// echo back charset
|
||||
_error = createError(415, 'unsupported charset "' + encoding.toUpperCase() + '"', {
|
||||
charset: encoding.toLowerCase(),
|
||||
type: 'charset.unsupported'
|
||||
})
|
||||
} else {
|
||||
// set status code on error
|
||||
_error = createError(400, error)
|
||||
}
|
||||
|
||||
// unpipe from stream and destroy
|
||||
if (stream !== req) {
|
||||
unpipe(req)
|
||||
destroy(stream, true)
|
||||
}
|
||||
|
||||
// read off entire request
|
||||
dump(req, function onfinished () {
|
||||
next(createError(400, _error))
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// verify
|
||||
if (verify) {
|
||||
try {
|
||||
debug('verify body')
|
||||
verify(req, res, body, encoding)
|
||||
} catch (err) {
|
||||
next(createError(403, err, {
|
||||
body: body,
|
||||
type: err.type || 'entity.verify.failed'
|
||||
}))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// parse
|
||||
var str = body
|
||||
try {
|
||||
debug('parse body')
|
||||
str = typeof body !== 'string' && encoding !== null
|
||||
? iconv.decode(body, encoding)
|
||||
: body
|
||||
req.body = parse(str)
|
||||
} catch (err) {
|
||||
next(createError(400, err, {
|
||||
body: str,
|
||||
type: err.type || 'entity.parse.failed'
|
||||
}))
|
||||
return
|
||||
}
|
||||
|
||||
next()
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the content stream of the request.
|
||||
*
|
||||
* @param {object} req
|
||||
* @param {function} debug
|
||||
* @param {boolean} [inflate=true]
|
||||
* @return {object}
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function contentstream (req, debug, inflate) {
|
||||
var encoding = (req.headers['content-encoding'] || 'identity').toLowerCase()
|
||||
var length = req.headers['content-length']
|
||||
var stream
|
||||
|
||||
debug('content-encoding "%s"', encoding)
|
||||
|
||||
if (inflate === false && encoding !== 'identity') {
|
||||
throw createError(415, 'content encoding unsupported', {
|
||||
encoding: encoding,
|
||||
type: 'encoding.unsupported'
|
||||
})
|
||||
}
|
||||
|
||||
switch (encoding) {
|
||||
case 'deflate':
|
||||
stream = zlib.createInflate()
|
||||
debug('inflate body')
|
||||
req.pipe(stream)
|
||||
break
|
||||
case 'gzip':
|
||||
stream = zlib.createGunzip()
|
||||
debug('gunzip body')
|
||||
req.pipe(stream)
|
||||
break
|
||||
case 'identity':
|
||||
stream = req
|
||||
stream.length = length
|
||||
break
|
||||
default:
|
||||
throw createError(415, 'unsupported content encoding "' + encoding + '"', {
|
||||
encoding: encoding,
|
||||
type: 'encoding.unsupported'
|
||||
})
|
||||
}
|
||||
|
||||
return stream
|
||||
}
|
||||
|
||||
/**
|
||||
* Dump the contents of a request.
|
||||
*
|
||||
* @param {object} req
|
||||
* @param {function} callback
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function dump (req, callback) {
|
||||
if (onFinished.isFinished(req)) {
|
||||
callback(null)
|
||||
} else {
|
||||
onFinished(req, callback)
|
||||
req.resume()
|
||||
}
|
||||
}
|
@ -0,0 +1,247 @@
|
||||
/*!
|
||||
* body-parser
|
||||
* Copyright(c) 2014 Jonathan Ong
|
||||
* Copyright(c) 2014-2015 Douglas Christopher Wilson
|
||||
* MIT Licensed
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
/**
|
||||
* Module dependencies.
|
||||
* @private
|
||||
*/
|
||||
|
||||
var bytes = require('bytes')
|
||||
var contentType = require('content-type')
|
||||
var createError = require('http-errors')
|
||||
var debug = require('debug')('body-parser:json')
|
||||
var read = require('../read')
|
||||
var typeis = require('type-is')
|
||||
|
||||
/**
|
||||
* Module exports.
|
||||
*/
|
||||
|
||||
module.exports = json
|
||||
|
||||
/**
|
||||
* RegExp to match the first non-space in a string.
|
||||
*
|
||||
* Allowed whitespace is defined in RFC 7159:
|
||||
*
|
||||
* ws = *(
|
||||
* %x20 / ; Space
|
||||
* %x09 / ; Horizontal tab
|
||||
* %x0A / ; Line feed or New line
|
||||
* %x0D ) ; Carriage return
|
||||
*/
|
||||
|
||||
var FIRST_CHAR_REGEXP = /^[\x20\x09\x0a\x0d]*([^\x20\x09\x0a\x0d])/ // eslint-disable-line no-control-regex
|
||||
|
||||
var JSON_SYNTAX_CHAR = '#'
|
||||
var JSON_SYNTAX_REGEXP = /#+/g
|
||||
|
||||
/**
|
||||
* Create a middleware to parse JSON bodies.
|
||||
*
|
||||
* @param {object} [options]
|
||||
* @return {function}
|
||||
* @public
|
||||
*/
|
||||
|
||||
function json (options) {
|
||||
var opts = options || {}
|
||||
|
||||
var limit = typeof opts.limit !== 'number'
|
||||
? bytes.parse(opts.limit || '100kb')
|
||||
: opts.limit
|
||||
var inflate = opts.inflate !== false
|
||||
var reviver = opts.reviver
|
||||
var strict = opts.strict !== false
|
||||
var type = opts.type || 'application/json'
|
||||
var verify = opts.verify || false
|
||||
|
||||
if (verify !== false && typeof verify !== 'function') {
|
||||
throw new TypeError('option verify must be function')
|
||||
}
|
||||
|
||||
// create the appropriate type checking function
|
||||
var shouldParse = typeof type !== 'function'
|
||||
? typeChecker(type)
|
||||
: type
|
||||
|
||||
function parse (body) {
|
||||
if (body.length === 0) {
|
||||
// special-case empty json body, as it's a common client-side mistake
|
||||
// TODO: maybe make this configurable or part of "strict" option
|
||||
return {}
|
||||
}
|
||||
|
||||
if (strict) {
|
||||
var first = firstchar(body)
|
||||
|
||||
if (first !== '{' && first !== '[') {
|
||||
debug('strict violation')
|
||||
throw createStrictSyntaxError(body, first)
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
debug('parse json')
|
||||
return JSON.parse(body, reviver)
|
||||
} catch (e) {
|
||||
throw normalizeJsonSyntaxError(e, {
|
||||
message: e.message,
|
||||
stack: e.stack
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return function jsonParser (req, res, next) {
|
||||
if (req._body) {
|
||||
debug('body already parsed')
|
||||
next()
|
||||
return
|
||||
}
|
||||
|
||||
req.body = req.body || {}
|
||||
|
||||
// skip requests without bodies
|
||||
if (!typeis.hasBody(req)) {
|
||||
debug('skip empty body')
|
||||
next()
|
||||
return
|
||||
}
|
||||
|
||||
debug('content-type %j', req.headers['content-type'])
|
||||
|
||||
// determine if request should be parsed
|
||||
if (!shouldParse(req)) {
|
||||
debug('skip parsing')
|
||||
next()
|
||||
return
|
||||
}
|
||||
|
||||
// assert charset per RFC 7159 sec 8.1
|
||||
var charset = getCharset(req) || 'utf-8'
|
||||
if (charset.slice(0, 4) !== 'utf-') {
|
||||
debug('invalid charset')
|
||||
next(createError(415, 'unsupported charset "' + charset.toUpperCase() + '"', {
|
||||
charset: charset,
|
||||
type: 'charset.unsupported'
|
||||
}))
|
||||
return
|
||||
}
|
||||
|
||||
// read
|
||||
read(req, res, next, parse, debug, {
|
||||
encoding: charset,
|
||||
inflate: inflate,
|
||||
limit: limit,
|
||||
verify: verify
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create strict violation syntax error matching native error.
|
||||
*
|
||||
* @param {string} str
|
||||
* @param {string} char
|
||||
* @return {Error}
|
||||
* @private
|
||||
*/
|
||||
|
||||
function createStrictSyntaxError (str, char) {
|
||||
var index = str.indexOf(char)
|
||||
var partial = ''
|
||||
|
||||
if (index !== -1) {
|
||||
partial = str.substring(0, index) + JSON_SYNTAX_CHAR
|
||||
|
||||
for (var i = index + 1; i < str.length; i++) {
|
||||
partial += JSON_SYNTAX_CHAR
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
JSON.parse(partial); /* istanbul ignore next */ throw new SyntaxError('strict violation')
|
||||
} catch (e) {
|
||||
return normalizeJsonSyntaxError(e, {
|
||||
message: e.message.replace(JSON_SYNTAX_REGEXP, function (placeholder) {
|
||||
return str.substring(index, index + placeholder.length)
|
||||
}),
|
||||
stack: e.stack
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the first non-whitespace character in a string.
|
||||
*
|
||||
* @param {string} str
|
||||
* @return {function}
|
||||
* @private
|
||||
*/
|
||||
|
||||
function firstchar (str) {
|
||||
var match = FIRST_CHAR_REGEXP.exec(str)
|
||||
|
||||
return match
|
||||
? match[1]
|
||||
: undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the charset of a request.
|
||||
*
|
||||
* @param {object} req
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function getCharset (req) {
|
||||
try {
|
||||
return (contentType.parse(req).parameters.charset || '').toLowerCase()
|
||||
} catch (e) {
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize a SyntaxError for JSON.parse.
|
||||
*
|
||||
* @param {SyntaxError} error
|
||||
* @param {object} obj
|
||||
* @return {SyntaxError}
|
||||
*/
|
||||
|
||||
function normalizeJsonSyntaxError (error, obj) {
|
||||
var keys = Object.getOwnPropertyNames(error)
|
||||
|
||||
for (var i = 0; i < keys.length; i++) {
|
||||
var key = keys[i]
|
||||
if (key !== 'stack' && key !== 'message') {
|
||||
delete error[key]
|
||||
}
|
||||
}
|
||||
|
||||
// replace stack before message for Node.js 0.10 and below
|
||||
error.stack = obj.stack.replace(error.message, obj.message)
|
||||
error.message = obj.message
|
||||
|
||||
return error
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the simple type checker.
|
||||
*
|
||||
* @param {string} type
|
||||
* @return {function}
|
||||
*/
|
||||
|
||||
function typeChecker (type) {
|
||||
return function checkType (req) {
|
||||
return Boolean(typeis(req, type))
|
||||
}
|
||||
}
|
@ -0,0 +1,101 @@
|
||||
/*!
|
||||
* body-parser
|
||||
* Copyright(c) 2014-2015 Douglas Christopher Wilson
|
||||
* MIT Licensed
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
var bytes = require('bytes')
|
||||
var debug = require('debug')('body-parser:raw')
|
||||
var read = require('../read')
|
||||
var typeis = require('type-is')
|
||||
|
||||
/**
|
||||
* Module exports.
|
||||
*/
|
||||
|
||||
module.exports = raw
|
||||
|
||||
/**
|
||||
* Create a middleware to parse raw bodies.
|
||||
*
|
||||
* @param {object} [options]
|
||||
* @return {function}
|
||||
* @api public
|
||||
*/
|
||||
|
||||
function raw (options) {
|
||||
var opts = options || {}
|
||||
|
||||
var inflate = opts.inflate !== false
|
||||
var limit = typeof opts.limit !== 'number'
|
||||
? bytes.parse(opts.limit || '100kb')
|
||||
: opts.limit
|
||||
var type = opts.type || 'application/octet-stream'
|
||||
var verify = opts.verify || false
|
||||
|
||||
if (verify !== false && typeof verify !== 'function') {
|
||||
throw new TypeError('option verify must be function')
|
||||
}
|
||||
|
||||
// create the appropriate type checking function
|
||||
var shouldParse = typeof type !== 'function'
|
||||
? typeChecker(type)
|
||||
: type
|
||||
|
||||
function parse (buf) {
|
||||
return buf
|
||||
}
|
||||
|
||||
return function rawParser (req, res, next) {
|
||||
if (req._body) {
|
||||
debug('body already parsed')
|
||||
next()
|
||||
return
|
||||
}
|
||||
|
||||
req.body = req.body || {}
|
||||
|
||||
// skip requests without bodies
|
||||
if (!typeis.hasBody(req)) {
|
||||
debug('skip empty body')
|
||||
next()
|
||||
return
|
||||
}
|
||||
|
||||
debug('content-type %j', req.headers['content-type'])
|
||||
|
||||
// determine if request should be parsed
|
||||
if (!shouldParse(req)) {
|
||||
debug('skip parsing')
|
||||
next()
|
||||
return
|
||||
}
|
||||
|
||||
// read
|
||||
read(req, res, next, parse, debug, {
|
||||
encoding: null,
|
||||
inflate: inflate,
|
||||
limit: limit,
|
||||
verify: verify
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the simple type checker.
|
||||
*
|
||||
* @param {string} type
|
||||
* @return {function}
|
||||
*/
|
||||
|
||||
function typeChecker (type) {
|
||||
return function checkType (req) {
|
||||
return Boolean(typeis(req, type))
|
||||
}
|
||||
}
|
@ -0,0 +1,121 @@
|
||||
/*!
|
||||
* body-parser
|
||||
* Copyright(c) 2014-2015 Douglas Christopher Wilson
|
||||
* MIT Licensed
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
var bytes = require('bytes')
|
||||
var contentType = require('content-type')
|
||||
var debug = require('debug')('body-parser:text')
|
||||
var read = require('../read')
|
||||
var typeis = require('type-is')
|
||||
|
||||
/**
|
||||
* Module exports.
|
||||
*/
|
||||
|
||||
module.exports = text
|
||||
|
||||
/**
|
||||
* Create a middleware to parse text bodies.
|
||||
*
|
||||
* @param {object} [options]
|
||||
* @return {function}
|
||||
* @api public
|
||||
*/
|
||||
|
||||
function text (options) {
|
||||
var opts = options || {}
|
||||
|
||||
var defaultCharset = opts.defaultCharset || 'utf-8'
|
||||
var inflate = opts.inflate !== false
|
||||
var limit = typeof opts.limit !== 'number'
|
||||
? bytes.parse(opts.limit || '100kb')
|
||||
: opts.limit
|
||||
var type = opts.type || 'text/plain'
|
||||
var verify = opts.verify || false
|
||||
|
||||
if (verify !== false && typeof verify !== 'function') {
|
||||
throw new TypeError('option verify must be function')
|
||||
}
|
||||
|
||||
// create the appropriate type checking function
|
||||
var shouldParse = typeof type !== 'function'
|
||||
? typeChecker(type)
|
||||
: type
|
||||
|
||||
function parse (buf) {
|
||||
return buf
|
||||
}
|
||||
|
||||
return function textParser (req, res, next) {
|
||||
if (req._body) {
|
||||
debug('body already parsed')
|
||||
next()
|
||||
return
|
||||
}
|
||||
|
||||
req.body = req.body || {}
|
||||
|
||||
// skip requests without bodies
|
||||
if (!typeis.hasBody(req)) {
|
||||
debug('skip empty body')
|
||||
next()
|
||||
return
|
||||
}
|
||||
|
||||
debug('content-type %j', req.headers['content-type'])
|
||||
|
||||
// determine if request should be parsed
|
||||
if (!shouldParse(req)) {
|
||||
debug('skip parsing')
|
||||
next()
|
||||
return
|
||||
}
|
||||
|
||||
// get charset
|
||||
var charset = getCharset(req) || defaultCharset
|
||||
|
||||
// read
|
||||
read(req, res, next, parse, debug, {
|
||||
encoding: charset,
|
||||
inflate: inflate,
|
||||
limit: limit,
|
||||
verify: verify
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the charset of a request.
|
||||
*
|
||||
* @param {object} req
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function getCharset (req) {
|
||||
try {
|
||||
return (contentType.parse(req).parameters.charset || '').toLowerCase()
|
||||
} catch (e) {
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the simple type checker.
|
||||
*
|
||||
* @param {string} type
|
||||
* @return {function}
|
||||
*/
|
||||
|
||||
function typeChecker (type) {
|
||||
return function checkType (req) {
|
||||
return Boolean(typeis(req, type))
|
||||
}
|
||||
}
|
@ -0,0 +1,307 @@
|
||||
/*!
|
||||
* body-parser
|
||||
* Copyright(c) 2014 Jonathan Ong
|
||||
* Copyright(c) 2014-2015 Douglas Christopher Wilson
|
||||
* MIT Licensed
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
/**
|
||||
* Module dependencies.
|
||||
* @private
|
||||
*/
|
||||
|
||||
var bytes = require('bytes')
|
||||
var contentType = require('content-type')
|
||||
var createError = require('http-errors')
|
||||
var debug = require('debug')('body-parser:urlencoded')
|
||||
var deprecate = require('depd')('body-parser')
|
||||
var read = require('../read')
|
||||
var typeis = require('type-is')
|
||||
|
||||
/**
|
||||
* Module exports.
|
||||
*/
|
||||
|
||||
module.exports = urlencoded
|
||||
|
||||
/**
|
||||
* Cache of parser modules.
|
||||
*/
|
||||
|
||||
var parsers = Object.create(null)
|
||||
|
||||
/**
|
||||
* Create a middleware to parse urlencoded bodies.
|
||||
*
|
||||
* @param {object} [options]
|
||||
* @return {function}
|
||||
* @public
|
||||
*/
|
||||
|
||||
function urlencoded (options) {
|
||||
var opts = options || {}
|
||||
|
||||
// notice because option default will flip in next major
|
||||
if (opts.extended === undefined) {
|
||||
deprecate('undefined extended: provide extended option')
|
||||
}
|
||||
|
||||
var extended = opts.extended !== false
|
||||
var inflate = opts.inflate !== false
|
||||
var limit = typeof opts.limit !== 'number'
|
||||
? bytes.parse(opts.limit || '100kb')
|
||||
: opts.limit
|
||||
var type = opts.type || 'application/x-www-form-urlencoded'
|
||||
var verify = opts.verify || false
|
||||
var depth = typeof opts.depth !== 'number'
|
||||
? Number(opts.depth || 32)
|
||||
: opts.depth
|
||||
|
||||
if (verify !== false && typeof verify !== 'function') {
|
||||
throw new TypeError('option verify must be function')
|
||||
}
|
||||
|
||||
// create the appropriate query parser
|
||||
var queryparse = extended
|
||||
? extendedparser(opts)
|
||||
: simpleparser(opts)
|
||||
|
||||
// create the appropriate type checking function
|
||||
var shouldParse = typeof type !== 'function'
|
||||
? typeChecker(type)
|
||||
: type
|
||||
|
||||
function parse (body) {
|
||||
return body.length
|
||||
? queryparse(body)
|
||||
: {}
|
||||
}
|
||||
|
||||
return function urlencodedParser (req, res, next) {
|
||||
if (req._body) {
|
||||
debug('body already parsed')
|
||||
next()
|
||||
return
|
||||
}
|
||||
|
||||
req.body = req.body || {}
|
||||
|
||||
// skip requests without bodies
|
||||
if (!typeis.hasBody(req)) {
|
||||
debug('skip empty body')
|
||||
next()
|
||||
return
|
||||
}
|
||||
|
||||
debug('content-type %j', req.headers['content-type'])
|
||||
|
||||
// determine if request should be parsed
|
||||
if (!shouldParse(req)) {
|
||||
debug('skip parsing')
|
||||
next()
|
||||
return
|
||||
}
|
||||
|
||||
// assert charset
|
||||
var charset = getCharset(req) || 'utf-8'
|
||||
if (charset !== 'utf-8') {
|
||||
debug('invalid charset')
|
||||
next(createError(415, 'unsupported charset "' + charset.toUpperCase() + '"', {
|
||||
charset: charset,
|
||||
type: 'charset.unsupported'
|
||||
}))
|
||||
return
|
||||
}
|
||||
|
||||
// read
|
||||
read(req, res, next, parse, debug, {
|
||||
debug: debug,
|
||||
encoding: charset,
|
||||
inflate: inflate,
|
||||
limit: limit,
|
||||
verify: verify,
|
||||
depth: depth
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the extended query parser.
|
||||
*
|
||||
* @param {object} options
|
||||
*/
|
||||
|
||||
function extendedparser (options) {
|
||||
var parameterLimit = options.parameterLimit !== undefined
|
||||
? options.parameterLimit
|
||||
: 1000
|
||||
|
||||
var depth = typeof options.depth !== 'number'
|
||||
? Number(options.depth || 32)
|
||||
: options.depth
|
||||
var parse = parser('qs')
|
||||
|
||||
if (isNaN(parameterLimit) || parameterLimit < 1) {
|
||||
throw new TypeError('option parameterLimit must be a positive number')
|
||||
}
|
||||
|
||||
if (isNaN(depth) || depth < 0) {
|
||||
throw new TypeError('option depth must be a zero or a positive number')
|
||||
}
|
||||
|
||||
if (isFinite(parameterLimit)) {
|
||||
parameterLimit = parameterLimit | 0
|
||||
}
|
||||
|
||||
return function queryparse (body) {
|
||||
var paramCount = parameterCount(body, parameterLimit)
|
||||
|
||||
if (paramCount === undefined) {
|
||||
debug('too many parameters')
|
||||
throw createError(413, 'too many parameters', {
|
||||
type: 'parameters.too.many'
|
||||
})
|
||||
}
|
||||
|
||||
var arrayLimit = Math.max(100, paramCount)
|
||||
|
||||
debug('parse extended urlencoding')
|
||||
try {
|
||||
return parse(body, {
|
||||
allowPrototypes: true,
|
||||
arrayLimit: arrayLimit,
|
||||
depth: depth,
|
||||
strictDepth: true,
|
||||
parameterLimit: parameterLimit
|
||||
})
|
||||
} catch (err) {
|
||||
if (err instanceof RangeError) {
|
||||
throw createError(400, 'The input exceeded the depth', {
|
||||
type: 'querystring.parse.rangeError'
|
||||
})
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the charset of a request.
|
||||
*
|
||||
* @param {object} req
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function getCharset (req) {
|
||||
try {
|
||||
return (contentType.parse(req).parameters.charset || '').toLowerCase()
|
||||
} catch (e) {
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Count the number of parameters, stopping once limit reached
|
||||
*
|
||||
* @param {string} body
|
||||
* @param {number} limit
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function parameterCount (body, limit) {
|
||||
var count = 0
|
||||
var index = 0
|
||||
|
||||
while ((index = body.indexOf('&', index)) !== -1) {
|
||||
count++
|
||||
index++
|
||||
|
||||
if (count === limit) {
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
return count
|
||||
}
|
||||
|
||||
/**
|
||||
* Get parser for module name dynamically.
|
||||
*
|
||||
* @param {string} name
|
||||
* @return {function}
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function parser (name) {
|
||||
var mod = parsers[name]
|
||||
|
||||
if (mod !== undefined) {
|
||||
return mod.parse
|
||||
}
|
||||
|
||||
// this uses a switch for static require analysis
|
||||
switch (name) {
|
||||
case 'qs':
|
||||
mod = require('qs')
|
||||
break
|
||||
case 'querystring':
|
||||
mod = require('querystring')
|
||||
break
|
||||
}
|
||||
|
||||
// store to prevent invoking require()
|
||||
parsers[name] = mod
|
||||
|
||||
return mod.parse
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the simple query parser.
|
||||
*
|
||||
* @param {object} options
|
||||
*/
|
||||
|
||||
function simpleparser (options) {
|
||||
var parameterLimit = options.parameterLimit !== undefined
|
||||
? options.parameterLimit
|
||||
: 1000
|
||||
var parse = parser('querystring')
|
||||
|
||||
if (isNaN(parameterLimit) || parameterLimit < 1) {
|
||||
throw new TypeError('option parameterLimit must be a positive number')
|
||||
}
|
||||
|
||||
if (isFinite(parameterLimit)) {
|
||||
parameterLimit = parameterLimit | 0
|
||||
}
|
||||
|
||||
return function queryparse (body) {
|
||||
var paramCount = parameterCount(body, parameterLimit)
|
||||
|
||||
if (paramCount === undefined) {
|
||||
debug('too many parameters')
|
||||
throw createError(413, 'too many parameters', {
|
||||
type: 'parameters.too.many'
|
||||
})
|
||||
}
|
||||
|
||||
debug('parse urlencoding')
|
||||
return parse(body, undefined, undefined, { maxKeys: parameterLimit })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the simple type checker.
|
||||
*
|
||||
* @param {string} type
|
||||
* @return {function}
|
||||
*/
|
||||
|
||||
function typeChecker (type) {
|
||||
return function checkType (req) {
|
||||
return Boolean(typeis(req, type))
|
||||
}
|
||||
}
|
@ -0,0 +1,56 @@
|
||||
{
|
||||
"name": "body-parser",
|
||||
"description": "Node.js body parsing middleware",
|
||||
"version": "1.20.3",
|
||||
"contributors": [
|
||||
"Douglas Christopher Wilson <doug@somethingdoug.com>",
|
||||
"Jonathan Ong <me@jongleberry.com> (http://jongleberry.com)"
|
||||
],
|
||||
"license": "MIT",
|
||||
"repository": "expressjs/body-parser",
|
||||
"dependencies": {
|
||||
"bytes": "3.1.2",
|
||||
"content-type": "~1.0.5",
|
||||
"debug": "2.6.9",
|
||||
"depd": "2.0.0",
|
||||
"destroy": "1.2.0",
|
||||
"http-errors": "2.0.0",
|
||||
"iconv-lite": "0.4.24",
|
||||
"on-finished": "2.4.1",
|
||||
"qs": "6.13.0",
|
||||
"raw-body": "2.5.2",
|
||||
"type-is": "~1.6.18",
|
||||
"unpipe": "1.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"eslint": "8.34.0",
|
||||
"eslint-config-standard": "14.1.1",
|
||||
"eslint-plugin-import": "2.27.5",
|
||||
"eslint-plugin-markdown": "3.0.0",
|
||||
"eslint-plugin-node": "11.1.0",
|
||||
"eslint-plugin-promise": "6.1.1",
|
||||
"eslint-plugin-standard": "4.1.0",
|
||||
"methods": "1.1.2",
|
||||
"mocha": "10.2.0",
|
||||
"nyc": "15.1.0",
|
||||
"safe-buffer": "5.2.1",
|
||||
"supertest": "6.3.3"
|
||||
},
|
||||
"files": [
|
||||
"lib/",
|
||||
"LICENSE",
|
||||
"HISTORY.md",
|
||||
"SECURITY.md",
|
||||
"index.js"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 0.8",
|
||||
"npm": "1.2.8000 || >= 1.4.16"
|
||||
},
|
||||
"scripts": {
|
||||
"lint": "eslint .",
|
||||
"test": "mocha --require test/support/env --reporter spec --check-leaks --bail test/",
|
||||
"test-ci": "nyc --reporter=lcov --reporter=text npm test",
|
||||
"test-cov": "nyc --reporter=html --reporter=text npm test"
|
||||
}
|
||||
}
|
@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2016, 2018 Linus Unnebäck
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
@ -0,0 +1,72 @@
|
||||
/* eslint-disable node/no-deprecated-api */
|
||||
|
||||
var toString = Object.prototype.toString
|
||||
|
||||
var isModern = (
|
||||
typeof Buffer !== 'undefined' &&
|
||||
typeof Buffer.alloc === 'function' &&
|
||||
typeof Buffer.allocUnsafe === 'function' &&
|
||||
typeof Buffer.from === 'function'
|
||||
)
|
||||
|
||||
function isArrayBuffer (input) {
|
||||
return toString.call(input).slice(8, -1) === 'ArrayBuffer'
|
||||
}
|
||||
|
||||
function fromArrayBuffer (obj, byteOffset, length) {
|
||||
byteOffset >>>= 0
|
||||
|
||||
var maxLength = obj.byteLength - byteOffset
|
||||
|
||||
if (maxLength < 0) {
|
||||
throw new RangeError("'offset' is out of bounds")
|
||||
}
|
||||
|
||||
if (length === undefined) {
|
||||
length = maxLength
|
||||
} else {
|
||||
length >>>= 0
|
||||
|
||||
if (length > maxLength) {
|
||||
throw new RangeError("'length' is out of bounds")
|
||||
}
|
||||
}
|
||||
|
||||
return isModern
|
||||
? Buffer.from(obj.slice(byteOffset, byteOffset + length))
|
||||
: new Buffer(new Uint8Array(obj.slice(byteOffset, byteOffset + length)))
|
||||
}
|
||||
|
||||
function fromString (string, encoding) {
|
||||
if (typeof encoding !== 'string' || encoding === '') {
|
||||
encoding = 'utf8'
|
||||
}
|
||||
|
||||
if (!Buffer.isEncoding(encoding)) {
|
||||
throw new TypeError('"encoding" must be a valid string encoding')
|
||||
}
|
||||
|
||||
return isModern
|
||||
? Buffer.from(string, encoding)
|
||||
: new Buffer(string, encoding)
|
||||
}
|
||||
|
||||
function bufferFrom (value, encodingOrOffset, length) {
|
||||
if (typeof value === 'number') {
|
||||
throw new TypeError('"value" argument must not be a number')
|
||||
}
|
||||
|
||||
if (isArrayBuffer(value)) {
|
||||
return fromArrayBuffer(value, encodingOrOffset, length)
|
||||
}
|
||||
|
||||
if (typeof value === 'string') {
|
||||
return fromString(value, encodingOrOffset)
|
||||
}
|
||||
|
||||
return isModern
|
||||
? Buffer.from(value)
|
||||
: new Buffer(value)
|
||||
}
|
||||
|
||||
module.exports = bufferFrom
|
@ -0,0 +1,19 @@
|
||||
{
|
||||
"name": "buffer-from",
|
||||
"version": "1.1.2",
|
||||
"license": "MIT",
|
||||
"repository": "LinusU/buffer-from",
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"scripts": {
|
||||
"test": "standard && node test"
|
||||
},
|
||||
"devDependencies": {
|
||||
"standard": "^12.0.1"
|
||||
},
|
||||
"keywords": [
|
||||
"buffer",
|
||||
"buffer from"
|
||||
]
|
||||
}
|
@ -0,0 +1,69 @@
|
||||
# Buffer From
|
||||
|
||||
A [ponyfill](https://ponyfill.com) for `Buffer.from`, uses native implementation if available.
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
npm install --save buffer-from
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
const bufferFrom = require('buffer-from')
|
||||
|
||||
console.log(bufferFrom([1, 2, 3, 4]))
|
||||
//=> <Buffer 01 02 03 04>
|
||||
|
||||
const arr = new Uint8Array([1, 2, 3, 4])
|
||||
console.log(bufferFrom(arr.buffer, 1, 2))
|
||||
//=> <Buffer 02 03>
|
||||
|
||||
console.log(bufferFrom('test', 'utf8'))
|
||||
//=> <Buffer 74 65 73 74>
|
||||
|
||||
const buf = bufferFrom('test')
|
||||
console.log(bufferFrom(buf))
|
||||
//=> <Buffer 74 65 73 74>
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
### bufferFrom(array)
|
||||
|
||||
- `array` <Array>
|
||||
|
||||
Allocates a new `Buffer` using an `array` of octets.
|
||||
|
||||
### bufferFrom(arrayBuffer[, byteOffset[, length]])
|
||||
|
||||
- `arrayBuffer` <ArrayBuffer> The `.buffer` property of a TypedArray or ArrayBuffer
|
||||
- `byteOffset` <Integer> Where to start copying from `arrayBuffer`. **Default:** `0`
|
||||
- `length` <Integer> How many bytes to copy from `arrayBuffer`. **Default:** `arrayBuffer.length - byteOffset`
|
||||
|
||||
When passed a reference to the `.buffer` property of a TypedArray instance, the
|
||||
newly created `Buffer` will share the same allocated memory as the TypedArray.
|
||||
|
||||
The optional `byteOffset` and `length` arguments specify a memory range within
|
||||
the `arrayBuffer` that will be shared by the `Buffer`.
|
||||
|
||||
### bufferFrom(buffer)
|
||||
|
||||
- `buffer` <Buffer> An existing `Buffer` to copy data from
|
||||
|
||||
Copies the passed `buffer` data onto a new `Buffer` instance.
|
||||
|
||||
### bufferFrom(string[, encoding])
|
||||
|
||||
- `string` <String> A string to encode.
|
||||
- `encoding` <String> The encoding of `string`. **Default:** `'utf8'`
|
||||
|
||||
Creates a new `Buffer` containing the given JavaScript string `string`. If
|
||||
provided, the `encoding` parameter identifies the character encoding of
|
||||
`string`.
|
||||
|
||||
## See also
|
||||
|
||||
- [buffer-alloc](https://github.com/LinusU/buffer-alloc) A ponyfill for `Buffer.alloc`
|
||||
- [buffer-alloc-unsafe](https://github.com/LinusU/buffer-alloc-unsafe) A ponyfill for `Buffer.allocUnsafe`
|
@ -0,0 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = {
|
||||
extends: '@mscdex/eslint-config',
|
||||
};
|
@ -0,0 +1,24 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches: [ master ]
|
||||
|
||||
jobs:
|
||||
tests-linux:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [10.16.0, 10.x, 12.x, 14.x, 16.x]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
- name: Install module
|
||||
run: npm install
|
||||
- name: Run tests
|
||||
run: npm test
|
@ -0,0 +1,23 @@
|
||||
name: lint
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches: [ master ]
|
||||
|
||||
env:
|
||||
NODE_VERSION: 16.x
|
||||
|
||||
jobs:
|
||||
lint-js:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Use Node.js ${{ env.NODE_VERSION }}
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
- name: Install ESLint + ESLint configs/plugins
|
||||
run: npm install --only=dev
|
||||
- name: Lint files
|
||||
run: npm run lint
|
@ -0,0 +1,19 @@
|
||||
Copyright Brian White. All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to
|
||||
deal in the Software without restriction, including without limitation the
|
||||
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
sell copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
IN THE SOFTWARE.
|
@ -0,0 +1,191 @@
|
||||
# Description
|
||||
|
||||
A node.js module for parsing incoming HTML form data.
|
||||
|
||||
Changes (breaking or otherwise) in v1.0.0 can be found [here](https://github.com/mscdex/busboy/issues/266).
|
||||
|
||||
# Requirements
|
||||
|
||||
* [node.js](http://nodejs.org/) -- v10.16.0 or newer
|
||||
|
||||
|
||||
# Install
|
||||
|
||||
npm install busboy
|
||||
|
||||
|
||||
# Examples
|
||||
|
||||
* Parsing (multipart) with default options:
|
||||
|
||||
```js
|
||||
const http = require('http');
|
||||
|
||||
const busboy = require('busboy');
|
||||
|
||||
http.createServer((req, res) => {
|
||||
if (req.method === 'POST') {
|
||||
console.log('POST request');
|
||||
const bb = busboy({ headers: req.headers });
|
||||
bb.on('file', (name, file, info) => {
|
||||
const { filename, encoding, mimeType } = info;
|
||||
console.log(
|
||||
`File [${name}]: filename: %j, encoding: %j, mimeType: %j`,
|
||||
filename,
|
||||
encoding,
|
||||
mimeType
|
||||
);
|
||||
file.on('data', (data) => {
|
||||
console.log(`File [${name}] got ${data.length} bytes`);
|
||||
}).on('close', () => {
|
||||
console.log(`File [${name}] done`);
|
||||
});
|
||||
});
|
||||
bb.on('field', (name, val, info) => {
|
||||
console.log(`Field [${name}]: value: %j`, val);
|
||||
});
|
||||
bb.on('close', () => {
|
||||
console.log('Done parsing form!');
|
||||
res.writeHead(303, { Connection: 'close', Location: '/' });
|
||||
res.end();
|
||||
});
|
||||
req.pipe(bb);
|
||||
} else if (req.method === 'GET') {
|
||||
res.writeHead(200, { Connection: 'close' });
|
||||
res.end(`
|
||||
<html>
|
||||
<head></head>
|
||||
<body>
|
||||
<form method="POST" enctype="multipart/form-data">
|
||||
<input type="file" name="filefield"><br />
|
||||
<input type="text" name="textfield"><br />
|
||||
<input type="submit">
|
||||
</form>
|
||||
</body>
|
||||
</html>
|
||||
`);
|
||||
}
|
||||
}).listen(8000, () => {
|
||||
console.log('Listening for requests');
|
||||
});
|
||||
|
||||
// Example output:
|
||||
//
|
||||
// Listening for requests
|
||||
// < ... form submitted ... >
|
||||
// POST request
|
||||
// File [filefield]: filename: "logo.jpg", encoding: "binary", mime: "image/jpeg"
|
||||
// File [filefield] got 11912 bytes
|
||||
// Field [textfield]: value: "testing! :-)"
|
||||
// File [filefield] done
|
||||
// Done parsing form!
|
||||
```
|
||||
|
||||
* Save all incoming files to disk:
|
||||
|
||||
```js
|
||||
const { randomFillSync } = require('crypto');
|
||||
const fs = require('fs');
|
||||
const http = require('http');
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
|
||||
const busboy = require('busboy');
|
||||
|
||||
const random = (() => {
|
||||
const buf = Buffer.alloc(16);
|
||||
return () => randomFillSync(buf).toString('hex');
|
||||
})();
|
||||
|
||||
http.createServer((req, res) => {
|
||||
if (req.method === 'POST') {
|
||||
const bb = busboy({ headers: req.headers });
|
||||
bb.on('file', (name, file, info) => {
|
||||
const saveTo = path.join(os.tmpdir(), `busboy-upload-${random()}`);
|
||||
file.pipe(fs.createWriteStream(saveTo));
|
||||
});
|
||||
bb.on('close', () => {
|
||||
res.writeHead(200, { 'Connection': 'close' });
|
||||
res.end(`That's all folks!`);
|
||||
});
|
||||
req.pipe(bb);
|
||||
return;
|
||||
}
|
||||
res.writeHead(404);
|
||||
res.end();
|
||||
}).listen(8000, () => {
|
||||
console.log('Listening for requests');
|
||||
});
|
||||
```
|
||||
|
||||
|
||||
# API
|
||||
|
||||
## Exports
|
||||
|
||||
`busboy` exports a single function:
|
||||
|
||||
**( _function_ )**(< _object_ >config) - Creates and returns a new _Writable_ form parser stream.
|
||||
|
||||
* Valid `config` properties:
|
||||
|
||||
* **headers** - _object_ - These are the HTTP headers of the incoming request, which are used by individual parsers.
|
||||
|
||||
* **highWaterMark** - _integer_ - highWaterMark to use for the parser stream. **Default:** node's _stream.Writable_ default.
|
||||
|
||||
* **fileHwm** - _integer_ - highWaterMark to use for individual file streams. **Default:** node's _stream.Readable_ default.
|
||||
|
||||
* **defCharset** - _string_ - Default character set to use when one isn't defined. **Default:** `'utf8'`.
|
||||
|
||||
* **defParamCharset** - _string_ - For multipart forms, the default character set to use for values of part header parameters (e.g. filename) that are not extended parameters (that contain an explicit charset). **Default:** `'latin1'`.
|
||||
|
||||
* **preservePath** - _boolean_ - If paths in filenames from file parts in a `'multipart/form-data'` request shall be preserved. **Default:** `false`.
|
||||
|
||||
* **limits** - _object_ - Various limits on incoming data. Valid properties are:
|
||||
|
||||
* **fieldNameSize** - _integer_ - Max field name size (in bytes). **Default:** `100`.
|
||||
|
||||
* **fieldSize** - _integer_ - Max field value size (in bytes). **Default:** `1048576` (1MB).
|
||||
|
||||
* **fields** - _integer_ - Max number of non-file fields. **Default:** `Infinity`.
|
||||
|
||||
* **fileSize** - _integer_ - For multipart forms, the max file size (in bytes). **Default:** `Infinity`.
|
||||
|
||||
* **files** - _integer_ - For multipart forms, the max number of file fields. **Default:** `Infinity`.
|
||||
|
||||
* **parts** - _integer_ - For multipart forms, the max number of parts (fields + files). **Default:** `Infinity`.
|
||||
|
||||
* **headerPairs** - _integer_ - For multipart forms, the max number of header key-value pairs to parse. **Default:** `2000` (same as node's http module).
|
||||
|
||||
This function can throw exceptions if there is something wrong with the values in `config`. For example, if the Content-Type in `headers` is missing entirely, is not a supported type, or is missing the boundary for `'multipart/form-data'` requests.
|
||||
|
||||
## (Special) Parser stream events
|
||||
|
||||
* **file**(< _string_ >name, < _Readable_ >stream, < _object_ >info) - Emitted for each new file found. `name` contains the form field name. `stream` is a _Readable_ stream containing the file's data. No transformations/conversions (e.g. base64 to raw binary) are done on the file's data. `info` contains the following properties:
|
||||
|
||||
* `filename` - _string_ - If supplied, this contains the file's filename. **WARNING:** You should almost _never_ use this value as-is (especially if you are using `preservePath: true` in your `config`) as it could contain malicious input. You are better off generating your own (safe) filenames, or at the very least using a hash of the filename.
|
||||
|
||||
* `encoding` - _string_ - The file's `'Content-Transfer-Encoding'` value.
|
||||
|
||||
* `mimeType` - _string_ - The file's `'Content-Type'` value.
|
||||
|
||||
**Note:** If you listen for this event, you should always consume the `stream` whether you care about its contents or not (you can simply do `stream.resume();` if you want to discard/skip the contents), otherwise the `'finish'`/`'close'` event will never fire on the busboy parser stream.
|
||||
However, if you aren't accepting files, you can either simply not listen for the `'file'` event at all or set `limits.files` to `0`, and any/all files will be automatically skipped (these skipped files will still count towards any configured `limits.files` and `limits.parts` limits though).
|
||||
|
||||
**Note:** If a configured `limits.fileSize` limit was reached for a file, `stream` will both have a boolean property `truncated` set to `true` (best checked at the end of the stream) and emit a `'limit'` event to notify you when this happens.
|
||||
|
||||
* **field**(< _string_ >name, < _string_ >value, < _object_ >info) - Emitted for each new non-file field found. `name` contains the form field name. `value` contains the string value of the field. `info` contains the following properties:
|
||||
|
||||
* `nameTruncated` - _boolean_ - Whether `name` was truncated or not (due to a configured `limits.fieldNameSize` limit)
|
||||
|
||||
* `valueTruncated` - _boolean_ - Whether `value` was truncated or not (due to a configured `limits.fieldSize` limit)
|
||||
|
||||
* `encoding` - _string_ - The field's `'Content-Transfer-Encoding'` value.
|
||||
|
||||
* `mimeType` - _string_ - The field's `'Content-Type'` value.
|
||||
|
||||
* **partsLimit**() - Emitted when the configured `limits.parts` limit has been reached. No more `'file'` or `'field'` events will be emitted.
|
||||
|
||||
* **filesLimit**() - Emitted when the configured `limits.files` limit has been reached. No more `'file'` events will be emitted.
|
||||
|
||||
* **fieldsLimit**() - Emitted when the configured `limits.fields` limit has been reached. No more `'field'` events will be emitted.
|
@ -0,0 +1,149 @@
|
||||
'use strict';
|
||||
|
||||
function createMultipartBuffers(boundary, sizes) {
|
||||
const bufs = [];
|
||||
for (let i = 0; i < sizes.length; ++i) {
|
||||
const mb = sizes[i] * 1024 * 1024;
|
||||
bufs.push(Buffer.from([
|
||||
`--${boundary}`,
|
||||
`content-disposition: form-data; name="field${i + 1}"`,
|
||||
'',
|
||||
'0'.repeat(mb),
|
||||
'',
|
||||
].join('\r\n')));
|
||||
}
|
||||
bufs.push(Buffer.from([
|
||||
`--${boundary}--`,
|
||||
'',
|
||||
].join('\r\n')));
|
||||
return bufs;
|
||||
}
|
||||
|
||||
const boundary = '-----------------------------168072824752491622650073';
|
||||
const buffers = createMultipartBuffers(boundary, [
|
||||
10,
|
||||
10,
|
||||
10,
|
||||
20,
|
||||
50,
|
||||
]);
|
||||
const calls = {
|
||||
partBegin: 0,
|
||||
headerField: 0,
|
||||
headerValue: 0,
|
||||
headerEnd: 0,
|
||||
headersEnd: 0,
|
||||
partData: 0,
|
||||
partEnd: 0,
|
||||
end: 0,
|
||||
};
|
||||
|
||||
const moduleName = process.argv[2];
|
||||
switch (moduleName) {
|
||||
case 'busboy': {
|
||||
const busboy = require('busboy');
|
||||
|
||||
const parser = busboy({
|
||||
limits: {
|
||||
fieldSizeLimit: Infinity,
|
||||
},
|
||||
headers: {
|
||||
'content-type': `multipart/form-data; boundary=${boundary}`,
|
||||
},
|
||||
});
|
||||
parser.on('field', (name, val, info) => {
|
||||
++calls.partBegin;
|
||||
++calls.partData;
|
||||
++calls.partEnd;
|
||||
}).on('close', () => {
|
||||
++calls.end;
|
||||
console.timeEnd(moduleName);
|
||||
});
|
||||
|
||||
console.time(moduleName);
|
||||
for (const buf of buffers)
|
||||
parser.write(buf);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'formidable': {
|
||||
const { MultipartParser } = require('formidable');
|
||||
|
||||
const parser = new MultipartParser();
|
||||
parser.initWithBoundary(boundary);
|
||||
parser.on('data', ({ name }) => {
|
||||
++calls[name];
|
||||
if (name === 'end')
|
||||
console.timeEnd(moduleName);
|
||||
});
|
||||
|
||||
console.time(moduleName);
|
||||
for (const buf of buffers)
|
||||
parser.write(buf);
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case 'multiparty': {
|
||||
const { Readable } = require('stream');
|
||||
|
||||
const { Form } = require('multiparty');
|
||||
|
||||
const form = new Form({
|
||||
maxFieldsSize: Infinity,
|
||||
maxFields: Infinity,
|
||||
maxFilesSize: Infinity,
|
||||
autoFields: false,
|
||||
autoFiles: false,
|
||||
});
|
||||
|
||||
const req = new Readable({ read: () => {} });
|
||||
req.headers = {
|
||||
'content-type': `multipart/form-data; boundary=${boundary}`,
|
||||
};
|
||||
|
||||
function hijack(name, fn) {
|
||||
const oldFn = form[name];
|
||||
form[name] = function() {
|
||||
fn();
|
||||
return oldFn.apply(this, arguments);
|
||||
};
|
||||
}
|
||||
|
||||
hijack('onParseHeaderField', () => {
|
||||
++calls.headerField;
|
||||
});
|
||||
hijack('onParseHeaderValue', () => {
|
||||
++calls.headerValue;
|
||||
});
|
||||
hijack('onParsePartBegin', () => {
|
||||
++calls.partBegin;
|
||||
});
|
||||
hijack('onParsePartData', () => {
|
||||
++calls.partData;
|
||||
});
|
||||
hijack('onParsePartEnd', () => {
|
||||
++calls.partEnd;
|
||||
});
|
||||
|
||||
form.on('close', () => {
|
||||
++calls.end;
|
||||
console.timeEnd(moduleName);
|
||||
}).on('part', (p) => p.resume());
|
||||
|
||||
console.time(moduleName);
|
||||
form.parse(req);
|
||||
for (const buf of buffers)
|
||||
req.push(buf);
|
||||
req.push(null);
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
if (moduleName === undefined)
|
||||
console.error('Missing parser module name');
|
||||
else
|
||||
console.error(`Invalid parser module name: ${moduleName}`);
|
||||
process.exit(1);
|
||||
}
|
@ -0,0 +1,143 @@
|
||||
'use strict';
|
||||
|
||||
function createMultipartBuffers(boundary, sizes) {
|
||||
const bufs = [];
|
||||
for (let i = 0; i < sizes.length; ++i) {
|
||||
const mb = sizes[i] * 1024 * 1024;
|
||||
bufs.push(Buffer.from([
|
||||
`--${boundary}`,
|
||||
`content-disposition: form-data; name="field${i + 1}"`,
|
||||
'',
|
||||
'0'.repeat(mb),
|
||||
'',
|
||||
].join('\r\n')));
|
||||
}
|
||||
bufs.push(Buffer.from([
|
||||
`--${boundary}--`,
|
||||
'',
|
||||
].join('\r\n')));
|
||||
return bufs;
|
||||
}
|
||||
|
||||
const boundary = '-----------------------------168072824752491622650073';
|
||||
const buffers = createMultipartBuffers(boundary, (new Array(100)).fill(1));
|
||||
const calls = {
|
||||
partBegin: 0,
|
||||
headerField: 0,
|
||||
headerValue: 0,
|
||||
headerEnd: 0,
|
||||
headersEnd: 0,
|
||||
partData: 0,
|
||||
partEnd: 0,
|
||||
end: 0,
|
||||
};
|
||||
|
||||
const moduleName = process.argv[2];
|
||||
switch (moduleName) {
|
||||
case 'busboy': {
|
||||
const busboy = require('busboy');
|
||||
|
||||
const parser = busboy({
|
||||
limits: {
|
||||
fieldSizeLimit: Infinity,
|
||||
},
|
||||
headers: {
|
||||
'content-type': `multipart/form-data; boundary=${boundary}`,
|
||||
},
|
||||
});
|
||||
parser.on('field', (name, val, info) => {
|
||||
++calls.partBegin;
|
||||
++calls.partData;
|
||||
++calls.partEnd;
|
||||
}).on('close', () => {
|
||||
++calls.end;
|
||||
console.timeEnd(moduleName);
|
||||
});
|
||||
|
||||
console.time(moduleName);
|
||||
for (const buf of buffers)
|
||||
parser.write(buf);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'formidable': {
|
||||
const { MultipartParser } = require('formidable');
|
||||
|
||||
const parser = new MultipartParser();
|
||||
parser.initWithBoundary(boundary);
|
||||
parser.on('data', ({ name }) => {
|
||||
++calls[name];
|
||||
if (name === 'end')
|
||||
console.timeEnd(moduleName);
|
||||
});
|
||||
|
||||
console.time(moduleName);
|
||||
for (const buf of buffers)
|
||||
parser.write(buf);
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case 'multiparty': {
|
||||
const { Readable } = require('stream');
|
||||
|
||||
const { Form } = require('multiparty');
|
||||
|
||||
const form = new Form({
|
||||
maxFieldsSize: Infinity,
|
||||
maxFields: Infinity,
|
||||
maxFilesSize: Infinity,
|
||||
autoFields: false,
|
||||
autoFiles: false,
|
||||
});
|
||||
|
||||
const req = new Readable({ read: () => {} });
|
||||
req.headers = {
|
||||
'content-type': `multipart/form-data; boundary=${boundary}`,
|
||||
};
|
||||
|
||||
function hijack(name, fn) {
|
||||
const oldFn = form[name];
|
||||
form[name] = function() {
|
||||
fn();
|
||||
return oldFn.apply(this, arguments);
|
||||
};
|
||||
}
|
||||
|
||||
hijack('onParseHeaderField', () => {
|
||||
++calls.headerField;
|
||||
});
|
||||
hijack('onParseHeaderValue', () => {
|
||||
++calls.headerValue;
|
||||
});
|
||||
hijack('onParsePartBegin', () => {
|
||||
++calls.partBegin;
|
||||
});
|
||||
hijack('onParsePartData', () => {
|
||||
++calls.partData;
|
||||
});
|
||||
hijack('onParsePartEnd', () => {
|
||||
++calls.partEnd;
|
||||
});
|
||||
|
||||
form.on('close', () => {
|
||||
++calls.end;
|
||||
console.timeEnd(moduleName);
|
||||
}).on('part', (p) => p.resume());
|
||||
|
||||
console.time(moduleName);
|
||||
form.parse(req);
|
||||
for (const buf of buffers)
|
||||
req.push(buf);
|
||||
req.push(null);
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
if (moduleName === undefined)
|
||||
console.error('Missing parser module name');
|
||||
else
|
||||
console.error(`Invalid parser module name: ${moduleName}`);
|
||||
process.exit(1);
|
||||
}
|
@ -0,0 +1,154 @@
|
||||
'use strict';
|
||||
|
||||
function createMultipartBuffers(boundary, sizes) {
|
||||
const bufs = [];
|
||||
for (let i = 0; i < sizes.length; ++i) {
|
||||
const mb = sizes[i] * 1024 * 1024;
|
||||
bufs.push(Buffer.from([
|
||||
`--${boundary}`,
|
||||
`content-disposition: form-data; name="file${i + 1}"; `
|
||||
+ `filename="random${i + 1}.bin"`,
|
||||
'content-type: application/octet-stream',
|
||||
'',
|
||||
'0'.repeat(mb),
|
||||
'',
|
||||
].join('\r\n')));
|
||||
}
|
||||
bufs.push(Buffer.from([
|
||||
`--${boundary}--`,
|
||||
'',
|
||||
].join('\r\n')));
|
||||
return bufs;
|
||||
}
|
||||
|
||||
const boundary = '-----------------------------168072824752491622650073';
|
||||
const buffers = createMultipartBuffers(boundary, [
|
||||
10,
|
||||
10,
|
||||
10,
|
||||
20,
|
||||
50,
|
||||
]);
|
||||
const calls = {
|
||||
partBegin: 0,
|
||||
headerField: 0,
|
||||
headerValue: 0,
|
||||
headerEnd: 0,
|
||||
headersEnd: 0,
|
||||
partData: 0,
|
||||
partEnd: 0,
|
||||
end: 0,
|
||||
};
|
||||
|
||||
const moduleName = process.argv[2];
|
||||
switch (moduleName) {
|
||||
case 'busboy': {
|
||||
const busboy = require('busboy');
|
||||
|
||||
const parser = busboy({
|
||||
limits: {
|
||||
fieldSizeLimit: Infinity,
|
||||
},
|
||||
headers: {
|
||||
'content-type': `multipart/form-data; boundary=${boundary}`,
|
||||
},
|
||||
});
|
||||
parser.on('file', (name, stream, info) => {
|
||||
++calls.partBegin;
|
||||
stream.on('data', (chunk) => {
|
||||
++calls.partData;
|
||||
}).on('end', () => {
|
||||
++calls.partEnd;
|
||||
});
|
||||
}).on('close', () => {
|
||||
++calls.end;
|
||||
console.timeEnd(moduleName);
|
||||
});
|
||||
|
||||
console.time(moduleName);
|
||||
for (const buf of buffers)
|
||||
parser.write(buf);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'formidable': {
|
||||
const { MultipartParser } = require('formidable');
|
||||
|
||||
const parser = new MultipartParser();
|
||||
parser.initWithBoundary(boundary);
|
||||
parser.on('data', ({ name }) => {
|
||||
++calls[name];
|
||||
if (name === 'end')
|
||||
console.timeEnd(moduleName);
|
||||
});
|
||||
|
||||
console.time(moduleName);
|
||||
for (const buf of buffers)
|
||||
parser.write(buf);
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case 'multiparty': {
|
||||
const { Readable } = require('stream');
|
||||
|
||||
const { Form } = require('multiparty');
|
||||
|
||||
const form = new Form({
|
||||
maxFieldsSize: Infinity,
|
||||
maxFields: Infinity,
|
||||
maxFilesSize: Infinity,
|
||||
autoFields: false,
|
||||
autoFiles: false,
|
||||
});
|
||||
|
||||
const req = new Readable({ read: () => {} });
|
||||
req.headers = {
|
||||
'content-type': `multipart/form-data; boundary=${boundary}`,
|
||||
};
|
||||
|
||||
function hijack(name, fn) {
|
||||
const oldFn = form[name];
|
||||
form[name] = function() {
|
||||
fn();
|
||||
return oldFn.apply(this, arguments);
|
||||
};
|
||||
}
|
||||
|
||||
hijack('onParseHeaderField', () => {
|
||||
++calls.headerField;
|
||||
});
|
||||
hijack('onParseHeaderValue', () => {
|
||||
++calls.headerValue;
|
||||
});
|
||||
hijack('onParsePartBegin', () => {
|
||||
++calls.partBegin;
|
||||
});
|
||||
hijack('onParsePartData', () => {
|
||||
++calls.partData;
|
||||
});
|
||||
hijack('onParsePartEnd', () => {
|
||||
++calls.partEnd;
|
||||
});
|
||||
|
||||
form.on('close', () => {
|
||||
++calls.end;
|
||||
console.timeEnd(moduleName);
|
||||
}).on('part', (p) => p.resume());
|
||||
|
||||
console.time(moduleName);
|
||||
form.parse(req);
|
||||
for (const buf of buffers)
|
||||
req.push(buf);
|
||||
req.push(null);
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
if (moduleName === undefined)
|
||||
console.error('Missing parser module name');
|
||||
else
|
||||
console.error(`Invalid parser module name: ${moduleName}`);
|
||||
process.exit(1);
|
||||
}
|
@ -0,0 +1,148 @@
|
||||
'use strict';
|
||||
|
||||
function createMultipartBuffers(boundary, sizes) {
|
||||
const bufs = [];
|
||||
for (let i = 0; i < sizes.length; ++i) {
|
||||
const mb = sizes[i] * 1024 * 1024;
|
||||
bufs.push(Buffer.from([
|
||||
`--${boundary}`,
|
||||
`content-disposition: form-data; name="file${i + 1}"; `
|
||||
+ `filename="random${i + 1}.bin"`,
|
||||
'content-type: application/octet-stream',
|
||||
'',
|
||||
'0'.repeat(mb),
|
||||
'',
|
||||
].join('\r\n')));
|
||||
}
|
||||
bufs.push(Buffer.from([
|
||||
`--${boundary}--`,
|
||||
'',
|
||||
].join('\r\n')));
|
||||
return bufs;
|
||||
}
|
||||
|
||||
const boundary = '-----------------------------168072824752491622650073';
|
||||
const buffers = createMultipartBuffers(boundary, (new Array(100)).fill(1));
|
||||
const calls = {
|
||||
partBegin: 0,
|
||||
headerField: 0,
|
||||
headerValue: 0,
|
||||
headerEnd: 0,
|
||||
headersEnd: 0,
|
||||
partData: 0,
|
||||
partEnd: 0,
|
||||
end: 0,
|
||||
};
|
||||
|
||||
const moduleName = process.argv[2];
|
||||
switch (moduleName) {
|
||||
case 'busboy': {
|
||||
const busboy = require('busboy');
|
||||
|
||||
const parser = busboy({
|
||||
limits: {
|
||||
fieldSizeLimit: Infinity,
|
||||
},
|
||||
headers: {
|
||||
'content-type': `multipart/form-data; boundary=${boundary}`,
|
||||
},
|
||||
});
|
||||
parser.on('file', (name, stream, info) => {
|
||||
++calls.partBegin;
|
||||
stream.on('data', (chunk) => {
|
||||
++calls.partData;
|
||||
}).on('end', () => {
|
||||
++calls.partEnd;
|
||||
});
|
||||
}).on('close', () => {
|
||||
++calls.end;
|
||||
console.timeEnd(moduleName);
|
||||
});
|
||||
|
||||
console.time(moduleName);
|
||||
for (const buf of buffers)
|
||||
parser.write(buf);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'formidable': {
|
||||
const { MultipartParser } = require('formidable');
|
||||
|
||||
const parser = new MultipartParser();
|
||||
parser.initWithBoundary(boundary);
|
||||
parser.on('data', ({ name }) => {
|
||||
++calls[name];
|
||||
if (name === 'end')
|
||||
console.timeEnd(moduleName);
|
||||
});
|
||||
|
||||
console.time(moduleName);
|
||||
for (const buf of buffers)
|
||||
parser.write(buf);
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case 'multiparty': {
|
||||
const { Readable } = require('stream');
|
||||
|
||||
const { Form } = require('multiparty');
|
||||
|
||||
const form = new Form({
|
||||
maxFieldsSize: Infinity,
|
||||
maxFields: Infinity,
|
||||
maxFilesSize: Infinity,
|
||||
autoFields: false,
|
||||
autoFiles: false,
|
||||
});
|
||||
|
||||
const req = new Readable({ read: () => {} });
|
||||
req.headers = {
|
||||
'content-type': `multipart/form-data; boundary=${boundary}`,
|
||||
};
|
||||
|
||||
function hijack(name, fn) {
|
||||
const oldFn = form[name];
|
||||
form[name] = function() {
|
||||
fn();
|
||||
return oldFn.apply(this, arguments);
|
||||
};
|
||||
}
|
||||
|
||||
hijack('onParseHeaderField', () => {
|
||||
++calls.headerField;
|
||||
});
|
||||
hijack('onParseHeaderValue', () => {
|
||||
++calls.headerValue;
|
||||
});
|
||||
hijack('onParsePartBegin', () => {
|
||||
++calls.partBegin;
|
||||
});
|
||||
hijack('onParsePartData', () => {
|
||||
++calls.partData;
|
||||
});
|
||||
hijack('onParsePartEnd', () => {
|
||||
++calls.partEnd;
|
||||
});
|
||||
|
||||
form.on('close', () => {
|
||||
++calls.end;
|
||||
console.timeEnd(moduleName);
|
||||
}).on('part', (p) => p.resume());
|
||||
|
||||
console.time(moduleName);
|
||||
form.parse(req);
|
||||
for (const buf of buffers)
|
||||
req.push(buf);
|
||||
req.push(null);
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
if (moduleName === undefined)
|
||||
console.error('Missing parser module name');
|
||||
else
|
||||
console.error(`Invalid parser module name: ${moduleName}`);
|
||||
process.exit(1);
|
||||
}
|
@ -0,0 +1,101 @@
|
||||
'use strict';
|
||||
|
||||
const buffers = [
|
||||
Buffer.from(
|
||||
(new Array(100)).fill('').map((_, i) => `key${i}=value${i}`).join('&')
|
||||
),
|
||||
];
|
||||
const calls = {
|
||||
field: 0,
|
||||
end: 0,
|
||||
};
|
||||
|
||||
let n = 3e3;
|
||||
|
||||
const moduleName = process.argv[2];
|
||||
switch (moduleName) {
|
||||
case 'busboy': {
|
||||
const busboy = require('busboy');
|
||||
|
||||
console.time(moduleName);
|
||||
(function next() {
|
||||
const parser = busboy({
|
||||
limits: {
|
||||
fieldSizeLimit: Infinity,
|
||||
},
|
||||
headers: {
|
||||
'content-type': 'application/x-www-form-urlencoded; charset=utf-8',
|
||||
},
|
||||
});
|
||||
parser.on('field', (name, val, info) => {
|
||||
++calls.field;
|
||||
}).on('close', () => {
|
||||
++calls.end;
|
||||
if (--n === 0)
|
||||
console.timeEnd(moduleName);
|
||||
else
|
||||
process.nextTick(next);
|
||||
});
|
||||
|
||||
for (const buf of buffers)
|
||||
parser.write(buf);
|
||||
parser.end();
|
||||
})();
|
||||
break;
|
||||
}
|
||||
|
||||
case 'formidable': {
|
||||
const QuerystringParser =
|
||||
require('formidable/src/parsers/Querystring.js');
|
||||
|
||||
console.time(moduleName);
|
||||
(function next() {
|
||||
const parser = new QuerystringParser();
|
||||
parser.on('data', (obj) => {
|
||||
++calls.field;
|
||||
}).on('end', () => {
|
||||
++calls.end;
|
||||
if (--n === 0)
|
||||
console.timeEnd(moduleName);
|
||||
else
|
||||
process.nextTick(next);
|
||||
});
|
||||
|
||||
for (const buf of buffers)
|
||||
parser.write(buf);
|
||||
parser.end();
|
||||
})();
|
||||
break;
|
||||
}
|
||||
|
||||
case 'formidable-streaming': {
|
||||
const QuerystringParser =
|
||||
require('formidable/src/parsers/StreamingQuerystring.js');
|
||||
|
||||
console.time(moduleName);
|
||||
(function next() {
|
||||
const parser = new QuerystringParser();
|
||||
parser.on('data', (obj) => {
|
||||
++calls.field;
|
||||
}).on('end', () => {
|
||||
++calls.end;
|
||||
if (--n === 0)
|
||||
console.timeEnd(moduleName);
|
||||
else
|
||||
process.nextTick(next);
|
||||
});
|
||||
|
||||
for (const buf of buffers)
|
||||
parser.write(buf);
|
||||
parser.end();
|
||||
})();
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
if (moduleName === undefined)
|
||||
console.error('Missing parser module name');
|
||||
else
|
||||
console.error(`Invalid parser module name: ${moduleName}`);
|
||||
process.exit(1);
|
||||
}
|
84
jiedui/node_modules/busboy/bench/bench-urlencoded-fields-900pairs-small-alt.js
generated
vendored
84
jiedui/node_modules/busboy/bench/bench-urlencoded-fields-900pairs-small-alt.js
generated
vendored
@ -0,0 +1,84 @@
|
||||
'use strict';
|
||||
|
||||
const buffers = [
|
||||
Buffer.from(
|
||||
(new Array(900)).fill('').map((_, i) => `key${i}=value${i}`).join('&')
|
||||
),
|
||||
];
|
||||
const calls = {
|
||||
field: 0,
|
||||
end: 0,
|
||||
};
|
||||
|
||||
const moduleName = process.argv[2];
|
||||
switch (moduleName) {
|
||||
case 'busboy': {
|
||||
const busboy = require('busboy');
|
||||
|
||||
console.time(moduleName);
|
||||
const parser = busboy({
|
||||
limits: {
|
||||
fieldSizeLimit: Infinity,
|
||||
},
|
||||
headers: {
|
||||
'content-type': 'application/x-www-form-urlencoded; charset=utf-8',
|
||||
},
|
||||
});
|
||||
parser.on('field', (name, val, info) => {
|
||||
++calls.field;
|
||||
}).on('close', () => {
|
||||
++calls.end;
|
||||
console.timeEnd(moduleName);
|
||||
});
|
||||
|
||||
for (const buf of buffers)
|
||||
parser.write(buf);
|
||||
parser.end();
|
||||
break;
|
||||
}
|
||||
|
||||
case 'formidable': {
|
||||
const QuerystringParser =
|
||||
require('formidable/src/parsers/Querystring.js');
|
||||
|
||||
console.time(moduleName);
|
||||
const parser = new QuerystringParser();
|
||||
parser.on('data', (obj) => {
|
||||
++calls.field;
|
||||
}).on('end', () => {
|
||||
++calls.end;
|
||||
console.timeEnd(moduleName);
|
||||
});
|
||||
|
||||
for (const buf of buffers)
|
||||
parser.write(buf);
|
||||
parser.end();
|
||||
break;
|
||||
}
|
||||
|
||||
case 'formidable-streaming': {
|
||||
const QuerystringParser =
|
||||
require('formidable/src/parsers/StreamingQuerystring.js');
|
||||
|
||||
console.time(moduleName);
|
||||
const parser = new QuerystringParser();
|
||||
parser.on('data', (obj) => {
|
||||
++calls.field;
|
||||
}).on('end', () => {
|
||||
++calls.end;
|
||||
console.timeEnd(moduleName);
|
||||
});
|
||||
|
||||
for (const buf of buffers)
|
||||
parser.write(buf);
|
||||
parser.end();
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
if (moduleName === undefined)
|
||||
console.error('Missing parser module name');
|
||||
else
|
||||
console.error(`Invalid parser module name: ${moduleName}`);
|
||||
process.exit(1);
|
||||
}
|
@ -0,0 +1,57 @@
|
||||
'use strict';
|
||||
|
||||
const { parseContentType } = require('./utils.js');
|
||||
|
||||
function getInstance(cfg) {
|
||||
const headers = cfg.headers;
|
||||
const conType = parseContentType(headers['content-type']);
|
||||
if (!conType)
|
||||
throw new Error('Malformed content type');
|
||||
|
||||
for (const type of TYPES) {
|
||||
const matched = type.detect(conType);
|
||||
if (!matched)
|
||||
continue;
|
||||
|
||||
const instanceCfg = {
|
||||
limits: cfg.limits,
|
||||
headers,
|
||||
conType,
|
||||
highWaterMark: undefined,
|
||||
fileHwm: undefined,
|
||||
defCharset: undefined,
|
||||
defParamCharset: undefined,
|
||||
preservePath: false,
|
||||
};
|
||||
if (cfg.highWaterMark)
|
||||
instanceCfg.highWaterMark = cfg.highWaterMark;
|
||||
if (cfg.fileHwm)
|
||||
instanceCfg.fileHwm = cfg.fileHwm;
|
||||
instanceCfg.defCharset = cfg.defCharset;
|
||||
instanceCfg.defParamCharset = cfg.defParamCharset;
|
||||
instanceCfg.preservePath = cfg.preservePath;
|
||||
return new type(instanceCfg);
|
||||
}
|
||||
|
||||
throw new Error(`Unsupported content type: ${headers['content-type']}`);
|
||||
}
|
||||
|
||||
// Note: types are explicitly listed here for easier bundling
|
||||
// See: https://github.com/mscdex/busboy/issues/121
|
||||
const TYPES = [
|
||||
require('./types/multipart'),
|
||||
require('./types/urlencoded'),
|
||||
].filter(function(typemod) { return typeof typemod.detect === 'function'; });
|
||||
|
||||
module.exports = (cfg) => {
|
||||
if (typeof cfg !== 'object' || cfg === null)
|
||||
cfg = {};
|
||||
|
||||
if (typeof cfg.headers !== 'object'
|
||||
|| cfg.headers === null
|
||||
|| typeof cfg.headers['content-type'] !== 'string') {
|
||||
throw new Error('Missing Content-Type');
|
||||
}
|
||||
|
||||
return getInstance(cfg);
|
||||
};
|
@ -0,0 +1,653 @@
|
||||
'use strict';
|
||||
|
||||
const { Readable, Writable } = require('stream');
|
||||
|
||||
const StreamSearch = require('streamsearch');
|
||||
|
||||
const {
|
||||
basename,
|
||||
convertToUTF8,
|
||||
getDecoder,
|
||||
parseContentType,
|
||||
parseDisposition,
|
||||
} = require('../utils.js');
|
||||
|
||||
const BUF_CRLF = Buffer.from('\r\n');
|
||||
const BUF_CR = Buffer.from('\r');
|
||||
const BUF_DASH = Buffer.from('-');
|
||||
|
||||
function noop() {}
|
||||
|
||||
const MAX_HEADER_PAIRS = 2000; // From node
|
||||
const MAX_HEADER_SIZE = 16 * 1024; // From node (its default value)
|
||||
|
||||
const HPARSER_NAME = 0;
|
||||
const HPARSER_PRE_OWS = 1;
|
||||
const HPARSER_VALUE = 2;
|
||||
class HeaderParser {
|
||||
constructor(cb) {
|
||||
this.header = Object.create(null);
|
||||
this.pairCount = 0;
|
||||
this.byteCount = 0;
|
||||
this.state = HPARSER_NAME;
|
||||
this.name = '';
|
||||
this.value = '';
|
||||
this.crlf = 0;
|
||||
this.cb = cb;
|
||||
}
|
||||
|
||||
reset() {
|
||||
this.header = Object.create(null);
|
||||
this.pairCount = 0;
|
||||
this.byteCount = 0;
|
||||
this.state = HPARSER_NAME;
|
||||
this.name = '';
|
||||
this.value = '';
|
||||
this.crlf = 0;
|
||||
}
|
||||
|
||||
push(chunk, pos, end) {
|
||||
let start = pos;
|
||||
while (pos < end) {
|
||||
switch (this.state) {
|
||||
case HPARSER_NAME: {
|
||||
let done = false;
|
||||
for (; pos < end; ++pos) {
|
||||
if (this.byteCount === MAX_HEADER_SIZE)
|
||||
return -1;
|
||||
++this.byteCount;
|
||||
const code = chunk[pos];
|
||||
if (TOKEN[code] !== 1) {
|
||||
if (code !== 58/* ':' */)
|
||||
return -1;
|
||||
this.name += chunk.latin1Slice(start, pos);
|
||||
if (this.name.length === 0)
|
||||
return -1;
|
||||
++pos;
|
||||
done = true;
|
||||
this.state = HPARSER_PRE_OWS;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!done) {
|
||||
this.name += chunk.latin1Slice(start, pos);
|
||||
break;
|
||||
}
|
||||
// FALLTHROUGH
|
||||
}
|
||||
case HPARSER_PRE_OWS: {
|
||||
// Skip optional whitespace
|
||||
let done = false;
|
||||
for (; pos < end; ++pos) {
|
||||
if (this.byteCount === MAX_HEADER_SIZE)
|
||||
return -1;
|
||||
++this.byteCount;
|
||||
const code = chunk[pos];
|
||||
if (code !== 32/* ' ' */ && code !== 9/* '\t' */) {
|
||||
start = pos;
|
||||
done = true;
|
||||
this.state = HPARSER_VALUE;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!done)
|
||||
break;
|
||||
// FALLTHROUGH
|
||||
}
|
||||
case HPARSER_VALUE:
|
||||
switch (this.crlf) {
|
||||
case 0: // Nothing yet
|
||||
for (; pos < end; ++pos) {
|
||||
if (this.byteCount === MAX_HEADER_SIZE)
|
||||
return -1;
|
||||
++this.byteCount;
|
||||
const code = chunk[pos];
|
||||
if (FIELD_VCHAR[code] !== 1) {
|
||||
if (code !== 13/* '\r' */)
|
||||
return -1;
|
||||
++this.crlf;
|
||||
break;
|
||||
}
|
||||
}
|
||||
this.value += chunk.latin1Slice(start, pos++);
|
||||
break;
|
||||
case 1: // Received CR
|
||||
if (this.byteCount === MAX_HEADER_SIZE)
|
||||
return -1;
|
||||
++this.byteCount;
|
||||
if (chunk[pos++] !== 10/* '\n' */)
|
||||
return -1;
|
||||
++this.crlf;
|
||||
break;
|
||||
case 2: { // Received CR LF
|
||||
if (this.byteCount === MAX_HEADER_SIZE)
|
||||
return -1;
|
||||
++this.byteCount;
|
||||
const code = chunk[pos];
|
||||
if (code === 32/* ' ' */ || code === 9/* '\t' */) {
|
||||
// Folded value
|
||||
start = pos;
|
||||
this.crlf = 0;
|
||||
} else {
|
||||
if (++this.pairCount < MAX_HEADER_PAIRS) {
|
||||
this.name = this.name.toLowerCase();
|
||||
if (this.header[this.name] === undefined)
|
||||
this.header[this.name] = [this.value];
|
||||
else
|
||||
this.header[this.name].push(this.value);
|
||||
}
|
||||
if (code === 13/* '\r' */) {
|
||||
++this.crlf;
|
||||
++pos;
|
||||
} else {
|
||||
// Assume start of next header field name
|
||||
start = pos;
|
||||
this.crlf = 0;
|
||||
this.state = HPARSER_NAME;
|
||||
this.name = '';
|
||||
this.value = '';
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 3: { // Received CR LF CR
|
||||
if (this.byteCount === MAX_HEADER_SIZE)
|
||||
return -1;
|
||||
++this.byteCount;
|
||||
if (chunk[pos++] !== 10/* '\n' */)
|
||||
return -1;
|
||||
// End of header
|
||||
const header = this.header;
|
||||
this.reset();
|
||||
this.cb(header);
|
||||
return pos;
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return pos;
|
||||
}
|
||||
}
|
||||
|
||||
class FileStream extends Readable {
|
||||
constructor(opts, owner) {
|
||||
super(opts);
|
||||
this.truncated = false;
|
||||
this._readcb = null;
|
||||
this.once('end', () => {
|
||||
// We need to make sure that we call any outstanding _writecb() that is
|
||||
// associated with this file so that processing of the rest of the form
|
||||
// can continue. This may not happen if the file stream ends right after
|
||||
// backpressure kicks in, so we force it here.
|
||||
this._read();
|
||||
if (--owner._fileEndsLeft === 0 && owner._finalcb) {
|
||||
const cb = owner._finalcb;
|
||||
owner._finalcb = null;
|
||||
// Make sure other 'end' event handlers get a chance to be executed
|
||||
// before busboy's 'finish' event is emitted
|
||||
process.nextTick(cb);
|
||||
}
|
||||
});
|
||||
}
|
||||
_read(n) {
|
||||
const cb = this._readcb;
|
||||
if (cb) {
|
||||
this._readcb = null;
|
||||
cb();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const ignoreData = {
|
||||
push: (chunk, pos) => {},
|
||||
destroy: () => {},
|
||||
};
|
||||
|
||||
function callAndUnsetCb(self, err) {
|
||||
const cb = self._writecb;
|
||||
self._writecb = null;
|
||||
if (err)
|
||||
self.destroy(err);
|
||||
else if (cb)
|
||||
cb();
|
||||
}
|
||||
|
||||
function nullDecoder(val, hint) {
|
||||
return val;
|
||||
}
|
||||
|
||||
class Multipart extends Writable {
|
||||
constructor(cfg) {
|
||||
const streamOpts = {
|
||||
autoDestroy: true,
|
||||
emitClose: true,
|
||||
highWaterMark: (typeof cfg.highWaterMark === 'number'
|
||||
? cfg.highWaterMark
|
||||
: undefined),
|
||||
};
|
||||
super(streamOpts);
|
||||
|
||||
if (!cfg.conType.params || typeof cfg.conType.params.boundary !== 'string')
|
||||
throw new Error('Multipart: Boundary not found');
|
||||
|
||||
const boundary = cfg.conType.params.boundary;
|
||||
const paramDecoder = (typeof cfg.defParamCharset === 'string'
|
||||
&& cfg.defParamCharset
|
||||
? getDecoder(cfg.defParamCharset)
|
||||
: nullDecoder);
|
||||
const defCharset = (cfg.defCharset || 'utf8');
|
||||
const preservePath = cfg.preservePath;
|
||||
const fileOpts = {
|
||||
autoDestroy: true,
|
||||
emitClose: true,
|
||||
highWaterMark: (typeof cfg.fileHwm === 'number'
|
||||
? cfg.fileHwm
|
||||
: undefined),
|
||||
};
|
||||
|
||||
const limits = cfg.limits;
|
||||
const fieldSizeLimit = (limits && typeof limits.fieldSize === 'number'
|
||||
? limits.fieldSize
|
||||
: 1 * 1024 * 1024);
|
||||
const fileSizeLimit = (limits && typeof limits.fileSize === 'number'
|
||||
? limits.fileSize
|
||||
: Infinity);
|
||||
const filesLimit = (limits && typeof limits.files === 'number'
|
||||
? limits.files
|
||||
: Infinity);
|
||||
const fieldsLimit = (limits && typeof limits.fields === 'number'
|
||||
? limits.fields
|
||||
: Infinity);
|
||||
const partsLimit = (limits && typeof limits.parts === 'number'
|
||||
? limits.parts
|
||||
: Infinity);
|
||||
|
||||
let parts = -1; // Account for initial boundary
|
||||
let fields = 0;
|
||||
let files = 0;
|
||||
let skipPart = false;
|
||||
|
||||
this._fileEndsLeft = 0;
|
||||
this._fileStream = undefined;
|
||||
this._complete = false;
|
||||
let fileSize = 0;
|
||||
|
||||
let field;
|
||||
let fieldSize = 0;
|
||||
let partCharset;
|
||||
let partEncoding;
|
||||
let partType;
|
||||
let partName;
|
||||
let partTruncated = false;
|
||||
|
||||
let hitFilesLimit = false;
|
||||
let hitFieldsLimit = false;
|
||||
|
||||
this._hparser = null;
|
||||
const hparser = new HeaderParser((header) => {
|
||||
this._hparser = null;
|
||||
skipPart = false;
|
||||
|
||||
partType = 'text/plain';
|
||||
partCharset = defCharset;
|
||||
partEncoding = '7bit';
|
||||
partName = undefined;
|
||||
partTruncated = false;
|
||||
|
||||
let filename;
|
||||
if (!header['content-disposition']) {
|
||||
skipPart = true;
|
||||
return;
|
||||
}
|
||||
|
||||
const disp = parseDisposition(header['content-disposition'][0],
|
||||
paramDecoder);
|
||||
if (!disp || disp.type !== 'form-data') {
|
||||
skipPart = true;
|
||||
return;
|
||||
}
|
||||
|
||||
if (disp.params) {
|
||||
if (disp.params.name)
|
||||
partName = disp.params.name;
|
||||
|
||||
if (disp.params['filename*'])
|
||||
filename = disp.params['filename*'];
|
||||
else if (disp.params.filename)
|
||||
filename = disp.params.filename;
|
||||
|
||||
if (filename !== undefined && !preservePath)
|
||||
filename = basename(filename);
|
||||
}
|
||||
|
||||
if (header['content-type']) {
|
||||
const conType = parseContentType(header['content-type'][0]);
|
||||
if (conType) {
|
||||
partType = `${conType.type}/${conType.subtype}`;
|
||||
if (conType.params && typeof conType.params.charset === 'string')
|
||||
partCharset = conType.params.charset.toLowerCase();
|
||||
}
|
||||
}
|
||||
|
||||
if (header['content-transfer-encoding'])
|
||||
partEncoding = header['content-transfer-encoding'][0].toLowerCase();
|
||||
|
||||
if (partType === 'application/octet-stream' || filename !== undefined) {
|
||||
// File
|
||||
|
||||
if (files === filesLimit) {
|
||||
if (!hitFilesLimit) {
|
||||
hitFilesLimit = true;
|
||||
this.emit('filesLimit');
|
||||
}
|
||||
skipPart = true;
|
||||
return;
|
||||
}
|
||||
++files;
|
||||
|
||||
if (this.listenerCount('file') === 0) {
|
||||
skipPart = true;
|
||||
return;
|
||||
}
|
||||
|
||||
fileSize = 0;
|
||||
this._fileStream = new FileStream(fileOpts, this);
|
||||
++this._fileEndsLeft;
|
||||
this.emit(
|
||||
'file',
|
||||
partName,
|
||||
this._fileStream,
|
||||
{ filename,
|
||||
encoding: partEncoding,
|
||||
mimeType: partType }
|
||||
);
|
||||
} else {
|
||||
// Non-file
|
||||
|
||||
if (fields === fieldsLimit) {
|
||||
if (!hitFieldsLimit) {
|
||||
hitFieldsLimit = true;
|
||||
this.emit('fieldsLimit');
|
||||
}
|
||||
skipPart = true;
|
||||
return;
|
||||
}
|
||||
++fields;
|
||||
|
||||
if (this.listenerCount('field') === 0) {
|
||||
skipPart = true;
|
||||
return;
|
||||
}
|
||||
|
||||
field = [];
|
||||
fieldSize = 0;
|
||||
}
|
||||
});
|
||||
|
||||
let matchPostBoundary = 0;
|
||||
const ssCb = (isMatch, data, start, end, isDataSafe) => {
|
||||
retrydata:
|
||||
while (data) {
|
||||
if (this._hparser !== null) {
|
||||
const ret = this._hparser.push(data, start, end);
|
||||
if (ret === -1) {
|
||||
this._hparser = null;
|
||||
hparser.reset();
|
||||
this.emit('error', new Error('Malformed part header'));
|
||||
break;
|
||||
}
|
||||
start = ret;
|
||||
}
|
||||
|
||||
if (start === end)
|
||||
break;
|
||||
|
||||
if (matchPostBoundary !== 0) {
|
||||
if (matchPostBoundary === 1) {
|
||||
switch (data[start]) {
|
||||
case 45: // '-'
|
||||
// Try matching '--' after boundary
|
||||
matchPostBoundary = 2;
|
||||
++start;
|
||||
break;
|
||||
case 13: // '\r'
|
||||
// Try matching CR LF before header
|
||||
matchPostBoundary = 3;
|
||||
++start;
|
||||
break;
|
||||
default:
|
||||
matchPostBoundary = 0;
|
||||
}
|
||||
if (start === end)
|
||||
return;
|
||||
}
|
||||
|
||||
if (matchPostBoundary === 2) {
|
||||
matchPostBoundary = 0;
|
||||
if (data[start] === 45/* '-' */) {
|
||||
// End of multipart data
|
||||
this._complete = true;
|
||||
this._bparser = ignoreData;
|
||||
return;
|
||||
}
|
||||
// We saw something other than '-', so put the dash we consumed
|
||||
// "back"
|
||||
const writecb = this._writecb;
|
||||
this._writecb = noop;
|
||||
ssCb(false, BUF_DASH, 0, 1, false);
|
||||
this._writecb = writecb;
|
||||
} else if (matchPostBoundary === 3) {
|
||||
matchPostBoundary = 0;
|
||||
if (data[start] === 10/* '\n' */) {
|
||||
++start;
|
||||
if (parts >= partsLimit)
|
||||
break;
|
||||
// Prepare the header parser
|
||||
this._hparser = hparser;
|
||||
if (start === end)
|
||||
break;
|
||||
// Process the remaining data as a header
|
||||
continue retrydata;
|
||||
} else {
|
||||
// We saw something other than LF, so put the CR we consumed
|
||||
// "back"
|
||||
const writecb = this._writecb;
|
||||
this._writecb = noop;
|
||||
ssCb(false, BUF_CR, 0, 1, false);
|
||||
this._writecb = writecb;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!skipPart) {
|
||||
if (this._fileStream) {
|
||||
let chunk;
|
||||
const actualLen = Math.min(end - start, fileSizeLimit - fileSize);
|
||||
if (!isDataSafe) {
|
||||
chunk = Buffer.allocUnsafe(actualLen);
|
||||
data.copy(chunk, 0, start, start + actualLen);
|
||||
} else {
|
||||
chunk = data.slice(start, start + actualLen);
|
||||
}
|
||||
|
||||
fileSize += chunk.length;
|
||||
if (fileSize === fileSizeLimit) {
|
||||
if (chunk.length > 0)
|
||||
this._fileStream.push(chunk);
|
||||
this._fileStream.emit('limit');
|
||||
this._fileStream.truncated = true;
|
||||
skipPart = true;
|
||||
} else if (!this._fileStream.push(chunk)) {
|
||||
if (this._writecb)
|
||||
this._fileStream._readcb = this._writecb;
|
||||
this._writecb = null;
|
||||
}
|
||||
} else if (field !== undefined) {
|
||||
let chunk;
|
||||
const actualLen = Math.min(
|
||||
end - start,
|
||||
fieldSizeLimit - fieldSize
|
||||
);
|
||||
if (!isDataSafe) {
|
||||
chunk = Buffer.allocUnsafe(actualLen);
|
||||
data.copy(chunk, 0, start, start + actualLen);
|
||||
} else {
|
||||
chunk = data.slice(start, start + actualLen);
|
||||
}
|
||||
|
||||
fieldSize += actualLen;
|
||||
field.push(chunk);
|
||||
if (fieldSize === fieldSizeLimit) {
|
||||
skipPart = true;
|
||||
partTruncated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
if (isMatch) {
|
||||
matchPostBoundary = 1;
|
||||
|
||||
if (this._fileStream) {
|
||||
// End the active file stream if the previous part was a file
|
||||
this._fileStream.push(null);
|
||||
this._fileStream = null;
|
||||
} else if (field !== undefined) {
|
||||
let data;
|
||||
switch (field.length) {
|
||||
case 0:
|
||||
data = '';
|
||||
break;
|
||||
case 1:
|
||||
data = convertToUTF8(field[0], partCharset, 0);
|
||||
break;
|
||||
default:
|
||||
data = convertToUTF8(
|
||||
Buffer.concat(field, fieldSize),
|
||||
partCharset,
|
||||
0
|
||||
);
|
||||
}
|
||||
field = undefined;
|
||||
fieldSize = 0;
|
||||
this.emit(
|
||||
'field',
|
||||
partName,
|
||||
data,
|
||||
{ nameTruncated: false,
|
||||
valueTruncated: partTruncated,
|
||||
encoding: partEncoding,
|
||||
mimeType: partType }
|
||||
);
|
||||
}
|
||||
|
||||
if (++parts === partsLimit)
|
||||
this.emit('partsLimit');
|
||||
}
|
||||
};
|
||||
this._bparser = new StreamSearch(`\r\n--${boundary}`, ssCb);
|
||||
|
||||
this._writecb = null;
|
||||
this._finalcb = null;
|
||||
|
||||
// Just in case there is no preamble
|
||||
this.write(BUF_CRLF);
|
||||
}
|
||||
|
||||
static detect(conType) {
|
||||
return (conType.type === 'multipart' && conType.subtype === 'form-data');
|
||||
}
|
||||
|
||||
_write(chunk, enc, cb) {
|
||||
this._writecb = cb;
|
||||
this._bparser.push(chunk, 0);
|
||||
if (this._writecb)
|
||||
callAndUnsetCb(this);
|
||||
}
|
||||
|
||||
_destroy(err, cb) {
|
||||
this._hparser = null;
|
||||
this._bparser = ignoreData;
|
||||
if (!err)
|
||||
err = checkEndState(this);
|
||||
const fileStream = this._fileStream;
|
||||
if (fileStream) {
|
||||
this._fileStream = null;
|
||||
fileStream.destroy(err);
|
||||
}
|
||||
cb(err);
|
||||
}
|
||||
|
||||
_final(cb) {
|
||||
this._bparser.destroy();
|
||||
if (!this._complete)
|
||||
return cb(new Error('Unexpected end of form'));
|
||||
if (this._fileEndsLeft)
|
||||
this._finalcb = finalcb.bind(null, this, cb);
|
||||
else
|
||||
finalcb(this, cb);
|
||||
}
|
||||
}
|
||||
|
||||
function finalcb(self, cb, err) {
|
||||
if (err)
|
||||
return cb(err);
|
||||
err = checkEndState(self);
|
||||
cb(err);
|
||||
}
|
||||
|
||||
function checkEndState(self) {
|
||||
if (self._hparser)
|
||||
return new Error('Malformed part header');
|
||||
const fileStream = self._fileStream;
|
||||
if (fileStream) {
|
||||
self._fileStream = null;
|
||||
fileStream.destroy(new Error('Unexpected end of file'));
|
||||
}
|
||||
if (!self._complete)
|
||||
return new Error('Unexpected end of form');
|
||||
}
|
||||
|
||||
const TOKEN = [
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0,
|
||||
0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
];
|
||||
|
||||
const FIELD_VCHAR = [
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
];
|
||||
|
||||
module.exports = Multipart;
|
@ -0,0 +1,350 @@
|
||||
'use strict';
|
||||
|
||||
const { Writable } = require('stream');
|
||||
|
||||
const { getDecoder } = require('../utils.js');
|
||||
|
||||
class URLEncoded extends Writable {
|
||||
constructor(cfg) {
|
||||
const streamOpts = {
|
||||
autoDestroy: true,
|
||||
emitClose: true,
|
||||
highWaterMark: (typeof cfg.highWaterMark === 'number'
|
||||
? cfg.highWaterMark
|
||||
: undefined),
|
||||
};
|
||||
super(streamOpts);
|
||||
|
||||
let charset = (cfg.defCharset || 'utf8');
|
||||
if (cfg.conType.params && typeof cfg.conType.params.charset === 'string')
|
||||
charset = cfg.conType.params.charset;
|
||||
|
||||
this.charset = charset;
|
||||
|
||||
const limits = cfg.limits;
|
||||
this.fieldSizeLimit = (limits && typeof limits.fieldSize === 'number'
|
||||
? limits.fieldSize
|
||||
: 1 * 1024 * 1024);
|
||||
this.fieldsLimit = (limits && typeof limits.fields === 'number'
|
||||
? limits.fields
|
||||
: Infinity);
|
||||
this.fieldNameSizeLimit = (
|
||||
limits && typeof limits.fieldNameSize === 'number'
|
||||
? limits.fieldNameSize
|
||||
: 100
|
||||
);
|
||||
|
||||
this._inKey = true;
|
||||
this._keyTrunc = false;
|
||||
this._valTrunc = false;
|
||||
this._bytesKey = 0;
|
||||
this._bytesVal = 0;
|
||||
this._fields = 0;
|
||||
this._key = '';
|
||||
this._val = '';
|
||||
this._byte = -2;
|
||||
this._lastPos = 0;
|
||||
this._encode = 0;
|
||||
this._decoder = getDecoder(charset);
|
||||
}
|
||||
|
||||
static detect(conType) {
|
||||
return (conType.type === 'application'
|
||||
&& conType.subtype === 'x-www-form-urlencoded');
|
||||
}
|
||||
|
||||
_write(chunk, enc, cb) {
|
||||
if (this._fields >= this.fieldsLimit)
|
||||
return cb();
|
||||
|
||||
let i = 0;
|
||||
const len = chunk.length;
|
||||
this._lastPos = 0;
|
||||
|
||||
// Check if we last ended mid-percent-encoded byte
|
||||
if (this._byte !== -2) {
|
||||
i = readPctEnc(this, chunk, i, len);
|
||||
if (i === -1)
|
||||
return cb(new Error('Malformed urlencoded form'));
|
||||
if (i >= len)
|
||||
return cb();
|
||||
if (this._inKey)
|
||||
++this._bytesKey;
|
||||
else
|
||||
++this._bytesVal;
|
||||
}
|
||||
|
||||
main:
|
||||
while (i < len) {
|
||||
if (this._inKey) {
|
||||
// Parsing key
|
||||
|
||||
i = skipKeyBytes(this, chunk, i, len);
|
||||
|
||||
while (i < len) {
|
||||
switch (chunk[i]) {
|
||||
case 61: // '='
|
||||
if (this._lastPos < i)
|
||||
this._key += chunk.latin1Slice(this._lastPos, i);
|
||||
this._lastPos = ++i;
|
||||
this._key = this._decoder(this._key, this._encode);
|
||||
this._encode = 0;
|
||||
this._inKey = false;
|
||||
continue main;
|
||||
case 38: // '&'
|
||||
if (this._lastPos < i)
|
||||
this._key += chunk.latin1Slice(this._lastPos, i);
|
||||
this._lastPos = ++i;
|
||||
this._key = this._decoder(this._key, this._encode);
|
||||
this._encode = 0;
|
||||
if (this._bytesKey > 0) {
|
||||
this.emit(
|
||||
'field',
|
||||
this._key,
|
||||
'',
|
||||
{ nameTruncated: this._keyTrunc,
|
||||
valueTruncated: false,
|
||||
encoding: this.charset,
|
||||
mimeType: 'text/plain' }
|
||||
);
|
||||
}
|
||||
this._key = '';
|
||||
this._val = '';
|
||||
this._keyTrunc = false;
|
||||
this._valTrunc = false;
|
||||
this._bytesKey = 0;
|
||||
this._bytesVal = 0;
|
||||
if (++this._fields >= this.fieldsLimit) {
|
||||
this.emit('fieldsLimit');
|
||||
return cb();
|
||||
}
|
||||
continue;
|
||||
case 43: // '+'
|
||||
if (this._lastPos < i)
|
||||
this._key += chunk.latin1Slice(this._lastPos, i);
|
||||
this._key += ' ';
|
||||
this._lastPos = i + 1;
|
||||
break;
|
||||
case 37: // '%'
|
||||
if (this._encode === 0)
|
||||
this._encode = 1;
|
||||
if (this._lastPos < i)
|
||||
this._key += chunk.latin1Slice(this._lastPos, i);
|
||||
this._lastPos = i + 1;
|
||||
this._byte = -1;
|
||||
i = readPctEnc(this, chunk, i + 1, len);
|
||||
if (i === -1)
|
||||
return cb(new Error('Malformed urlencoded form'));
|
||||
if (i >= len)
|
||||
return cb();
|
||||
++this._bytesKey;
|
||||
i = skipKeyBytes(this, chunk, i, len);
|
||||
continue;
|
||||
}
|
||||
++i;
|
||||
++this._bytesKey;
|
||||
i = skipKeyBytes(this, chunk, i, len);
|
||||
}
|
||||
if (this._lastPos < i)
|
||||
this._key += chunk.latin1Slice(this._lastPos, i);
|
||||
} else {
|
||||
// Parsing value
|
||||
|
||||
i = skipValBytes(this, chunk, i, len);
|
||||
|
||||
while (i < len) {
|
||||
switch (chunk[i]) {
|
||||
case 38: // '&'
|
||||
if (this._lastPos < i)
|
||||
this._val += chunk.latin1Slice(this._lastPos, i);
|
||||
this._lastPos = ++i;
|
||||
this._inKey = true;
|
||||
this._val = this._decoder(this._val, this._encode);
|
||||
this._encode = 0;
|
||||
if (this._bytesKey > 0 || this._bytesVal > 0) {
|
||||
this.emit(
|
||||
'field',
|
||||
this._key,
|
||||
this._val,
|
||||
{ nameTruncated: this._keyTrunc,
|
||||
valueTruncated: this._valTrunc,
|
||||
encoding: this.charset,
|
||||
mimeType: 'text/plain' }
|
||||
);
|
||||
}
|
||||
this._key = '';
|
||||
this._val = '';
|
||||
this._keyTrunc = false;
|
||||
this._valTrunc = false;
|
||||
this._bytesKey = 0;
|
||||
this._bytesVal = 0;
|
||||
if (++this._fields >= this.fieldsLimit) {
|
||||
this.emit('fieldsLimit');
|
||||
return cb();
|
||||
}
|
||||
continue main;
|
||||
case 43: // '+'
|
||||
if (this._lastPos < i)
|
||||
this._val += chunk.latin1Slice(this._lastPos, i);
|
||||
this._val += ' ';
|
||||
this._lastPos = i + 1;
|
||||
break;
|
||||
case 37: // '%'
|
||||
if (this._encode === 0)
|
||||
this._encode = 1;
|
||||
if (this._lastPos < i)
|
||||
this._val += chunk.latin1Slice(this._lastPos, i);
|
||||
this._lastPos = i + 1;
|
||||
this._byte = -1;
|
||||
i = readPctEnc(this, chunk, i + 1, len);
|
||||
if (i === -1)
|
||||
return cb(new Error('Malformed urlencoded form'));
|
||||
if (i >= len)
|
||||
return cb();
|
||||
++this._bytesVal;
|
||||
i = skipValBytes(this, chunk, i, len);
|
||||
continue;
|
||||
}
|
||||
++i;
|
||||
++this._bytesVal;
|
||||
i = skipValBytes(this, chunk, i, len);
|
||||
}
|
||||
if (this._lastPos < i)
|
||||
this._val += chunk.latin1Slice(this._lastPos, i);
|
||||
}
|
||||
}
|
||||
|
||||
cb();
|
||||
}
|
||||
|
||||
_final(cb) {
|
||||
if (this._byte !== -2)
|
||||
return cb(new Error('Malformed urlencoded form'));
|
||||
if (!this._inKey || this._bytesKey > 0 || this._bytesVal > 0) {
|
||||
if (this._inKey)
|
||||
this._key = this._decoder(this._key, this._encode);
|
||||
else
|
||||
this._val = this._decoder(this._val, this._encode);
|
||||
this.emit(
|
||||
'field',
|
||||
this._key,
|
||||
this._val,
|
||||
{ nameTruncated: this._keyTrunc,
|
||||
valueTruncated: this._valTrunc,
|
||||
encoding: this.charset,
|
||||
mimeType: 'text/plain' }
|
||||
);
|
||||
}
|
||||
cb();
|
||||
}
|
||||
}
|
||||
|
||||
function readPctEnc(self, chunk, pos, len) {
|
||||
if (pos >= len)
|
||||
return len;
|
||||
|
||||
if (self._byte === -1) {
|
||||
// We saw a '%' but no hex characters yet
|
||||
const hexUpper = HEX_VALUES[chunk[pos++]];
|
||||
if (hexUpper === -1)
|
||||
return -1;
|
||||
|
||||
if (hexUpper >= 8)
|
||||
self._encode = 2; // Indicate high bits detected
|
||||
|
||||
if (pos < len) {
|
||||
// Both hex characters are in this chunk
|
||||
const hexLower = HEX_VALUES[chunk[pos++]];
|
||||
if (hexLower === -1)
|
||||
return -1;
|
||||
|
||||
if (self._inKey)
|
||||
self._key += String.fromCharCode((hexUpper << 4) + hexLower);
|
||||
else
|
||||
self._val += String.fromCharCode((hexUpper << 4) + hexLower);
|
||||
|
||||
self._byte = -2;
|
||||
self._lastPos = pos;
|
||||
} else {
|
||||
// Only one hex character was available in this chunk
|
||||
self._byte = hexUpper;
|
||||
}
|
||||
} else {
|
||||
// We saw only one hex character so far
|
||||
const hexLower = HEX_VALUES[chunk[pos++]];
|
||||
if (hexLower === -1)
|
||||
return -1;
|
||||
|
||||
if (self._inKey)
|
||||
self._key += String.fromCharCode((self._byte << 4) + hexLower);
|
||||
else
|
||||
self._val += String.fromCharCode((self._byte << 4) + hexLower);
|
||||
|
||||
self._byte = -2;
|
||||
self._lastPos = pos;
|
||||
}
|
||||
|
||||
return pos;
|
||||
}
|
||||
|
||||
function skipKeyBytes(self, chunk, pos, len) {
|
||||
// Skip bytes if we've truncated
|
||||
if (self._bytesKey > self.fieldNameSizeLimit) {
|
||||
if (!self._keyTrunc) {
|
||||
if (self._lastPos < pos)
|
||||
self._key += chunk.latin1Slice(self._lastPos, pos - 1);
|
||||
}
|
||||
self._keyTrunc = true;
|
||||
for (; pos < len; ++pos) {
|
||||
const code = chunk[pos];
|
||||
if (code === 61/* '=' */ || code === 38/* '&' */)
|
||||
break;
|
||||
++self._bytesKey;
|
||||
}
|
||||
self._lastPos = pos;
|
||||
}
|
||||
|
||||
return pos;
|
||||
}
|
||||
|
||||
function skipValBytes(self, chunk, pos, len) {
|
||||
// Skip bytes if we've truncated
|
||||
if (self._bytesVal > self.fieldSizeLimit) {
|
||||
if (!self._valTrunc) {
|
||||
if (self._lastPos < pos)
|
||||
self._val += chunk.latin1Slice(self._lastPos, pos - 1);
|
||||
}
|
||||
self._valTrunc = true;
|
||||
for (; pos < len; ++pos) {
|
||||
if (chunk[pos] === 38/* '&' */)
|
||||
break;
|
||||
++self._bytesVal;
|
||||
}
|
||||
self._lastPos = pos;
|
||||
}
|
||||
|
||||
return pos;
|
||||
}
|
||||
|
||||
/* eslint-disable no-multi-spaces */
|
||||
const HEX_VALUES = [
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, -1, -1, -1, -1, -1, -1,
|
||||
-1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
];
|
||||
/* eslint-enable no-multi-spaces */
|
||||
|
||||
module.exports = URLEncoded;
|
@ -0,0 +1,596 @@
|
||||
'use strict';
|
||||
|
||||
function parseContentType(str) {
|
||||
if (str.length === 0)
|
||||
return;
|
||||
|
||||
const params = Object.create(null);
|
||||
let i = 0;
|
||||
|
||||
// Parse type
|
||||
for (; i < str.length; ++i) {
|
||||
const code = str.charCodeAt(i);
|
||||
if (TOKEN[code] !== 1) {
|
||||
if (code !== 47/* '/' */ || i === 0)
|
||||
return;
|
||||
break;
|
||||
}
|
||||
}
|
||||
// Check for type without subtype
|
||||
if (i === str.length)
|
||||
return;
|
||||
|
||||
const type = str.slice(0, i).toLowerCase();
|
||||
|
||||
// Parse subtype
|
||||
const subtypeStart = ++i;
|
||||
for (; i < str.length; ++i) {
|
||||
const code = str.charCodeAt(i);
|
||||
if (TOKEN[code] !== 1) {
|
||||
// Make sure we have a subtype
|
||||
if (i === subtypeStart)
|
||||
return;
|
||||
|
||||
if (parseContentTypeParams(str, i, params) === undefined)
|
||||
return;
|
||||
break;
|
||||
}
|
||||
}
|
||||
// Make sure we have a subtype
|
||||
if (i === subtypeStart)
|
||||
return;
|
||||
|
||||
const subtype = str.slice(subtypeStart, i).toLowerCase();
|
||||
|
||||
return { type, subtype, params };
|
||||
}
|
||||
|
||||
function parseContentTypeParams(str, i, params) {
|
||||
while (i < str.length) {
|
||||
// Consume whitespace
|
||||
for (; i < str.length; ++i) {
|
||||
const code = str.charCodeAt(i);
|
||||
if (code !== 32/* ' ' */ && code !== 9/* '\t' */)
|
||||
break;
|
||||
}
|
||||
|
||||
// Ended on whitespace
|
||||
if (i === str.length)
|
||||
break;
|
||||
|
||||
// Check for malformed parameter
|
||||
if (str.charCodeAt(i++) !== 59/* ';' */)
|
||||
return;
|
||||
|
||||
// Consume whitespace
|
||||
for (; i < str.length; ++i) {
|
||||
const code = str.charCodeAt(i);
|
||||
if (code !== 32/* ' ' */ && code !== 9/* '\t' */)
|
||||
break;
|
||||
}
|
||||
|
||||
// Ended on whitespace (malformed)
|
||||
if (i === str.length)
|
||||
return;
|
||||
|
||||
let name;
|
||||
const nameStart = i;
|
||||
// Parse parameter name
|
||||
for (; i < str.length; ++i) {
|
||||
const code = str.charCodeAt(i);
|
||||
if (TOKEN[code] !== 1) {
|
||||
if (code !== 61/* '=' */)
|
||||
return;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// No value (malformed)
|
||||
if (i === str.length)
|
||||
return;
|
||||
|
||||
name = str.slice(nameStart, i);
|
||||
++i; // Skip over '='
|
||||
|
||||
// No value (malformed)
|
||||
if (i === str.length)
|
||||
return;
|
||||
|
||||
let value = '';
|
||||
let valueStart;
|
||||
if (str.charCodeAt(i) === 34/* '"' */) {
|
||||
valueStart = ++i;
|
||||
let escaping = false;
|
||||
// Parse quoted value
|
||||
for (; i < str.length; ++i) {
|
||||
const code = str.charCodeAt(i);
|
||||
if (code === 92/* '\\' */) {
|
||||
if (escaping) {
|
||||
valueStart = i;
|
||||
escaping = false;
|
||||
} else {
|
||||
value += str.slice(valueStart, i);
|
||||
escaping = true;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (code === 34/* '"' */) {
|
||||
if (escaping) {
|
||||
valueStart = i;
|
||||
escaping = false;
|
||||
continue;
|
||||
}
|
||||
value += str.slice(valueStart, i);
|
||||
break;
|
||||
}
|
||||
if (escaping) {
|
||||
valueStart = i - 1;
|
||||
escaping = false;
|
||||
}
|
||||
// Invalid unescaped quoted character (malformed)
|
||||
if (QDTEXT[code] !== 1)
|
||||
return;
|
||||
}
|
||||
|
||||
// No end quote (malformed)
|
||||
if (i === str.length)
|
||||
return;
|
||||
|
||||
++i; // Skip over double quote
|
||||
} else {
|
||||
valueStart = i;
|
||||
// Parse unquoted value
|
||||
for (; i < str.length; ++i) {
|
||||
const code = str.charCodeAt(i);
|
||||
if (TOKEN[code] !== 1) {
|
||||
// No value (malformed)
|
||||
if (i === valueStart)
|
||||
return;
|
||||
break;
|
||||
}
|
||||
}
|
||||
value = str.slice(valueStart, i);
|
||||
}
|
||||
|
||||
name = name.toLowerCase();
|
||||
if (params[name] === undefined)
|
||||
params[name] = value;
|
||||
}
|
||||
|
||||
return params;
|
||||
}
|
||||
|
||||
function parseDisposition(str, defDecoder) {
|
||||
if (str.length === 0)
|
||||
return;
|
||||
|
||||
const params = Object.create(null);
|
||||
let i = 0;
|
||||
|
||||
for (; i < str.length; ++i) {
|
||||
const code = str.charCodeAt(i);
|
||||
if (TOKEN[code] !== 1) {
|
||||
if (parseDispositionParams(str, i, params, defDecoder) === undefined)
|
||||
return;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
const type = str.slice(0, i).toLowerCase();
|
||||
|
||||
return { type, params };
|
||||
}
|
||||
|
||||
function parseDispositionParams(str, i, params, defDecoder) {
|
||||
while (i < str.length) {
|
||||
// Consume whitespace
|
||||
for (; i < str.length; ++i) {
|
||||
const code = str.charCodeAt(i);
|
||||
if (code !== 32/* ' ' */ && code !== 9/* '\t' */)
|
||||
break;
|
||||
}
|
||||
|
||||
// Ended on whitespace
|
||||
if (i === str.length)
|
||||
break;
|
||||
|
||||
// Check for malformed parameter
|
||||
if (str.charCodeAt(i++) !== 59/* ';' */)
|
||||
return;
|
||||
|
||||
// Consume whitespace
|
||||
for (; i < str.length; ++i) {
|
||||
const code = str.charCodeAt(i);
|
||||
if (code !== 32/* ' ' */ && code !== 9/* '\t' */)
|
||||
break;
|
||||
}
|
||||
|
||||
// Ended on whitespace (malformed)
|
||||
if (i === str.length)
|
||||
return;
|
||||
|
||||
let name;
|
||||
const nameStart = i;
|
||||
// Parse parameter name
|
||||
for (; i < str.length; ++i) {
|
||||
const code = str.charCodeAt(i);
|
||||
if (TOKEN[code] !== 1) {
|
||||
if (code === 61/* '=' */)
|
||||
break;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// No value (malformed)
|
||||
if (i === str.length)
|
||||
return;
|
||||
|
||||
let value = '';
|
||||
let valueStart;
|
||||
let charset;
|
||||
//~ let lang;
|
||||
name = str.slice(nameStart, i);
|
||||
if (name.charCodeAt(name.length - 1) === 42/* '*' */) {
|
||||
// Extended value
|
||||
|
||||
const charsetStart = ++i;
|
||||
// Parse charset name
|
||||
for (; i < str.length; ++i) {
|
||||
const code = str.charCodeAt(i);
|
||||
if (CHARSET[code] !== 1) {
|
||||
if (code !== 39/* '\'' */)
|
||||
return;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Incomplete charset (malformed)
|
||||
if (i === str.length)
|
||||
return;
|
||||
|
||||
charset = str.slice(charsetStart, i);
|
||||
++i; // Skip over the '\''
|
||||
|
||||
//~ const langStart = ++i;
|
||||
// Parse language name
|
||||
for (; i < str.length; ++i) {
|
||||
const code = str.charCodeAt(i);
|
||||
if (code === 39/* '\'' */)
|
||||
break;
|
||||
}
|
||||
|
||||
// Incomplete language (malformed)
|
||||
if (i === str.length)
|
||||
return;
|
||||
|
||||
//~ lang = str.slice(langStart, i);
|
||||
++i; // Skip over the '\''
|
||||
|
||||
// No value (malformed)
|
||||
if (i === str.length)
|
||||
return;
|
||||
|
||||
valueStart = i;
|
||||
|
||||
let encode = 0;
|
||||
// Parse value
|
||||
for (; i < str.length; ++i) {
|
||||
const code = str.charCodeAt(i);
|
||||
if (EXTENDED_VALUE[code] !== 1) {
|
||||
if (code === 37/* '%' */) {
|
||||
let hexUpper;
|
||||
let hexLower;
|
||||
if (i + 2 < str.length
|
||||
&& (hexUpper = HEX_VALUES[str.charCodeAt(i + 1)]) !== -1
|
||||
&& (hexLower = HEX_VALUES[str.charCodeAt(i + 2)]) !== -1) {
|
||||
const byteVal = (hexUpper << 4) + hexLower;
|
||||
value += str.slice(valueStart, i);
|
||||
value += String.fromCharCode(byteVal);
|
||||
i += 2;
|
||||
valueStart = i + 1;
|
||||
if (byteVal >= 128)
|
||||
encode = 2;
|
||||
else if (encode === 0)
|
||||
encode = 1;
|
||||
continue;
|
||||
}
|
||||
// '%' disallowed in non-percent encoded contexts (malformed)
|
||||
return;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
value += str.slice(valueStart, i);
|
||||
value = convertToUTF8(value, charset, encode);
|
||||
if (value === undefined)
|
||||
return;
|
||||
} else {
|
||||
// Non-extended value
|
||||
|
||||
++i; // Skip over '='
|
||||
|
||||
// No value (malformed)
|
||||
if (i === str.length)
|
||||
return;
|
||||
|
||||
if (str.charCodeAt(i) === 34/* '"' */) {
|
||||
valueStart = ++i;
|
||||
let escaping = false;
|
||||
// Parse quoted value
|
||||
for (; i < str.length; ++i) {
|
||||
const code = str.charCodeAt(i);
|
||||
if (code === 92/* '\\' */) {
|
||||
if (escaping) {
|
||||
valueStart = i;
|
||||
escaping = false;
|
||||
} else {
|
||||
value += str.slice(valueStart, i);
|
||||
escaping = true;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (code === 34/* '"' */) {
|
||||
if (escaping) {
|
||||
valueStart = i;
|
||||
escaping = false;
|
||||
continue;
|
||||
}
|
||||
value += str.slice(valueStart, i);
|
||||
break;
|
||||
}
|
||||
if (escaping) {
|
||||
valueStart = i - 1;
|
||||
escaping = false;
|
||||
}
|
||||
// Invalid unescaped quoted character (malformed)
|
||||
if (QDTEXT[code] !== 1)
|
||||
return;
|
||||
}
|
||||
|
||||
// No end quote (malformed)
|
||||
if (i === str.length)
|
||||
return;
|
||||
|
||||
++i; // Skip over double quote
|
||||
} else {
|
||||
valueStart = i;
|
||||
// Parse unquoted value
|
||||
for (; i < str.length; ++i) {
|
||||
const code = str.charCodeAt(i);
|
||||
if (TOKEN[code] !== 1) {
|
||||
// No value (malformed)
|
||||
if (i === valueStart)
|
||||
return;
|
||||
break;
|
||||
}
|
||||
}
|
||||
value = str.slice(valueStart, i);
|
||||
}
|
||||
|
||||
value = defDecoder(value, 2);
|
||||
if (value === undefined)
|
||||
return;
|
||||
}
|
||||
|
||||
name = name.toLowerCase();
|
||||
if (params[name] === undefined)
|
||||
params[name] = value;
|
||||
}
|
||||
|
||||
return params;
|
||||
}
|
||||
|
||||
function getDecoder(charset) {
|
||||
let lc;
|
||||
while (true) {
|
||||
switch (charset) {
|
||||
case 'utf-8':
|
||||
case 'utf8':
|
||||
return decoders.utf8;
|
||||
case 'latin1':
|
||||
case 'ascii': // TODO: Make these a separate, strict decoder?
|
||||
case 'us-ascii':
|
||||
case 'iso-8859-1':
|
||||
case 'iso8859-1':
|
||||
case 'iso88591':
|
||||
case 'iso_8859-1':
|
||||
case 'windows-1252':
|
||||
case 'iso_8859-1:1987':
|
||||
case 'cp1252':
|
||||
case 'x-cp1252':
|
||||
return decoders.latin1;
|
||||
case 'utf16le':
|
||||
case 'utf-16le':
|
||||
case 'ucs2':
|
||||
case 'ucs-2':
|
||||
return decoders.utf16le;
|
||||
case 'base64':
|
||||
return decoders.base64;
|
||||
default:
|
||||
if (lc === undefined) {
|
||||
lc = true;
|
||||
charset = charset.toLowerCase();
|
||||
continue;
|
||||
}
|
||||
return decoders.other.bind(charset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const decoders = {
|
||||
utf8: (data, hint) => {
|
||||
if (data.length === 0)
|
||||
return '';
|
||||
if (typeof data === 'string') {
|
||||
// If `data` never had any percent-encoded bytes or never had any that
|
||||
// were outside of the ASCII range, then we can safely just return the
|
||||
// input since UTF-8 is ASCII compatible
|
||||
if (hint < 2)
|
||||
return data;
|
||||
|
||||
data = Buffer.from(data, 'latin1');
|
||||
}
|
||||
return data.utf8Slice(0, data.length);
|
||||
},
|
||||
|
||||
latin1: (data, hint) => {
|
||||
if (data.length === 0)
|
||||
return '';
|
||||
if (typeof data === 'string')
|
||||
return data;
|
||||
return data.latin1Slice(0, data.length);
|
||||
},
|
||||
|
||||
utf16le: (data, hint) => {
|
||||
if (data.length === 0)
|
||||
return '';
|
||||
if (typeof data === 'string')
|
||||
data = Buffer.from(data, 'latin1');
|
||||
return data.ucs2Slice(0, data.length);
|
||||
},
|
||||
|
||||
base64: (data, hint) => {
|
||||
if (data.length === 0)
|
||||
return '';
|
||||
if (typeof data === 'string')
|
||||
data = Buffer.from(data, 'latin1');
|
||||
return data.base64Slice(0, data.length);
|
||||
},
|
||||
|
||||
other: (data, hint) => {
|
||||
if (data.length === 0)
|
||||
return '';
|
||||
if (typeof data === 'string')
|
||||
data = Buffer.from(data, 'latin1');
|
||||
try {
|
||||
const decoder = new TextDecoder(this);
|
||||
return decoder.decode(data);
|
||||
} catch {}
|
||||
},
|
||||
};
|
||||
|
||||
function convertToUTF8(data, charset, hint) {
|
||||
const decode = getDecoder(charset);
|
||||
if (decode)
|
||||
return decode(data, hint);
|
||||
}
|
||||
|
||||
function basename(path) {
|
||||
if (typeof path !== 'string')
|
||||
return '';
|
||||
for (let i = path.length - 1; i >= 0; --i) {
|
||||
switch (path.charCodeAt(i)) {
|
||||
case 0x2F: // '/'
|
||||
case 0x5C: // '\'
|
||||
path = path.slice(i + 1);
|
||||
return (path === '..' || path === '.' ? '' : path);
|
||||
}
|
||||
}
|
||||
return (path === '..' || path === '.' ? '' : path);
|
||||
}
|
||||
|
||||
const TOKEN = [
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0,
|
||||
0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
];
|
||||
|
||||
const QDTEXT = [
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
];
|
||||
|
||||
const CHARSET = [
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0,
|
||||
0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
];
|
||||
|
||||
const EXTENDED_VALUE = [
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0,
|
||||
0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
];
|
||||
|
||||
/* eslint-disable no-multi-spaces */
|
||||
const HEX_VALUES = [
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, -1, -1, -1, -1, -1, -1,
|
||||
-1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
];
|
||||
/* eslint-enable no-multi-spaces */
|
||||
|
||||
module.exports = {
|
||||
basename,
|
||||
convertToUTF8,
|
||||
getDecoder,
|
||||
parseContentType,
|
||||
parseDisposition,
|
||||
};
|
@ -0,0 +1,22 @@
|
||||
{ "name": "busboy",
|
||||
"version": "1.6.0",
|
||||
"author": "Brian White <mscdex@mscdex.net>",
|
||||
"description": "A streaming parser for HTML form data for node.js",
|
||||
"main": "./lib/index.js",
|
||||
"dependencies": {
|
||||
"streamsearch": "^1.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@mscdex/eslint-config": "^1.1.0",
|
||||
"eslint": "^7.32.0"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "node test/test.js",
|
||||
"lint": "eslint --cache --report-unused-disable-directives --ext=.js .eslintrc.js lib test bench",
|
||||
"lint:fix": "npm run lint -- --fix"
|
||||
},
|
||||
"engines": { "node": ">=10.16.0" },
|
||||
"keywords": [ "uploads", "forms", "multipart", "form-data" ],
|
||||
"licenses": [ { "type": "MIT", "url": "http://github.com/mscdex/busboy/raw/master/LICENSE" } ],
|
||||
"repository": { "type": "git", "url": "http://github.com/mscdex/busboy.git" }
|
||||
}
|
@ -0,0 +1,109 @@
|
||||
'use strict';
|
||||
|
||||
const assert = require('assert');
|
||||
const { inspect } = require('util');
|
||||
|
||||
const mustCallChecks = [];
|
||||
|
||||
function noop() {}
|
||||
|
||||
function runCallChecks(exitCode) {
|
||||
if (exitCode !== 0) return;
|
||||
|
||||
const failed = mustCallChecks.filter((context) => {
|
||||
if ('minimum' in context) {
|
||||
context.messageSegment = `at least ${context.minimum}`;
|
||||
return context.actual < context.minimum;
|
||||
}
|
||||
context.messageSegment = `exactly ${context.exact}`;
|
||||
return context.actual !== context.exact;
|
||||
});
|
||||
|
||||
failed.forEach((context) => {
|
||||
console.error('Mismatched %s function calls. Expected %s, actual %d.',
|
||||
context.name,
|
||||
context.messageSegment,
|
||||
context.actual);
|
||||
console.error(context.stack.split('\n').slice(2).join('\n'));
|
||||
});
|
||||
|
||||
if (failed.length)
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
function mustCall(fn, exact) {
|
||||
return _mustCallInner(fn, exact, 'exact');
|
||||
}
|
||||
|
||||
function mustCallAtLeast(fn, minimum) {
|
||||
return _mustCallInner(fn, minimum, 'minimum');
|
||||
}
|
||||
|
||||
function _mustCallInner(fn, criteria = 1, field) {
|
||||
if (process._exiting)
|
||||
throw new Error('Cannot use common.mustCall*() in process exit handler');
|
||||
|
||||
if (typeof fn === 'number') {
|
||||
criteria = fn;
|
||||
fn = noop;
|
||||
} else if (fn === undefined) {
|
||||
fn = noop;
|
||||
}
|
||||
|
||||
if (typeof criteria !== 'number')
|
||||
throw new TypeError(`Invalid ${field} value: ${criteria}`);
|
||||
|
||||
const context = {
|
||||
[field]: criteria,
|
||||
actual: 0,
|
||||
stack: inspect(new Error()),
|
||||
name: fn.name || '<anonymous>'
|
||||
};
|
||||
|
||||
// Add the exit listener only once to avoid listener leak warnings
|
||||
if (mustCallChecks.length === 0)
|
||||
process.on('exit', runCallChecks);
|
||||
|
||||
mustCallChecks.push(context);
|
||||
|
||||
function wrapped(...args) {
|
||||
++context.actual;
|
||||
return fn.call(this, ...args);
|
||||
}
|
||||
// TODO: remove origFn?
|
||||
wrapped.origFn = fn;
|
||||
|
||||
return wrapped;
|
||||
}
|
||||
|
||||
function getCallSite(top) {
|
||||
const originalStackFormatter = Error.prepareStackTrace;
|
||||
Error.prepareStackTrace = (err, stack) =>
|
||||
`${stack[0].getFileName()}:${stack[0].getLineNumber()}`;
|
||||
const err = new Error();
|
||||
Error.captureStackTrace(err, top);
|
||||
// With the V8 Error API, the stack is not formatted until it is accessed
|
||||
// eslint-disable-next-line no-unused-expressions
|
||||
err.stack;
|
||||
Error.prepareStackTrace = originalStackFormatter;
|
||||
return err.stack;
|
||||
}
|
||||
|
||||
function mustNotCall(msg) {
|
||||
const callSite = getCallSite(mustNotCall);
|
||||
return function mustNotCall(...args) {
|
||||
args = args.map(inspect).join(', ');
|
||||
const argsInfo = (args.length > 0
|
||||
? `\ncalled with arguments: ${args}`
|
||||
: '');
|
||||
assert.fail(
|
||||
`${msg || 'function should not have been called'} at ${callSite}`
|
||||
+ argsInfo);
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
mustCall,
|
||||
mustCallAtLeast,
|
||||
mustNotCall,
|
||||
};
|
@ -0,0 +1,94 @@
|
||||
'use strict';
|
||||
|
||||
const assert = require('assert');
|
||||
const { inspect } = require('util');
|
||||
|
||||
const { mustCall } = require(`${__dirname}/common.js`);
|
||||
|
||||
const busboy = require('..');
|
||||
|
||||
const input = Buffer.from([
|
||||
'-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
|
||||
'Content-Disposition: form-data; '
|
||||
+ 'name="upload_file_0"; filename="テスト.dat"',
|
||||
'Content-Type: application/octet-stream',
|
||||
'',
|
||||
'A'.repeat(1023),
|
||||
'-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--'
|
||||
].join('\r\n'));
|
||||
const boundary = '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k';
|
||||
const expected = [
|
||||
{ type: 'file',
|
||||
name: 'upload_file_0',
|
||||
data: Buffer.from('A'.repeat(1023)),
|
||||
info: {
|
||||
filename: 'テスト.dat',
|
||||
encoding: '7bit',
|
||||
mimeType: 'application/octet-stream',
|
||||
},
|
||||
limited: false,
|
||||
},
|
||||
];
|
||||
const bb = busboy({
|
||||
defParamCharset: 'utf8',
|
||||
headers: {
|
||||
'content-type': `multipart/form-data; boundary=${boundary}`,
|
||||
}
|
||||
});
|
||||
const results = [];
|
||||
|
||||
bb.on('field', (name, val, info) => {
|
||||
results.push({ type: 'field', name, val, info });
|
||||
});
|
||||
|
||||
bb.on('file', (name, stream, info) => {
|
||||
const data = [];
|
||||
let nb = 0;
|
||||
const file = {
|
||||
type: 'file',
|
||||
name,
|
||||
data: null,
|
||||
info,
|
||||
limited: false,
|
||||
};
|
||||
results.push(file);
|
||||
stream.on('data', (d) => {
|
||||
data.push(d);
|
||||
nb += d.length;
|
||||
}).on('limit', () => {
|
||||
file.limited = true;
|
||||
}).on('close', () => {
|
||||
file.data = Buffer.concat(data, nb);
|
||||
assert.strictEqual(stream.truncated, file.limited);
|
||||
}).once('error', (err) => {
|
||||
file.err = err.message;
|
||||
});
|
||||
});
|
||||
|
||||
bb.on('error', (err) => {
|
||||
results.push({ error: err.message });
|
||||
});
|
||||
|
||||
bb.on('partsLimit', () => {
|
||||
results.push('partsLimit');
|
||||
});
|
||||
|
||||
bb.on('filesLimit', () => {
|
||||
results.push('filesLimit');
|
||||
});
|
||||
|
||||
bb.on('fieldsLimit', () => {
|
||||
results.push('fieldsLimit');
|
||||
});
|
||||
|
||||
bb.on('close', mustCall(() => {
|
||||
assert.deepStrictEqual(
|
||||
results,
|
||||
expected,
|
||||
'Results mismatch.\n'
|
||||
+ `Parsed: ${inspect(results)}\n`
|
||||
+ `Expected: ${inspect(expected)}`
|
||||
);
|
||||
}));
|
||||
|
||||
bb.end(input);
|
@ -0,0 +1,102 @@
|
||||
'use strict';
|
||||
|
||||
const assert = require('assert');
|
||||
const { randomFillSync } = require('crypto');
|
||||
const { inspect } = require('util');
|
||||
|
||||
const busboy = require('..');
|
||||
|
||||
const { mustCall } = require('./common.js');
|
||||
|
||||
const BOUNDARY = 'u2KxIV5yF1y+xUspOQCCZopaVgeV6Jxihv35XQJmuTx8X3sh';
|
||||
|
||||
function formDataSection(key, value) {
|
||||
return Buffer.from(
|
||||
`\r\n--${BOUNDARY}`
|
||||
+ `\r\nContent-Disposition: form-data; name="${key}"`
|
||||
+ `\r\n\r\n${value}`
|
||||
);
|
||||
}
|
||||
|
||||
function formDataFile(key, filename, contentType) {
|
||||
const buf = Buffer.allocUnsafe(100000);
|
||||
return Buffer.concat([
|
||||
Buffer.from(`\r\n--${BOUNDARY}\r\n`),
|
||||
Buffer.from(`Content-Disposition: form-data; name="${key}"`
|
||||
+ `; filename="${filename}"\r\n`),
|
||||
Buffer.from(`Content-Type: ${contentType}\r\n\r\n`),
|
||||
randomFillSync(buf)
|
||||
]);
|
||||
}
|
||||
|
||||
const reqChunks = [
|
||||
Buffer.concat([
|
||||
formDataFile('file', 'file.bin', 'application/octet-stream'),
|
||||
formDataSection('foo', 'foo value'),
|
||||
]),
|
||||
formDataSection('bar', 'bar value'),
|
||||
Buffer.from(`\r\n--${BOUNDARY}--\r\n`)
|
||||
];
|
||||
const bb = busboy({
|
||||
headers: {
|
||||
'content-type': `multipart/form-data; boundary=${BOUNDARY}`
|
||||
}
|
||||
});
|
||||
const expected = [
|
||||
{ type: 'file',
|
||||
name: 'file',
|
||||
info: {
|
||||
filename: 'file.bin',
|
||||
encoding: '7bit',
|
||||
mimeType: 'application/octet-stream',
|
||||
},
|
||||
},
|
||||
{ type: 'field',
|
||||
name: 'foo',
|
||||
val: 'foo value',
|
||||
info: {
|
||||
nameTruncated: false,
|
||||
valueTruncated: false,
|
||||
encoding: '7bit',
|
||||
mimeType: 'text/plain',
|
||||
},
|
||||
},
|
||||
{ type: 'field',
|
||||
name: 'bar',
|
||||
val: 'bar value',
|
||||
info: {
|
||||
nameTruncated: false,
|
||||
valueTruncated: false,
|
||||
encoding: '7bit',
|
||||
mimeType: 'text/plain',
|
||||
},
|
||||
},
|
||||
];
|
||||
const results = [];
|
||||
|
||||
bb.on('field', (name, val, info) => {
|
||||
results.push({ type: 'field', name, val, info });
|
||||
});
|
||||
|
||||
bb.on('file', (name, stream, info) => {
|
||||
results.push({ type: 'file', name, info });
|
||||
// Simulate a pipe where the destination is pausing (perhaps due to waiting
|
||||
// for file system write to finish)
|
||||
setTimeout(() => {
|
||||
stream.resume();
|
||||
}, 10);
|
||||
});
|
||||
|
||||
bb.on('close', mustCall(() => {
|
||||
assert.deepStrictEqual(
|
||||
results,
|
||||
expected,
|
||||
'Results mismatch.\n'
|
||||
+ `Parsed: ${inspect(results)}\n`
|
||||
+ `Expected: ${inspect(expected)}`
|
||||
);
|
||||
}));
|
||||
|
||||
for (const chunk of reqChunks)
|
||||
bb.write(chunk);
|
||||
bb.end();
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,488 @@
|
||||
'use strict';
|
||||
|
||||
const assert = require('assert');
|
||||
const { transcode } = require('buffer');
|
||||
const { inspect } = require('util');
|
||||
|
||||
const busboy = require('..');
|
||||
|
||||
const active = new Map();
|
||||
|
||||
const tests = [
|
||||
{ source: ['foo'],
|
||||
expected: [
|
||||
['foo',
|
||||
'',
|
||||
{ nameTruncated: false,
|
||||
valueTruncated: false,
|
||||
encoding: 'utf-8',
|
||||
mimeType: 'text/plain' },
|
||||
],
|
||||
],
|
||||
what: 'Unassigned value'
|
||||
},
|
||||
{ source: ['foo=bar'],
|
||||
expected: [
|
||||
['foo',
|
||||
'bar',
|
||||
{ nameTruncated: false,
|
||||
valueTruncated: false,
|
||||
encoding: 'utf-8',
|
||||
mimeType: 'text/plain' },
|
||||
],
|
||||
],
|
||||
what: 'Assigned value'
|
||||
},
|
||||
{ source: ['foo&bar=baz'],
|
||||
expected: [
|
||||
['foo',
|
||||
'',
|
||||
{ nameTruncated: false,
|
||||
valueTruncated: false,
|
||||
encoding: 'utf-8',
|
||||
mimeType: 'text/plain' },
|
||||
],
|
||||
['bar',
|
||||
'baz',
|
||||
{ nameTruncated: false,
|
||||
valueTruncated: false,
|
||||
encoding: 'utf-8',
|
||||
mimeType: 'text/plain' },
|
||||
],
|
||||
],
|
||||
what: 'Unassigned and assigned value'
|
||||
},
|
||||
{ source: ['foo=bar&baz'],
|
||||
expected: [
|
||||
['foo',
|
||||
'bar',
|
||||
{ nameTruncated: false,
|
||||
valueTruncated: false,
|
||||
encoding: 'utf-8',
|
||||
mimeType: 'text/plain' },
|
||||
],
|
||||
['baz',
|
||||
'',
|
||||
{ nameTruncated: false,
|
||||
valueTruncated: false,
|
||||
encoding: 'utf-8',
|
||||
mimeType: 'text/plain' },
|
||||
],
|
||||
],
|
||||
what: 'Assigned and unassigned value'
|
||||
},
|
||||
{ source: ['foo=bar&baz=bla'],
|
||||
expected: [
|
||||
['foo',
|
||||
'bar',
|
||||
{ nameTruncated: false,
|
||||
valueTruncated: false,
|
||||
encoding: 'utf-8',
|
||||
mimeType: 'text/plain' },
|
||||
],
|
||||
['baz',
|
||||
'bla',
|
||||
{ nameTruncated: false,
|
||||
valueTruncated: false,
|
||||
encoding: 'utf-8',
|
||||
mimeType: 'text/plain' },
|
||||
],
|
||||
],
|
||||
what: 'Two assigned values'
|
||||
},
|
||||
{ source: ['foo&bar'],
|
||||
expected: [
|
||||
['foo',
|
||||
'',
|
||||
{ nameTruncated: false,
|
||||
valueTruncated: false,
|
||||
encoding: 'utf-8',
|
||||
mimeType: 'text/plain' },
|
||||
],
|
||||
['bar',
|
||||
'',
|
||||
{ nameTruncated: false,
|
||||
valueTruncated: false,
|
||||
encoding: 'utf-8',
|
||||
mimeType: 'text/plain' },
|
||||
],
|
||||
],
|
||||
what: 'Two unassigned values'
|
||||
},
|
||||
{ source: ['foo&bar&'],
|
||||
expected: [
|
||||
['foo',
|
||||
'',
|
||||
{ nameTruncated: false,
|
||||
valueTruncated: false,
|
||||
encoding: 'utf-8',
|
||||
mimeType: 'text/plain' },
|
||||
],
|
||||
['bar',
|
||||
'',
|
||||
{ nameTruncated: false,
|
||||
valueTruncated: false,
|
||||
encoding: 'utf-8',
|
||||
mimeType: 'text/plain' },
|
||||
],
|
||||
],
|
||||
what: 'Two unassigned values and ampersand'
|
||||
},
|
||||
{ source: ['foo+1=bar+baz%2Bquux'],
|
||||
expected: [
|
||||
['foo 1',
|
||||
'bar baz+quux',
|
||||
{ nameTruncated: false,
|
||||
valueTruncated: false,
|
||||
encoding: 'utf-8',
|
||||
mimeType: 'text/plain' },
|
||||
],
|
||||
],
|
||||
what: 'Assigned key and value with (plus) space'
|
||||
},
|
||||
{ source: ['foo=bar%20baz%21'],
|
||||
expected: [
|
||||
['foo',
|
||||
'bar baz!',
|
||||
{ nameTruncated: false,
|
||||
valueTruncated: false,
|
||||
encoding: 'utf-8',
|
||||
mimeType: 'text/plain' },
|
||||
],
|
||||
],
|
||||
what: 'Assigned value with encoded bytes'
|
||||
},
|
||||
{ source: ['foo%20bar=baz%20bla%21'],
|
||||
expected: [
|
||||
['foo bar',
|
||||
'baz bla!',
|
||||
{ nameTruncated: false,
|
||||
valueTruncated: false,
|
||||
encoding: 'utf-8',
|
||||
mimeType: 'text/plain' },
|
||||
],
|
||||
],
|
||||
what: 'Assigned value with encoded bytes #2'
|
||||
},
|
||||
{ source: ['foo=bar%20baz%21&num=1000'],
|
||||
expected: [
|
||||
['foo',
|
||||
'bar baz!',
|
||||
{ nameTruncated: false,
|
||||
valueTruncated: false,
|
||||
encoding: 'utf-8',
|
||||
mimeType: 'text/plain' },
|
||||
],
|
||||
['num',
|
||||
'1000',
|
||||
{ nameTruncated: false,
|
||||
valueTruncated: false,
|
||||
encoding: 'utf-8',
|
||||
mimeType: 'text/plain' },
|
||||
],
|
||||
],
|
||||
what: 'Two assigned values, one with encoded bytes'
|
||||
},
|
||||
{ source: [
|
||||
Array.from(transcode(Buffer.from('foo'), 'utf8', 'utf16le')).map(
|
||||
(n) => `%${n.toString(16).padStart(2, '0')}`
|
||||
).join(''),
|
||||
'=',
|
||||
Array.from(transcode(Buffer.from('😀!'), 'utf8', 'utf16le')).map(
|
||||
(n) => `%${n.toString(16).padStart(2, '0')}`
|
||||
).join(''),
|
||||
],
|
||||
expected: [
|
||||
['foo',
|
||||
'😀!',
|
||||
{ nameTruncated: false,
|
||||
valueTruncated: false,
|
||||
encoding: 'UTF-16LE',
|
||||
mimeType: 'text/plain' },
|
||||
],
|
||||
],
|
||||
charset: 'UTF-16LE',
|
||||
what: 'Encoded value with multi-byte charset'
|
||||
},
|
||||
{ source: [
|
||||
'foo=<',
|
||||
Array.from(transcode(Buffer.from('©:^þ'), 'utf8', 'latin1')).map(
|
||||
(n) => `%${n.toString(16).padStart(2, '0')}`
|
||||
).join(''),
|
||||
],
|
||||
expected: [
|
||||
['foo',
|
||||
'<©:^þ',
|
||||
{ nameTruncated: false,
|
||||
valueTruncated: false,
|
||||
encoding: 'ISO-8859-1',
|
||||
mimeType: 'text/plain' },
|
||||
],
|
||||
],
|
||||
charset: 'ISO-8859-1',
|
||||
what: 'Encoded value with single-byte, ASCII-compatible, non-UTF8 charset'
|
||||
},
|
||||
{ source: ['foo=bar&baz=bla'],
|
||||
expected: [],
|
||||
what: 'Limits: zero fields',
|
||||
limits: { fields: 0 }
|
||||
},
|
||||
{ source: ['foo=bar&baz=bla'],
|
||||
expected: [
|
||||
['foo',
|
||||
'bar',
|
||||
{ nameTruncated: false,
|
||||
valueTruncated: false,
|
||||
encoding: 'utf-8',
|
||||
mimeType: 'text/plain' },
|
||||
],
|
||||
],
|
||||
what: 'Limits: one field',
|
||||
limits: { fields: 1 }
|
||||
},
|
||||
{ source: ['foo=bar&baz=bla'],
|
||||
expected: [
|
||||
['foo',
|
||||
'bar',
|
||||
{ nameTruncated: false,
|
||||
valueTruncated: false,
|
||||
encoding: 'utf-8',
|
||||
mimeType: 'text/plain' },
|
||||
],
|
||||
['baz',
|
||||
'bla',
|
||||
{ nameTruncated: false,
|
||||
valueTruncated: false,
|
||||
encoding: 'utf-8',
|
||||
mimeType: 'text/plain' },
|
||||
],
|
||||
],
|
||||
what: 'Limits: field part lengths match limits',
|
||||
limits: { fieldNameSize: 3, fieldSize: 3 }
|
||||
},
|
||||
{ source: ['foo=bar&baz=bla'],
|
||||
expected: [
|
||||
['fo',
|
||||
'bar',
|
||||
{ nameTruncated: true,
|
||||
valueTruncated: false,
|
||||
encoding: 'utf-8',
|
||||
mimeType: 'text/plain' },
|
||||
],
|
||||
['ba',
|
||||
'bla',
|
||||
{ nameTruncated: true,
|
||||
valueTruncated: false,
|
||||
encoding: 'utf-8',
|
||||
mimeType: 'text/plain' },
|
||||
],
|
||||
],
|
||||
what: 'Limits: truncated field name',
|
||||
limits: { fieldNameSize: 2 }
|
||||
},
|
||||
{ source: ['foo=bar&baz=bla'],
|
||||
expected: [
|
||||
['foo',
|
||||
'ba',
|
||||
{ nameTruncated: false,
|
||||
valueTruncated: true,
|
||||
encoding: 'utf-8',
|
||||
mimeType: 'text/plain' },
|
||||
],
|
||||
['baz',
|
||||
'bl',
|
||||
{ nameTruncated: false,
|
||||
valueTruncated: true,
|
||||
encoding: 'utf-8',
|
||||
mimeType: 'text/plain' },
|
||||
],
|
||||
],
|
||||
what: 'Limits: truncated field value',
|
||||
limits: { fieldSize: 2 }
|
||||
},
|
||||
{ source: ['foo=bar&baz=bla'],
|
||||
expected: [
|
||||
['fo',
|
||||
'ba',
|
||||
{ nameTruncated: true,
|
||||
valueTruncated: true,
|
||||
encoding: 'utf-8',
|
||||
mimeType: 'text/plain' },
|
||||
],
|
||||
['ba',
|
||||
'bl',
|
||||
{ nameTruncated: true,
|
||||
valueTruncated: true,
|
||||
encoding: 'utf-8',
|
||||
mimeType: 'text/plain' },
|
||||
],
|
||||
],
|
||||
what: 'Limits: truncated field name and value',
|
||||
limits: { fieldNameSize: 2, fieldSize: 2 }
|
||||
},
|
||||
{ source: ['foo=bar&baz=bla'],
|
||||
expected: [
|
||||
['fo',
|
||||
'',
|
||||
{ nameTruncated: true,
|
||||
valueTruncated: true,
|
||||
encoding: 'utf-8',
|
||||
mimeType: 'text/plain' },
|
||||
],
|
||||
['ba',
|
||||
'',
|
||||
{ nameTruncated: true,
|
||||
valueTruncated: true,
|
||||
encoding: 'utf-8',
|
||||
mimeType: 'text/plain' },
|
||||
],
|
||||
],
|
||||
what: 'Limits: truncated field name and zero value limit',
|
||||
limits: { fieldNameSize: 2, fieldSize: 0 }
|
||||
},
|
||||
{ source: ['foo=bar&baz=bla'],
|
||||
expected: [
|
||||
['',
|
||||
'',
|
||||
{ nameTruncated: true,
|
||||
valueTruncated: true,
|
||||
encoding: 'utf-8',
|
||||
mimeType: 'text/plain' },
|
||||
],
|
||||
['',
|
||||
'',
|
||||
{ nameTruncated: true,
|
||||
valueTruncated: true,
|
||||
encoding: 'utf-8',
|
||||
mimeType: 'text/plain' },
|
||||
],
|
||||
],
|
||||
what: 'Limits: truncated zero field name and zero value limit',
|
||||
limits: { fieldNameSize: 0, fieldSize: 0 }
|
||||
},
|
||||
{ source: ['&'],
|
||||
expected: [],
|
||||
what: 'Ampersand'
|
||||
},
|
||||
{ source: ['&&&&&'],
|
||||
expected: [],
|
||||
what: 'Many ampersands'
|
||||
},
|
||||
{ source: ['='],
|
||||
expected: [
|
||||
['',
|
||||
'',
|
||||
{ nameTruncated: false,
|
||||
valueTruncated: false,
|
||||
encoding: 'utf-8',
|
||||
mimeType: 'text/plain' },
|
||||
],
|
||||
],
|
||||
what: 'Assigned value, empty name and value'
|
||||
},
|
||||
{ source: [''],
|
||||
expected: [],
|
||||
what: 'Nothing'
|
||||
},
|
||||
];
|
||||
|
||||
for (const test of tests) {
|
||||
active.set(test, 1);
|
||||
|
||||
const { what } = test;
|
||||
const charset = test.charset || 'utf-8';
|
||||
const bb = busboy({
|
||||
limits: test.limits,
|
||||
headers: {
|
||||
'content-type': `application/x-www-form-urlencoded; charset=${charset}`,
|
||||
},
|
||||
});
|
||||
const results = [];
|
||||
|
||||
bb.on('field', (key, val, info) => {
|
||||
results.push([key, val, info]);
|
||||
});
|
||||
|
||||
bb.on('file', () => {
|
||||
throw new Error(`[${what}] Unexpected file`);
|
||||
});
|
||||
|
||||
bb.on('close', () => {
|
||||
active.delete(test);
|
||||
|
||||
assert.deepStrictEqual(
|
||||
results,
|
||||
test.expected,
|
||||
`[${what}] Results mismatch.\n`
|
||||
+ `Parsed: ${inspect(results)}\n`
|
||||
+ `Expected: ${inspect(test.expected)}`
|
||||
);
|
||||
});
|
||||
|
||||
for (const src of test.source) {
|
||||
const buf = (typeof src === 'string' ? Buffer.from(src, 'utf8') : src);
|
||||
bb.write(buf);
|
||||
}
|
||||
bb.end();
|
||||
}
|
||||
|
||||
// Byte-by-byte versions
|
||||
for (let test of tests) {
|
||||
test = { ...test };
|
||||
test.what += ' (byte-by-byte)';
|
||||
active.set(test, 1);
|
||||
|
||||
const { what } = test;
|
||||
const charset = test.charset || 'utf-8';
|
||||
const bb = busboy({
|
||||
limits: test.limits,
|
||||
headers: {
|
||||
'content-type': `application/x-www-form-urlencoded; charset="${charset}"`,
|
||||
},
|
||||
});
|
||||
const results = [];
|
||||
|
||||
bb.on('field', (key, val, info) => {
|
||||
results.push([key, val, info]);
|
||||
});
|
||||
|
||||
bb.on('file', () => {
|
||||
throw new Error(`[${what}] Unexpected file`);
|
||||
});
|
||||
|
||||
bb.on('close', () => {
|
||||
active.delete(test);
|
||||
|
||||
assert.deepStrictEqual(
|
||||
results,
|
||||
test.expected,
|
||||
`[${what}] Results mismatch.\n`
|
||||
+ `Parsed: ${inspect(results)}\n`
|
||||
+ `Expected: ${inspect(test.expected)}`
|
||||
);
|
||||
});
|
||||
|
||||
for (const src of test.source) {
|
||||
const buf = (typeof src === 'string' ? Buffer.from(src, 'utf8') : src);
|
||||
for (let i = 0; i < buf.length; ++i)
|
||||
bb.write(buf.slice(i, i + 1));
|
||||
}
|
||||
bb.end();
|
||||
}
|
||||
|
||||
{
|
||||
let exception = false;
|
||||
process.once('uncaughtException', (ex) => {
|
||||
exception = true;
|
||||
throw ex;
|
||||
});
|
||||
process.on('exit', () => {
|
||||
if (exception || active.size === 0)
|
||||
return;
|
||||
process.exitCode = 1;
|
||||
console.error('==========================');
|
||||
console.error(`${active.size} test(s) did not finish:`);
|
||||
console.error('==========================');
|
||||
console.error(Array.from(active.keys()).map((v) => v.what).join('\n'));
|
||||
});
|
||||
}
|
@ -0,0 +1,20 @@
|
||||
'use strict';
|
||||
|
||||
const { spawnSync } = require('child_process');
|
||||
const { readdirSync } = require('fs');
|
||||
const { join } = require('path');
|
||||
|
||||
const files = readdirSync(__dirname).sort();
|
||||
for (const filename of files) {
|
||||
if (filename.startsWith('test-')) {
|
||||
const path = join(__dirname, filename);
|
||||
console.log(`> Running ${filename} ...`);
|
||||
const result = spawnSync(`${process.argv0} ${path}`, {
|
||||
shell: true,
|
||||
stdio: 'inherit',
|
||||
windowsHide: true
|
||||
});
|
||||
if (result.status !== 0)
|
||||
process.exitCode = 1;
|
||||
}
|
||||
}
|
@ -0,0 +1,97 @@
|
||||
3.1.2 / 2022-01-27
|
||||
==================
|
||||
|
||||
* Fix return value for un-parsable strings
|
||||
|
||||
3.1.1 / 2021-11-15
|
||||
==================
|
||||
|
||||
* Fix "thousandsSeparator" incorrecting formatting fractional part
|
||||
|
||||
3.1.0 / 2019-01-22
|
||||
==================
|
||||
|
||||
* Add petabyte (`pb`) support
|
||||
|
||||
3.0.0 / 2017-08-31
|
||||
==================
|
||||
|
||||
* Change "kB" to "KB" in format output
|
||||
* Remove support for Node.js 0.6
|
||||
* Remove support for ComponentJS
|
||||
|
||||
2.5.0 / 2017-03-24
|
||||
==================
|
||||
|
||||
* Add option "unit"
|
||||
|
||||
2.4.0 / 2016-06-01
|
||||
==================
|
||||
|
||||
* Add option "unitSeparator"
|
||||
|
||||
2.3.0 / 2016-02-15
|
||||
==================
|
||||
|
||||
* Drop partial bytes on all parsed units
|
||||
* Fix non-finite numbers to `.format` to return `null`
|
||||
* Fix parsing byte string that looks like hex
|
||||
* perf: hoist regular expressions
|
||||
|
||||
2.2.0 / 2015-11-13
|
||||
==================
|
||||
|
||||
* add option "decimalPlaces"
|
||||
* add option "fixedDecimals"
|
||||
|
||||
2.1.0 / 2015-05-21
|
||||
==================
|
||||
|
||||
* add `.format` export
|
||||
* add `.parse` export
|
||||
|
||||
2.0.2 / 2015-05-20
|
||||
==================
|
||||
|
||||
* remove map recreation
|
||||
* remove unnecessary object construction
|
||||
|
||||
2.0.1 / 2015-05-07
|
||||
==================
|
||||
|
||||
* fix browserify require
|
||||
* remove node.extend dependency
|
||||
|
||||
2.0.0 / 2015-04-12
|
||||
==================
|
||||
|
||||
* add option "case"
|
||||
* add option "thousandsSeparator"
|
||||
* return "null" on invalid parse input
|
||||
* support proper round-trip: bytes(bytes(num)) === num
|
||||
* units no longer case sensitive when parsing
|
||||
|
||||
1.0.0 / 2014-05-05
|
||||
==================
|
||||
|
||||
* add negative support. fixes #6
|
||||
|
||||
0.3.0 / 2014-03-19
|
||||
==================
|
||||
|
||||
* added terabyte support
|
||||
|
||||
0.2.1 / 2013-04-01
|
||||
==================
|
||||
|
||||
* add .component
|
||||
|
||||
0.2.0 / 2012-10-28
|
||||
==================
|
||||
|
||||
* bytes(200).should.eql('200b')
|
||||
|
||||
0.1.0 / 2012-07-04
|
||||
==================
|
||||
|
||||
* add bytes to string conversion [yields]
|
@ -0,0 +1,23 @@
|
||||
(The MIT License)
|
||||
|
||||
Copyright (c) 2012-2014 TJ Holowaychuk <tj@vision-media.ca>
|
||||
Copyright (c) 2015 Jed Watson <jed.watson@me.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
'Software'), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
@ -0,0 +1,170 @@
|
||||
/*!
|
||||
* bytes
|
||||
* Copyright(c) 2012-2014 TJ Holowaychuk
|
||||
* Copyright(c) 2015 Jed Watson
|
||||
* MIT Licensed
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Module exports.
|
||||
* @public
|
||||
*/
|
||||
|
||||
module.exports = bytes;
|
||||
module.exports.format = format;
|
||||
module.exports.parse = parse;
|
||||
|
||||
/**
|
||||
* Module variables.
|
||||
* @private
|
||||
*/
|
||||
|
||||
var formatThousandsRegExp = /\B(?=(\d{3})+(?!\d))/g;
|
||||
|
||||
var formatDecimalsRegExp = /(?:\.0*|(\.[^0]+)0+)$/;
|
||||
|
||||
var map = {
|
||||
b: 1,
|
||||
kb: 1 << 10,
|
||||
mb: 1 << 20,
|
||||
gb: 1 << 30,
|
||||
tb: Math.pow(1024, 4),
|
||||
pb: Math.pow(1024, 5),
|
||||
};
|
||||
|
||||
var parseRegExp = /^((-|\+)?(\d+(?:\.\d+)?)) *(kb|mb|gb|tb|pb)$/i;
|
||||
|
||||
/**
|
||||
* Convert the given value in bytes into a string or parse to string to an integer in bytes.
|
||||
*
|
||||
* @param {string|number} value
|
||||
* @param {{
|
||||
* case: [string],
|
||||
* decimalPlaces: [number]
|
||||
* fixedDecimals: [boolean]
|
||||
* thousandsSeparator: [string]
|
||||
* unitSeparator: [string]
|
||||
* }} [options] bytes options.
|
||||
*
|
||||
* @returns {string|number|null}
|
||||
*/
|
||||
|
||||
function bytes(value, options) {
|
||||
if (typeof value === 'string') {
|
||||
return parse(value);
|
||||
}
|
||||
|
||||
if (typeof value === 'number') {
|
||||
return format(value, options);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Format the given value in bytes into a string.
|
||||
*
|
||||
* If the value is negative, it is kept as such. If it is a float,
|
||||
* it is rounded.
|
||||
*
|
||||
* @param {number} value
|
||||
* @param {object} [options]
|
||||
* @param {number} [options.decimalPlaces=2]
|
||||
* @param {number} [options.fixedDecimals=false]
|
||||
* @param {string} [options.thousandsSeparator=]
|
||||
* @param {string} [options.unit=]
|
||||
* @param {string} [options.unitSeparator=]
|
||||
*
|
||||
* @returns {string|null}
|
||||
* @public
|
||||
*/
|
||||
|
||||
function format(value, options) {
|
||||
if (!Number.isFinite(value)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
var mag = Math.abs(value);
|
||||
var thousandsSeparator = (options && options.thousandsSeparator) || '';
|
||||
var unitSeparator = (options && options.unitSeparator) || '';
|
||||
var decimalPlaces = (options && options.decimalPlaces !== undefined) ? options.decimalPlaces : 2;
|
||||
var fixedDecimals = Boolean(options && options.fixedDecimals);
|
||||
var unit = (options && options.unit) || '';
|
||||
|
||||
if (!unit || !map[unit.toLowerCase()]) {
|
||||
if (mag >= map.pb) {
|
||||
unit = 'PB';
|
||||
} else if (mag >= map.tb) {
|
||||
unit = 'TB';
|
||||
} else if (mag >= map.gb) {
|
||||
unit = 'GB';
|
||||
} else if (mag >= map.mb) {
|
||||
unit = 'MB';
|
||||
} else if (mag >= map.kb) {
|
||||
unit = 'KB';
|
||||
} else {
|
||||
unit = 'B';
|
||||
}
|
||||
}
|
||||
|
||||
var val = value / map[unit.toLowerCase()];
|
||||
var str = val.toFixed(decimalPlaces);
|
||||
|
||||
if (!fixedDecimals) {
|
||||
str = str.replace(formatDecimalsRegExp, '$1');
|
||||
}
|
||||
|
||||
if (thousandsSeparator) {
|
||||
str = str.split('.').map(function (s, i) {
|
||||
return i === 0
|
||||
? s.replace(formatThousandsRegExp, thousandsSeparator)
|
||||
: s
|
||||
}).join('.');
|
||||
}
|
||||
|
||||
return str + unitSeparator + unit;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the string value into an integer in bytes.
|
||||
*
|
||||
* If no unit is given, it is assumed the value is in bytes.
|
||||
*
|
||||
* @param {number|string} val
|
||||
*
|
||||
* @returns {number|null}
|
||||
* @public
|
||||
*/
|
||||
|
||||
function parse(val) {
|
||||
if (typeof val === 'number' && !isNaN(val)) {
|
||||
return val;
|
||||
}
|
||||
|
||||
if (typeof val !== 'string') {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Test if the string passed is valid
|
||||
var results = parseRegExp.exec(val);
|
||||
var floatValue;
|
||||
var unit = 'b';
|
||||
|
||||
if (!results) {
|
||||
// Nothing could be extracted from the given string
|
||||
floatValue = parseInt(val, 10);
|
||||
unit = 'b'
|
||||
} else {
|
||||
// Retrieve the value and the unit
|
||||
floatValue = parseFloat(results[1]);
|
||||
unit = results[4].toLowerCase();
|
||||
}
|
||||
|
||||
if (isNaN(floatValue)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return Math.floor(map[unit] * floatValue);
|
||||
}
|
@ -0,0 +1,42 @@
|
||||
{
|
||||
"name": "bytes",
|
||||
"description": "Utility to parse a string bytes to bytes and vice-versa",
|
||||
"version": "3.1.2",
|
||||
"author": "TJ Holowaychuk <tj@vision-media.ca> (http://tjholowaychuk.com)",
|
||||
"contributors": [
|
||||
"Jed Watson <jed.watson@me.com>",
|
||||
"Théo FIDRY <theo.fidry@gmail.com>"
|
||||
],
|
||||
"license": "MIT",
|
||||
"keywords": [
|
||||
"byte",
|
||||
"bytes",
|
||||
"utility",
|
||||
"parse",
|
||||
"parser",
|
||||
"convert",
|
||||
"converter"
|
||||
],
|
||||
"repository": "visionmedia/bytes.js",
|
||||
"devDependencies": {
|
||||
"eslint": "7.32.0",
|
||||
"eslint-plugin-markdown": "2.2.1",
|
||||
"mocha": "9.2.0",
|
||||
"nyc": "15.1.0"
|
||||
},
|
||||
"files": [
|
||||
"History.md",
|
||||
"LICENSE",
|
||||
"Readme.md",
|
||||
"index.js"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
},
|
||||
"scripts": {
|
||||
"lint": "eslint .",
|
||||
"test": "mocha --check-leaks --reporter spec",
|
||||
"test-ci": "nyc --reporter=lcov --reporter=text npm test",
|
||||
"test-cov": "nyc --reporter=html --reporter=text npm test"
|
||||
}
|
||||
}
|
@ -0,0 +1 @@
|
||||
coverage/
|
@ -0,0 +1,16 @@
|
||||
{
|
||||
"root": true,
|
||||
|
||||
"extends": "@ljharb",
|
||||
|
||||
"rules": {
|
||||
"func-name-matching": 0,
|
||||
"id-length": 0,
|
||||
"new-cap": [2, {
|
||||
"capIsNewExceptions": [
|
||||
"GetIntrinsic",
|
||||
],
|
||||
}],
|
||||
"no-magic-numbers": 0,
|
||||
},
|
||||
}
|
@ -0,0 +1,12 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: [ljharb]
|
||||
patreon: # Replace with a single Patreon username
|
||||
open_collective: # Replace with a single Open Collective username
|
||||
ko_fi: # Replace with a single Ko-fi username
|
||||
tidelift: npm/call-bind
|
||||
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||
liberapay: # Replace with a single Liberapay username
|
||||
issuehunt: # Replace with a single IssueHunt username
|
||||
otechie: # Replace with a single Otechie username
|
||||
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
|
@ -0,0 +1,9 @@
|
||||
{
|
||||
"all": true,
|
||||
"check-coverage": false,
|
||||
"reporter": ["text-summary", "text", "html", "json"],
|
||||
"exclude": [
|
||||
"coverage",
|
||||
"test"
|
||||
]
|
||||
}
|
@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2020 Jordan Harband
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
@ -0,0 +1,64 @@
|
||||
# call-bind <sup>[![Version Badge][npm-version-svg]][package-url]</sup>
|
||||
|
||||
[![github actions][actions-image]][actions-url]
|
||||
[![coverage][codecov-image]][codecov-url]
|
||||
[![dependency status][deps-svg]][deps-url]
|
||||
[![dev dependency status][dev-deps-svg]][dev-deps-url]
|
||||
[![License][license-image]][license-url]
|
||||
[![Downloads][downloads-image]][downloads-url]
|
||||
|
||||
[![npm badge][npm-badge-png]][package-url]
|
||||
|
||||
Robustly `.call.bind()` a function.
|
||||
|
||||
## Getting started
|
||||
|
||||
```sh
|
||||
npm install --save call-bind
|
||||
```
|
||||
|
||||
## Usage/Examples
|
||||
|
||||
```js
|
||||
const assert = require('assert');
|
||||
const callBind = require('call-bind');
|
||||
const callBound = require('call-bind/callBound');
|
||||
|
||||
function f(a, b) {
|
||||
assert.equal(this, 1);
|
||||
assert.equal(a, 2);
|
||||
assert.equal(b, 3);
|
||||
assert.equal(arguments.length, 2);
|
||||
}
|
||||
|
||||
const fBound = callBind(f);
|
||||
|
||||
const slice = callBound('Array.prototype.slice');
|
||||
|
||||
delete Function.prototype.call;
|
||||
delete Function.prototype.bind;
|
||||
|
||||
fBound(1, 2, 3);
|
||||
|
||||
assert.deepEqual(slice([1, 2, 3, 4], 1, -1), [2, 3]);
|
||||
```
|
||||
|
||||
## Tests
|
||||
|
||||
Clone the repo, `npm install`, and run `npm test`
|
||||
|
||||
[package-url]: https://npmjs.org/package/call-bind
|
||||
[npm-version-svg]: https://versionbadg.es/ljharb/call-bind.svg
|
||||
[deps-svg]: https://david-dm.org/ljharb/call-bind.svg
|
||||
[deps-url]: https://david-dm.org/ljharb/call-bind
|
||||
[dev-deps-svg]: https://david-dm.org/ljharb/call-bind/dev-status.svg
|
||||
[dev-deps-url]: https://david-dm.org/ljharb/call-bind#info=devDependencies
|
||||
[npm-badge-png]: https://nodei.co/npm/call-bind.png?downloads=true&stars=true
|
||||
[license-image]: https://img.shields.io/npm/l/call-bind.svg
|
||||
[license-url]: LICENSE
|
||||
[downloads-image]: https://img.shields.io/npm/dm/call-bind.svg
|
||||
[downloads-url]: https://npm-stat.com/charts.html?package=call-bind
|
||||
[codecov-image]: https://codecov.io/gh/ljharb/call-bind/branch/main/graphs/badge.svg
|
||||
[codecov-url]: https://app.codecov.io/gh/ljharb/call-bind/
|
||||
[actions-image]: https://img.shields.io/endpoint?url=https://github-actions-badge-u3jn4tfpocch.runkit.sh/ljharb/call-bind
|
||||
[actions-url]: https://github.com/ljharb/call-bind/actions
|
@ -0,0 +1,15 @@
|
||||
'use strict';
|
||||
|
||||
var GetIntrinsic = require('get-intrinsic');
|
||||
|
||||
var callBind = require('./');
|
||||
|
||||
var $indexOf = callBind(GetIntrinsic('String.prototype.indexOf'));
|
||||
|
||||
module.exports = function callBoundIntrinsic(name, allowMissing) {
|
||||
var intrinsic = GetIntrinsic(name, !!allowMissing);
|
||||
if (typeof intrinsic === 'function' && $indexOf(name, '.prototype.') > -1) {
|
||||
return callBind(intrinsic);
|
||||
}
|
||||
return intrinsic;
|
||||
};
|
@ -0,0 +1,35 @@
|
||||
'use strict';
|
||||
|
||||
var bind = require('function-bind');
|
||||
var GetIntrinsic = require('get-intrinsic');
|
||||
var setFunctionLength = require('set-function-length');
|
||||
|
||||
var $TypeError = require('es-errors/type');
|
||||
var $apply = GetIntrinsic('%Function.prototype.apply%');
|
||||
var $call = GetIntrinsic('%Function.prototype.call%');
|
||||
var $reflectApply = GetIntrinsic('%Reflect.apply%', true) || bind.call($call, $apply);
|
||||
|
||||
var $defineProperty = require('es-define-property');
|
||||
var $max = GetIntrinsic('%Math.max%');
|
||||
|
||||
module.exports = function callBind(originalFunction) {
|
||||
if (typeof originalFunction !== 'function') {
|
||||
throw new $TypeError('a function is required');
|
||||
}
|
||||
var func = $reflectApply(bind, $call, arguments);
|
||||
return setFunctionLength(
|
||||
func,
|
||||
1 + $max(0, originalFunction.length - (arguments.length - 1)),
|
||||
true
|
||||
);
|
||||
};
|
||||
|
||||
var applyBind = function applyBind() {
|
||||
return $reflectApply(bind, $apply, arguments);
|
||||
};
|
||||
|
||||
if ($defineProperty) {
|
||||
$defineProperty(module.exports, 'apply', { value: applyBind });
|
||||
} else {
|
||||
module.exports.apply = applyBind;
|
||||
}
|
@ -0,0 +1,95 @@
|
||||
{
|
||||
"name": "call-bind",
|
||||
"version": "1.0.7",
|
||||
"description": "Robustly `.call.bind()` a function",
|
||||
"main": "index.js",
|
||||
"exports": {
|
||||
".": "./index.js",
|
||||
"./callBound": "./callBound.js",
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"scripts": {
|
||||
"prepack": "npmignore --auto --commentLines=auto",
|
||||
"prepublish": "not-in-publish || npm run prepublishOnly",
|
||||
"prepublishOnly": "safe-publish-latest",
|
||||
"lint": "eslint --ext=.js,.mjs .",
|
||||
"postlint": "evalmd README.md",
|
||||
"pretest": "npm run lint",
|
||||
"tests-only": "nyc tape 'test/**/*.js'",
|
||||
"test": "npm run tests-only",
|
||||
"posttest": "aud --production",
|
||||
"version": "auto-changelog && git add CHANGELOG.md",
|
||||
"postversion": "auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\""
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/ljharb/call-bind.git"
|
||||
},
|
||||
"keywords": [
|
||||
"javascript",
|
||||
"ecmascript",
|
||||
"es",
|
||||
"js",
|
||||
"callbind",
|
||||
"callbound",
|
||||
"call",
|
||||
"bind",
|
||||
"bound",
|
||||
"call-bind",
|
||||
"call-bound",
|
||||
"function",
|
||||
"es-abstract"
|
||||
],
|
||||
"author": "Jordan Harband <ljharb@gmail.com>",
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
},
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/ljharb/call-bind/issues"
|
||||
},
|
||||
"homepage": "https://github.com/ljharb/call-bind#readme",
|
||||
"devDependencies": {
|
||||
"@ljharb/eslint-config": "^21.1.0",
|
||||
"aud": "^2.0.4",
|
||||
"auto-changelog": "^2.4.0",
|
||||
"es-value-fixtures": "^1.4.2",
|
||||
"eslint": "=8.8.0",
|
||||
"evalmd": "^0.0.19",
|
||||
"for-each": "^0.3.3",
|
||||
"gopd": "^1.0.1",
|
||||
"has-strict-mode": "^1.0.1",
|
||||
"in-publish": "^2.0.1",
|
||||
"npmignore": "^0.3.1",
|
||||
"nyc": "^10.3.2",
|
||||
"object-inspect": "^1.13.1",
|
||||
"safe-publish-latest": "^2.0.0",
|
||||
"tape": "^5.7.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"es-define-property": "^1.0.0",
|
||||
"es-errors": "^1.3.0",
|
||||
"function-bind": "^1.1.2",
|
||||
"get-intrinsic": "^1.2.4",
|
||||
"set-function-length": "^1.2.1"
|
||||
},
|
||||
"testling": {
|
||||
"files": "test/index.js"
|
||||
},
|
||||
"auto-changelog": {
|
||||
"output": "CHANGELOG.md",
|
||||
"template": "keepachangelog",
|
||||
"unreleased": false,
|
||||
"commitLimit": false,
|
||||
"backfillLimit": false,
|
||||
"hideCredit": true
|
||||
},
|
||||
"publishConfig": {
|
||||
"ignore": [
|
||||
".github/workflows"
|
||||
]
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
}
|
@ -0,0 +1,54 @@
|
||||
'use strict';
|
||||
|
||||
var test = require('tape');
|
||||
|
||||
var callBound = require('../callBound');
|
||||
|
||||
test('callBound', function (t) {
|
||||
// static primitive
|
||||
t.equal(callBound('Array.length'), Array.length, 'Array.length yields itself');
|
||||
t.equal(callBound('%Array.length%'), Array.length, '%Array.length% yields itself');
|
||||
|
||||
// static non-function object
|
||||
t.equal(callBound('Array.prototype'), Array.prototype, 'Array.prototype yields itself');
|
||||
t.equal(callBound('%Array.prototype%'), Array.prototype, '%Array.prototype% yields itself');
|
||||
t.equal(callBound('Array.constructor'), Array.constructor, 'Array.constructor yields itself');
|
||||
t.equal(callBound('%Array.constructor%'), Array.constructor, '%Array.constructor% yields itself');
|
||||
|
||||
// static function
|
||||
t.equal(callBound('Date.parse'), Date.parse, 'Date.parse yields itself');
|
||||
t.equal(callBound('%Date.parse%'), Date.parse, '%Date.parse% yields itself');
|
||||
|
||||
// prototype primitive
|
||||
t.equal(callBound('Error.prototype.message'), Error.prototype.message, 'Error.prototype.message yields itself');
|
||||
t.equal(callBound('%Error.prototype.message%'), Error.prototype.message, '%Error.prototype.message% yields itself');
|
||||
|
||||
// prototype function
|
||||
t.notEqual(callBound('Object.prototype.toString'), Object.prototype.toString, 'Object.prototype.toString does not yield itself');
|
||||
t.notEqual(callBound('%Object.prototype.toString%'), Object.prototype.toString, '%Object.prototype.toString% does not yield itself');
|
||||
t.equal(callBound('Object.prototype.toString')(true), Object.prototype.toString.call(true), 'call-bound Object.prototype.toString calls into the original');
|
||||
t.equal(callBound('%Object.prototype.toString%')(true), Object.prototype.toString.call(true), 'call-bound %Object.prototype.toString% calls into the original');
|
||||
|
||||
t['throws'](
|
||||
function () { callBound('does not exist'); },
|
||||
SyntaxError,
|
||||
'nonexistent intrinsic throws'
|
||||
);
|
||||
t['throws'](
|
||||
function () { callBound('does not exist', true); },
|
||||
SyntaxError,
|
||||
'allowMissing arg still throws for unknown intrinsic'
|
||||
);
|
||||
|
||||
t.test('real but absent intrinsic', { skip: typeof WeakRef !== 'undefined' }, function (st) {
|
||||
st['throws'](
|
||||
function () { callBound('WeakRef'); },
|
||||
TypeError,
|
||||
'real but absent intrinsic throws'
|
||||
);
|
||||
st.equal(callBound('WeakRef', true), undefined, 'allowMissing arg avoids exception');
|
||||
st.end();
|
||||
});
|
||||
|
||||
t.end();
|
||||
});
|
@ -0,0 +1,80 @@
|
||||
'use strict';
|
||||
|
||||
var callBind = require('../');
|
||||
var bind = require('function-bind');
|
||||
var gOPD = require('gopd');
|
||||
var hasStrictMode = require('has-strict-mode')();
|
||||
var forEach = require('for-each');
|
||||
var inspect = require('object-inspect');
|
||||
var v = require('es-value-fixtures');
|
||||
|
||||
var test = require('tape');
|
||||
|
||||
/*
|
||||
* older engines have length nonconfigurable
|
||||
* in io.js v3, it is configurable except on bound functions, hence the .bind()
|
||||
*/
|
||||
var functionsHaveConfigurableLengths = !!(
|
||||
gOPD
|
||||
&& Object.getOwnPropertyDescriptor
|
||||
&& Object.getOwnPropertyDescriptor(bind.call(function () {}), 'length').configurable
|
||||
);
|
||||
|
||||
test('callBind', function (t) {
|
||||
forEach(v.nonFunctions, function (nonFunction) {
|
||||
t['throws'](
|
||||
function () { callBind(nonFunction); },
|
||||
TypeError,
|
||||
inspect(nonFunction) + ' is not a function'
|
||||
);
|
||||
});
|
||||
|
||||
var sentinel = { sentinel: true };
|
||||
var func = function (a, b) {
|
||||
// eslint-disable-next-line no-invalid-this
|
||||
return [!hasStrictMode && this === global ? undefined : this, a, b];
|
||||
};
|
||||
t.equal(func.length, 2, 'original function length is 2');
|
||||
t.deepEqual(func(), [undefined, undefined, undefined], 'unbound func with too few args');
|
||||
t.deepEqual(func(1, 2), [undefined, 1, 2], 'unbound func with right args');
|
||||
t.deepEqual(func(1, 2, 3), [undefined, 1, 2], 'unbound func with too many args');
|
||||
|
||||
var bound = callBind(func);
|
||||
t.equal(bound.length, func.length + 1, 'function length is preserved', { skip: !functionsHaveConfigurableLengths });
|
||||
t.deepEqual(bound(), [undefined, undefined, undefined], 'bound func with too few args');
|
||||
t.deepEqual(bound(1, 2), [hasStrictMode ? 1 : Object(1), 2, undefined], 'bound func with right args');
|
||||
t.deepEqual(bound(1, 2, 3), [hasStrictMode ? 1 : Object(1), 2, 3], 'bound func with too many args');
|
||||
|
||||
var boundR = callBind(func, sentinel);
|
||||
t.equal(boundR.length, func.length, 'function length is preserved', { skip: !functionsHaveConfigurableLengths });
|
||||
t.deepEqual(boundR(), [sentinel, undefined, undefined], 'bound func with receiver, with too few args');
|
||||
t.deepEqual(boundR(1, 2), [sentinel, 1, 2], 'bound func with receiver, with right args');
|
||||
t.deepEqual(boundR(1, 2, 3), [sentinel, 1, 2], 'bound func with receiver, with too many args');
|
||||
|
||||
var boundArg = callBind(func, sentinel, 1);
|
||||
t.equal(boundArg.length, func.length - 1, 'function length is preserved', { skip: !functionsHaveConfigurableLengths });
|
||||
t.deepEqual(boundArg(), [sentinel, 1, undefined], 'bound func with receiver and arg, with too few args');
|
||||
t.deepEqual(boundArg(2), [sentinel, 1, 2], 'bound func with receiver and arg, with right arg');
|
||||
t.deepEqual(boundArg(2, 3), [sentinel, 1, 2], 'bound func with receiver and arg, with too many args');
|
||||
|
||||
t.test('callBind.apply', function (st) {
|
||||
var aBound = callBind.apply(func);
|
||||
st.deepEqual(aBound(sentinel), [sentinel, undefined, undefined], 'apply-bound func with no args');
|
||||
st.deepEqual(aBound(sentinel, [1], 4), [sentinel, 1, undefined], 'apply-bound func with too few args');
|
||||
st.deepEqual(aBound(sentinel, [1, 2], 4), [sentinel, 1, 2], 'apply-bound func with right args');
|
||||
|
||||
var aBoundArg = callBind.apply(func);
|
||||
st.deepEqual(aBoundArg(sentinel, [1, 2, 3], 4), [sentinel, 1, 2], 'apply-bound func with too many args');
|
||||
st.deepEqual(aBoundArg(sentinel, [1, 2], 4), [sentinel, 1, 2], 'apply-bound func with right args');
|
||||
st.deepEqual(aBoundArg(sentinel, [1], 4), [sentinel, 1, undefined], 'apply-bound func with too few args');
|
||||
|
||||
var aBoundR = callBind.apply(func, sentinel);
|
||||
st.deepEqual(aBoundR([1, 2, 3], 4), [sentinel, 1, 2], 'apply-bound func with receiver and too many args');
|
||||
st.deepEqual(aBoundR([1, 2], 4), [sentinel, 1, 2], 'apply-bound func with receiver and right args');
|
||||
st.deepEqual(aBoundR([1], 4), [sentinel, 1, undefined], 'apply-bound func with receiver and too few args');
|
||||
|
||||
st.end();
|
||||
});
|
||||
|
||||
t.end();
|
||||
});
|
@ -0,0 +1,201 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright (C) 2013-present SheetJS LLC
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
@ -0,0 +1,161 @@
|
||||
# Container File Blobs
|
||||
|
||||
Pure JS implementation of various container file formats, including ZIP and CFB.
|
||||
|
||||
[![Build Status](https://travis-ci.org/SheetJS/js-cfb.svg?branch=master)](https://travis-ci.org/SheetJS/js-cfb)
|
||||
[![Coverage Status](http://img.shields.io/coveralls/SheetJS/js-cfb/master.svg)](https://coveralls.io/r/SheetJS/js-cfb?branch=master)
|
||||
[![Dependencies Status](https://david-dm.org/sheetjs/js-cfb/status.svg)](https://david-dm.org/sheetjs/js-cfb)
|
||||
[![NPM Downloads](https://img.shields.io/npm/dt/cfb.svg)](https://npmjs.org/package/cfb)
|
||||
[![Analytics](https://ga-beacon.appspot.com/UA-36810333-1/SheetJS/js-cfb?pixel)](https://github.com/SheetJS/js-cfb)
|
||||
|
||||
## Installation
|
||||
|
||||
In the browser:
|
||||
|
||||
```html
|
||||
<script src="dist/cfb.min.js" type="text/javascript"></script>
|
||||
```
|
||||
|
||||
With [npm](https://www.npmjs.org/package/cfb):
|
||||
|
||||
```bash
|
||||
$ npm install cfb
|
||||
```
|
||||
|
||||
The `xlscfb.js` file is designed to be embedded in [js-xlsx](http://git.io/xlsx)
|
||||
|
||||
|
||||
## Library Usage
|
||||
|
||||
In node:
|
||||
|
||||
```js
|
||||
var CFB = require('cfb');
|
||||
```
|
||||
|
||||
For example, to get the Workbook content from an Excel 2003 XLS file:
|
||||
|
||||
```js
|
||||
var cfb = CFB.read(filename, {type: 'file'});
|
||||
var workbook = CFB.find(cfb, 'Workbook');
|
||||
var data = workbook.content;
|
||||
```
|
||||
|
||||
|
||||
## Command-Line Utility Usage
|
||||
|
||||
The [`cfb-cli`](https://www.npmjs.com/package/cfb-cli) module ships with a CLI
|
||||
tool for manipulating and inspecting supported files.
|
||||
|
||||
|
||||
## JS API
|
||||
|
||||
TypeScript definitions are maintained in `types/index.d.ts`.
|
||||
|
||||
The CFB object exposes the following methods and properties:
|
||||
|
||||
`CFB.parse(blob)` takes a nodejs Buffer or an array of bytes and returns an
|
||||
parsed representation of the data.
|
||||
|
||||
`CFB.read(blob, opts)` wraps `parse`.
|
||||
|
||||
`CFB.find(cfb, path)` performs a case-insensitive match for the path (or file
|
||||
name, if there are no slashes) and returns an entry object or null if not found.
|
||||
|
||||
`CFB.write(cfb, opts)` generates a file based on the container.
|
||||
|
||||
`CFB.writeFile(cfb, filename, opts)` creates a file with the specified name.
|
||||
|
||||
### Parse Options
|
||||
|
||||
`CFB.read` takes an options argument. `opts.type` controls the behavior:
|
||||
|
||||
| `type` | expected input |
|
||||
|------------|:----------------------------------------------------------------|
|
||||
| `"base64"` | string: Base64 encoding of the file |
|
||||
| `"binary"` | string: binary string (byte `n` is `data.charCodeAt(n)`) |
|
||||
| `"buffer"` | nodejs Buffer |
|
||||
| `"file"` | string: path of file that will be read (nodejs only) |
|
||||
| (default) | buffer or array of 8-bit unsigned int (byte `n` is `data[n]`) |
|
||||
|
||||
|
||||
### Write Options
|
||||
|
||||
`CFB.write` and `CFB.writeFile` take options argument.
|
||||
|
||||
`opts.type` controls the behavior:
|
||||
|
||||
| `type` | output |
|
||||
|------------|:----------------------------------------------------------------|
|
||||
| `"base64"` | string: Base64 encoding of the file |
|
||||
| `"binary"` | string: binary string (byte `n` is `data.charCodeAt(n)`) |
|
||||
| `"buffer"` | nodejs Buffer |
|
||||
| `"file"` | string: path of file that will be created (nodejs only) |
|
||||
| (default) | buffer if available, array of 8-bit unsigned int otherwise |
|
||||
|
||||
`opts.fileType` controls the output file type:
|
||||
|
||||
| `fileType` | output |
|
||||
|:-------------------|:------------------------|
|
||||
| `'cfb'` (default) | CFB container |
|
||||
| `'zip'` | ZIP file |
|
||||
| `'mad'` | MIME aggregate document |
|
||||
|
||||
`opts.compression` enables DEFLATE compression for ZIP file type.
|
||||
|
||||
|
||||
## Utility Functions
|
||||
|
||||
The utility functions are available in the `CFB.utils` object. Functions that
|
||||
accept a `name` argument strictly deal with absolute file names:
|
||||
|
||||
- `.cfb_new(?opts)` creates a new container object.
|
||||
- `.cfb_add(cfb, name, ?content, ?opts)` adds a new file to the `cfb`.
|
||||
Set the option `{unsafe:true}` to skip existence checks (for bulk additions)
|
||||
- `.cfb_del(cfb, name)` deletes the specified file
|
||||
- `.cfb_mov(cfb, old_name, new_name)` moves the old file to new path and name
|
||||
- `.use_zlib(require("zlib"))` loads a nodejs `zlib` instance.
|
||||
|
||||
By default, the library uses a pure JS inflate/deflate implementation. NodeJS
|
||||
`zlib.InflateRaw` exposes the number of bytes read in versions after `8.11.0`.
|
||||
If a supplied `zlib` does not support the required features, a warning will be
|
||||
displayed in the console and the pure JS fallback will be used.
|
||||
|
||||
|
||||
## Container Object Description
|
||||
|
||||
The objects returned by `parse` and `read` have the following properties:
|
||||
|
||||
- `.FullPaths` is an array of the names of all of the streams (files) and
|
||||
storages (directories) in the container. The paths are properly prefixed from
|
||||
the root entry (so the entries are unique)
|
||||
|
||||
- `.FileIndex` is an array, in the same order as `.FullPaths`, whose values are
|
||||
objects following the schema:
|
||||
|
||||
```typescript
|
||||
interface CFBEntry {
|
||||
name: string; /** Case-sensitive internal name */
|
||||
type: number; /** 1 = dir, 2 = file, 5 = root ; see [MS-CFB] 2.6.1 */
|
||||
content: Buffer | number[] | Uint8Array; /** Raw Content */
|
||||
ct?: Date; /** Creation Time */
|
||||
mt?: Date; /** Modification Time */
|
||||
ctype?: String; /** Content-Type (for MAD) */
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## License
|
||||
|
||||
Please consult the attached LICENSE file for details. All rights not explicitly
|
||||
granted by the Apache 2.0 License are reserved by the Original Author.
|
||||
|
||||
|
||||
## References
|
||||
|
||||
- `MS-CFB`: Compound File Binary File Format
|
||||
- ZIP `APPNOTE.TXT`: .ZIP File Format Specification
|
||||
- RFC1951: https://www.ietf.org/rfc/rfc1951.txt
|
||||
- RFC2045: https://www.ietf.org/rfc/rfc2045.txt
|
||||
- RFC2557: https://www.ietf.org/rfc/rfc2557.txt
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,201 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright (C) 2013-present SheetJS LLC
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue