项目任务书

main
Vks 2 months ago
parent fc16f91859
commit 0f73dde6de

@ -0,0 +1,16 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../browserslist/cli.js" "$@"
else
exec node "$basedir/../browserslist/cli.js" "$@"
fi

@ -0,0 +1,17 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\browserslist\cli.js" %*

@ -0,0 +1,28 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../browserslist/cli.js" $args
} else {
& "$basedir/node$exe" "$basedir/../browserslist/cli.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../browserslist/cli.js" $args
} else {
& "node$exe" "$basedir/../browserslist/cli.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

@ -0,0 +1,16 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../esbuild/bin/esbuild" "$@"
else
exec node "$basedir/../esbuild/bin/esbuild" "$@"
fi

@ -0,0 +1,17 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\esbuild\bin\esbuild" %*

@ -0,0 +1,28 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../esbuild/bin/esbuild" $args
} else {
& "$basedir/node$exe" "$basedir/../esbuild/bin/esbuild" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../esbuild/bin/esbuild" $args
} else {
& "node$exe" "$basedir/../esbuild/bin/esbuild" $args
}
$ret=$LASTEXITCODE
}
exit $ret

@ -0,0 +1,16 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../is-docker/cli.js" "$@"
else
exec node "$basedir/../is-docker/cli.js" "$@"
fi

@ -0,0 +1,17 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\is-docker\cli.js" %*

@ -0,0 +1,28 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../is-docker/cli.js" $args
} else {
& "$basedir/node$exe" "$basedir/../is-docker/cli.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../is-docker/cli.js" $args
} else {
& "node$exe" "$basedir/../is-docker/cli.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

@ -0,0 +1,16 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../is-inside-container/cli.js" "$@"
else
exec node "$basedir/../is-inside-container/cli.js" "$@"
fi

@ -0,0 +1,17 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\is-inside-container\cli.js" %*

@ -0,0 +1,28 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../is-inside-container/cli.js" $args
} else {
& "$basedir/node$exe" "$basedir/../is-inside-container/cli.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../is-inside-container/cli.js" $args
} else {
& "node$exe" "$basedir/../is-inside-container/cli.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

@ -0,0 +1,16 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../jsesc/bin/jsesc" "$@"
else
exec node "$basedir/../jsesc/bin/jsesc" "$@"
fi

@ -0,0 +1,17 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\jsesc\bin\jsesc" %*

@ -0,0 +1,28 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../jsesc/bin/jsesc" $args
} else {
& "$basedir/node$exe" "$basedir/../jsesc/bin/jsesc" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../jsesc/bin/jsesc" $args
} else {
& "node$exe" "$basedir/../jsesc/bin/jsesc" $args
}
$ret=$LASTEXITCODE
}
exit $ret

@ -0,0 +1,16 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../json5/lib/cli.js" "$@"
else
exec node "$basedir/../json5/lib/cli.js" "$@"
fi

@ -0,0 +1,17 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\json5\lib\cli.js" %*

@ -0,0 +1,28 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../json5/lib/cli.js" $args
} else {
& "$basedir/node$exe" "$basedir/../json5/lib/cli.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../json5/lib/cli.js" $args
} else {
& "node$exe" "$basedir/../json5/lib/cli.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

@ -0,0 +1,16 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../nanoid/bin/nanoid.cjs" "$@"
else
exec node "$basedir/../nanoid/bin/nanoid.cjs" "$@"
fi

@ -0,0 +1,17 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\nanoid\bin\nanoid.cjs" %*

@ -0,0 +1,28 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../nanoid/bin/nanoid.cjs" $args
} else {
& "$basedir/node$exe" "$basedir/../nanoid/bin/nanoid.cjs" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../nanoid/bin/nanoid.cjs" $args
} else {
& "node$exe" "$basedir/../nanoid/bin/nanoid.cjs" $args
}
$ret=$LASTEXITCODE
}
exit $ret

@ -0,0 +1,16 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../which/bin/node-which" "$@"
else
exec node "$basedir/../which/bin/node-which" "$@"
fi

@ -0,0 +1,17 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\which\bin\node-which" %*

@ -0,0 +1,28 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../which/bin/node-which" $args
} else {
& "$basedir/node$exe" "$basedir/../which/bin/node-which" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../which/bin/node-which" $args
} else {
& "node$exe" "$basedir/../which/bin/node-which" $args
}
$ret=$LASTEXITCODE
}
exit $ret

@ -0,0 +1,16 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../@babel/parser/bin/babel-parser.js" "$@"
else
exec node "$basedir/../@babel/parser/bin/babel-parser.js" "$@"
fi

@ -0,0 +1,17 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\@babel\parser\bin\babel-parser.js" %*

@ -0,0 +1,28 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../@babel/parser/bin/babel-parser.js" $args
} else {
& "$basedir/node$exe" "$basedir/../@babel/parser/bin/babel-parser.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../@babel/parser/bin/babel-parser.js" $args
} else {
& "node$exe" "$basedir/../@babel/parser/bin/babel-parser.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

@ -0,0 +1,16 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../rollup/dist/bin/rollup" "$@"
else
exec node "$basedir/../rollup/dist/bin/rollup" "$@"
fi

@ -0,0 +1,17 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\rollup\dist\bin\rollup" %*

@ -0,0 +1,28 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../rollup/dist/bin/rollup" $args
} else {
& "$basedir/node$exe" "$basedir/../rollup/dist/bin/rollup" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../rollup/dist/bin/rollup" $args
} else {
& "node$exe" "$basedir/../rollup/dist/bin/rollup" $args
}
$ret=$LASTEXITCODE
}
exit $ret

@ -0,0 +1,16 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../semver/bin/semver.js" "$@"
else
exec node "$basedir/../semver/bin/semver.js" "$@"
fi

@ -0,0 +1,17 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\semver\bin\semver.js" %*

@ -0,0 +1,28 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../semver/bin/semver.js" $args
} else {
& "$basedir/node$exe" "$basedir/../semver/bin/semver.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../semver/bin/semver.js" $args
} else {
& "node$exe" "$basedir/../semver/bin/semver.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

@ -0,0 +1,16 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../update-browserslist-db/cli.js" "$@"
else
exec node "$basedir/../update-browserslist-db/cli.js" "$@"
fi

@ -0,0 +1,17 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\update-browserslist-db\cli.js" %*

@ -0,0 +1,28 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../update-browserslist-db/cli.js" $args
} else {
& "$basedir/node$exe" "$basedir/../update-browserslist-db/cli.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../update-browserslist-db/cli.js" $args
} else {
& "node$exe" "$basedir/../update-browserslist-db/cli.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

@ -0,0 +1,16 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../vite/bin/vite.js" "$@"
else
exec node "$basedir/../vite/bin/vite.js" "$@"
fi

@ -0,0 +1,17 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\vite\bin\vite.js" %*

@ -0,0 +1,28 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../vite/bin/vite.js" $args
} else {
& "$basedir/node$exe" "$basedir/../vite/bin/vite.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../vite/bin/vite.js" $args
} else {
& "node$exe" "$basedir/../vite/bin/vite.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

File diff suppressed because it is too large Load Diff

@ -0,0 +1,25 @@
{
"hash": "fefaa93c",
"configHash": "3158518f",
"lockfileHash": "5870829e",
"browserHash": "d637c5f2",
"optimized": {
"lodash.uniqueId": {
"src": "../../lodash.uniqueid/index.js",
"file": "lodash__uniqueId.js",
"fileHash": "48df8526",
"needsInterop": true
},
"vue": {
"src": "../../vue/dist/vue.runtime.esm-bundler.js",
"file": "vue.js",
"fileHash": "1dd03531",
"needsInterop": false
}
},
"chunks": {
"chunk-BUSYA2B4": {
"file": "chunk-BUSYA2B4.js"
}
}
}

@ -0,0 +1,8 @@
var __getOwnPropNames = Object.getOwnPropertyNames;
var __commonJS = (cb, mod) => function __require() {
return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
};
export {
__commonJS
};

@ -0,0 +1,7 @@
{
"version": 3,
"sources": [],
"sourcesContent": [],
"mappings": "",
"names": []
}

@ -0,0 +1,46 @@
import {
__commonJS
} from "./chunk-BUSYA2B4.js";
// node_modules/lodash.uniqueid/index.js
var require_lodash = __commonJS({
"node_modules/lodash.uniqueid/index.js"(exports, module) {
var INFINITY = 1 / 0;
var symbolTag = "[object Symbol]";
var freeGlobal = typeof global == "object" && global && global.Object === Object && global;
var freeSelf = typeof self == "object" && self && self.Object === Object && self;
var root = freeGlobal || freeSelf || Function("return this")();
var objectProto = Object.prototype;
var idCounter = 0;
var objectToString = objectProto.toString;
var Symbol = root.Symbol;
var symbolProto = Symbol ? Symbol.prototype : void 0;
var symbolToString = symbolProto ? symbolProto.toString : void 0;
function baseToString(value) {
if (typeof value == "string") {
return value;
}
if (isSymbol(value)) {
return symbolToString ? symbolToString.call(value) : "";
}
var result = value + "";
return result == "0" && 1 / value == -INFINITY ? "-0" : result;
}
function isObjectLike(value) {
return !!value && typeof value == "object";
}
function isSymbol(value) {
return typeof value == "symbol" || isObjectLike(value) && objectToString.call(value) == symbolTag;
}
function toString(value) {
return value == null ? "" : baseToString(value);
}
function uniqueId(prefix) {
var id = ++idCounter;
return toString(prefix) + id;
}
module.exports = uniqueId;
}
});
export default require_lodash();
//# sourceMappingURL=lodash__uniqueId.js.map

@ -0,0 +1,7 @@
{
"version": 3,
"sources": ["../../lodash.uniqueid/index.js"],
"sourcesContent": ["/**\n * lodash (Custom Build) <https://lodash.com/>\n * Build: `lodash modularize exports=\"npm\" -o ./`\n * Copyright jQuery Foundation and other contributors <https://jquery.org/>\n * Released under MIT license <https://lodash.com/license>\n * Based on Underscore.js 1.8.3 <http://underscorejs.org/LICENSE>\n * Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors\n */\n\n/** Used as references for various `Number` constants. */\nvar INFINITY = 1 / 0;\n\n/** `Object#toString` result references. */\nvar symbolTag = '[object Symbol]';\n\n/** Detect free variable `global` from Node.js. */\nvar freeGlobal = typeof global == 'object' && global && global.Object === Object && global;\n\n/** Detect free variable `self`. */\nvar freeSelf = typeof self == 'object' && self && self.Object === Object && self;\n\n/** Used as a reference to the global object. */\nvar root = freeGlobal || freeSelf || Function('return this')();\n\n/** Used for built-in method references. */\nvar objectProto = Object.prototype;\n\n/** Used to generate unique IDs. */\nvar idCounter = 0;\n\n/**\n * Used to resolve the\n * [`toStringTag`](http://ecma-international.org/ecma-262/6.0/#sec-object.prototype.tostring)\n * of values.\n */\nvar objectToString = objectProto.toString;\n\n/** Built-in value references. */\nvar Symbol = root.Symbol;\n\n/** Used to convert symbols to primitives and strings. */\nvar symbolProto = Symbol ? Symbol.prototype : undefined,\n symbolToString = symbolProto ? symbolProto.toString : undefined;\n\n/**\n * The base implementation of `_.toString` which doesn't convert nullish\n * values to empty strings.\n *\n * @private\n * @param {*} value The value to process.\n * @returns {string} Returns the string.\n */\nfunction baseToString(value) {\n // Exit early for strings to avoid a performance hit in some environments.\n if (typeof value == 'string') {\n return value;\n }\n if (isSymbol(value)) {\n return symbolToString ? symbolToString.call(value) : '';\n }\n var result = (value + '');\n return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result;\n}\n\n/**\n * Checks if `value` is object-like. A value is object-like if it's not `null`\n * and has a `typeof` result of \"object\".\n *\n * @static\n * @memberOf _\n * @since 4.0.0\n * @category Lang\n * @param {*} value The value to check.\n * @returns {boolean} Returns `true` if `value` is object-like, else `false`.\n * @example\n *\n * _.isObjectLike({});\n * // => true\n *\n * _.isObjectLike([1, 2, 3]);\n * // => true\n *\n * _.isObjectLike(_.noop);\n * // => false\n *\n * _.isObjectLike(null);\n * // => false\n */\nfunction isObjectLike(value) {\n return !!value && typeof value == 'object';\n}\n\n/**\n * Checks if `value` is classified as a `Symbol` primitive or object.\n *\n * @static\n * @memberOf _\n * @since 4.0.0\n * @category Lang\n * @param {*} value The value to check.\n * @returns {boolean} Returns `true` if `value` is a symbol, else `false`.\n * @example\n *\n * _.isSymbol(Symbol.iterator);\n * // => true\n *\n * _.isSymbol('abc');\n * // => false\n */\nfunction isSymbol(value) {\n return typeof value == 'symbol' ||\n (isObjectLike(value) && objectToString.call(value) == symbolTag);\n}\n\n/**\n * Converts `value` to a string. An empty string is returned for `null`\n * and `undefined` values. The sign of `-0` is preserved.\n *\n * @static\n * @memberOf _\n * @since 4.0.0\n * @category Lang\n * @param {*} value The value to process.\n * @returns {string} Returns the string.\n * @example\n *\n * _.toString(null);\n * // => ''\n *\n * _.toString(-0);\n * // => '-0'\n *\n * _.toString([1, 2, 3]);\n * // => '1,2,3'\n */\nfunction toString(value) {\n return value == null ? '' : baseToString(value);\n}\n\n/**\n * Generates a unique ID. If `prefix` is given, the ID is appended to it.\n *\n * @static\n * @since 0.1.0\n * @memberOf _\n * @category Util\n * @param {string} [prefix=''] The value to prefix the ID with.\n * @returns {string} Returns the unique ID.\n * @example\n *\n * _.uniqueId('contact_');\n * // => 'contact_104'\n *\n * _.uniqueId();\n * // => '105'\n */\nfunction uniqueId(prefix) {\n var id = ++idCounter;\n return toString(prefix) + id;\n}\n\nmodule.exports = uniqueId;\n"],
"mappings": ";;;;;AAAA;AAAA;AAUA,QAAI,WAAW,IAAI;AAGnB,QAAI,YAAY;AAGhB,QAAI,aAAa,OAAO,UAAU,YAAY,UAAU,OAAO,WAAW,UAAU;AAGpF,QAAI,WAAW,OAAO,QAAQ,YAAY,QAAQ,KAAK,WAAW,UAAU;AAG5E,QAAI,OAAO,cAAc,YAAY,SAAS,aAAa,EAAE;AAG7D,QAAI,cAAc,OAAO;AAGzB,QAAI,YAAY;AAOhB,QAAI,iBAAiB,YAAY;AAGjC,QAAI,SAAS,KAAK;AAGlB,QAAI,cAAc,SAAS,OAAO,YAAY;AAA9C,QACI,iBAAiB,cAAc,YAAY,WAAW;AAU1D,aAAS,aAAa,OAAO;AAE3B,UAAI,OAAO,SAAS,UAAU;AAC5B,eAAO;AAAA,MACT;AACA,UAAI,SAAS,KAAK,GAAG;AACnB,eAAO,iBAAiB,eAAe,KAAK,KAAK,IAAI;AAAA,MACvD;AACA,UAAI,SAAU,QAAQ;AACtB,aAAQ,UAAU,OAAQ,IAAI,SAAU,CAAC,WAAY,OAAO;AAAA,IAC9D;AA0BA,aAAS,aAAa,OAAO;AAC3B,aAAO,CAAC,CAAC,SAAS,OAAO,SAAS;AAAA,IACpC;AAmBA,aAAS,SAAS,OAAO;AACvB,aAAO,OAAO,SAAS,YACpB,aAAa,KAAK,KAAK,eAAe,KAAK,KAAK,KAAK;AAAA,IAC1D;AAuBA,aAAS,SAAS,OAAO;AACvB,aAAO,SAAS,OAAO,KAAK,aAAa,KAAK;AAAA,IAChD;AAmBA,aAAS,SAAS,QAAQ;AACxB,UAAI,KAAK,EAAE;AACX,aAAO,SAAS,MAAM,IAAI;AAAA,IAC5B;AAEA,WAAO,UAAU;AAAA;AAAA;",
"names": []
}

@ -0,0 +1,3 @@
{
"type": "module"
}

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

@ -0,0 +1,218 @@
# @ampproject/remapping
> Remap sequential sourcemaps through transformations to point at the original source code
Remapping allows you to take the sourcemaps generated through transforming your code and "remap"
them to the original source locations. Think "my minified code, transformed with babel and bundled
with webpack", all pointing to the correct location in your original source code.
With remapping, none of your source code transformations need to be aware of the input's sourcemap,
they only need to generate an output sourcemap. This greatly simplifies building custom
transformations (think a find-and-replace).
## Installation
```sh
npm install @ampproject/remapping
```
## Usage
```typescript
function remapping(
map: SourceMap | SourceMap[],
loader: (file: string, ctx: LoaderContext) => (SourceMap | null | undefined),
options?: { excludeContent: boolean, decodedMappings: boolean }
): SourceMap;
// LoaderContext gives the loader the importing sourcemap, tree depth, the ability to override the
// "source" location (where child sources are resolved relative to, or the location of original
// source), and the ability to override the "content" of an original source for inclusion in the
// output sourcemap.
type LoaderContext = {
readonly importer: string;
readonly depth: number;
source: string;
content: string | null | undefined;
}
```
`remapping` takes the final output sourcemap, and a `loader` function. For every source file pointer
in the sourcemap, the `loader` will be called with the resolved path. If the path itself represents
a transformed file (it has a sourcmap associated with it), then the `loader` should return that
sourcemap. If not, the path will be treated as an original, untransformed source code.
```js
// Babel transformed "helloworld.js" into "transformed.js"
const transformedMap = JSON.stringify({
file: 'transformed.js',
// 1st column of 2nd line of output file translates into the 1st source
// file, line 3, column 2
mappings: ';CAEE',
sources: ['helloworld.js'],
version: 3,
});
// Uglify minified "transformed.js" into "transformed.min.js"
const minifiedTransformedMap = JSON.stringify({
file: 'transformed.min.js',
// 0th column of 1st line of output file translates into the 1st source
// file, line 2, column 1.
mappings: 'AACC',
names: [],
sources: ['transformed.js'],
version: 3,
});
const remapped = remapping(
minifiedTransformedMap,
(file, ctx) => {
// The "transformed.js" file is an transformed file.
if (file === 'transformed.js') {
// The root importer is empty.
console.assert(ctx.importer === '');
// The depth in the sourcemap tree we're currently loading.
// The root `minifiedTransformedMap` is depth 0, and its source children are depth 1, etc.
console.assert(ctx.depth === 1);
return transformedMap;
}
// Loader will be called to load transformedMap's source file pointers as well.
console.assert(file === 'helloworld.js');
// `transformed.js`'s sourcemap points into `helloworld.js`.
console.assert(ctx.importer === 'transformed.js');
// This is a source child of `transformed`, which is a source child of `minifiedTransformedMap`.
console.assert(ctx.depth === 2);
return null;
}
);
console.log(remapped);
// {
// file: 'transpiled.min.js',
// mappings: 'AAEE',
// sources: ['helloworld.js'],
// version: 3,
// };
```
In this example, `loader` will be called twice:
1. `"transformed.js"`, the first source file pointer in the `minifiedTransformedMap`. We return the
associated sourcemap for it (its a transformed file, after all) so that sourcemap locations can
be traced through it into the source files it represents.
2. `"helloworld.js"`, our original, unmodified source code. This file does not have a sourcemap, so
we return `null`.
The `remapped` sourcemap now points from `transformed.min.js` into locations in `helloworld.js`. If
you were to read the `mappings`, it says "0th column of the first line output line points to the 1st
column of the 2nd line of the file `helloworld.js`".
### Multiple transformations of a file
As a convenience, if you have multiple single-source transformations of a file, you may pass an
array of sourcemap files in the order of most-recent transformation sourcemap first. Note that this
changes the `importer` and `depth` of each call to our loader. So our above example could have been
written as:
```js
const remapped = remapping(
[minifiedTransformedMap, transformedMap],
() => null
);
console.log(remapped);
// {
// file: 'transpiled.min.js',
// mappings: 'AAEE',
// sources: ['helloworld.js'],
// version: 3,
// };
```
### Advanced control of the loading graph
#### `source`
The `source` property can overridden to any value to change the location of the current load. Eg,
for an original source file, it allows us to change the location to the original source regardless
of what the sourcemap source entry says. And for transformed files, it allows us to change the
relative resolving location for child sources of the loaded sourcemap.
```js
const remapped = remapping(
minifiedTransformedMap,
(file, ctx) => {
if (file === 'transformed.js') {
// We pretend the transformed.js file actually exists in the 'src/' directory. When the nested
// source files are loaded, they will now be relative to `src/`.
ctx.source = 'src/transformed.js';
return transformedMap;
}
console.assert(file === 'src/helloworld.js');
// We could futher change the source of this original file, eg, to be inside a nested directory
// itself. This will be reflected in the remapped sourcemap.
ctx.source = 'src/nested/transformed.js';
return null;
}
);
console.log(remapped);
// {
// …,
// sources: ['src/nested/helloworld.js'],
// };
```
#### `content`
The `content` property can be overridden when we encounter an original source file. Eg, this allows
you to manually provide the source content of the original file regardless of whether the
`sourcesContent` field is present in the parent sourcemap. It can also be set to `null` to remove
the source content.
```js
const remapped = remapping(
minifiedTransformedMap,
(file, ctx) => {
if (file === 'transformed.js') {
// transformedMap does not include a `sourcesContent` field, so usually the remapped sourcemap
// would not include any `sourcesContent` values.
return transformedMap;
}
console.assert(file === 'helloworld.js');
// We can read the file to provide the source content.
ctx.content = fs.readFileSync(file, 'utf8');
return null;
}
);
console.log(remapped);
// {
// …,
// sourcesContent: [
// 'console.log("Hello world!")',
// ],
// };
```
### Options
#### excludeContent
By default, `excludeContent` is `false`. Passing `{ excludeContent: true }` will exclude the
`sourcesContent` field from the returned sourcemap. This is mainly useful when you want to reduce
the size out the sourcemap.
#### decodedMappings
By default, `decodedMappings` is `false`. Passing `{ decodedMappings: true }` will leave the
`mappings` field in a [decoded state](https://github.com/rich-harris/sourcemap-codec) instead of
encoding into a VLQ string.

@ -0,0 +1,197 @@
import { decodedMappings, traceSegment, TraceMap } from '@jridgewell/trace-mapping';
import { GenMapping, maybeAddSegment, setSourceContent, setIgnore, toDecodedMap, toEncodedMap } from '@jridgewell/gen-mapping';
const SOURCELESS_MAPPING = /* #__PURE__ */ SegmentObject('', -1, -1, '', null, false);
const EMPTY_SOURCES = [];
function SegmentObject(source, line, column, name, content, ignore) {
return { source, line, column, name, content, ignore };
}
function Source(map, sources, source, content, ignore) {
return {
map,
sources,
source,
content,
ignore,
};
}
/**
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
* (which may themselves be SourceMapTrees).
*/
function MapSource(map, sources) {
return Source(map, sources, '', null, false);
}
/**
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
* segment tracing ends at the `OriginalSource`.
*/
function OriginalSource(source, content, ignore) {
return Source(null, EMPTY_SOURCES, source, content, ignore);
}
/**
* traceMappings is only called on the root level SourceMapTree, and begins the process of
* resolving each mapping in terms of the original source files.
*/
function traceMappings(tree) {
// TODO: Eventually support sourceRoot, which has to be removed because the sources are already
// fully resolved. We'll need to make sources relative to the sourceRoot before adding them.
const gen = new GenMapping({ file: tree.map.file });
const { sources: rootSources, map } = tree;
const rootNames = map.names;
const rootMappings = decodedMappings(map);
for (let i = 0; i < rootMappings.length; i++) {
const segments = rootMappings[i];
for (let j = 0; j < segments.length; j++) {
const segment = segments[j];
const genCol = segment[0];
let traced = SOURCELESS_MAPPING;
// 1-length segments only move the current generated column, there's no source information
// to gather from it.
if (segment.length !== 1) {
const source = rootSources[segment[1]];
traced = originalPositionFor(source, segment[2], segment[3], segment.length === 5 ? rootNames[segment[4]] : '');
// If the trace is invalid, then the trace ran into a sourcemap that doesn't contain a
// respective segment into an original source.
if (traced == null)
continue;
}
const { column, line, name, content, source, ignore } = traced;
maybeAddSegment(gen, i, genCol, source, line, column, name);
if (source && content != null)
setSourceContent(gen, source, content);
if (ignore)
setIgnore(gen, source, true);
}
}
return gen;
}
/**
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
* child SourceMapTrees, until we find the original source map.
*/
function originalPositionFor(source, line, column, name) {
if (!source.map) {
return SegmentObject(source.source, line, column, name, source.content, source.ignore);
}
const segment = traceSegment(source.map, line, column);
// If we couldn't find a segment, then this doesn't exist in the sourcemap.
if (segment == null)
return null;
// 1-length segments only move the current generated column, there's no source information
// to gather from it.
if (segment.length === 1)
return SOURCELESS_MAPPING;
return originalPositionFor(source.sources[segment[1]], segment[2], segment[3], segment.length === 5 ? source.map.names[segment[4]] : name);
}
function asArray(value) {
if (Array.isArray(value))
return value;
return [value];
}
/**
* Recursively builds a tree structure out of sourcemap files, with each node
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
* `OriginalSource`s and `SourceMapTree`s.
*
* Every sourcemap is composed of a collection of source files and mappings
* into locations of those source files. When we generate a `SourceMapTree` for
* the sourcemap, we attempt to load each source file's own sourcemap. If it
* does not have an associated sourcemap, it is considered an original,
* unmodified source file.
*/
function buildSourceMapTree(input, loader) {
const maps = asArray(input).map((m) => new TraceMap(m, ''));
const map = maps.pop();
for (let i = 0; i < maps.length; i++) {
if (maps[i].sources.length > 1) {
throw new Error(`Transformation map ${i} must have exactly one source file.\n` +
'Did you specify these with the most recent transformation maps first?');
}
}
let tree = build(map, loader, '', 0);
for (let i = maps.length - 1; i >= 0; i--) {
tree = MapSource(maps[i], [tree]);
}
return tree;
}
function build(map, loader, importer, importerDepth) {
const { resolvedSources, sourcesContent, ignoreList } = map;
const depth = importerDepth + 1;
const children = resolvedSources.map((sourceFile, i) => {
// The loading context gives the loader more information about why this file is being loaded
// (eg, from which importer). It also allows the loader to override the location of the loaded
// sourcemap/original source, or to override the content in the sourcesContent field if it's
// an unmodified source file.
const ctx = {
importer,
depth,
source: sourceFile || '',
content: undefined,
ignore: undefined,
};
// Use the provided loader callback to retrieve the file's sourcemap.
// TODO: We should eventually support async loading of sourcemap files.
const sourceMap = loader(ctx.source, ctx);
const { source, content, ignore } = ctx;
// If there is a sourcemap, then we need to recurse into it to load its source files.
if (sourceMap)
return build(new TraceMap(sourceMap, source), loader, source, depth);
// Else, it's an unmodified source file.
// The contents of this unmodified source file can be overridden via the loader context,
// allowing it to be explicitly null or a string. If it remains undefined, we fall back to
// the importing sourcemap's `sourcesContent` field.
const sourceContent = content !== undefined ? content : sourcesContent ? sourcesContent[i] : null;
const ignored = ignore !== undefined ? ignore : ignoreList ? ignoreList.includes(i) : false;
return OriginalSource(source, sourceContent, ignored);
});
return MapSource(map, children);
}
/**
* A SourceMap v3 compatible sourcemap, which only includes fields that were
* provided to it.
*/
class SourceMap {
constructor(map, options) {
const out = options.decodedMappings ? toDecodedMap(map) : toEncodedMap(map);
this.version = out.version; // SourceMap spec says this should be first.
this.file = out.file;
this.mappings = out.mappings;
this.names = out.names;
this.ignoreList = out.ignoreList;
this.sourceRoot = out.sourceRoot;
this.sources = out.sources;
if (!options.excludeContent) {
this.sourcesContent = out.sourcesContent;
}
}
toString() {
return JSON.stringify(this);
}
}
/**
* Traces through all the mappings in the root sourcemap, through the sources
* (and their sourcemaps), all the way back to the original source location.
*
* `loader` will be called every time we encounter a source file. If it returns
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
* it returns a falsey value, that source file is treated as an original,
* unmodified source file.
*
* Pass `excludeContent` to exclude any self-containing source file content
* from the output sourcemap.
*
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
* VLQ encoded) mappings.
*/
function remapping(input, loader, options) {
const opts = typeof options === 'object' ? options : { excludeContent: !!options, decodedMappings: false };
const tree = buildSourceMapTree(input, loader);
return new SourceMap(traceMappings(tree), opts);
}
export { remapping as default };
//# sourceMappingURL=remapping.mjs.map

File diff suppressed because one or more lines are too long

@ -0,0 +1,202 @@
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory(require('@jridgewell/trace-mapping'), require('@jridgewell/gen-mapping')) :
typeof define === 'function' && define.amd ? define(['@jridgewell/trace-mapping', '@jridgewell/gen-mapping'], factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.remapping = factory(global.traceMapping, global.genMapping));
})(this, (function (traceMapping, genMapping) { 'use strict';
const SOURCELESS_MAPPING = /* #__PURE__ */ SegmentObject('', -1, -1, '', null, false);
const EMPTY_SOURCES = [];
function SegmentObject(source, line, column, name, content, ignore) {
return { source, line, column, name, content, ignore };
}
function Source(map, sources, source, content, ignore) {
return {
map,
sources,
source,
content,
ignore,
};
}
/**
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
* (which may themselves be SourceMapTrees).
*/
function MapSource(map, sources) {
return Source(map, sources, '', null, false);
}
/**
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
* segment tracing ends at the `OriginalSource`.
*/
function OriginalSource(source, content, ignore) {
return Source(null, EMPTY_SOURCES, source, content, ignore);
}
/**
* traceMappings is only called on the root level SourceMapTree, and begins the process of
* resolving each mapping in terms of the original source files.
*/
function traceMappings(tree) {
// TODO: Eventually support sourceRoot, which has to be removed because the sources are already
// fully resolved. We'll need to make sources relative to the sourceRoot before adding them.
const gen = new genMapping.GenMapping({ file: tree.map.file });
const { sources: rootSources, map } = tree;
const rootNames = map.names;
const rootMappings = traceMapping.decodedMappings(map);
for (let i = 0; i < rootMappings.length; i++) {
const segments = rootMappings[i];
for (let j = 0; j < segments.length; j++) {
const segment = segments[j];
const genCol = segment[0];
let traced = SOURCELESS_MAPPING;
// 1-length segments only move the current generated column, there's no source information
// to gather from it.
if (segment.length !== 1) {
const source = rootSources[segment[1]];
traced = originalPositionFor(source, segment[2], segment[3], segment.length === 5 ? rootNames[segment[4]] : '');
// If the trace is invalid, then the trace ran into a sourcemap that doesn't contain a
// respective segment into an original source.
if (traced == null)
continue;
}
const { column, line, name, content, source, ignore } = traced;
genMapping.maybeAddSegment(gen, i, genCol, source, line, column, name);
if (source && content != null)
genMapping.setSourceContent(gen, source, content);
if (ignore)
genMapping.setIgnore(gen, source, true);
}
}
return gen;
}
/**
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
* child SourceMapTrees, until we find the original source map.
*/
function originalPositionFor(source, line, column, name) {
if (!source.map) {
return SegmentObject(source.source, line, column, name, source.content, source.ignore);
}
const segment = traceMapping.traceSegment(source.map, line, column);
// If we couldn't find a segment, then this doesn't exist in the sourcemap.
if (segment == null)
return null;
// 1-length segments only move the current generated column, there's no source information
// to gather from it.
if (segment.length === 1)
return SOURCELESS_MAPPING;
return originalPositionFor(source.sources[segment[1]], segment[2], segment[3], segment.length === 5 ? source.map.names[segment[4]] : name);
}
function asArray(value) {
if (Array.isArray(value))
return value;
return [value];
}
/**
* Recursively builds a tree structure out of sourcemap files, with each node
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
* `OriginalSource`s and `SourceMapTree`s.
*
* Every sourcemap is composed of a collection of source files and mappings
* into locations of those source files. When we generate a `SourceMapTree` for
* the sourcemap, we attempt to load each source file's own sourcemap. If it
* does not have an associated sourcemap, it is considered an original,
* unmodified source file.
*/
function buildSourceMapTree(input, loader) {
const maps = asArray(input).map((m) => new traceMapping.TraceMap(m, ''));
const map = maps.pop();
for (let i = 0; i < maps.length; i++) {
if (maps[i].sources.length > 1) {
throw new Error(`Transformation map ${i} must have exactly one source file.\n` +
'Did you specify these with the most recent transformation maps first?');
}
}
let tree = build(map, loader, '', 0);
for (let i = maps.length - 1; i >= 0; i--) {
tree = MapSource(maps[i], [tree]);
}
return tree;
}
function build(map, loader, importer, importerDepth) {
const { resolvedSources, sourcesContent, ignoreList } = map;
const depth = importerDepth + 1;
const children = resolvedSources.map((sourceFile, i) => {
// The loading context gives the loader more information about why this file is being loaded
// (eg, from which importer). It also allows the loader to override the location of the loaded
// sourcemap/original source, or to override the content in the sourcesContent field if it's
// an unmodified source file.
const ctx = {
importer,
depth,
source: sourceFile || '',
content: undefined,
ignore: undefined,
};
// Use the provided loader callback to retrieve the file's sourcemap.
// TODO: We should eventually support async loading of sourcemap files.
const sourceMap = loader(ctx.source, ctx);
const { source, content, ignore } = ctx;
// If there is a sourcemap, then we need to recurse into it to load its source files.
if (sourceMap)
return build(new traceMapping.TraceMap(sourceMap, source), loader, source, depth);
// Else, it's an unmodified source file.
// The contents of this unmodified source file can be overridden via the loader context,
// allowing it to be explicitly null or a string. If it remains undefined, we fall back to
// the importing sourcemap's `sourcesContent` field.
const sourceContent = content !== undefined ? content : sourcesContent ? sourcesContent[i] : null;
const ignored = ignore !== undefined ? ignore : ignoreList ? ignoreList.includes(i) : false;
return OriginalSource(source, sourceContent, ignored);
});
return MapSource(map, children);
}
/**
* A SourceMap v3 compatible sourcemap, which only includes fields that were
* provided to it.
*/
class SourceMap {
constructor(map, options) {
const out = options.decodedMappings ? genMapping.toDecodedMap(map) : genMapping.toEncodedMap(map);
this.version = out.version; // SourceMap spec says this should be first.
this.file = out.file;
this.mappings = out.mappings;
this.names = out.names;
this.ignoreList = out.ignoreList;
this.sourceRoot = out.sourceRoot;
this.sources = out.sources;
if (!options.excludeContent) {
this.sourcesContent = out.sourcesContent;
}
}
toString() {
return JSON.stringify(this);
}
}
/**
* Traces through all the mappings in the root sourcemap, through the sources
* (and their sourcemaps), all the way back to the original source location.
*
* `loader` will be called every time we encounter a source file. If it returns
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
* it returns a falsey value, that source file is treated as an original,
* unmodified source file.
*
* Pass `excludeContent` to exclude any self-containing source file content
* from the output sourcemap.
*
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
* VLQ encoded) mappings.
*/
function remapping(input, loader, options) {
const opts = typeof options === 'object' ? options : { excludeContent: !!options, decodedMappings: false };
const tree = buildSourceMapTree(input, loader);
return new SourceMap(traceMappings(tree), opts);
}
return remapping;
}));
//# sourceMappingURL=remapping.umd.js.map

File diff suppressed because one or more lines are too long

@ -0,0 +1,14 @@
import type { MapSource as MapSourceType } from './source-map-tree';
import type { SourceMapInput, SourceMapLoader } from './types';
/**
* Recursively builds a tree structure out of sourcemap files, with each node
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
* `OriginalSource`s and `SourceMapTree`s.
*
* Every sourcemap is composed of a collection of source files and mappings
* into locations of those source files. When we generate a `SourceMapTree` for
* the sourcemap, we attempt to load each source file's own sourcemap. If it
* does not have an associated sourcemap, it is considered an original,
* unmodified source file.
*/
export default function buildSourceMapTree(input: SourceMapInput | SourceMapInput[], loader: SourceMapLoader): MapSourceType;

@ -0,0 +1,20 @@
import SourceMap from './source-map';
import type { SourceMapInput, SourceMapLoader, Options } from './types';
export type { SourceMapSegment, EncodedSourceMap, EncodedSourceMap as RawSourceMap, DecodedSourceMap, SourceMapInput, SourceMapLoader, LoaderContext, Options, } from './types';
export type { SourceMap };
/**
* Traces through all the mappings in the root sourcemap, through the sources
* (and their sourcemaps), all the way back to the original source location.
*
* `loader` will be called every time we encounter a source file. If it returns
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
* it returns a falsey value, that source file is treated as an original,
* unmodified source file.
*
* Pass `excludeContent` to exclude any self-containing source file content
* from the output sourcemap.
*
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
* VLQ encoded) mappings.
*/
export default function remapping(input: SourceMapInput | SourceMapInput[], loader: SourceMapLoader, options?: boolean | Options): SourceMap;

@ -0,0 +1,45 @@
import { GenMapping } from '@jridgewell/gen-mapping';
import type { TraceMap } from '@jridgewell/trace-mapping';
export declare type SourceMapSegmentObject = {
column: number;
line: number;
name: string;
source: string;
content: string | null;
ignore: boolean;
};
export declare type OriginalSource = {
map: null;
sources: Sources[];
source: string;
content: string | null;
ignore: boolean;
};
export declare type MapSource = {
map: TraceMap;
sources: Sources[];
source: string;
content: null;
ignore: false;
};
export declare type Sources = OriginalSource | MapSource;
/**
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
* (which may themselves be SourceMapTrees).
*/
export declare function MapSource(map: TraceMap, sources: Sources[]): MapSource;
/**
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
* segment tracing ends at the `OriginalSource`.
*/
export declare function OriginalSource(source: string, content: string | null, ignore: boolean): OriginalSource;
/**
* traceMappings is only called on the root level SourceMapTree, and begins the process of
* resolving each mapping in terms of the original source files.
*/
export declare function traceMappings(tree: MapSource): GenMapping;
/**
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
* child SourceMapTrees, until we find the original source map.
*/
export declare function originalPositionFor(source: Sources, line: number, column: number, name: string): SourceMapSegmentObject | null;

@ -0,0 +1,18 @@
import type { GenMapping } from '@jridgewell/gen-mapping';
import type { DecodedSourceMap, EncodedSourceMap, Options } from './types';
/**
* A SourceMap v3 compatible sourcemap, which only includes fields that were
* provided to it.
*/
export default class SourceMap {
file?: string | null;
mappings: EncodedSourceMap['mappings'] | DecodedSourceMap['mappings'];
sourceRoot?: string;
names: string[];
sources: (string | null)[];
sourcesContent?: (string | null)[];
version: 3;
ignoreList: number[] | undefined;
constructor(map: GenMapping, options: Options);
toString(): string;
}

@ -0,0 +1,15 @@
import type { SourceMapInput } from '@jridgewell/trace-mapping';
export type { SourceMapSegment, DecodedSourceMap, EncodedSourceMap, } from '@jridgewell/trace-mapping';
export type { SourceMapInput };
export declare type LoaderContext = {
readonly importer: string;
readonly depth: number;
source: string;
content: string | null | undefined;
ignore: boolean | undefined;
};
export declare type SourceMapLoader = (file: string, ctx: LoaderContext) => SourceMapInput | null | undefined | void;
export declare type Options = {
excludeContent?: boolean;
decodedMappings?: boolean;
};

@ -0,0 +1,75 @@
{
"name": "@ampproject/remapping",
"version": "2.3.0",
"description": "Remap sequential sourcemaps through transformations to point at the original source code",
"keywords": [
"source",
"map",
"remap"
],
"main": "dist/remapping.umd.js",
"module": "dist/remapping.mjs",
"types": "dist/types/remapping.d.ts",
"exports": {
".": [
{
"types": "./dist/types/remapping.d.ts",
"browser": "./dist/remapping.umd.js",
"require": "./dist/remapping.umd.js",
"import": "./dist/remapping.mjs"
},
"./dist/remapping.umd.js"
],
"./package.json": "./package.json"
},
"files": [
"dist"
],
"author": "Justin Ridgewell <jridgewell@google.com>",
"repository": {
"type": "git",
"url": "git+https://github.com/ampproject/remapping.git"
},
"license": "Apache-2.0",
"engines": {
"node": ">=6.0.0"
},
"scripts": {
"build": "run-s -n build:*",
"build:rollup": "rollup -c rollup.config.js",
"build:ts": "tsc --project tsconfig.build.json",
"lint": "run-s -n lint:*",
"lint:prettier": "npm run test:lint:prettier -- --write",
"lint:ts": "npm run test:lint:ts -- --fix",
"prebuild": "rm -rf dist",
"prepublishOnly": "npm run preversion",
"preversion": "run-s test build",
"test": "run-s -n test:lint test:only",
"test:debug": "node --inspect-brk node_modules/.bin/jest --runInBand",
"test:lint": "run-s -n test:lint:*",
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
"test:only": "jest --coverage",
"test:watch": "jest --coverage --watch"
},
"devDependencies": {
"@rollup/plugin-typescript": "8.3.2",
"@types/jest": "27.4.1",
"@typescript-eslint/eslint-plugin": "5.20.0",
"@typescript-eslint/parser": "5.20.0",
"eslint": "8.14.0",
"eslint-config-prettier": "8.5.0",
"jest": "27.5.1",
"jest-config": "27.5.1",
"npm-run-all": "4.1.5",
"prettier": "2.6.2",
"rollup": "2.70.2",
"ts-jest": "27.1.4",
"tslib": "2.4.0",
"typescript": "4.6.3"
},
"dependencies": {
"@jridgewell/gen-mapping": "^0.3.5",
"@jridgewell/trace-mapping": "^0.3.24"
}
}

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2021 Anthony Fu <https://github.com/antfu>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

@ -0,0 +1,24 @@
# @antfu/utils
[![NPM version](https://img.shields.io/npm/v/@antfu/utils?color=a1b858&label=)](https://www.npmjs.com/package/@antfu/utils)
[![Docs](https://img.shields.io/badge/docs-green)](https://www.jsdocs.io/package/@antfu/utils)
Opinionated collection of common JavaScript / TypeScript utils by [@antfu](https://github.com/antfu).
- Tree-shakable ESM
- Fully typed - with TSDocs
- Type utilities - `Arrayable<T>`, `ElementOf<T>`, etc.
> This package is designed to be used as `devDependencies` and bundled into your dist.
## Sponsors
<p align="center">
<a href="https://cdn.jsdelivr.net/gh/antfu/static/sponsors.svg">
<img src='https://cdn.jsdelivr.net/gh/antfu/static/sponsors.svg'/>
</a>
</p>
## License
[MIT](./LICENSE) License © 2021-PRESENT [Anthony Fu](https://github.com/antfu)

@ -0,0 +1,844 @@
'use strict';
function clamp(n, min, max) {
return Math.min(max, Math.max(min, n));
}
function sum(...args) {
return flattenArrayable(args).reduce((a, b) => a + b, 0);
}
function lerp(min, max, t) {
const interpolation = clamp(t, 0, 1);
return min + (max - min) * interpolation;
}
function remap(n, inMin, inMax, outMin, outMax) {
const interpolation = (n - inMin) / (inMax - inMin);
return lerp(outMin, outMax, interpolation);
}
function toArray(array) {
array = array ?? [];
return Array.isArray(array) ? array : [array];
}
function flattenArrayable(array) {
return toArray(array).flat(1);
}
function mergeArrayable(...args) {
return args.flatMap((i) => toArray(i));
}
function partition(array, ...filters) {
const result = Array.from({ length: filters.length + 1 }).fill(null).map(() => []);
array.forEach((e, idx, arr) => {
let i = 0;
for (const filter of filters) {
if (filter(e, idx, arr)) {
result[i].push(e);
return;
}
i += 1;
}
result[i].push(e);
});
return result;
}
function uniq(array) {
return Array.from(new Set(array));
}
function uniqueBy(array, equalFn) {
return array.reduce((acc, cur) => {
const index = acc.findIndex((item) => equalFn(cur, item));
if (index === -1)
acc.push(cur);
return acc;
}, []);
}
function last(array) {
return at(array, -1);
}
function remove(array, value) {
if (!array)
return false;
const index = array.indexOf(value);
if (index >= 0) {
array.splice(index, 1);
return true;
}
return false;
}
function at(array, index) {
const len = array.length;
if (!len)
return void 0;
if (index < 0)
index += len;
return array[index];
}
function range(...args) {
let start, stop, step;
if (args.length === 1) {
start = 0;
step = 1;
[stop] = args;
} else {
[start, stop, step = 1] = args;
}
const arr = [];
let current = start;
while (current < stop) {
arr.push(current);
current += step || 1;
}
return arr;
}
function move(arr, from, to) {
arr.splice(to, 0, arr.splice(from, 1)[0]);
return arr;
}
function clampArrayRange(n, arr) {
return clamp(n, 0, arr.length - 1);
}
function sample(arr, quantity) {
return Array.from({ length: quantity }, (_) => arr[Math.round(Math.random() * (arr.length - 1))]);
}
function shuffle(array) {
for (let i = array.length - 1; i > 0; i--) {
const j = Math.floor(Math.random() * (i + 1));
[array[i], array[j]] = [array[j], array[i]];
}
return array;
}
function assert(condition, message) {
if (!condition)
throw new Error(message);
}
const toString = (v) => Object.prototype.toString.call(v);
function getTypeName(v) {
if (v === null)
return "null";
const type = toString(v).slice(8, -1).toLowerCase();
return typeof v === "object" || typeof v === "function" ? type : typeof v;
}
function noop() {
}
function isDeepEqual(value1, value2) {
const type1 = getTypeName(value1);
const type2 = getTypeName(value2);
if (type1 !== type2)
return false;
if (type1 === "array") {
if (value1.length !== value2.length)
return false;
return value1.every((item, i) => {
return isDeepEqual(item, value2[i]);
});
}
if (type1 === "object") {
const keyArr = Object.keys(value1);
if (keyArr.length !== Object.keys(value2).length)
return false;
return keyArr.every((key) => {
return isDeepEqual(value1[key], value2[key]);
});
}
return Object.is(value1, value2);
}
function notNullish(v) {
return v != null;
}
function noNull(v) {
return v !== null;
}
function notUndefined(v) {
return v !== void 0;
}
function isTruthy(v) {
return Boolean(v);
}
const isDef = (val) => typeof val !== "undefined";
const isBoolean = (val) => typeof val === "boolean";
const isFunction = (val) => typeof val === "function";
const isNumber = (val) => typeof val === "number";
const isString = (val) => typeof val === "string";
const isObject = (val) => toString(val) === "[object Object]";
const isUndefined = (val) => toString(val) === "[object Undefined]";
const isNull = (val) => toString(val) === "[object Null]";
const isRegExp = (val) => toString(val) === "[object RegExp]";
const isDate = (val) => toString(val) === "[object Date]";
const isWindow = (val) => typeof window !== "undefined" && toString(val) === "[object Window]";
const isBrowser = typeof window !== "undefined";
function slash(str) {
return str.replace(/\\/g, "/");
}
function ensurePrefix(prefix, str) {
if (!str.startsWith(prefix))
return prefix + str;
return str;
}
function ensureSuffix(suffix, str) {
if (!str.endsWith(suffix))
return str + suffix;
return str;
}
function template(str, ...args) {
const [firstArg, fallback] = args;
if (isObject(firstArg)) {
const vars = firstArg;
return str.replace(/{([\w\d]+)}/g, (_, key) => vars[key] || ((typeof fallback === "function" ? fallback(key) : fallback) ?? key));
} else {
return str.replace(/{(\d+)}/g, (_, key) => {
const index = Number(key);
if (Number.isNaN(index))
return key;
return args[index];
});
}
}
const urlAlphabet = "useandom-26T198340PX75pxJACKVERYMINDBUSHWOLF_GQZbfghjklqvwyzrict";
function randomStr(size = 16, dict = urlAlphabet) {
let id = "";
let i = size;
const len = dict.length;
while (i--)
id += dict[Math.random() * len | 0];
return id;
}
function capitalize(str) {
return str[0].toUpperCase() + str.slice(1).toLowerCase();
}
const _reFullWs = /^\s*$/;
function unindent(str) {
const lines = (typeof str === "string" ? str : str[0]).split("\n");
const whitespaceLines = lines.map((line) => _reFullWs.test(line));
const commonIndent = lines.reduce((min, line, idx) => {
var _a;
if (whitespaceLines[idx])
return min;
const indent = (_a = line.match(/^\s*/)) == null ? void 0 : _a[0].length;
return indent === void 0 ? min : Math.min(min, indent);
}, Number.POSITIVE_INFINITY);
let emptyLinesHead = 0;
while (emptyLinesHead < lines.length && whitespaceLines[emptyLinesHead])
emptyLinesHead++;
let emptyLinesTail = 0;
while (emptyLinesTail < lines.length && whitespaceLines[lines.length - emptyLinesTail - 1])
emptyLinesTail++;
return lines.slice(emptyLinesHead, lines.length - emptyLinesTail).map((line) => line.slice(commonIndent)).join("\n");
}
const timestamp = () => +Date.now();
function batchInvoke(functions) {
functions.forEach((fn) => fn && fn());
}
function invoke(fn) {
return fn();
}
function tap(value, callback) {
callback(value);
return value;
}
function objectMap(obj, fn) {
return Object.fromEntries(
Object.entries(obj).map(([k, v]) => fn(k, v)).filter(notNullish)
);
}
function isKeyOf(obj, k) {
return k in obj;
}
function objectKeys(obj) {
return Object.keys(obj);
}
function objectEntries(obj) {
return Object.entries(obj);
}
function deepMerge(target, ...sources) {
if (!sources.length)
return target;
const source = sources.shift();
if (source === void 0)
return target;
if (isMergableObject(target) && isMergableObject(source)) {
objectKeys(source).forEach((key) => {
if (key === "__proto__" || key === "constructor" || key === "prototype")
return;
if (isMergableObject(source[key])) {
if (!target[key])
target[key] = {};
if (isMergableObject(target[key])) {
deepMerge(target[key], source[key]);
} else {
target[key] = source[key];
}
} else {
target[key] = source[key];
}
});
}
return deepMerge(target, ...sources);
}
function deepMergeWithArray(target, ...sources) {
if (!sources.length)
return target;
const source = sources.shift();
if (source === void 0)
return target;
if (Array.isArray(target) && Array.isArray(source))
target.push(...source);
if (isMergableObject(target) && isMergableObject(source)) {
objectKeys(source).forEach((key) => {
if (key === "__proto__" || key === "constructor" || key === "prototype")
return;
if (Array.isArray(source[key])) {
if (!target[key])
target[key] = [];
deepMergeWithArray(target[key], source[key]);
} else if (isMergableObject(source[key])) {
if (!target[key])
target[key] = {};
deepMergeWithArray(target[key], source[key]);
} else {
target[key] = source[key];
}
});
}
return deepMergeWithArray(target, ...sources);
}
function isMergableObject(item) {
return isObject(item) && !Array.isArray(item);
}
function objectPick(obj, keys, omitUndefined = false) {
return keys.reduce((n, k) => {
if (k in obj) {
if (!omitUndefined || obj[k] !== void 0)
n[k] = obj[k];
}
return n;
}, {});
}
function clearUndefined(obj) {
Object.keys(obj).forEach((key) => obj[key] === void 0 ? delete obj[key] : {});
return obj;
}
function hasOwnProperty(obj, v) {
if (obj == null)
return false;
return Object.prototype.hasOwnProperty.call(obj, v);
}
function createSingletonPromise(fn) {
let _promise;
function wrapper() {
if (!_promise)
_promise = fn();
return _promise;
}
wrapper.reset = async () => {
const _prev = _promise;
_promise = void 0;
if (_prev)
await _prev;
};
return wrapper;
}
function sleep(ms, callback) {
return new Promise(
(resolve) => setTimeout(async () => {
await (callback == null ? void 0 : callback());
resolve();
}, ms)
);
}
function createPromiseLock() {
const locks = [];
return {
async run(fn) {
const p = fn();
locks.push(p);
try {
return await p;
} finally {
remove(locks, p);
}
},
async wait() {
await Promise.allSettled(locks);
},
isWaiting() {
return Boolean(locks.length);
},
clear() {
locks.length = 0;
}
};
}
function createControlledPromise() {
let resolve, reject;
const promise = new Promise((_resolve, _reject) => {
resolve = _resolve;
reject = _reject;
});
promise.resolve = resolve;
promise.reject = reject;
return promise;
}
/* eslint-disable no-undefined,no-param-reassign,no-shadow */
/**
* Throttle execution of a function. Especially useful for rate limiting
* execution of handlers on events like resize and scroll.
*
* @param {number} delay - A zero-or-greater delay in milliseconds. For event callbacks, values around 100 or 250 (or even higher)
* are most useful.
* @param {Function} callback - A function to be executed after delay milliseconds. The `this` context and all arguments are passed through,
* as-is, to `callback` when the throttled-function is executed.
* @param {object} [options] - An object to configure options.
* @param {boolean} [options.noTrailing] - Optional, defaults to false. If noTrailing is true, callback will only execute every `delay` milliseconds
* while the throttled-function is being called. If noTrailing is false or unspecified, callback will be executed
* one final time after the last throttled-function call. (After the throttled-function has not been called for
* `delay` milliseconds, the internal counter is reset).
* @param {boolean} [options.noLeading] - Optional, defaults to false. If noLeading is false, the first throttled-function call will execute callback
* immediately. If noLeading is true, the first the callback execution will be skipped. It should be noted that
* callback will never executed if both noLeading = true and noTrailing = true.
* @param {boolean} [options.debounceMode] - If `debounceMode` is true (at begin), schedule `clear` to execute after `delay` ms. If `debounceMode` is
* false (at end), schedule `callback` to execute after `delay` ms.
*
* @returns {Function} A new, throttled, function.
*/
function throttle (delay, callback, options) {
var _ref = options || {},
_ref$noTrailing = _ref.noTrailing,
noTrailing = _ref$noTrailing === void 0 ? false : _ref$noTrailing,
_ref$noLeading = _ref.noLeading,
noLeading = _ref$noLeading === void 0 ? false : _ref$noLeading,
_ref$debounceMode = _ref.debounceMode,
debounceMode = _ref$debounceMode === void 0 ? undefined : _ref$debounceMode;
/*
* After wrapper has stopped being called, this timeout ensures that
* `callback` is executed at the proper times in `throttle` and `end`
* debounce modes.
*/
var timeoutID;
var cancelled = false; // Keep track of the last time `callback` was executed.
var lastExec = 0; // Function to clear existing timeout
function clearExistingTimeout() {
if (timeoutID) {
clearTimeout(timeoutID);
}
} // Function to cancel next exec
function cancel(options) {
var _ref2 = options || {},
_ref2$upcomingOnly = _ref2.upcomingOnly,
upcomingOnly = _ref2$upcomingOnly === void 0 ? false : _ref2$upcomingOnly;
clearExistingTimeout();
cancelled = !upcomingOnly;
}
/*
* The `wrapper` function encapsulates all of the throttling / debouncing
* functionality and when executed will limit the rate at which `callback`
* is executed.
*/
function wrapper() {
for (var _len = arguments.length, arguments_ = new Array(_len), _key = 0; _key < _len; _key++) {
arguments_[_key] = arguments[_key];
}
var self = this;
var elapsed = Date.now() - lastExec;
if (cancelled) {
return;
} // Execute `callback` and update the `lastExec` timestamp.
function exec() {
lastExec = Date.now();
callback.apply(self, arguments_);
}
/*
* If `debounceMode` is true (at begin) this is used to clear the flag
* to allow future `callback` executions.
*/
function clear() {
timeoutID = undefined;
}
if (!noLeading && debounceMode && !timeoutID) {
/*
* Since `wrapper` is being called for the first time and
* `debounceMode` is true (at begin), execute `callback`
* and noLeading != true.
*/
exec();
}
clearExistingTimeout();
if (debounceMode === undefined && elapsed > delay) {
if (noLeading) {
/*
* In throttle mode with noLeading, if `delay` time has
* been exceeded, update `lastExec` and schedule `callback`
* to execute after `delay` ms.
*/
lastExec = Date.now();
if (!noTrailing) {
timeoutID = setTimeout(debounceMode ? clear : exec, delay);
}
} else {
/*
* In throttle mode without noLeading, if `delay` time has been exceeded, execute
* `callback`.
*/
exec();
}
} else if (noTrailing !== true) {
/*
* In trailing throttle mode, since `delay` time has not been
* exceeded, schedule `callback` to execute `delay` ms after most
* recent execution.
*
* If `debounceMode` is true (at begin), schedule `clear` to execute
* after `delay` ms.
*
* If `debounceMode` is false (at end), schedule `callback` to
* execute after `delay` ms.
*/
timeoutID = setTimeout(debounceMode ? clear : exec, debounceMode === undefined ? delay - elapsed : delay);
}
}
wrapper.cancel = cancel; // Return the wrapper function.
return wrapper;
}
/* eslint-disable no-undefined */
/**
* Debounce execution of a function. Debouncing, unlike throttling,
* guarantees that a function is only executed a single time, either at the
* very beginning of a series of calls, or at the very end.
*
* @param {number} delay - A zero-or-greater delay in milliseconds. For event callbacks, values around 100 or 250 (or even higher) are most useful.
* @param {Function} callback - A function to be executed after delay milliseconds. The `this` context and all arguments are passed through, as-is,
* to `callback` when the debounced-function is executed.
* @param {object} [options] - An object to configure options.
* @param {boolean} [options.atBegin] - Optional, defaults to false. If atBegin is false or unspecified, callback will only be executed `delay` milliseconds
* after the last debounced-function call. If atBegin is true, callback will be executed only at the first debounced-function call.
* (After the throttled-function has not been called for `delay` milliseconds, the internal counter is reset).
*
* @returns {Function} A new, debounced function.
*/
function debounce (delay, callback, options) {
var _ref = options || {},
_ref$atBegin = _ref.atBegin,
atBegin = _ref$atBegin === void 0 ? false : _ref$atBegin;
return throttle(delay, callback, {
debounceMode: atBegin !== false
});
}
/*
How it works:
`this.#head` is an instance of `Node` which keeps track of its current value and nests another instance of `Node` that keeps the value that comes after it. When a value is provided to `.enqueue()`, the code needs to iterate through `this.#head`, going deeper and deeper to find the last value. However, iterating through every single item is slow. This problem is solved by saving a reference to the last value as `this.#tail` so that it can reference it to add a new value.
*/
class Node {
value;
next;
constructor(value) {
this.value = value;
}
}
class Queue {
#head;
#tail;
#size;
constructor() {
this.clear();
}
enqueue(value) {
const node = new Node(value);
if (this.#head) {
this.#tail.next = node;
this.#tail = node;
} else {
this.#head = node;
this.#tail = node;
}
this.#size++;
}
dequeue() {
const current = this.#head;
if (!current) {
return;
}
this.#head = this.#head.next;
this.#size--;
return current.value;
}
clear() {
this.#head = undefined;
this.#tail = undefined;
this.#size = 0;
}
get size() {
return this.#size;
}
* [Symbol.iterator]() {
let current = this.#head;
while (current) {
yield current.value;
current = current.next;
}
}
}
const AsyncResource = {
bind(fn, _type, thisArg) {
return fn.bind(thisArg);
},
};
function pLimit(concurrency) {
if (!((Number.isInteger(concurrency) || concurrency === Number.POSITIVE_INFINITY) && concurrency > 0)) {
throw new TypeError('Expected `concurrency` to be a number from 1 and up');
}
const queue = new Queue();
let activeCount = 0;
const next = () => {
activeCount--;
if (queue.size > 0) {
queue.dequeue()();
}
};
const run = async (function_, resolve, arguments_) => {
activeCount++;
const result = (async () => function_(...arguments_))();
resolve(result);
try {
await result;
} catch {}
next();
};
const enqueue = (function_, resolve, arguments_) => {
queue.enqueue(
AsyncResource.bind(run.bind(undefined, function_, resolve, arguments_)),
);
(async () => {
// This function needs to wait until the next microtask before comparing
// `activeCount` to `concurrency`, because `activeCount` is updated asynchronously
// when the run function is dequeued and called. The comparison in the if-statement
// needs to happen asynchronously as well to get an up-to-date value for `activeCount`.
await Promise.resolve();
if (activeCount < concurrency && queue.size > 0) {
queue.dequeue()();
}
})();
};
const generator = (function_, ...arguments_) => new Promise(resolve => {
enqueue(function_, resolve, arguments_);
});
Object.defineProperties(generator, {
activeCount: {
get: () => activeCount,
},
pendingCount: {
get: () => queue.size,
},
clearQueue: {
value() {
queue.clear();
},
},
});
return generator;
}
const VOID = Symbol("p-void");
class PInstance extends Promise {
constructor(items = [], options) {
super(() => {
});
this.items = items;
this.options = options;
this.promises = /* @__PURE__ */ new Set();
}
get promise() {
var _a;
let batch;
const items = [...Array.from(this.items), ...Array.from(this.promises)];
if ((_a = this.options) == null ? void 0 : _a.concurrency) {
const limit = pLimit(this.options.concurrency);
batch = Promise.all(items.map((p2) => limit(() => p2)));
} else {
batch = Promise.all(items);
}
return batch.then((l) => l.filter((i) => i !== VOID));
}
add(...args) {
args.forEach((i) => {
this.promises.add(i);
});
}
map(fn) {
return new PInstance(
Array.from(this.items).map(async (i, idx) => {
const v = await i;
if (v === VOID)
return VOID;
return fn(v, idx);
}),
this.options
);
}
filter(fn) {
return new PInstance(
Array.from(this.items).map(async (i, idx) => {
const v = await i;
const r = await fn(v, idx);
if (!r)
return VOID;
return v;
}),
this.options
);
}
forEach(fn) {
return this.map(fn).then();
}
reduce(fn, initialValue) {
return this.promise.then((array) => array.reduce(fn, initialValue));
}
clear() {
this.promises.clear();
}
then(fn) {
const p2 = this.promise;
if (fn)
return p2.then(fn);
else
return p2;
}
catch(fn) {
return this.promise.catch(fn);
}
finally(fn) {
return this.promise.finally(fn);
}
}
function p(items, options) {
return new PInstance(items, options);
}
exports.assert = assert;
exports.at = at;
exports.batchInvoke = batchInvoke;
exports.capitalize = capitalize;
exports.clamp = clamp;
exports.clampArrayRange = clampArrayRange;
exports.clearUndefined = clearUndefined;
exports.createControlledPromise = createControlledPromise;
exports.createPromiseLock = createPromiseLock;
exports.createSingletonPromise = createSingletonPromise;
exports.debounce = debounce;
exports.deepMerge = deepMerge;
exports.deepMergeWithArray = deepMergeWithArray;
exports.ensurePrefix = ensurePrefix;
exports.ensureSuffix = ensureSuffix;
exports.flattenArrayable = flattenArrayable;
exports.getTypeName = getTypeName;
exports.hasOwnProperty = hasOwnProperty;
exports.invoke = invoke;
exports.isBoolean = isBoolean;
exports.isBrowser = isBrowser;
exports.isDate = isDate;
exports.isDeepEqual = isDeepEqual;
exports.isDef = isDef;
exports.isFunction = isFunction;
exports.isKeyOf = isKeyOf;
exports.isNull = isNull;
exports.isNumber = isNumber;
exports.isObject = isObject;
exports.isRegExp = isRegExp;
exports.isString = isString;
exports.isTruthy = isTruthy;
exports.isUndefined = isUndefined;
exports.isWindow = isWindow;
exports.last = last;
exports.lerp = lerp;
exports.mergeArrayable = mergeArrayable;
exports.move = move;
exports.noNull = noNull;
exports.noop = noop;
exports.notNullish = notNullish;
exports.notUndefined = notUndefined;
exports.objectEntries = objectEntries;
exports.objectKeys = objectKeys;
exports.objectMap = objectMap;
exports.objectPick = objectPick;
exports.p = p;
exports.partition = partition;
exports.randomStr = randomStr;
exports.range = range;
exports.remap = remap;
exports.remove = remove;
exports.sample = sample;
exports.shuffle = shuffle;
exports.slash = slash;
exports.sleep = sleep;
exports.sum = sum;
exports.tap = tap;
exports.template = template;
exports.throttle = throttle;
exports.timestamp = timestamp;
exports.toArray = toArray;
exports.toString = toString;
exports.unindent = unindent;
exports.uniq = uniq;
exports.uniqueBy = uniqueBy;

@ -0,0 +1,614 @@
/**
* Promise, or maybe not
*/
type Awaitable<T> = T | PromiseLike<T>;
/**
* Null or whatever
*/
type Nullable<T> = T | null | undefined;
/**
* Array, or not yet
*/
type Arrayable<T> = T | Array<T>;
/**
* Function
*/
type Fn<T = void> = () => T;
/**
* Constructor
*/
type Constructor<T = void> = new (...args: any[]) => T;
/**
* Infers the element type of an array
*/
type ElementOf<T> = T extends (infer E)[] ? E : never;
/**
* Defines an intersection type of all union items.
*
* @param U Union of any types that will be intersected.
* @returns U items intersected
* @see https://stackoverflow.com/a/50375286/9259330
*/
type UnionToIntersection<U> = (U extends unknown ? (k: U) => void : never) extends ((k: infer I) => void) ? I : never;
/**
* Infers the arguments type of a function
*/
type ArgumentsType<T> = T extends ((...args: infer A) => any) ? A : never;
type MergeInsertions<T> = T extends object ? {
[K in keyof T]: MergeInsertions<T[K]>;
} : T;
type DeepMerge<F, S> = MergeInsertions<{
[K in keyof F | keyof S]: K extends keyof S & keyof F ? DeepMerge<F[K], S[K]> : K extends keyof S ? S[K] : K extends keyof F ? F[K] : never;
}>;
/**
* Convert `Arrayable<T>` to `Array<T>`
*
* @category Array
*/
declare function toArray<T>(array?: Nullable<Arrayable<T>>): Array<T>;
/**
* Convert `Arrayable<T>` to `Array<T>` and flatten it
*
* @category Array
*/
declare function flattenArrayable<T>(array?: Nullable<Arrayable<T | Array<T>>>): Array<T>;
/**
* Use rest arguments to merge arrays
*
* @category Array
*/
declare function mergeArrayable<T>(...args: Nullable<Arrayable<T>>[]): Array<T>;
type PartitionFilter<T> = (i: T, idx: number, arr: readonly T[]) => any;
/**
* Divide an array into two parts by a filter function
*
* @category Array
* @example const [odd, even] = partition([1, 2, 3, 4], i => i % 2 != 0)
*/
declare function partition<T>(array: readonly T[], f1: PartitionFilter<T>): [T[], T[]];
declare function partition<T>(array: readonly T[], f1: PartitionFilter<T>, f2: PartitionFilter<T>): [T[], T[], T[]];
declare function partition<T>(array: readonly T[], f1: PartitionFilter<T>, f2: PartitionFilter<T>, f3: PartitionFilter<T>): [T[], T[], T[], T[]];
declare function partition<T>(array: readonly T[], f1: PartitionFilter<T>, f2: PartitionFilter<T>, f3: PartitionFilter<T>, f4: PartitionFilter<T>): [T[], T[], T[], T[], T[]];
declare function partition<T>(array: readonly T[], f1: PartitionFilter<T>, f2: PartitionFilter<T>, f3: PartitionFilter<T>, f4: PartitionFilter<T>, f5: PartitionFilter<T>): [T[], T[], T[], T[], T[], T[]];
declare function partition<T>(array: readonly T[], f1: PartitionFilter<T>, f2: PartitionFilter<T>, f3: PartitionFilter<T>, f4: PartitionFilter<T>, f5: PartitionFilter<T>, f6: PartitionFilter<T>): [T[], T[], T[], T[], T[], T[], T[]];
/**
* Unique an Array
*
* @category Array
*/
declare function uniq<T>(array: readonly T[]): T[];
/**
* Unique an Array by a custom equality function
*
* @category Array
*/
declare function uniqueBy<T>(array: readonly T[], equalFn: (a: any, b: any) => boolean): T[];
/**
* Get last item
*
* @category Array
*/
declare function last(array: readonly []): undefined;
declare function last<T>(array: readonly T[]): T;
/**
* Remove an item from Array
*
* @category Array
*/
declare function remove<T>(array: T[], value: T): boolean;
/**
* Get nth item of Array. Negative for backward
*
* @category Array
*/
declare function at(array: readonly [], index: number): undefined;
declare function at<T>(array: readonly T[], index: number): T;
/**
* Genrate a range array of numbers. The `stop` is exclusive.
*
* @category Array
*/
declare function range(stop: number): number[];
declare function range(start: number, stop: number, step?: number): number[];
/**
* Move element in an Array
*
* @category Array
* @param arr
* @param from
* @param to
*/
declare function move<T>(arr: T[], from: number, to: number): T[];
/**
* Clamp a number to the index range of an array
*
* @category Array
*/
declare function clampArrayRange(n: number, arr: readonly unknown[]): number;
/**
* Get random item(s) from an array
*
* @param arr
* @param quantity - quantity of random items which will be returned
*/
declare function sample<T>(arr: T[], quantity: number): T[];
/**
* Shuffle an array. This function mutates the array.
*
* @category Array
*/
declare function shuffle<T>(array: T[]): T[];
declare function assert(condition: boolean, message: string): asserts condition;
declare const toString: (v: any) => string;
declare function getTypeName(v: any): string;
declare function noop(): void;
declare function isDeepEqual(value1: any, value2: any): boolean;
/**
* Type guard to filter out null-ish values
*
* @category Guards
* @example array.filter(notNullish)
*/
declare function notNullish<T>(v: T | null | undefined): v is NonNullable<T>;
/**
* Type guard to filter out null values
*
* @category Guards
* @example array.filter(noNull)
*/
declare function noNull<T>(v: T | null): v is Exclude<T, null>;
/**
* Type guard to filter out null-ish values
*
* @category Guards
* @example array.filter(notUndefined)
*/
declare function notUndefined<T>(v: T): v is Exclude<T, undefined>;
/**
* Type guard to filter out falsy values
*
* @category Guards
* @example array.filter(isTruthy)
*/
declare function isTruthy<T>(v: T): v is NonNullable<T>;
declare const isDef: <T = any>(val?: T) => val is T;
declare const isBoolean: (val: any) => val is boolean;
declare const isFunction: <T extends Function>(val: any) => val is T;
declare const isNumber: (val: any) => val is number;
declare const isString: (val: unknown) => val is string;
declare const isObject: (val: any) => val is object;
declare const isUndefined: (val: any) => val is undefined;
declare const isNull: (val: any) => val is null;
declare const isRegExp: (val: any) => val is RegExp;
declare const isDate: (val: any) => val is Date;
declare const isWindow: (val: any) => boolean;
declare const isBrowser: boolean;
declare function clamp(n: number, min: number, max: number): number;
declare function sum(...args: number[] | number[][]): number;
/**
* Linearly interpolates between `min` and `max` based on `t`
*
* @category Math
* @param min The minimum value
* @param max The maximum value
* @param t The interpolation value clamped between 0 and 1
* @example
* ```
* const value = lerp(0, 2, 0.5) // value will be 1
* ```
*/
declare function lerp(min: number, max: number, t: number): number;
/**
* Linearly remaps a clamped value from its source range [`inMin`, `inMax`] to the destination range [`outMin`, `outMax`]
*
* @category Math
* @example
* ```
* const value = remap(0.5, 0, 1, 200, 400) // value will be 300
* ```
*/
declare function remap(n: number, inMin: number, inMax: number, outMin: number, outMax: number): number;
/**
* Replace backslash to slash
*
* @category String
*/
declare function slash(str: string): string;
/**
* Ensure prefix of a string
*
* @category String
*/
declare function ensurePrefix(prefix: string, str: string): string;
/**
* Ensure suffix of a string
*
* @category String
*/
declare function ensureSuffix(suffix: string, str: string): string;
/**
* Dead simple template engine, just like Python's `.format()`
* Support passing variables as either in index based or object/name based approach
* While using object/name based approach, you can pass a fallback value as the third argument
*
* @category String
* @example
* ```
* const result = template(
* 'Hello {0}! My name is {1}.',
* 'Inès',
* 'Anthony'
* ) // Hello Inès! My name is Anthony.
* ```
*
* ```
* const result = namedTemplate(
* '{greet}! My name is {name}.',
* { greet: 'Hello', name: 'Anthony' }
* ) // Hello! My name is Anthony.
* ```
*
* const result = namedTemplate(
* '{greet}! My name is {name}.',
* { greet: 'Hello' }, // name isn't passed hence fallback will be used for name
* 'placeholder'
* ) // Hello! My name is placeholder.
* ```
*/
declare function template(str: string, object: Record<string | number, any>, fallback?: string | ((key: string) => string)): string;
declare function template(str: string, ...args: (string | number | bigint | undefined | null)[]): string;
/**
* Generate a random string
* @category String
*/
declare function randomStr(size?: number, dict?: string): string;
/**
* First letter uppercase, other lowercase
* @category string
* @example
* ```
* capitalize('hello') => 'Hello'
* ```
*/
declare function capitalize(str: string): string;
/**
* Remove common leading whitespace from a template string.
* Will also remove empty lines at the beginning and end.
* @category string
* @example
* ```ts
* const str = unindent`
* if (a) {
* b()
* }
* `
*/
declare function unindent(str: TemplateStringsArray | string): string;
declare const timestamp: () => number;
/**
* Call every function in an array
*/
declare function batchInvoke(functions: Nullable<Fn>[]): void;
/**
* Call the function
*/
declare function invoke(fn: Fn): void;
/**
* Pass the value through the callback, and return the value
*
* @example
* ```
* function createUser(name: string): User {
* return tap(new User, user => {
* user.name = name
* })
* }
* ```
*/
declare function tap<T>(value: T, callback: (value: T) => void): T;
/**
* Map key/value pairs for an object, and construct a new one
*
*
* @category Object
*
* Transform:
* @example
* ```
* objectMap({ a: 1, b: 2 }, (k, v) => [k.toString().toUpperCase(), v.toString()])
* // { A: '1', B: '2' }
* ```
*
* Swap key/value:
* @example
* ```
* objectMap({ a: 1, b: 2 }, (k, v) => [v, k])
* // { 1: 'a', 2: 'b' }
* ```
*
* Filter keys:
* @example
* ```
* objectMap({ a: 1, b: 2 }, (k, v) => k === 'a' ? undefined : [k, v])
* // { b: 2 }
* ```
*/
declare function objectMap<K extends string, V, NK extends string | number | symbol = K, NV = V>(obj: Record<K, V>, fn: (key: K, value: V) => [NK, NV] | undefined): Record<NK, NV>;
/**
* Type guard for any key, `k`.
* Marks `k` as a key of `T` if `k` is in `obj`.
*
* @category Object
* @param obj object to query for key `k`
* @param k key to check existence in `obj`
*/
declare function isKeyOf<T extends object>(obj: T, k: keyof any): k is keyof T;
/**
* Strict typed `Object.keys`
*
* @category Object
*/
declare function objectKeys<T extends object>(obj: T): (`${keyof T & undefined}` | `${keyof T & null}` | `${keyof T & string}` | `${keyof T & number}` | `${keyof T & false}` | `${keyof T & true}`)[];
/**
* Strict typed `Object.entries`
*
* @category Object
*/
declare function objectEntries<T extends object>(obj: T): [keyof T, T[keyof T]][];
/**
* Deep merge
*
* The first argument is the target object, the rest are the sources.
* The target object will be mutated and returned.
*
* @category Object
*/
declare function deepMerge<T extends object = object, S extends object = T>(target: T, ...sources: S[]): DeepMerge<T, S>;
/**
* Deep merge
*
* Differs from `deepMerge` in that it merges arrays instead of overriding them.
*
* The first argument is the target object, the rest are the sources.
* The target object will be mutated and returned.
*
* @category Object
*/
declare function deepMergeWithArray<T extends object = object, S extends object = T>(target: T, ...sources: S[]): DeepMerge<T, S>;
/**
* Create a new subset object by giving keys
*
* @category Object
*/
declare function objectPick<O extends object, T extends keyof O>(obj: O, keys: T[], omitUndefined?: boolean): Pick<O, T>;
/**
* Clear undefined fields from an object. It mutates the object
*
* @category Object
*/
declare function clearUndefined<T extends object>(obj: T): T;
/**
* Determines whether an object has a property with the specified name
*
* @see https://eslint.org/docs/rules/no-prototype-builtins
* @category Object
*/
declare function hasOwnProperty<T>(obj: T, v: PropertyKey): boolean;
interface SingletonPromiseReturn<T> {
(): Promise<T>;
/**
* Reset current staled promise.
* Await it to have proper shutdown.
*/
reset: () => Promise<void>;
}
/**
* Create singleton promise function
*
* @category Promise
*/
declare function createSingletonPromise<T>(fn: () => Promise<T>): SingletonPromiseReturn<T>;
/**
* Promised `setTimeout`
*
* @category Promise
*/
declare function sleep(ms: number, callback?: Fn<any>): Promise<void>;
/**
* Create a promise lock
*
* @category Promise
* @example
* ```
* const lock = createPromiseLock()
*
* lock.run(async () => {
* await doSomething()
* })
*
* // in anther context:
* await lock.wait() // it will wait all tasking finished
* ```
*/
declare function createPromiseLock(): {
run<T = void>(fn: () => Promise<T>): Promise<T>;
wait(): Promise<void>;
isWaiting(): boolean;
clear(): void;
};
/**
* Promise with `resolve` and `reject` methods of itself
*/
interface ControlledPromise<T = void> extends Promise<T> {
resolve: (value: T | PromiseLike<T>) => void;
reject: (reason?: any) => void;
}
/**
* Return a Promise with `resolve` and `reject` methods
*
* @category Promise
* @example
* ```
* const promise = createControlledPromise()
*
* await promise
*
* // in anther context:
* promise.resolve(data)
* ```
*/
declare function createControlledPromise<T>(): ControlledPromise<T>;
interface CancelOptions {
upcomingOnly?: boolean;
}
interface Cancel {
cancel: (options?: CancelOptions) => void;
}
interface NoReturn<T extends (...args: any[]) => any> {
(...args: Parameters<T>): void;
}
interface ThrottleOptions {
noTrailing?: boolean;
noLeading?: boolean;
debounceMode?: boolean;
}
interface DebounceOptions {
atBegin?: boolean;
}
type throttle<T extends (...args: any[]) => any> = NoReturn<T> & Cancel;
/**
* Throttle execution of a function. Especially useful for rate limiting
* execution of handlers on events like resize and scroll.
*
* @param delay
* A zero-or-greater delay in milliseconds. For event callbacks, values around
* 100 or 250 (or even higher) are most useful.
*
* @param callback
* A function to be executed after delay milliseconds. The `this` context and
* all arguments are passed through, as-is, to `callback` when the
* throttled-function is executed.
*
* @param options
* An object to configure options.
*
* @param options.noTrailing
* Optional, defaults to false. If noTrailing is true, callback will only execute
* every `delay` milliseconds while the throttled-function is being called. If
* noTrailing is false or unspecified, callback will be executed one final time
* after the last throttled-function call. (After the throttled-function has not
* been called for `delay` milliseconds, the internal counter is reset)
*
* @param options.noLeading
* Optional, defaults to false. If noLeading is false, the first throttled-function
* call will execute callback immediately. If noLeading is true, the first the
* callback execution will be skipped. It should be noted that callback will never
* executed if both noLeading = true and noTrailing = true.
*
* @param options.debounceMode If `debounceMode` is true (at begin), schedule
* `callback` to execute after `delay` ms. If `debounceMode` is false (at end),
* schedule `callback` to execute after `delay` ms.
*
* @return
* A new, throttled, function.
*/
declare function throttle<T extends (...args: any[]) => any>(
delay: number,
callback: T,
options?: ThrottleOptions,
): throttle<T>;
type debounce<T extends (...args: any[]) => any> = NoReturn<T> & Cancel;
/**
* Debounce execution of a function. Debouncing, unlike throttling,
* guarantees that a function is only executed a single time, either at the
* very beginning of a series of calls, or at the very end.
*
* @param delay
* A zero-or-greater delay in milliseconds. For event callbacks, values around
* 100 or 250 (or even higher) are most useful.
*
* @param callback
* A function to be executed after delay milliseconds. The `this` context and
* all arguments are passed through, as-is, to `callback` when the
* debounced-function is executed.
*
* @param options
* An object to configure options.
*
* @param options.atBegin
* If atBegin is false or unspecified, callback will only be executed `delay`
* milliseconds after the last debounced-function call. If atBegin is true,
* callback will be executed only at the first debounced-function call. (After
* the throttled-function has not been called for `delay` milliseconds, the
* internal counter is reset).
*
* @return
* A new, debounced function.
*/
declare function debounce<T extends (...args: any[]) => any>(
delay: number,
callback: T,
options?: DebounceOptions,
): debounce<T>;
interface POptions {
/**
* How many promises are resolved at the same time.
*/
concurrency?: number | undefined;
}
declare class PInstance<T = any> extends Promise<Awaited<T>[]> {
items: Iterable<T>;
options?: POptions | undefined;
private promises;
get promise(): Promise<Awaited<T>[]>;
constructor(items?: Iterable<T>, options?: POptions | undefined);
add(...args: (T | Promise<T>)[]): void;
map<U>(fn: (value: Awaited<T>, index: number) => U): PInstance<Promise<U>>;
filter(fn: (value: Awaited<T>, index: number) => boolean | Promise<boolean>): PInstance<Promise<T>>;
forEach(fn: (value: Awaited<T>, index: number) => void): Promise<void>;
reduce<U>(fn: (previousValue: U, currentValue: Awaited<T>, currentIndex: number, array: Awaited<T>[]) => U, initialValue: U): Promise<U>;
clear(): void;
then(fn?: () => PromiseLike<any>): Promise<any>;
catch(fn?: (err: unknown) => PromiseLike<any>): Promise<any>;
finally(fn?: () => void): Promise<Awaited<T>[]>;
}
/**
* Utility for managing multiple promises.
*
* @see https://github.com/antfu/utils/tree/main/docs/p.md
* @category Promise
* @example
* ```
* import { p } from '@antfu/utils'
*
* const items = [1, 2, 3, 4, 5]
*
* await p(items)
* .map(async i => await multiply(i, 3))
* .filter(async i => await isEven(i))
* // [6, 12]
* ```
*/
declare function p<T = any>(items?: Iterable<T>, options?: POptions): PInstance<T>;
export { type ArgumentsType, type Arrayable, type Awaitable, type Constructor, type ControlledPromise, type DeepMerge, type ElementOf, type Fn, type MergeInsertions, type Nullable, type PartitionFilter, type SingletonPromiseReturn, type UnionToIntersection, assert, at, batchInvoke, capitalize, clamp, clampArrayRange, clearUndefined, createControlledPromise, createPromiseLock, createSingletonPromise, debounce, deepMerge, deepMergeWithArray, ensurePrefix, ensureSuffix, flattenArrayable, getTypeName, hasOwnProperty, invoke, isBoolean, isBrowser, isDate, isDeepEqual, isDef, isFunction, isKeyOf, isNull, isNumber, isObject, isRegExp, isString, isTruthy, isUndefined, isWindow, last, lerp, mergeArrayable, move, noNull, noop, notNullish, notUndefined, objectEntries, objectKeys, objectMap, objectPick, p, partition, randomStr, range, remap, remove, sample, shuffle, slash, sleep, sum, tap, template, throttle, timestamp, toArray, toString, unindent, uniq, uniqueBy };

@ -0,0 +1,614 @@
/**
* Promise, or maybe not
*/
type Awaitable<T> = T | PromiseLike<T>;
/**
* Null or whatever
*/
type Nullable<T> = T | null | undefined;
/**
* Array, or not yet
*/
type Arrayable<T> = T | Array<T>;
/**
* Function
*/
type Fn<T = void> = () => T;
/**
* Constructor
*/
type Constructor<T = void> = new (...args: any[]) => T;
/**
* Infers the element type of an array
*/
type ElementOf<T> = T extends (infer E)[] ? E : never;
/**
* Defines an intersection type of all union items.
*
* @param U Union of any types that will be intersected.
* @returns U items intersected
* @see https://stackoverflow.com/a/50375286/9259330
*/
type UnionToIntersection<U> = (U extends unknown ? (k: U) => void : never) extends ((k: infer I) => void) ? I : never;
/**
* Infers the arguments type of a function
*/
type ArgumentsType<T> = T extends ((...args: infer A) => any) ? A : never;
type MergeInsertions<T> = T extends object ? {
[K in keyof T]: MergeInsertions<T[K]>;
} : T;
type DeepMerge<F, S> = MergeInsertions<{
[K in keyof F | keyof S]: K extends keyof S & keyof F ? DeepMerge<F[K], S[K]> : K extends keyof S ? S[K] : K extends keyof F ? F[K] : never;
}>;
/**
* Convert `Arrayable<T>` to `Array<T>`
*
* @category Array
*/
declare function toArray<T>(array?: Nullable<Arrayable<T>>): Array<T>;
/**
* Convert `Arrayable<T>` to `Array<T>` and flatten it
*
* @category Array
*/
declare function flattenArrayable<T>(array?: Nullable<Arrayable<T | Array<T>>>): Array<T>;
/**
* Use rest arguments to merge arrays
*
* @category Array
*/
declare function mergeArrayable<T>(...args: Nullable<Arrayable<T>>[]): Array<T>;
type PartitionFilter<T> = (i: T, idx: number, arr: readonly T[]) => any;
/**
* Divide an array into two parts by a filter function
*
* @category Array
* @example const [odd, even] = partition([1, 2, 3, 4], i => i % 2 != 0)
*/
declare function partition<T>(array: readonly T[], f1: PartitionFilter<T>): [T[], T[]];
declare function partition<T>(array: readonly T[], f1: PartitionFilter<T>, f2: PartitionFilter<T>): [T[], T[], T[]];
declare function partition<T>(array: readonly T[], f1: PartitionFilter<T>, f2: PartitionFilter<T>, f3: PartitionFilter<T>): [T[], T[], T[], T[]];
declare function partition<T>(array: readonly T[], f1: PartitionFilter<T>, f2: PartitionFilter<T>, f3: PartitionFilter<T>, f4: PartitionFilter<T>): [T[], T[], T[], T[], T[]];
declare function partition<T>(array: readonly T[], f1: PartitionFilter<T>, f2: PartitionFilter<T>, f3: PartitionFilter<T>, f4: PartitionFilter<T>, f5: PartitionFilter<T>): [T[], T[], T[], T[], T[], T[]];
declare function partition<T>(array: readonly T[], f1: PartitionFilter<T>, f2: PartitionFilter<T>, f3: PartitionFilter<T>, f4: PartitionFilter<T>, f5: PartitionFilter<T>, f6: PartitionFilter<T>): [T[], T[], T[], T[], T[], T[], T[]];
/**
* Unique an Array
*
* @category Array
*/
declare function uniq<T>(array: readonly T[]): T[];
/**
* Unique an Array by a custom equality function
*
* @category Array
*/
declare function uniqueBy<T>(array: readonly T[], equalFn: (a: any, b: any) => boolean): T[];
/**
* Get last item
*
* @category Array
*/
declare function last(array: readonly []): undefined;
declare function last<T>(array: readonly T[]): T;
/**
* Remove an item from Array
*
* @category Array
*/
declare function remove<T>(array: T[], value: T): boolean;
/**
* Get nth item of Array. Negative for backward
*
* @category Array
*/
declare function at(array: readonly [], index: number): undefined;
declare function at<T>(array: readonly T[], index: number): T;
/**
* Genrate a range array of numbers. The `stop` is exclusive.
*
* @category Array
*/
declare function range(stop: number): number[];
declare function range(start: number, stop: number, step?: number): number[];
/**
* Move element in an Array
*
* @category Array
* @param arr
* @param from
* @param to
*/
declare function move<T>(arr: T[], from: number, to: number): T[];
/**
* Clamp a number to the index range of an array
*
* @category Array
*/
declare function clampArrayRange(n: number, arr: readonly unknown[]): number;
/**
* Get random item(s) from an array
*
* @param arr
* @param quantity - quantity of random items which will be returned
*/
declare function sample<T>(arr: T[], quantity: number): T[];
/**
* Shuffle an array. This function mutates the array.
*
* @category Array
*/
declare function shuffle<T>(array: T[]): T[];
declare function assert(condition: boolean, message: string): asserts condition;
declare const toString: (v: any) => string;
declare function getTypeName(v: any): string;
declare function noop(): void;
declare function isDeepEqual(value1: any, value2: any): boolean;
/**
* Type guard to filter out null-ish values
*
* @category Guards
* @example array.filter(notNullish)
*/
declare function notNullish<T>(v: T | null | undefined): v is NonNullable<T>;
/**
* Type guard to filter out null values
*
* @category Guards
* @example array.filter(noNull)
*/
declare function noNull<T>(v: T | null): v is Exclude<T, null>;
/**
* Type guard to filter out null-ish values
*
* @category Guards
* @example array.filter(notUndefined)
*/
declare function notUndefined<T>(v: T): v is Exclude<T, undefined>;
/**
* Type guard to filter out falsy values
*
* @category Guards
* @example array.filter(isTruthy)
*/
declare function isTruthy<T>(v: T): v is NonNullable<T>;
declare const isDef: <T = any>(val?: T) => val is T;
declare const isBoolean: (val: any) => val is boolean;
declare const isFunction: <T extends Function>(val: any) => val is T;
declare const isNumber: (val: any) => val is number;
declare const isString: (val: unknown) => val is string;
declare const isObject: (val: any) => val is object;
declare const isUndefined: (val: any) => val is undefined;
declare const isNull: (val: any) => val is null;
declare const isRegExp: (val: any) => val is RegExp;
declare const isDate: (val: any) => val is Date;
declare const isWindow: (val: any) => boolean;
declare const isBrowser: boolean;
declare function clamp(n: number, min: number, max: number): number;
declare function sum(...args: number[] | number[][]): number;
/**
* Linearly interpolates between `min` and `max` based on `t`
*
* @category Math
* @param min The minimum value
* @param max The maximum value
* @param t The interpolation value clamped between 0 and 1
* @example
* ```
* const value = lerp(0, 2, 0.5) // value will be 1
* ```
*/
declare function lerp(min: number, max: number, t: number): number;
/**
* Linearly remaps a clamped value from its source range [`inMin`, `inMax`] to the destination range [`outMin`, `outMax`]
*
* @category Math
* @example
* ```
* const value = remap(0.5, 0, 1, 200, 400) // value will be 300
* ```
*/
declare function remap(n: number, inMin: number, inMax: number, outMin: number, outMax: number): number;
/**
* Replace backslash to slash
*
* @category String
*/
declare function slash(str: string): string;
/**
* Ensure prefix of a string
*
* @category String
*/
declare function ensurePrefix(prefix: string, str: string): string;
/**
* Ensure suffix of a string
*
* @category String
*/
declare function ensureSuffix(suffix: string, str: string): string;
/**
* Dead simple template engine, just like Python's `.format()`
* Support passing variables as either in index based or object/name based approach
* While using object/name based approach, you can pass a fallback value as the third argument
*
* @category String
* @example
* ```
* const result = template(
* 'Hello {0}! My name is {1}.',
* 'Inès',
* 'Anthony'
* ) // Hello Inès! My name is Anthony.
* ```
*
* ```
* const result = namedTemplate(
* '{greet}! My name is {name}.',
* { greet: 'Hello', name: 'Anthony' }
* ) // Hello! My name is Anthony.
* ```
*
* const result = namedTemplate(
* '{greet}! My name is {name}.',
* { greet: 'Hello' }, // name isn't passed hence fallback will be used for name
* 'placeholder'
* ) // Hello! My name is placeholder.
* ```
*/
declare function template(str: string, object: Record<string | number, any>, fallback?: string | ((key: string) => string)): string;
declare function template(str: string, ...args: (string | number | bigint | undefined | null)[]): string;
/**
* Generate a random string
* @category String
*/
declare function randomStr(size?: number, dict?: string): string;
/**
* First letter uppercase, other lowercase
* @category string
* @example
* ```
* capitalize('hello') => 'Hello'
* ```
*/
declare function capitalize(str: string): string;
/**
* Remove common leading whitespace from a template string.
* Will also remove empty lines at the beginning and end.
* @category string
* @example
* ```ts
* const str = unindent`
* if (a) {
* b()
* }
* `
*/
declare function unindent(str: TemplateStringsArray | string): string;
declare const timestamp: () => number;
/**
* Call every function in an array
*/
declare function batchInvoke(functions: Nullable<Fn>[]): void;
/**
* Call the function
*/
declare function invoke(fn: Fn): void;
/**
* Pass the value through the callback, and return the value
*
* @example
* ```
* function createUser(name: string): User {
* return tap(new User, user => {
* user.name = name
* })
* }
* ```
*/
declare function tap<T>(value: T, callback: (value: T) => void): T;
/**
* Map key/value pairs for an object, and construct a new one
*
*
* @category Object
*
* Transform:
* @example
* ```
* objectMap({ a: 1, b: 2 }, (k, v) => [k.toString().toUpperCase(), v.toString()])
* // { A: '1', B: '2' }
* ```
*
* Swap key/value:
* @example
* ```
* objectMap({ a: 1, b: 2 }, (k, v) => [v, k])
* // { 1: 'a', 2: 'b' }
* ```
*
* Filter keys:
* @example
* ```
* objectMap({ a: 1, b: 2 }, (k, v) => k === 'a' ? undefined : [k, v])
* // { b: 2 }
* ```
*/
declare function objectMap<K extends string, V, NK extends string | number | symbol = K, NV = V>(obj: Record<K, V>, fn: (key: K, value: V) => [NK, NV] | undefined): Record<NK, NV>;
/**
* Type guard for any key, `k`.
* Marks `k` as a key of `T` if `k` is in `obj`.
*
* @category Object
* @param obj object to query for key `k`
* @param k key to check existence in `obj`
*/
declare function isKeyOf<T extends object>(obj: T, k: keyof any): k is keyof T;
/**
* Strict typed `Object.keys`
*
* @category Object
*/
declare function objectKeys<T extends object>(obj: T): (`${keyof T & undefined}` | `${keyof T & null}` | `${keyof T & string}` | `${keyof T & number}` | `${keyof T & false}` | `${keyof T & true}`)[];
/**
* Strict typed `Object.entries`
*
* @category Object
*/
declare function objectEntries<T extends object>(obj: T): [keyof T, T[keyof T]][];
/**
* Deep merge
*
* The first argument is the target object, the rest are the sources.
* The target object will be mutated and returned.
*
* @category Object
*/
declare function deepMerge<T extends object = object, S extends object = T>(target: T, ...sources: S[]): DeepMerge<T, S>;
/**
* Deep merge
*
* Differs from `deepMerge` in that it merges arrays instead of overriding them.
*
* The first argument is the target object, the rest are the sources.
* The target object will be mutated and returned.
*
* @category Object
*/
declare function deepMergeWithArray<T extends object = object, S extends object = T>(target: T, ...sources: S[]): DeepMerge<T, S>;
/**
* Create a new subset object by giving keys
*
* @category Object
*/
declare function objectPick<O extends object, T extends keyof O>(obj: O, keys: T[], omitUndefined?: boolean): Pick<O, T>;
/**
* Clear undefined fields from an object. It mutates the object
*
* @category Object
*/
declare function clearUndefined<T extends object>(obj: T): T;
/**
* Determines whether an object has a property with the specified name
*
* @see https://eslint.org/docs/rules/no-prototype-builtins
* @category Object
*/
declare function hasOwnProperty<T>(obj: T, v: PropertyKey): boolean;
interface SingletonPromiseReturn<T> {
(): Promise<T>;
/**
* Reset current staled promise.
* Await it to have proper shutdown.
*/
reset: () => Promise<void>;
}
/**
* Create singleton promise function
*
* @category Promise
*/
declare function createSingletonPromise<T>(fn: () => Promise<T>): SingletonPromiseReturn<T>;
/**
* Promised `setTimeout`
*
* @category Promise
*/
declare function sleep(ms: number, callback?: Fn<any>): Promise<void>;
/**
* Create a promise lock
*
* @category Promise
* @example
* ```
* const lock = createPromiseLock()
*
* lock.run(async () => {
* await doSomething()
* })
*
* // in anther context:
* await lock.wait() // it will wait all tasking finished
* ```
*/
declare function createPromiseLock(): {
run<T = void>(fn: () => Promise<T>): Promise<T>;
wait(): Promise<void>;
isWaiting(): boolean;
clear(): void;
};
/**
* Promise with `resolve` and `reject` methods of itself
*/
interface ControlledPromise<T = void> extends Promise<T> {
resolve: (value: T | PromiseLike<T>) => void;
reject: (reason?: any) => void;
}
/**
* Return a Promise with `resolve` and `reject` methods
*
* @category Promise
* @example
* ```
* const promise = createControlledPromise()
*
* await promise
*
* // in anther context:
* promise.resolve(data)
* ```
*/
declare function createControlledPromise<T>(): ControlledPromise<T>;
interface CancelOptions {
upcomingOnly?: boolean;
}
interface Cancel {
cancel: (options?: CancelOptions) => void;
}
interface NoReturn<T extends (...args: any[]) => any> {
(...args: Parameters<T>): void;
}
interface ThrottleOptions {
noTrailing?: boolean;
noLeading?: boolean;
debounceMode?: boolean;
}
interface DebounceOptions {
atBegin?: boolean;
}
type throttle<T extends (...args: any[]) => any> = NoReturn<T> & Cancel;
/**
* Throttle execution of a function. Especially useful for rate limiting
* execution of handlers on events like resize and scroll.
*
* @param delay
* A zero-or-greater delay in milliseconds. For event callbacks, values around
* 100 or 250 (or even higher) are most useful.
*
* @param callback
* A function to be executed after delay milliseconds. The `this` context and
* all arguments are passed through, as-is, to `callback` when the
* throttled-function is executed.
*
* @param options
* An object to configure options.
*
* @param options.noTrailing
* Optional, defaults to false. If noTrailing is true, callback will only execute
* every `delay` milliseconds while the throttled-function is being called. If
* noTrailing is false or unspecified, callback will be executed one final time
* after the last throttled-function call. (After the throttled-function has not
* been called for `delay` milliseconds, the internal counter is reset)
*
* @param options.noLeading
* Optional, defaults to false. If noLeading is false, the first throttled-function
* call will execute callback immediately. If noLeading is true, the first the
* callback execution will be skipped. It should be noted that callback will never
* executed if both noLeading = true and noTrailing = true.
*
* @param options.debounceMode If `debounceMode` is true (at begin), schedule
* `callback` to execute after `delay` ms. If `debounceMode` is false (at end),
* schedule `callback` to execute after `delay` ms.
*
* @return
* A new, throttled, function.
*/
declare function throttle<T extends (...args: any[]) => any>(
delay: number,
callback: T,
options?: ThrottleOptions,
): throttle<T>;
type debounce<T extends (...args: any[]) => any> = NoReturn<T> & Cancel;
/**
* Debounce execution of a function. Debouncing, unlike throttling,
* guarantees that a function is only executed a single time, either at the
* very beginning of a series of calls, or at the very end.
*
* @param delay
* A zero-or-greater delay in milliseconds. For event callbacks, values around
* 100 or 250 (or even higher) are most useful.
*
* @param callback
* A function to be executed after delay milliseconds. The `this` context and
* all arguments are passed through, as-is, to `callback` when the
* debounced-function is executed.
*
* @param options
* An object to configure options.
*
* @param options.atBegin
* If atBegin is false or unspecified, callback will only be executed `delay`
* milliseconds after the last debounced-function call. If atBegin is true,
* callback will be executed only at the first debounced-function call. (After
* the throttled-function has not been called for `delay` milliseconds, the
* internal counter is reset).
*
* @return
* A new, debounced function.
*/
declare function debounce<T extends (...args: any[]) => any>(
delay: number,
callback: T,
options?: DebounceOptions,
): debounce<T>;
interface POptions {
/**
* How many promises are resolved at the same time.
*/
concurrency?: number | undefined;
}
declare class PInstance<T = any> extends Promise<Awaited<T>[]> {
items: Iterable<T>;
options?: POptions | undefined;
private promises;
get promise(): Promise<Awaited<T>[]>;
constructor(items?: Iterable<T>, options?: POptions | undefined);
add(...args: (T | Promise<T>)[]): void;
map<U>(fn: (value: Awaited<T>, index: number) => U): PInstance<Promise<U>>;
filter(fn: (value: Awaited<T>, index: number) => boolean | Promise<boolean>): PInstance<Promise<T>>;
forEach(fn: (value: Awaited<T>, index: number) => void): Promise<void>;
reduce<U>(fn: (previousValue: U, currentValue: Awaited<T>, currentIndex: number, array: Awaited<T>[]) => U, initialValue: U): Promise<U>;
clear(): void;
then(fn?: () => PromiseLike<any>): Promise<any>;
catch(fn?: (err: unknown) => PromiseLike<any>): Promise<any>;
finally(fn?: () => void): Promise<Awaited<T>[]>;
}
/**
* Utility for managing multiple promises.
*
* @see https://github.com/antfu/utils/tree/main/docs/p.md
* @category Promise
* @example
* ```
* import { p } from '@antfu/utils'
*
* const items = [1, 2, 3, 4, 5]
*
* await p(items)
* .map(async i => await multiply(i, 3))
* .filter(async i => await isEven(i))
* // [6, 12]
* ```
*/
declare function p<T = any>(items?: Iterable<T>, options?: POptions): PInstance<T>;
export { type ArgumentsType, type Arrayable, type Awaitable, type Constructor, type ControlledPromise, type DeepMerge, type ElementOf, type Fn, type MergeInsertions, type Nullable, type PartitionFilter, type SingletonPromiseReturn, type UnionToIntersection, assert, at, batchInvoke, capitalize, clamp, clampArrayRange, clearUndefined, createControlledPromise, createPromiseLock, createSingletonPromise, debounce, deepMerge, deepMergeWithArray, ensurePrefix, ensureSuffix, flattenArrayable, getTypeName, hasOwnProperty, invoke, isBoolean, isBrowser, isDate, isDeepEqual, isDef, isFunction, isKeyOf, isNull, isNumber, isObject, isRegExp, isString, isTruthy, isUndefined, isWindow, last, lerp, mergeArrayable, move, noNull, noop, notNullish, notUndefined, objectEntries, objectKeys, objectMap, objectPick, p, partition, randomStr, range, remap, remove, sample, shuffle, slash, sleep, sum, tap, template, throttle, timestamp, toArray, toString, unindent, uniq, uniqueBy };

@ -0,0 +1,614 @@
/**
* Promise, or maybe not
*/
type Awaitable<T> = T | PromiseLike<T>;
/**
* Null or whatever
*/
type Nullable<T> = T | null | undefined;
/**
* Array, or not yet
*/
type Arrayable<T> = T | Array<T>;
/**
* Function
*/
type Fn<T = void> = () => T;
/**
* Constructor
*/
type Constructor<T = void> = new (...args: any[]) => T;
/**
* Infers the element type of an array
*/
type ElementOf<T> = T extends (infer E)[] ? E : never;
/**
* Defines an intersection type of all union items.
*
* @param U Union of any types that will be intersected.
* @returns U items intersected
* @see https://stackoverflow.com/a/50375286/9259330
*/
type UnionToIntersection<U> = (U extends unknown ? (k: U) => void : never) extends ((k: infer I) => void) ? I : never;
/**
* Infers the arguments type of a function
*/
type ArgumentsType<T> = T extends ((...args: infer A) => any) ? A : never;
type MergeInsertions<T> = T extends object ? {
[K in keyof T]: MergeInsertions<T[K]>;
} : T;
type DeepMerge<F, S> = MergeInsertions<{
[K in keyof F | keyof S]: K extends keyof S & keyof F ? DeepMerge<F[K], S[K]> : K extends keyof S ? S[K] : K extends keyof F ? F[K] : never;
}>;
/**
* Convert `Arrayable<T>` to `Array<T>`
*
* @category Array
*/
declare function toArray<T>(array?: Nullable<Arrayable<T>>): Array<T>;
/**
* Convert `Arrayable<T>` to `Array<T>` and flatten it
*
* @category Array
*/
declare function flattenArrayable<T>(array?: Nullable<Arrayable<T | Array<T>>>): Array<T>;
/**
* Use rest arguments to merge arrays
*
* @category Array
*/
declare function mergeArrayable<T>(...args: Nullable<Arrayable<T>>[]): Array<T>;
type PartitionFilter<T> = (i: T, idx: number, arr: readonly T[]) => any;
/**
* Divide an array into two parts by a filter function
*
* @category Array
* @example const [odd, even] = partition([1, 2, 3, 4], i => i % 2 != 0)
*/
declare function partition<T>(array: readonly T[], f1: PartitionFilter<T>): [T[], T[]];
declare function partition<T>(array: readonly T[], f1: PartitionFilter<T>, f2: PartitionFilter<T>): [T[], T[], T[]];
declare function partition<T>(array: readonly T[], f1: PartitionFilter<T>, f2: PartitionFilter<T>, f3: PartitionFilter<T>): [T[], T[], T[], T[]];
declare function partition<T>(array: readonly T[], f1: PartitionFilter<T>, f2: PartitionFilter<T>, f3: PartitionFilter<T>, f4: PartitionFilter<T>): [T[], T[], T[], T[], T[]];
declare function partition<T>(array: readonly T[], f1: PartitionFilter<T>, f2: PartitionFilter<T>, f3: PartitionFilter<T>, f4: PartitionFilter<T>, f5: PartitionFilter<T>): [T[], T[], T[], T[], T[], T[]];
declare function partition<T>(array: readonly T[], f1: PartitionFilter<T>, f2: PartitionFilter<T>, f3: PartitionFilter<T>, f4: PartitionFilter<T>, f5: PartitionFilter<T>, f6: PartitionFilter<T>): [T[], T[], T[], T[], T[], T[], T[]];
/**
* Unique an Array
*
* @category Array
*/
declare function uniq<T>(array: readonly T[]): T[];
/**
* Unique an Array by a custom equality function
*
* @category Array
*/
declare function uniqueBy<T>(array: readonly T[], equalFn: (a: any, b: any) => boolean): T[];
/**
* Get last item
*
* @category Array
*/
declare function last(array: readonly []): undefined;
declare function last<T>(array: readonly T[]): T;
/**
* Remove an item from Array
*
* @category Array
*/
declare function remove<T>(array: T[], value: T): boolean;
/**
* Get nth item of Array. Negative for backward
*
* @category Array
*/
declare function at(array: readonly [], index: number): undefined;
declare function at<T>(array: readonly T[], index: number): T;
/**
* Genrate a range array of numbers. The `stop` is exclusive.
*
* @category Array
*/
declare function range(stop: number): number[];
declare function range(start: number, stop: number, step?: number): number[];
/**
* Move element in an Array
*
* @category Array
* @param arr
* @param from
* @param to
*/
declare function move<T>(arr: T[], from: number, to: number): T[];
/**
* Clamp a number to the index range of an array
*
* @category Array
*/
declare function clampArrayRange(n: number, arr: readonly unknown[]): number;
/**
* Get random item(s) from an array
*
* @param arr
* @param quantity - quantity of random items which will be returned
*/
declare function sample<T>(arr: T[], quantity: number): T[];
/**
* Shuffle an array. This function mutates the array.
*
* @category Array
*/
declare function shuffle<T>(array: T[]): T[];
declare function assert(condition: boolean, message: string): asserts condition;
declare const toString: (v: any) => string;
declare function getTypeName(v: any): string;
declare function noop(): void;
declare function isDeepEqual(value1: any, value2: any): boolean;
/**
* Type guard to filter out null-ish values
*
* @category Guards
* @example array.filter(notNullish)
*/
declare function notNullish<T>(v: T | null | undefined): v is NonNullable<T>;
/**
* Type guard to filter out null values
*
* @category Guards
* @example array.filter(noNull)
*/
declare function noNull<T>(v: T | null): v is Exclude<T, null>;
/**
* Type guard to filter out null-ish values
*
* @category Guards
* @example array.filter(notUndefined)
*/
declare function notUndefined<T>(v: T): v is Exclude<T, undefined>;
/**
* Type guard to filter out falsy values
*
* @category Guards
* @example array.filter(isTruthy)
*/
declare function isTruthy<T>(v: T): v is NonNullable<T>;
declare const isDef: <T = any>(val?: T) => val is T;
declare const isBoolean: (val: any) => val is boolean;
declare const isFunction: <T extends Function>(val: any) => val is T;
declare const isNumber: (val: any) => val is number;
declare const isString: (val: unknown) => val is string;
declare const isObject: (val: any) => val is object;
declare const isUndefined: (val: any) => val is undefined;
declare const isNull: (val: any) => val is null;
declare const isRegExp: (val: any) => val is RegExp;
declare const isDate: (val: any) => val is Date;
declare const isWindow: (val: any) => boolean;
declare const isBrowser: boolean;
declare function clamp(n: number, min: number, max: number): number;
declare function sum(...args: number[] | number[][]): number;
/**
* Linearly interpolates between `min` and `max` based on `t`
*
* @category Math
* @param min The minimum value
* @param max The maximum value
* @param t The interpolation value clamped between 0 and 1
* @example
* ```
* const value = lerp(0, 2, 0.5) // value will be 1
* ```
*/
declare function lerp(min: number, max: number, t: number): number;
/**
* Linearly remaps a clamped value from its source range [`inMin`, `inMax`] to the destination range [`outMin`, `outMax`]
*
* @category Math
* @example
* ```
* const value = remap(0.5, 0, 1, 200, 400) // value will be 300
* ```
*/
declare function remap(n: number, inMin: number, inMax: number, outMin: number, outMax: number): number;
/**
* Replace backslash to slash
*
* @category String
*/
declare function slash(str: string): string;
/**
* Ensure prefix of a string
*
* @category String
*/
declare function ensurePrefix(prefix: string, str: string): string;
/**
* Ensure suffix of a string
*
* @category String
*/
declare function ensureSuffix(suffix: string, str: string): string;
/**
* Dead simple template engine, just like Python's `.format()`
* Support passing variables as either in index based or object/name based approach
* While using object/name based approach, you can pass a fallback value as the third argument
*
* @category String
* @example
* ```
* const result = template(
* 'Hello {0}! My name is {1}.',
* 'Inès',
* 'Anthony'
* ) // Hello Inès! My name is Anthony.
* ```
*
* ```
* const result = namedTemplate(
* '{greet}! My name is {name}.',
* { greet: 'Hello', name: 'Anthony' }
* ) // Hello! My name is Anthony.
* ```
*
* const result = namedTemplate(
* '{greet}! My name is {name}.',
* { greet: 'Hello' }, // name isn't passed hence fallback will be used for name
* 'placeholder'
* ) // Hello! My name is placeholder.
* ```
*/
declare function template(str: string, object: Record<string | number, any>, fallback?: string | ((key: string) => string)): string;
declare function template(str: string, ...args: (string | number | bigint | undefined | null)[]): string;
/**
* Generate a random string
* @category String
*/
declare function randomStr(size?: number, dict?: string): string;
/**
* First letter uppercase, other lowercase
* @category string
* @example
* ```
* capitalize('hello') => 'Hello'
* ```
*/
declare function capitalize(str: string): string;
/**
* Remove common leading whitespace from a template string.
* Will also remove empty lines at the beginning and end.
* @category string
* @example
* ```ts
* const str = unindent`
* if (a) {
* b()
* }
* `
*/
declare function unindent(str: TemplateStringsArray | string): string;
declare const timestamp: () => number;
/**
* Call every function in an array
*/
declare function batchInvoke(functions: Nullable<Fn>[]): void;
/**
* Call the function
*/
declare function invoke(fn: Fn): void;
/**
* Pass the value through the callback, and return the value
*
* @example
* ```
* function createUser(name: string): User {
* return tap(new User, user => {
* user.name = name
* })
* }
* ```
*/
declare function tap<T>(value: T, callback: (value: T) => void): T;
/**
* Map key/value pairs for an object, and construct a new one
*
*
* @category Object
*
* Transform:
* @example
* ```
* objectMap({ a: 1, b: 2 }, (k, v) => [k.toString().toUpperCase(), v.toString()])
* // { A: '1', B: '2' }
* ```
*
* Swap key/value:
* @example
* ```
* objectMap({ a: 1, b: 2 }, (k, v) => [v, k])
* // { 1: 'a', 2: 'b' }
* ```
*
* Filter keys:
* @example
* ```
* objectMap({ a: 1, b: 2 }, (k, v) => k === 'a' ? undefined : [k, v])
* // { b: 2 }
* ```
*/
declare function objectMap<K extends string, V, NK extends string | number | symbol = K, NV = V>(obj: Record<K, V>, fn: (key: K, value: V) => [NK, NV] | undefined): Record<NK, NV>;
/**
* Type guard for any key, `k`.
* Marks `k` as a key of `T` if `k` is in `obj`.
*
* @category Object
* @param obj object to query for key `k`
* @param k key to check existence in `obj`
*/
declare function isKeyOf<T extends object>(obj: T, k: keyof any): k is keyof T;
/**
* Strict typed `Object.keys`
*
* @category Object
*/
declare function objectKeys<T extends object>(obj: T): (`${keyof T & undefined}` | `${keyof T & null}` | `${keyof T & string}` | `${keyof T & number}` | `${keyof T & false}` | `${keyof T & true}`)[];
/**
* Strict typed `Object.entries`
*
* @category Object
*/
declare function objectEntries<T extends object>(obj: T): [keyof T, T[keyof T]][];
/**
* Deep merge
*
* The first argument is the target object, the rest are the sources.
* The target object will be mutated and returned.
*
* @category Object
*/
declare function deepMerge<T extends object = object, S extends object = T>(target: T, ...sources: S[]): DeepMerge<T, S>;
/**
* Deep merge
*
* Differs from `deepMerge` in that it merges arrays instead of overriding them.
*
* The first argument is the target object, the rest are the sources.
* The target object will be mutated and returned.
*
* @category Object
*/
declare function deepMergeWithArray<T extends object = object, S extends object = T>(target: T, ...sources: S[]): DeepMerge<T, S>;
/**
* Create a new subset object by giving keys
*
* @category Object
*/
declare function objectPick<O extends object, T extends keyof O>(obj: O, keys: T[], omitUndefined?: boolean): Pick<O, T>;
/**
* Clear undefined fields from an object. It mutates the object
*
* @category Object
*/
declare function clearUndefined<T extends object>(obj: T): T;
/**
* Determines whether an object has a property with the specified name
*
* @see https://eslint.org/docs/rules/no-prototype-builtins
* @category Object
*/
declare function hasOwnProperty<T>(obj: T, v: PropertyKey): boolean;
interface SingletonPromiseReturn<T> {
(): Promise<T>;
/**
* Reset current staled promise.
* Await it to have proper shutdown.
*/
reset: () => Promise<void>;
}
/**
* Create singleton promise function
*
* @category Promise
*/
declare function createSingletonPromise<T>(fn: () => Promise<T>): SingletonPromiseReturn<T>;
/**
* Promised `setTimeout`
*
* @category Promise
*/
declare function sleep(ms: number, callback?: Fn<any>): Promise<void>;
/**
* Create a promise lock
*
* @category Promise
* @example
* ```
* const lock = createPromiseLock()
*
* lock.run(async () => {
* await doSomething()
* })
*
* // in anther context:
* await lock.wait() // it will wait all tasking finished
* ```
*/
declare function createPromiseLock(): {
run<T = void>(fn: () => Promise<T>): Promise<T>;
wait(): Promise<void>;
isWaiting(): boolean;
clear(): void;
};
/**
* Promise with `resolve` and `reject` methods of itself
*/
interface ControlledPromise<T = void> extends Promise<T> {
resolve: (value: T | PromiseLike<T>) => void;
reject: (reason?: any) => void;
}
/**
* Return a Promise with `resolve` and `reject` methods
*
* @category Promise
* @example
* ```
* const promise = createControlledPromise()
*
* await promise
*
* // in anther context:
* promise.resolve(data)
* ```
*/
declare function createControlledPromise<T>(): ControlledPromise<T>;
interface CancelOptions {
upcomingOnly?: boolean;
}
interface Cancel {
cancel: (options?: CancelOptions) => void;
}
interface NoReturn<T extends (...args: any[]) => any> {
(...args: Parameters<T>): void;
}
interface ThrottleOptions {
noTrailing?: boolean;
noLeading?: boolean;
debounceMode?: boolean;
}
interface DebounceOptions {
atBegin?: boolean;
}
type throttle<T extends (...args: any[]) => any> = NoReturn<T> & Cancel;
/**
* Throttle execution of a function. Especially useful for rate limiting
* execution of handlers on events like resize and scroll.
*
* @param delay
* A zero-or-greater delay in milliseconds. For event callbacks, values around
* 100 or 250 (or even higher) are most useful.
*
* @param callback
* A function to be executed after delay milliseconds. The `this` context and
* all arguments are passed through, as-is, to `callback` when the
* throttled-function is executed.
*
* @param options
* An object to configure options.
*
* @param options.noTrailing
* Optional, defaults to false. If noTrailing is true, callback will only execute
* every `delay` milliseconds while the throttled-function is being called. If
* noTrailing is false or unspecified, callback will be executed one final time
* after the last throttled-function call. (After the throttled-function has not
* been called for `delay` milliseconds, the internal counter is reset)
*
* @param options.noLeading
* Optional, defaults to false. If noLeading is false, the first throttled-function
* call will execute callback immediately. If noLeading is true, the first the
* callback execution will be skipped. It should be noted that callback will never
* executed if both noLeading = true and noTrailing = true.
*
* @param options.debounceMode If `debounceMode` is true (at begin), schedule
* `callback` to execute after `delay` ms. If `debounceMode` is false (at end),
* schedule `callback` to execute after `delay` ms.
*
* @return
* A new, throttled, function.
*/
declare function throttle<T extends (...args: any[]) => any>(
delay: number,
callback: T,
options?: ThrottleOptions,
): throttle<T>;
type debounce<T extends (...args: any[]) => any> = NoReturn<T> & Cancel;
/**
* Debounce execution of a function. Debouncing, unlike throttling,
* guarantees that a function is only executed a single time, either at the
* very beginning of a series of calls, or at the very end.
*
* @param delay
* A zero-or-greater delay in milliseconds. For event callbacks, values around
* 100 or 250 (or even higher) are most useful.
*
* @param callback
* A function to be executed after delay milliseconds. The `this` context and
* all arguments are passed through, as-is, to `callback` when the
* debounced-function is executed.
*
* @param options
* An object to configure options.
*
* @param options.atBegin
* If atBegin is false or unspecified, callback will only be executed `delay`
* milliseconds after the last debounced-function call. If atBegin is true,
* callback will be executed only at the first debounced-function call. (After
* the throttled-function has not been called for `delay` milliseconds, the
* internal counter is reset).
*
* @return
* A new, debounced function.
*/
declare function debounce<T extends (...args: any[]) => any>(
delay: number,
callback: T,
options?: DebounceOptions,
): debounce<T>;
interface POptions {
/**
* How many promises are resolved at the same time.
*/
concurrency?: number | undefined;
}
declare class PInstance<T = any> extends Promise<Awaited<T>[]> {
items: Iterable<T>;
options?: POptions | undefined;
private promises;
get promise(): Promise<Awaited<T>[]>;
constructor(items?: Iterable<T>, options?: POptions | undefined);
add(...args: (T | Promise<T>)[]): void;
map<U>(fn: (value: Awaited<T>, index: number) => U): PInstance<Promise<U>>;
filter(fn: (value: Awaited<T>, index: number) => boolean | Promise<boolean>): PInstance<Promise<T>>;
forEach(fn: (value: Awaited<T>, index: number) => void): Promise<void>;
reduce<U>(fn: (previousValue: U, currentValue: Awaited<T>, currentIndex: number, array: Awaited<T>[]) => U, initialValue: U): Promise<U>;
clear(): void;
then(fn?: () => PromiseLike<any>): Promise<any>;
catch(fn?: (err: unknown) => PromiseLike<any>): Promise<any>;
finally(fn?: () => void): Promise<Awaited<T>[]>;
}
/**
* Utility for managing multiple promises.
*
* @see https://github.com/antfu/utils/tree/main/docs/p.md
* @category Promise
* @example
* ```
* import { p } from '@antfu/utils'
*
* const items = [1, 2, 3, 4, 5]
*
* await p(items)
* .map(async i => await multiply(i, 3))
* .filter(async i => await isEven(i))
* // [6, 12]
* ```
*/
declare function p<T = any>(items?: Iterable<T>, options?: POptions): PInstance<T>;
export { type ArgumentsType, type Arrayable, type Awaitable, type Constructor, type ControlledPromise, type DeepMerge, type ElementOf, type Fn, type MergeInsertions, type Nullable, type PartitionFilter, type SingletonPromiseReturn, type UnionToIntersection, assert, at, batchInvoke, capitalize, clamp, clampArrayRange, clearUndefined, createControlledPromise, createPromiseLock, createSingletonPromise, debounce, deepMerge, deepMergeWithArray, ensurePrefix, ensureSuffix, flattenArrayable, getTypeName, hasOwnProperty, invoke, isBoolean, isBrowser, isDate, isDeepEqual, isDef, isFunction, isKeyOf, isNull, isNumber, isObject, isRegExp, isString, isTruthy, isUndefined, isWindow, last, lerp, mergeArrayable, move, noNull, noop, notNullish, notUndefined, objectEntries, objectKeys, objectMap, objectPick, p, partition, randomStr, range, remap, remove, sample, shuffle, slash, sleep, sum, tap, template, throttle, timestamp, toArray, toString, unindent, uniq, uniqueBy };

@ -0,0 +1,777 @@
function clamp(n, min, max) {
return Math.min(max, Math.max(min, n));
}
function sum(...args) {
return flattenArrayable(args).reduce((a, b) => a + b, 0);
}
function lerp(min, max, t) {
const interpolation = clamp(t, 0, 1);
return min + (max - min) * interpolation;
}
function remap(n, inMin, inMax, outMin, outMax) {
const interpolation = (n - inMin) / (inMax - inMin);
return lerp(outMin, outMax, interpolation);
}
function toArray(array) {
array = array ?? [];
return Array.isArray(array) ? array : [array];
}
function flattenArrayable(array) {
return toArray(array).flat(1);
}
function mergeArrayable(...args) {
return args.flatMap((i) => toArray(i));
}
function partition(array, ...filters) {
const result = Array.from({ length: filters.length + 1 }).fill(null).map(() => []);
array.forEach((e, idx, arr) => {
let i = 0;
for (const filter of filters) {
if (filter(e, idx, arr)) {
result[i].push(e);
return;
}
i += 1;
}
result[i].push(e);
});
return result;
}
function uniq(array) {
return Array.from(new Set(array));
}
function uniqueBy(array, equalFn) {
return array.reduce((acc, cur) => {
const index = acc.findIndex((item) => equalFn(cur, item));
if (index === -1)
acc.push(cur);
return acc;
}, []);
}
function last(array) {
return at(array, -1);
}
function remove(array, value) {
if (!array)
return false;
const index = array.indexOf(value);
if (index >= 0) {
array.splice(index, 1);
return true;
}
return false;
}
function at(array, index) {
const len = array.length;
if (!len)
return void 0;
if (index < 0)
index += len;
return array[index];
}
function range(...args) {
let start, stop, step;
if (args.length === 1) {
start = 0;
step = 1;
[stop] = args;
} else {
[start, stop, step = 1] = args;
}
const arr = [];
let current = start;
while (current < stop) {
arr.push(current);
current += step || 1;
}
return arr;
}
function move(arr, from, to) {
arr.splice(to, 0, arr.splice(from, 1)[0]);
return arr;
}
function clampArrayRange(n, arr) {
return clamp(n, 0, arr.length - 1);
}
function sample(arr, quantity) {
return Array.from({ length: quantity }, (_) => arr[Math.round(Math.random() * (arr.length - 1))]);
}
function shuffle(array) {
for (let i = array.length - 1; i > 0; i--) {
const j = Math.floor(Math.random() * (i + 1));
[array[i], array[j]] = [array[j], array[i]];
}
return array;
}
function assert(condition, message) {
if (!condition)
throw new Error(message);
}
const toString = (v) => Object.prototype.toString.call(v);
function getTypeName(v) {
if (v === null)
return "null";
const type = toString(v).slice(8, -1).toLowerCase();
return typeof v === "object" || typeof v === "function" ? type : typeof v;
}
function noop() {
}
function isDeepEqual(value1, value2) {
const type1 = getTypeName(value1);
const type2 = getTypeName(value2);
if (type1 !== type2)
return false;
if (type1 === "array") {
if (value1.length !== value2.length)
return false;
return value1.every((item, i) => {
return isDeepEqual(item, value2[i]);
});
}
if (type1 === "object") {
const keyArr = Object.keys(value1);
if (keyArr.length !== Object.keys(value2).length)
return false;
return keyArr.every((key) => {
return isDeepEqual(value1[key], value2[key]);
});
}
return Object.is(value1, value2);
}
function notNullish(v) {
return v != null;
}
function noNull(v) {
return v !== null;
}
function notUndefined(v) {
return v !== void 0;
}
function isTruthy(v) {
return Boolean(v);
}
const isDef = (val) => typeof val !== "undefined";
const isBoolean = (val) => typeof val === "boolean";
const isFunction = (val) => typeof val === "function";
const isNumber = (val) => typeof val === "number";
const isString = (val) => typeof val === "string";
const isObject = (val) => toString(val) === "[object Object]";
const isUndefined = (val) => toString(val) === "[object Undefined]";
const isNull = (val) => toString(val) === "[object Null]";
const isRegExp = (val) => toString(val) === "[object RegExp]";
const isDate = (val) => toString(val) === "[object Date]";
const isWindow = (val) => typeof window !== "undefined" && toString(val) === "[object Window]";
const isBrowser = typeof window !== "undefined";
function slash(str) {
return str.replace(/\\/g, "/");
}
function ensurePrefix(prefix, str) {
if (!str.startsWith(prefix))
return prefix + str;
return str;
}
function ensureSuffix(suffix, str) {
if (!str.endsWith(suffix))
return str + suffix;
return str;
}
function template(str, ...args) {
const [firstArg, fallback] = args;
if (isObject(firstArg)) {
const vars = firstArg;
return str.replace(/{([\w\d]+)}/g, (_, key) => vars[key] || ((typeof fallback === "function" ? fallback(key) : fallback) ?? key));
} else {
return str.replace(/{(\d+)}/g, (_, key) => {
const index = Number(key);
if (Number.isNaN(index))
return key;
return args[index];
});
}
}
const urlAlphabet = "useandom-26T198340PX75pxJACKVERYMINDBUSHWOLF_GQZbfghjklqvwyzrict";
function randomStr(size = 16, dict = urlAlphabet) {
let id = "";
let i = size;
const len = dict.length;
while (i--)
id += dict[Math.random() * len | 0];
return id;
}
function capitalize(str) {
return str[0].toUpperCase() + str.slice(1).toLowerCase();
}
const _reFullWs = /^\s*$/;
function unindent(str) {
const lines = (typeof str === "string" ? str : str[0]).split("\n");
const whitespaceLines = lines.map((line) => _reFullWs.test(line));
const commonIndent = lines.reduce((min, line, idx) => {
var _a;
if (whitespaceLines[idx])
return min;
const indent = (_a = line.match(/^\s*/)) == null ? void 0 : _a[0].length;
return indent === void 0 ? min : Math.min(min, indent);
}, Number.POSITIVE_INFINITY);
let emptyLinesHead = 0;
while (emptyLinesHead < lines.length && whitespaceLines[emptyLinesHead])
emptyLinesHead++;
let emptyLinesTail = 0;
while (emptyLinesTail < lines.length && whitespaceLines[lines.length - emptyLinesTail - 1])
emptyLinesTail++;
return lines.slice(emptyLinesHead, lines.length - emptyLinesTail).map((line) => line.slice(commonIndent)).join("\n");
}
const timestamp = () => +Date.now();
function batchInvoke(functions) {
functions.forEach((fn) => fn && fn());
}
function invoke(fn) {
return fn();
}
function tap(value, callback) {
callback(value);
return value;
}
function objectMap(obj, fn) {
return Object.fromEntries(
Object.entries(obj).map(([k, v]) => fn(k, v)).filter(notNullish)
);
}
function isKeyOf(obj, k) {
return k in obj;
}
function objectKeys(obj) {
return Object.keys(obj);
}
function objectEntries(obj) {
return Object.entries(obj);
}
function deepMerge(target, ...sources) {
if (!sources.length)
return target;
const source = sources.shift();
if (source === void 0)
return target;
if (isMergableObject(target) && isMergableObject(source)) {
objectKeys(source).forEach((key) => {
if (key === "__proto__" || key === "constructor" || key === "prototype")
return;
if (isMergableObject(source[key])) {
if (!target[key])
target[key] = {};
if (isMergableObject(target[key])) {
deepMerge(target[key], source[key]);
} else {
target[key] = source[key];
}
} else {
target[key] = source[key];
}
});
}
return deepMerge(target, ...sources);
}
function deepMergeWithArray(target, ...sources) {
if (!sources.length)
return target;
const source = sources.shift();
if (source === void 0)
return target;
if (Array.isArray(target) && Array.isArray(source))
target.push(...source);
if (isMergableObject(target) && isMergableObject(source)) {
objectKeys(source).forEach((key) => {
if (key === "__proto__" || key === "constructor" || key === "prototype")
return;
if (Array.isArray(source[key])) {
if (!target[key])
target[key] = [];
deepMergeWithArray(target[key], source[key]);
} else if (isMergableObject(source[key])) {
if (!target[key])
target[key] = {};
deepMergeWithArray(target[key], source[key]);
} else {
target[key] = source[key];
}
});
}
return deepMergeWithArray(target, ...sources);
}
function isMergableObject(item) {
return isObject(item) && !Array.isArray(item);
}
function objectPick(obj, keys, omitUndefined = false) {
return keys.reduce((n, k) => {
if (k in obj) {
if (!omitUndefined || obj[k] !== void 0)
n[k] = obj[k];
}
return n;
}, {});
}
function clearUndefined(obj) {
Object.keys(obj).forEach((key) => obj[key] === void 0 ? delete obj[key] : {});
return obj;
}
function hasOwnProperty(obj, v) {
if (obj == null)
return false;
return Object.prototype.hasOwnProperty.call(obj, v);
}
function createSingletonPromise(fn) {
let _promise;
function wrapper() {
if (!_promise)
_promise = fn();
return _promise;
}
wrapper.reset = async () => {
const _prev = _promise;
_promise = void 0;
if (_prev)
await _prev;
};
return wrapper;
}
function sleep(ms, callback) {
return new Promise(
(resolve) => setTimeout(async () => {
await (callback == null ? void 0 : callback());
resolve();
}, ms)
);
}
function createPromiseLock() {
const locks = [];
return {
async run(fn) {
const p = fn();
locks.push(p);
try {
return await p;
} finally {
remove(locks, p);
}
},
async wait() {
await Promise.allSettled(locks);
},
isWaiting() {
return Boolean(locks.length);
},
clear() {
locks.length = 0;
}
};
}
function createControlledPromise() {
let resolve, reject;
const promise = new Promise((_resolve, _reject) => {
resolve = _resolve;
reject = _reject;
});
promise.resolve = resolve;
promise.reject = reject;
return promise;
}
/* eslint-disable no-undefined,no-param-reassign,no-shadow */
/**
* Throttle execution of a function. Especially useful for rate limiting
* execution of handlers on events like resize and scroll.
*
* @param {number} delay - A zero-or-greater delay in milliseconds. For event callbacks, values around 100 or 250 (or even higher)
* are most useful.
* @param {Function} callback - A function to be executed after delay milliseconds. The `this` context and all arguments are passed through,
* as-is, to `callback` when the throttled-function is executed.
* @param {object} [options] - An object to configure options.
* @param {boolean} [options.noTrailing] - Optional, defaults to false. If noTrailing is true, callback will only execute every `delay` milliseconds
* while the throttled-function is being called. If noTrailing is false or unspecified, callback will be executed
* one final time after the last throttled-function call. (After the throttled-function has not been called for
* `delay` milliseconds, the internal counter is reset).
* @param {boolean} [options.noLeading] - Optional, defaults to false. If noLeading is false, the first throttled-function call will execute callback
* immediately. If noLeading is true, the first the callback execution will be skipped. It should be noted that
* callback will never executed if both noLeading = true and noTrailing = true.
* @param {boolean} [options.debounceMode] - If `debounceMode` is true (at begin), schedule `clear` to execute after `delay` ms. If `debounceMode` is
* false (at end), schedule `callback` to execute after `delay` ms.
*
* @returns {Function} A new, throttled, function.
*/
function throttle (delay, callback, options) {
var _ref = options || {},
_ref$noTrailing = _ref.noTrailing,
noTrailing = _ref$noTrailing === void 0 ? false : _ref$noTrailing,
_ref$noLeading = _ref.noLeading,
noLeading = _ref$noLeading === void 0 ? false : _ref$noLeading,
_ref$debounceMode = _ref.debounceMode,
debounceMode = _ref$debounceMode === void 0 ? undefined : _ref$debounceMode;
/*
* After wrapper has stopped being called, this timeout ensures that
* `callback` is executed at the proper times in `throttle` and `end`
* debounce modes.
*/
var timeoutID;
var cancelled = false; // Keep track of the last time `callback` was executed.
var lastExec = 0; // Function to clear existing timeout
function clearExistingTimeout() {
if (timeoutID) {
clearTimeout(timeoutID);
}
} // Function to cancel next exec
function cancel(options) {
var _ref2 = options || {},
_ref2$upcomingOnly = _ref2.upcomingOnly,
upcomingOnly = _ref2$upcomingOnly === void 0 ? false : _ref2$upcomingOnly;
clearExistingTimeout();
cancelled = !upcomingOnly;
}
/*
* The `wrapper` function encapsulates all of the throttling / debouncing
* functionality and when executed will limit the rate at which `callback`
* is executed.
*/
function wrapper() {
for (var _len = arguments.length, arguments_ = new Array(_len), _key = 0; _key < _len; _key++) {
arguments_[_key] = arguments[_key];
}
var self = this;
var elapsed = Date.now() - lastExec;
if (cancelled) {
return;
} // Execute `callback` and update the `lastExec` timestamp.
function exec() {
lastExec = Date.now();
callback.apply(self, arguments_);
}
/*
* If `debounceMode` is true (at begin) this is used to clear the flag
* to allow future `callback` executions.
*/
function clear() {
timeoutID = undefined;
}
if (!noLeading && debounceMode && !timeoutID) {
/*
* Since `wrapper` is being called for the first time and
* `debounceMode` is true (at begin), execute `callback`
* and noLeading != true.
*/
exec();
}
clearExistingTimeout();
if (debounceMode === undefined && elapsed > delay) {
if (noLeading) {
/*
* In throttle mode with noLeading, if `delay` time has
* been exceeded, update `lastExec` and schedule `callback`
* to execute after `delay` ms.
*/
lastExec = Date.now();
if (!noTrailing) {
timeoutID = setTimeout(debounceMode ? clear : exec, delay);
}
} else {
/*
* In throttle mode without noLeading, if `delay` time has been exceeded, execute
* `callback`.
*/
exec();
}
} else if (noTrailing !== true) {
/*
* In trailing throttle mode, since `delay` time has not been
* exceeded, schedule `callback` to execute `delay` ms after most
* recent execution.
*
* If `debounceMode` is true (at begin), schedule `clear` to execute
* after `delay` ms.
*
* If `debounceMode` is false (at end), schedule `callback` to
* execute after `delay` ms.
*/
timeoutID = setTimeout(debounceMode ? clear : exec, debounceMode === undefined ? delay - elapsed : delay);
}
}
wrapper.cancel = cancel; // Return the wrapper function.
return wrapper;
}
/* eslint-disable no-undefined */
/**
* Debounce execution of a function. Debouncing, unlike throttling,
* guarantees that a function is only executed a single time, either at the
* very beginning of a series of calls, or at the very end.
*
* @param {number} delay - A zero-or-greater delay in milliseconds. For event callbacks, values around 100 or 250 (or even higher) are most useful.
* @param {Function} callback - A function to be executed after delay milliseconds. The `this` context and all arguments are passed through, as-is,
* to `callback` when the debounced-function is executed.
* @param {object} [options] - An object to configure options.
* @param {boolean} [options.atBegin] - Optional, defaults to false. If atBegin is false or unspecified, callback will only be executed `delay` milliseconds
* after the last debounced-function call. If atBegin is true, callback will be executed only at the first debounced-function call.
* (After the throttled-function has not been called for `delay` milliseconds, the internal counter is reset).
*
* @returns {Function} A new, debounced function.
*/
function debounce (delay, callback, options) {
var _ref = options || {},
_ref$atBegin = _ref.atBegin,
atBegin = _ref$atBegin === void 0 ? false : _ref$atBegin;
return throttle(delay, callback, {
debounceMode: atBegin !== false
});
}
/*
How it works:
`this.#head` is an instance of `Node` which keeps track of its current value and nests another instance of `Node` that keeps the value that comes after it. When a value is provided to `.enqueue()`, the code needs to iterate through `this.#head`, going deeper and deeper to find the last value. However, iterating through every single item is slow. This problem is solved by saving a reference to the last value as `this.#tail` so that it can reference it to add a new value.
*/
class Node {
value;
next;
constructor(value) {
this.value = value;
}
}
class Queue {
#head;
#tail;
#size;
constructor() {
this.clear();
}
enqueue(value) {
const node = new Node(value);
if (this.#head) {
this.#tail.next = node;
this.#tail = node;
} else {
this.#head = node;
this.#tail = node;
}
this.#size++;
}
dequeue() {
const current = this.#head;
if (!current) {
return;
}
this.#head = this.#head.next;
this.#size--;
return current.value;
}
clear() {
this.#head = undefined;
this.#tail = undefined;
this.#size = 0;
}
get size() {
return this.#size;
}
* [Symbol.iterator]() {
let current = this.#head;
while (current) {
yield current.value;
current = current.next;
}
}
}
const AsyncResource = {
bind(fn, _type, thisArg) {
return fn.bind(thisArg);
},
};
function pLimit(concurrency) {
if (!((Number.isInteger(concurrency) || concurrency === Number.POSITIVE_INFINITY) && concurrency > 0)) {
throw new TypeError('Expected `concurrency` to be a number from 1 and up');
}
const queue = new Queue();
let activeCount = 0;
const next = () => {
activeCount--;
if (queue.size > 0) {
queue.dequeue()();
}
};
const run = async (function_, resolve, arguments_) => {
activeCount++;
const result = (async () => function_(...arguments_))();
resolve(result);
try {
await result;
} catch {}
next();
};
const enqueue = (function_, resolve, arguments_) => {
queue.enqueue(
AsyncResource.bind(run.bind(undefined, function_, resolve, arguments_)),
);
(async () => {
// This function needs to wait until the next microtask before comparing
// `activeCount` to `concurrency`, because `activeCount` is updated asynchronously
// when the run function is dequeued and called. The comparison in the if-statement
// needs to happen asynchronously as well to get an up-to-date value for `activeCount`.
await Promise.resolve();
if (activeCount < concurrency && queue.size > 0) {
queue.dequeue()();
}
})();
};
const generator = (function_, ...arguments_) => new Promise(resolve => {
enqueue(function_, resolve, arguments_);
});
Object.defineProperties(generator, {
activeCount: {
get: () => activeCount,
},
pendingCount: {
get: () => queue.size,
},
clearQueue: {
value() {
queue.clear();
},
},
});
return generator;
}
const VOID = Symbol("p-void");
class PInstance extends Promise {
constructor(items = [], options) {
super(() => {
});
this.items = items;
this.options = options;
this.promises = /* @__PURE__ */ new Set();
}
get promise() {
var _a;
let batch;
const items = [...Array.from(this.items), ...Array.from(this.promises)];
if ((_a = this.options) == null ? void 0 : _a.concurrency) {
const limit = pLimit(this.options.concurrency);
batch = Promise.all(items.map((p2) => limit(() => p2)));
} else {
batch = Promise.all(items);
}
return batch.then((l) => l.filter((i) => i !== VOID));
}
add(...args) {
args.forEach((i) => {
this.promises.add(i);
});
}
map(fn) {
return new PInstance(
Array.from(this.items).map(async (i, idx) => {
const v = await i;
if (v === VOID)
return VOID;
return fn(v, idx);
}),
this.options
);
}
filter(fn) {
return new PInstance(
Array.from(this.items).map(async (i, idx) => {
const v = await i;
const r = await fn(v, idx);
if (!r)
return VOID;
return v;
}),
this.options
);
}
forEach(fn) {
return this.map(fn).then();
}
reduce(fn, initialValue) {
return this.promise.then((array) => array.reduce(fn, initialValue));
}
clear() {
this.promises.clear();
}
then(fn) {
const p2 = this.promise;
if (fn)
return p2.then(fn);
else
return p2;
}
catch(fn) {
return this.promise.catch(fn);
}
finally(fn) {
return this.promise.finally(fn);
}
}
function p(items, options) {
return new PInstance(items, options);
}
export { assert, at, batchInvoke, capitalize, clamp, clampArrayRange, clearUndefined, createControlledPromise, createPromiseLock, createSingletonPromise, debounce, deepMerge, deepMergeWithArray, ensurePrefix, ensureSuffix, flattenArrayable, getTypeName, hasOwnProperty, invoke, isBoolean, isBrowser, isDate, isDeepEqual, isDef, isFunction, isKeyOf, isNull, isNumber, isObject, isRegExp, isString, isTruthy, isUndefined, isWindow, last, lerp, mergeArrayable, move, noNull, noop, notNullish, notUndefined, objectEntries, objectKeys, objectMap, objectPick, p, partition, randomStr, range, remap, remove, sample, shuffle, slash, sleep, sum, tap, template, throttle, timestamp, toArray, toString, unindent, uniq, uniqueBy };

@ -0,0 +1,67 @@
{
"name": "@antfu/utils",
"type": "module",
"version": "0.7.10",
"packageManager": "pnpm@9.1.0",
"description": "Opinionated collection of common JavaScript / TypeScript utils by @antfu",
"author": "Anthony Fu <anthonyfu117@hotmail.com>",
"license": "MIT",
"funding": "https://github.com/sponsors/antfu",
"homepage": "https://github.com/antfu/utils#readme",
"repository": {
"type": "git",
"url": "git+https://github.com/antfu/utils.git"
},
"bugs": {
"url": "https://github.com/antfu/utils/issues"
},
"keywords": [
"utils"
],
"sideEffects": false,
"exports": {
".": {
"import": "./dist/index.mjs",
"require": "./dist/index.cjs"
}
},
"main": "dist/index.cjs",
"module": "dist/index.mjs",
"types": "dist/index.d.ts",
"files": [
"dist"
],
"scripts": {
"build": "rollup -c",
"dev": "nr build --watch",
"lint": "eslint .",
"lint-fix": "nr lint --fix",
"prepublishOnly": "npm run build",
"release": "bumpp --commit --push --tag && npm publish",
"start": "esno src/index.ts",
"typecheck": "tsc --noEmit",
"test": "vitest"
},
"devDependencies": {
"@antfu/eslint-config": "^2.16.3",
"@antfu/ni": "^0.21.12",
"@rollup/plugin-alias": "^5.1.0",
"@rollup/plugin-commonjs": "^25.0.7",
"@rollup/plugin-json": "^6.1.0",
"@rollup/plugin-node-resolve": "^15.2.3",
"@types/node": "^20.12.10",
"@types/throttle-debounce": "^5.0.2",
"bumpp": "^9.4.1",
"eslint": "npm:eslint-ts-patch@8.55.0-1",
"eslint-ts-patch": "8.55.0-1",
"esno": "^4.7.0",
"p-limit": "^5.0.0",
"rollup": "^4.17.2",
"rollup-plugin-dts": "^6.1.0",
"rollup-plugin-esbuild": "^6.1.1",
"throttle-debounce": "5.0.0",
"typescript": "^5.4.5",
"vite": "^5.2.11",
"vitest": "^1.6.0"
}
}

@ -0,0 +1,22 @@
MIT License
Copyright (c) 2014-present Sebastian McKenzie and other contributors
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

@ -0,0 +1,19 @@
# @babel/code-frame
> Generate errors that contain a code frame that point to source locations.
See our website [@babel/code-frame](https://babeljs.io/docs/babel-code-frame) for more information.
## Install
Using npm:
```sh
npm install --save-dev @babel/code-frame
```
or using yarn:
```sh
yarn add @babel/code-frame --dev
```

@ -0,0 +1,216 @@
'use strict';
Object.defineProperty(exports, '__esModule', { value: true });
var picocolors = require('picocolors');
var jsTokens = require('js-tokens');
var helperValidatorIdentifier = require('@babel/helper-validator-identifier');
function isColorSupported() {
return (typeof process === "object" && (process.env.FORCE_COLOR === "0" || process.env.FORCE_COLOR === "false") ? false : picocolors.isColorSupported
);
}
const compose = (f, g) => v => f(g(v));
function buildDefs(colors) {
return {
keyword: colors.cyan,
capitalized: colors.yellow,
jsxIdentifier: colors.yellow,
punctuator: colors.yellow,
number: colors.magenta,
string: colors.green,
regex: colors.magenta,
comment: colors.gray,
invalid: compose(compose(colors.white, colors.bgRed), colors.bold),
gutter: colors.gray,
marker: compose(colors.red, colors.bold),
message: compose(colors.red, colors.bold),
reset: colors.reset
};
}
const defsOn = buildDefs(picocolors.createColors(true));
const defsOff = buildDefs(picocolors.createColors(false));
function getDefs(enabled) {
return enabled ? defsOn : defsOff;
}
const sometimesKeywords = new Set(["as", "async", "from", "get", "of", "set"]);
const NEWLINE$1 = /\r\n|[\n\r\u2028\u2029]/;
const BRACKET = /^[()[\]{}]$/;
let tokenize;
{
const JSX_TAG = /^[a-z][\w-]*$/i;
const getTokenType = function (token, offset, text) {
if (token.type === "name") {
if (helperValidatorIdentifier.isKeyword(token.value) || helperValidatorIdentifier.isStrictReservedWord(token.value, true) || sometimesKeywords.has(token.value)) {
return "keyword";
}
if (JSX_TAG.test(token.value) && (text[offset - 1] === "<" || text.slice(offset - 2, offset) === "</")) {
return "jsxIdentifier";
}
if (token.value[0] !== token.value[0].toLowerCase()) {
return "capitalized";
}
}
if (token.type === "punctuator" && BRACKET.test(token.value)) {
return "bracket";
}
if (token.type === "invalid" && (token.value === "@" || token.value === "#")) {
return "punctuator";
}
return token.type;
};
tokenize = function* (text) {
let match;
while (match = jsTokens.default.exec(text)) {
const token = jsTokens.matchToToken(match);
yield {
type: getTokenType(token, match.index, text),
value: token.value
};
}
};
}
function highlight(text) {
if (text === "") return "";
const defs = getDefs(true);
let highlighted = "";
for (const {
type,
value
} of tokenize(text)) {
if (type in defs) {
highlighted += value.split(NEWLINE$1).map(str => defs[type](str)).join("\n");
} else {
highlighted += value;
}
}
return highlighted;
}
let deprecationWarningShown = false;
const NEWLINE = /\r\n|[\n\r\u2028\u2029]/;
function getMarkerLines(loc, source, opts) {
const startLoc = Object.assign({
column: 0,
line: -1
}, loc.start);
const endLoc = Object.assign({}, startLoc, loc.end);
const {
linesAbove = 2,
linesBelow = 3
} = opts || {};
const startLine = startLoc.line;
const startColumn = startLoc.column;
const endLine = endLoc.line;
const endColumn = endLoc.column;
let start = Math.max(startLine - (linesAbove + 1), 0);
let end = Math.min(source.length, endLine + linesBelow);
if (startLine === -1) {
start = 0;
}
if (endLine === -1) {
end = source.length;
}
const lineDiff = endLine - startLine;
const markerLines = {};
if (lineDiff) {
for (let i = 0; i <= lineDiff; i++) {
const lineNumber = i + startLine;
if (!startColumn) {
markerLines[lineNumber] = true;
} else if (i === 0) {
const sourceLength = source[lineNumber - 1].length;
markerLines[lineNumber] = [startColumn, sourceLength - startColumn + 1];
} else if (i === lineDiff) {
markerLines[lineNumber] = [0, endColumn];
} else {
const sourceLength = source[lineNumber - i].length;
markerLines[lineNumber] = [0, sourceLength];
}
}
} else {
if (startColumn === endColumn) {
if (startColumn) {
markerLines[startLine] = [startColumn, 0];
} else {
markerLines[startLine] = true;
}
} else {
markerLines[startLine] = [startColumn, endColumn - startColumn];
}
}
return {
start,
end,
markerLines
};
}
function codeFrameColumns(rawLines, loc, opts = {}) {
const shouldHighlight = opts.forceColor || isColorSupported() && opts.highlightCode;
const defs = getDefs(shouldHighlight);
const lines = rawLines.split(NEWLINE);
const {
start,
end,
markerLines
} = getMarkerLines(loc, lines, opts);
const hasColumns = loc.start && typeof loc.start.column === "number";
const numberMaxWidth = String(end).length;
const highlightedLines = shouldHighlight ? highlight(rawLines) : rawLines;
let frame = highlightedLines.split(NEWLINE, end).slice(start, end).map((line, index) => {
const number = start + 1 + index;
const paddedNumber = ` ${number}`.slice(-numberMaxWidth);
const gutter = ` ${paddedNumber} |`;
const hasMarker = markerLines[number];
const lastMarkerLine = !markerLines[number + 1];
if (hasMarker) {
let markerLine = "";
if (Array.isArray(hasMarker)) {
const markerSpacing = line.slice(0, Math.max(hasMarker[0] - 1, 0)).replace(/[^\t]/g, " ");
const numberOfMarkers = hasMarker[1] || 1;
markerLine = ["\n ", defs.gutter(gutter.replace(/\d/g, " ")), " ", markerSpacing, defs.marker("^").repeat(numberOfMarkers)].join("");
if (lastMarkerLine && opts.message) {
markerLine += " " + defs.message(opts.message);
}
}
return [defs.marker(">"), defs.gutter(gutter), line.length > 0 ? ` ${line}` : "", markerLine].join("");
} else {
return ` ${defs.gutter(gutter)}${line.length > 0 ? ` ${line}` : ""}`;
}
}).join("\n");
if (opts.message && !hasColumns) {
frame = `${" ".repeat(numberMaxWidth + 1)}${opts.message}\n${frame}`;
}
if (shouldHighlight) {
return defs.reset(frame);
} else {
return frame;
}
}
function index (rawLines, lineNumber, colNumber, opts = {}) {
if (!deprecationWarningShown) {
deprecationWarningShown = true;
const message = "Passing lineNumber and colNumber is deprecated to @babel/code-frame. Please use `codeFrameColumns`.";
if (process.emitWarning) {
process.emitWarning(message, "DeprecationWarning");
} else {
const deprecationError = new Error(message);
deprecationError.name = "DeprecationWarning";
console.warn(new Error(message));
}
}
colNumber = Math.max(colNumber, 0);
const location = {
start: {
column: colNumber,
line: lineNumber
}
};
return codeFrameColumns(rawLines, location, opts);
}
exports.codeFrameColumns = codeFrameColumns;
exports.default = index;
exports.highlight = highlight;
//# sourceMappingURL=index.js.map

File diff suppressed because one or more lines are too long

@ -0,0 +1,31 @@
{
"name": "@babel/code-frame",
"version": "7.26.2",
"description": "Generate errors that contain a code frame that point to source locations.",
"author": "The Babel Team (https://babel.dev/team)",
"homepage": "https://babel.dev/docs/en/next/babel-code-frame",
"bugs": "https://github.com/babel/babel/issues?utf8=%E2%9C%93&q=is%3Aissue+is%3Aopen",
"license": "MIT",
"publishConfig": {
"access": "public"
},
"repository": {
"type": "git",
"url": "https://github.com/babel/babel.git",
"directory": "packages/babel-code-frame"
},
"main": "./lib/index.js",
"dependencies": {
"@babel/helper-validator-identifier": "^7.25.9",
"js-tokens": "^4.0.0",
"picocolors": "^1.0.0"
},
"devDependencies": {
"import-meta-resolve": "^4.1.0",
"strip-ansi": "^4.0.0"
},
"engines": {
"node": ">=6.9.0"
},
"type": "commonjs"
}

@ -0,0 +1,22 @@
MIT License
Copyright (c) 2014-present Sebastian McKenzie and other contributors
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

@ -0,0 +1,19 @@
# @babel/compat-data
> The compat-data to determine required Babel plugins
See our website [@babel/compat-data](https://babeljs.io/docs/babel-compat-data) for more information.
## Install
Using npm:
```sh
npm install --save @babel/compat-data
```
or using yarn:
```sh
yarn add @babel/compat-data
```

@ -0,0 +1,2 @@
// Todo (Babel 8): remove this file as Babel 8 drop support of core-js 2
module.exports = require("./data/corejs2-built-ins.json");

@ -0,0 +1,2 @@
// Todo (Babel 8): remove this file now that it is included in babel-plugin-polyfill-corejs3
module.exports = require("./data/corejs3-shipped-proposals.json");

@ -0,0 +1,5 @@
[
"esnext.promise.all-settled",
"esnext.string.match-all",
"esnext.global-this"
]

@ -0,0 +1,18 @@
{
"es6.module": {
"chrome": "61",
"and_chr": "61",
"edge": "16",
"firefox": "60",
"and_ff": "60",
"node": "13.2.0",
"opera": "48",
"op_mob": "45",
"safari": "10.1",
"ios": "10.3",
"samsung": "8.2",
"android": "61",
"electron": "2.0",
"ios_saf": "10.3"
}
}

@ -0,0 +1,35 @@
{
"transform-async-to-generator": [
"bugfix/transform-async-arrows-in-class"
],
"transform-parameters": [
"bugfix/transform-edge-default-parameters",
"bugfix/transform-safari-id-destructuring-collision-in-function-expression"
],
"transform-function-name": [
"bugfix/transform-edge-function-name"
],
"transform-block-scoping": [
"bugfix/transform-safari-block-shadowing",
"bugfix/transform-safari-for-shadowing"
],
"transform-template-literals": [
"bugfix/transform-tagged-template-caching"
],
"transform-optional-chaining": [
"bugfix/transform-v8-spread-parameters-in-optional-chaining"
],
"proposal-optional-chaining": [
"bugfix/transform-v8-spread-parameters-in-optional-chaining"
],
"transform-class-properties": [
"bugfix/transform-v8-static-class-fields-redefine-readonly",
"bugfix/transform-firefox-class-in-computed-class-key",
"bugfix/transform-safari-class-field-initializer-scope"
],
"proposal-class-properties": [
"bugfix/transform-v8-static-class-fields-redefine-readonly",
"bugfix/transform-firefox-class-in-computed-class-key",
"bugfix/transform-safari-class-field-initializer-scope"
]
}

@ -0,0 +1,213 @@
{
"bugfix/transform-async-arrows-in-class": {
"chrome": "55",
"opera": "42",
"edge": "15",
"firefox": "52",
"safari": "11",
"node": "7.6",
"deno": "1",
"ios": "11",
"samsung": "6",
"opera_mobile": "42",
"electron": "1.6"
},
"bugfix/transform-edge-default-parameters": {
"chrome": "49",
"opera": "36",
"edge": "18",
"firefox": "52",
"safari": "10",
"node": "6",
"deno": "1",
"ios": "10",
"samsung": "5",
"opera_mobile": "36",
"electron": "0.37"
},
"bugfix/transform-edge-function-name": {
"chrome": "51",
"opera": "38",
"edge": "79",
"firefox": "53",
"safari": "10",
"node": "6.5",
"deno": "1",
"ios": "10",
"samsung": "5",
"opera_mobile": "41",
"electron": "1.2"
},
"bugfix/transform-safari-block-shadowing": {
"chrome": "49",
"opera": "36",
"edge": "12",
"firefox": "44",
"safari": "11",
"node": "6",
"deno": "1",
"ie": "11",
"ios": "11",
"samsung": "5",
"opera_mobile": "36",
"electron": "0.37"
},
"bugfix/transform-safari-for-shadowing": {
"chrome": "49",
"opera": "36",
"edge": "12",
"firefox": "4",
"safari": "11",
"node": "6",
"deno": "1",
"ie": "11",
"ios": "11",
"samsung": "5",
"rhino": "1.7.13",
"opera_mobile": "36",
"electron": "0.37"
},
"bugfix/transform-safari-id-destructuring-collision-in-function-expression": {
"chrome": "49",
"opera": "36",
"edge": "14",
"firefox": "2",
"safari": "16.3",
"node": "6",
"deno": "1",
"ios": "16.3",
"samsung": "5",
"opera_mobile": "36",
"electron": "0.37"
},
"bugfix/transform-tagged-template-caching": {
"chrome": "41",
"opera": "28",
"edge": "12",
"firefox": "34",
"safari": "13",
"node": "4",
"deno": "1",
"ios": "13",
"samsung": "3.4",
"rhino": "1.7.14",
"opera_mobile": "28",
"electron": "0.21"
},
"bugfix/transform-v8-spread-parameters-in-optional-chaining": {
"chrome": "91",
"opera": "77",
"edge": "91",
"firefox": "74",
"safari": "13.1",
"node": "16.9",
"deno": "1.9",
"ios": "13.4",
"samsung": "16",
"opera_mobile": "64",
"electron": "13.0"
},
"bugfix/transform-firefox-class-in-computed-class-key": {
"chrome": "74",
"opera": "62",
"edge": "79",
"safari": "16",
"node": "12",
"deno": "1",
"ios": "16",
"samsung": "11",
"opera_mobile": "53",
"electron": "6.0"
},
"transform-optional-chaining": {
"chrome": "80",
"opera": "67",
"edge": "80",
"firefox": "74",
"safari": "13.1",
"node": "14",
"deno": "1",
"ios": "13.4",
"samsung": "13",
"opera_mobile": "57",
"electron": "8.0"
},
"proposal-optional-chaining": {
"chrome": "80",
"opera": "67",
"edge": "80",
"firefox": "74",
"safari": "13.1",
"node": "14",
"deno": "1",
"ios": "13.4",
"samsung": "13",
"opera_mobile": "57",
"electron": "8.0"
},
"transform-parameters": {
"chrome": "49",
"opera": "36",
"edge": "15",
"firefox": "53",
"safari": "10",
"node": "6",
"deno": "1",
"ios": "10",
"samsung": "5",
"opera_mobile": "36",
"electron": "0.37"
},
"transform-async-to-generator": {
"chrome": "55",
"opera": "42",
"edge": "15",
"firefox": "52",
"safari": "10.1",
"node": "7.6",
"deno": "1",
"ios": "10.3",
"samsung": "6",
"opera_mobile": "42",
"electron": "1.6"
},
"transform-template-literals": {
"chrome": "41",
"opera": "28",
"edge": "13",
"firefox": "34",
"safari": "9",
"node": "4",
"deno": "1",
"ios": "9",
"samsung": "3.4",
"opera_mobile": "28",
"electron": "0.21"
},
"transform-function-name": {
"chrome": "51",
"opera": "38",
"edge": "14",
"firefox": "53",
"safari": "10",
"node": "6.5",
"deno": "1",
"ios": "10",
"samsung": "5",
"opera_mobile": "41",
"electron": "1.2"
},
"transform-block-scoping": {
"chrome": "50",
"opera": "37",
"edge": "14",
"firefox": "53",
"safari": "10",
"node": "6",
"deno": "1",
"ios": "10",
"samsung": "5",
"opera_mobile": "37",
"electron": "1.1"
}
}

@ -0,0 +1,824 @@
{
"transform-duplicate-named-capturing-groups-regex": {
"chrome": "126",
"opera": "112",
"edge": "126",
"firefox": "129",
"safari": "17.4",
"node": "23",
"ios": "17.4",
"electron": "31.0"
},
"transform-regexp-modifiers": {
"chrome": "125",
"opera": "111",
"edge": "125",
"firefox": "132",
"node": "23",
"electron": "31.0"
},
"transform-unicode-sets-regex": {
"chrome": "112",
"opera": "98",
"edge": "112",
"firefox": "116",
"safari": "17",
"node": "20",
"deno": "1.32",
"ios": "17",
"opera_mobile": "75",
"electron": "24.0"
},
"bugfix/transform-v8-static-class-fields-redefine-readonly": {
"chrome": "98",
"opera": "84",
"edge": "98",
"firefox": "75",
"safari": "15",
"node": "12",
"deno": "1.18",
"ios": "15",
"samsung": "11",
"opera_mobile": "52",
"electron": "17.0"
},
"bugfix/transform-firefox-class-in-computed-class-key": {
"chrome": "74",
"opera": "62",
"edge": "79",
"safari": "16",
"node": "12",
"deno": "1",
"ios": "16",
"samsung": "11",
"opera_mobile": "53",
"electron": "6.0"
},
"bugfix/transform-safari-class-field-initializer-scope": {
"chrome": "74",
"opera": "62",
"edge": "79",
"firefox": "69",
"safari": "16",
"node": "12",
"deno": "1",
"ios": "16",
"samsung": "11",
"opera_mobile": "53",
"electron": "6.0"
},
"transform-class-static-block": {
"chrome": "94",
"opera": "80",
"edge": "94",
"firefox": "93",
"safari": "16.4",
"node": "16.11",
"deno": "1.14",
"ios": "16.4",
"samsung": "17",
"opera_mobile": "66",
"electron": "15.0"
},
"proposal-class-static-block": {
"chrome": "94",
"opera": "80",
"edge": "94",
"firefox": "93",
"safari": "16.4",
"node": "16.11",
"deno": "1.14",
"ios": "16.4",
"samsung": "17",
"opera_mobile": "66",
"electron": "15.0"
},
"transform-private-property-in-object": {
"chrome": "91",
"opera": "77",
"edge": "91",
"firefox": "90",
"safari": "15",
"node": "16.9",
"deno": "1.9",
"ios": "15",
"samsung": "16",
"opera_mobile": "64",
"electron": "13.0"
},
"proposal-private-property-in-object": {
"chrome": "91",
"opera": "77",
"edge": "91",
"firefox": "90",
"safari": "15",
"node": "16.9",
"deno": "1.9",
"ios": "15",
"samsung": "16",
"opera_mobile": "64",
"electron": "13.0"
},
"transform-class-properties": {
"chrome": "74",
"opera": "62",
"edge": "79",
"firefox": "90",
"safari": "14.1",
"node": "12",
"deno": "1",
"ios": "14.5",
"samsung": "11",
"opera_mobile": "53",
"electron": "6.0"
},
"proposal-class-properties": {
"chrome": "74",
"opera": "62",
"edge": "79",
"firefox": "90",
"safari": "14.1",
"node": "12",
"deno": "1",
"ios": "14.5",
"samsung": "11",
"opera_mobile": "53",
"electron": "6.0"
},
"transform-private-methods": {
"chrome": "84",
"opera": "70",
"edge": "84",
"firefox": "90",
"safari": "15",
"node": "14.6",
"deno": "1",
"ios": "15",
"samsung": "14",
"opera_mobile": "60",
"electron": "10.0"
},
"proposal-private-methods": {
"chrome": "84",
"opera": "70",
"edge": "84",
"firefox": "90",
"safari": "15",
"node": "14.6",
"deno": "1",
"ios": "15",
"samsung": "14",
"opera_mobile": "60",
"electron": "10.0"
},
"transform-numeric-separator": {
"chrome": "75",
"opera": "62",
"edge": "79",
"firefox": "70",
"safari": "13",
"node": "12.5",
"deno": "1",
"ios": "13",
"samsung": "11",
"rhino": "1.7.14",
"opera_mobile": "54",
"electron": "6.0"
},
"proposal-numeric-separator": {
"chrome": "75",
"opera": "62",
"edge": "79",
"firefox": "70",
"safari": "13",
"node": "12.5",
"deno": "1",
"ios": "13",
"samsung": "11",
"rhino": "1.7.14",
"opera_mobile": "54",
"electron": "6.0"
},
"transform-logical-assignment-operators": {
"chrome": "85",
"opera": "71",
"edge": "85",
"firefox": "79",
"safari": "14",
"node": "15",
"deno": "1.2",
"ios": "14",
"samsung": "14",
"opera_mobile": "60",
"electron": "10.0"
},
"proposal-logical-assignment-operators": {
"chrome": "85",
"opera": "71",
"edge": "85",
"firefox": "79",
"safari": "14",
"node": "15",
"deno": "1.2",
"ios": "14",
"samsung": "14",
"opera_mobile": "60",
"electron": "10.0"
},
"transform-nullish-coalescing-operator": {
"chrome": "80",
"opera": "67",
"edge": "80",
"firefox": "72",
"safari": "13.1",
"node": "14",
"deno": "1",
"ios": "13.4",
"samsung": "13",
"opera_mobile": "57",
"electron": "8.0"
},
"proposal-nullish-coalescing-operator": {
"chrome": "80",
"opera": "67",
"edge": "80",
"firefox": "72",
"safari": "13.1",
"node": "14",
"deno": "1",
"ios": "13.4",
"samsung": "13",
"opera_mobile": "57",
"electron": "8.0"
},
"transform-optional-chaining": {
"chrome": "91",
"opera": "77",
"edge": "91",
"firefox": "74",
"safari": "13.1",
"node": "16.9",
"deno": "1.9",
"ios": "13.4",
"samsung": "16",
"opera_mobile": "64",
"electron": "13.0"
},
"proposal-optional-chaining": {
"chrome": "91",
"opera": "77",
"edge": "91",
"firefox": "74",
"safari": "13.1",
"node": "16.9",
"deno": "1.9",
"ios": "13.4",
"samsung": "16",
"opera_mobile": "64",
"electron": "13.0"
},
"transform-json-strings": {
"chrome": "66",
"opera": "53",
"edge": "79",
"firefox": "62",
"safari": "12",
"node": "10",
"deno": "1",
"ios": "12",
"samsung": "9",
"rhino": "1.7.14",
"opera_mobile": "47",
"electron": "3.0"
},
"proposal-json-strings": {
"chrome": "66",
"opera": "53",
"edge": "79",
"firefox": "62",
"safari": "12",
"node": "10",
"deno": "1",
"ios": "12",
"samsung": "9",
"rhino": "1.7.14",
"opera_mobile": "47",
"electron": "3.0"
},
"transform-optional-catch-binding": {
"chrome": "66",
"opera": "53",
"edge": "79",
"firefox": "58",
"safari": "11.1",
"node": "10",
"deno": "1",
"ios": "11.3",
"samsung": "9",
"opera_mobile": "47",
"electron": "3.0"
},
"proposal-optional-catch-binding": {
"chrome": "66",
"opera": "53",
"edge": "79",
"firefox": "58",
"safari": "11.1",
"node": "10",
"deno": "1",
"ios": "11.3",
"samsung": "9",
"opera_mobile": "47",
"electron": "3.0"
},
"transform-parameters": {
"chrome": "49",
"opera": "36",
"edge": "18",
"firefox": "53",
"safari": "16.3",
"node": "6",
"deno": "1",
"ios": "16.3",
"samsung": "5",
"opera_mobile": "36",
"electron": "0.37"
},
"transform-async-generator-functions": {
"chrome": "63",
"opera": "50",
"edge": "79",
"firefox": "57",
"safari": "12",
"node": "10",
"deno": "1",
"ios": "12",
"samsung": "8",
"opera_mobile": "46",
"electron": "3.0"
},
"proposal-async-generator-functions": {
"chrome": "63",
"opera": "50",
"edge": "79",
"firefox": "57",
"safari": "12",
"node": "10",
"deno": "1",
"ios": "12",
"samsung": "8",
"opera_mobile": "46",
"electron": "3.0"
},
"transform-object-rest-spread": {
"chrome": "60",
"opera": "47",
"edge": "79",
"firefox": "55",
"safari": "11.1",
"node": "8.3",
"deno": "1",
"ios": "11.3",
"samsung": "8",
"opera_mobile": "44",
"electron": "2.0"
},
"proposal-object-rest-spread": {
"chrome": "60",
"opera": "47",
"edge": "79",
"firefox": "55",
"safari": "11.1",
"node": "8.3",
"deno": "1",
"ios": "11.3",
"samsung": "8",
"opera_mobile": "44",
"electron": "2.0"
},
"transform-dotall-regex": {
"chrome": "62",
"opera": "49",
"edge": "79",
"firefox": "78",
"safari": "11.1",
"node": "8.10",
"deno": "1",
"ios": "11.3",
"samsung": "8",
"rhino": "1.7.15",
"opera_mobile": "46",
"electron": "3.0"
},
"transform-unicode-property-regex": {
"chrome": "64",
"opera": "51",
"edge": "79",
"firefox": "78",
"safari": "11.1",
"node": "10",
"deno": "1",
"ios": "11.3",
"samsung": "9",
"opera_mobile": "47",
"electron": "3.0"
},
"proposal-unicode-property-regex": {
"chrome": "64",
"opera": "51",
"edge": "79",
"firefox": "78",
"safari": "11.1",
"node": "10",
"deno": "1",
"ios": "11.3",
"samsung": "9",
"opera_mobile": "47",
"electron": "3.0"
},
"transform-named-capturing-groups-regex": {
"chrome": "64",
"opera": "51",
"edge": "79",
"firefox": "78",
"safari": "11.1",
"node": "10",
"deno": "1",
"ios": "11.3",
"samsung": "9",
"opera_mobile": "47",
"electron": "3.0"
},
"transform-async-to-generator": {
"chrome": "55",
"opera": "42",
"edge": "15",
"firefox": "52",
"safari": "11",
"node": "7.6",
"deno": "1",
"ios": "11",
"samsung": "6",
"opera_mobile": "42",
"electron": "1.6"
},
"transform-exponentiation-operator": {
"chrome": "52",
"opera": "39",
"edge": "14",
"firefox": "52",
"safari": "10.1",
"node": "7",
"deno": "1",
"ios": "10.3",
"samsung": "6",
"rhino": "1.7.14",
"opera_mobile": "41",
"electron": "1.3"
},
"transform-template-literals": {
"chrome": "41",
"opera": "28",
"edge": "13",
"firefox": "34",
"safari": "13",
"node": "4",
"deno": "1",
"ios": "13",
"samsung": "3.4",
"opera_mobile": "28",
"electron": "0.21"
},
"transform-literals": {
"chrome": "44",
"opera": "31",
"edge": "12",
"firefox": "53",
"safari": "9",
"node": "4",
"deno": "1",
"ios": "9",
"samsung": "4",
"rhino": "1.7.15",
"opera_mobile": "32",
"electron": "0.30"
},
"transform-function-name": {
"chrome": "51",
"opera": "38",
"edge": "79",
"firefox": "53",
"safari": "10",
"node": "6.5",
"deno": "1",
"ios": "10",
"samsung": "5",
"opera_mobile": "41",
"electron": "1.2"
},
"transform-arrow-functions": {
"chrome": "47",
"opera": "34",
"edge": "13",
"firefox": "43",
"safari": "10",
"node": "6",
"deno": "1",
"ios": "10",
"samsung": "5",
"rhino": "1.7.13",
"opera_mobile": "34",
"electron": "0.36"
},
"transform-block-scoped-functions": {
"chrome": "41",
"opera": "28",
"edge": "12",
"firefox": "46",
"safari": "10",
"node": "4",
"deno": "1",
"ie": "11",
"ios": "10",
"samsung": "3.4",
"opera_mobile": "28",
"electron": "0.21"
},
"transform-classes": {
"chrome": "46",
"opera": "33",
"edge": "13",
"firefox": "45",
"safari": "10",
"node": "5",
"deno": "1",
"ios": "10",
"samsung": "5",
"opera_mobile": "33",
"electron": "0.36"
},
"transform-object-super": {
"chrome": "46",
"opera": "33",
"edge": "13",
"firefox": "45",
"safari": "10",
"node": "5",
"deno": "1",
"ios": "10",
"samsung": "5",
"opera_mobile": "33",
"electron": "0.36"
},
"transform-shorthand-properties": {
"chrome": "43",
"opera": "30",
"edge": "12",
"firefox": "33",
"safari": "9",
"node": "4",
"deno": "1",
"ios": "9",
"samsung": "4",
"rhino": "1.7.14",
"opera_mobile": "30",
"electron": "0.27"
},
"transform-duplicate-keys": {
"chrome": "42",
"opera": "29",
"edge": "12",
"firefox": "34",
"safari": "9",
"node": "4",
"deno": "1",
"ios": "9",
"samsung": "3.4",
"opera_mobile": "29",
"electron": "0.25"
},
"transform-computed-properties": {
"chrome": "44",
"opera": "31",
"edge": "12",
"firefox": "34",
"safari": "7.1",
"node": "4",
"deno": "1",
"ios": "8",
"samsung": "4",
"opera_mobile": "32",
"electron": "0.30"
},
"transform-for-of": {
"chrome": "51",
"opera": "38",
"edge": "15",
"firefox": "53",
"safari": "10",
"node": "6.5",
"deno": "1",
"ios": "10",
"samsung": "5",
"opera_mobile": "41",
"electron": "1.2"
},
"transform-sticky-regex": {
"chrome": "49",
"opera": "36",
"edge": "13",
"firefox": "3",
"safari": "10",
"node": "6",
"deno": "1",
"ios": "10",
"samsung": "5",
"rhino": "1.7.15",
"opera_mobile": "36",
"electron": "0.37"
},
"transform-unicode-escapes": {
"chrome": "44",
"opera": "31",
"edge": "12",
"firefox": "53",
"safari": "9",
"node": "4",
"deno": "1",
"ios": "9",
"samsung": "4",
"rhino": "1.7.15",
"opera_mobile": "32",
"electron": "0.30"
},
"transform-unicode-regex": {
"chrome": "50",
"opera": "37",
"edge": "13",
"firefox": "46",
"safari": "12",
"node": "6",
"deno": "1",
"ios": "12",
"samsung": "5",
"opera_mobile": "37",
"electron": "1.1"
},
"transform-spread": {
"chrome": "46",
"opera": "33",
"edge": "13",
"firefox": "45",
"safari": "10",
"node": "5",
"deno": "1",
"ios": "10",
"samsung": "5",
"opera_mobile": "33",
"electron": "0.36"
},
"transform-destructuring": {
"chrome": "51",
"opera": "38",
"edge": "15",
"firefox": "53",
"safari": "10",
"node": "6.5",
"deno": "1",
"ios": "10",
"samsung": "5",
"opera_mobile": "41",
"electron": "1.2"
},
"transform-block-scoping": {
"chrome": "50",
"opera": "37",
"edge": "14",
"firefox": "53",
"safari": "11",
"node": "6",
"deno": "1",
"ios": "11",
"samsung": "5",
"opera_mobile": "37",
"electron": "1.1"
},
"transform-typeof-symbol": {
"chrome": "48",
"opera": "35",
"edge": "12",
"firefox": "36",
"safari": "9",
"node": "6",
"deno": "1",
"ios": "9",
"samsung": "5",
"opera_mobile": "35",
"electron": "0.37"
},
"transform-new-target": {
"chrome": "46",
"opera": "33",
"edge": "14",
"firefox": "41",
"safari": "10",
"node": "5",
"deno": "1",
"ios": "10",
"samsung": "5",
"opera_mobile": "33",
"electron": "0.36"
},
"transform-regenerator": {
"chrome": "50",
"opera": "37",
"edge": "13",
"firefox": "53",
"safari": "10",
"node": "6",
"deno": "1",
"ios": "10",
"samsung": "5",
"opera_mobile": "37",
"electron": "1.1"
},
"transform-member-expression-literals": {
"chrome": "7",
"opera": "12",
"edge": "12",
"firefox": "2",
"safari": "5.1",
"node": "0.4",
"deno": "1",
"ie": "9",
"android": "4",
"ios": "6",
"phantom": "1.9",
"samsung": "1",
"rhino": "1.7.13",
"opera_mobile": "12",
"electron": "0.20"
},
"transform-property-literals": {
"chrome": "7",
"opera": "12",
"edge": "12",
"firefox": "2",
"safari": "5.1",
"node": "0.4",
"deno": "1",
"ie": "9",
"android": "4",
"ios": "6",
"phantom": "1.9",
"samsung": "1",
"rhino": "1.7.13",
"opera_mobile": "12",
"electron": "0.20"
},
"transform-reserved-words": {
"chrome": "13",
"opera": "10.50",
"edge": "12",
"firefox": "2",
"safari": "3.1",
"node": "0.6",
"deno": "1",
"ie": "9",
"android": "4.4",
"ios": "6",
"phantom": "1.9",
"samsung": "1",
"rhino": "1.7.13",
"opera_mobile": "10.1",
"electron": "0.20"
},
"transform-export-namespace-from": {
"chrome": "72",
"deno": "1.0",
"edge": "79",
"firefox": "80",
"node": "13.2",
"opera": "60",
"opera_mobile": "51",
"safari": "14.1",
"ios": "14.5",
"samsung": "11.0",
"android": "72",
"electron": "5.0"
},
"proposal-export-namespace-from": {
"chrome": "72",
"deno": "1.0",
"edge": "79",
"firefox": "80",
"node": "13.2",
"opera": "60",
"opera_mobile": "51",
"safari": "14.1",
"ios": "14.5",
"samsung": "11.0",
"android": "72",
"electron": "5.0"
}
}

@ -0,0 +1 @@
module.exports = require("./data/native-modules.json");

@ -0,0 +1 @@
module.exports = require("./data/overlapping-plugins.json");

@ -0,0 +1,40 @@
{
"name": "@babel/compat-data",
"version": "7.26.8",
"author": "The Babel Team (https://babel.dev/team)",
"license": "MIT",
"description": "The compat-data to determine required Babel plugins",
"repository": {
"type": "git",
"url": "https://github.com/babel/babel.git",
"directory": "packages/babel-compat-data"
},
"publishConfig": {
"access": "public"
},
"exports": {
"./plugins": "./plugins.js",
"./native-modules": "./native-modules.js",
"./corejs2-built-ins": "./corejs2-built-ins.js",
"./corejs3-shipped-proposals": "./corejs3-shipped-proposals.js",
"./overlapping-plugins": "./overlapping-plugins.js",
"./plugin-bugfixes": "./plugin-bugfixes.js"
},
"scripts": {
"build-data": "./scripts/download-compat-table.sh && node ./scripts/build-data.js && node ./scripts/build-modules-support.js && node ./scripts/build-bugfixes-targets.js"
},
"keywords": [
"babel",
"compat-table",
"compat-data"
],
"devDependencies": {
"@mdn/browser-compat-data": "^5.5.36",
"core-js-compat": "^3.40.0",
"electron-to-chromium": "^1.4.816"
},
"engines": {
"node": ">=6.9.0"
},
"type": "commonjs"
}

@ -0,0 +1 @@
module.exports = require("./data/plugin-bugfixes.json");

@ -0,0 +1 @@
module.exports = require("./data/plugins.json");

@ -0,0 +1,22 @@
MIT License
Copyright (c) 2014-present Sebastian McKenzie and other contributors
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

@ -0,0 +1,19 @@
# @babel/core
> Babel compiler core.
See our website [@babel/core](https://babeljs.io/docs/babel-core) for more information or the [issues](https://github.com/babel/babel/issues?utf8=%E2%9C%93&q=is%3Aissue+label%3A%22pkg%3A%20core%22+is%3Aopen) associated with this package.
## Install
Using npm:
```sh
npm install --save-dev @babel/core
```
or using yarn:
```sh
yarn add @babel/core --dev
```

@ -0,0 +1,68 @@
"use strict";
const babelP = import("./lib/index.js");
let babel = null;
Object.defineProperty(exports, "__ initialize @babel/core cjs proxy __", {
set(val) {
babel = val;
},
});
exports.version = require("./package.json").version;
const functionNames = [
"createConfigItem",
"loadPartialConfig",
"loadOptions",
"transform",
"transformFile",
"transformFromAst",
"parse",
];
const propertyNames = [
"buildExternalHelpers",
"types",
"tokTypes",
"traverse",
"template",
];
for (const name of functionNames) {
exports[name] = function (...args) {
if (
process.env.BABEL_8_BREAKING &&
typeof args[args.length - 1] !== "function"
) {
throw new Error(
`Starting from Babel 8.0.0, the '${name}' function expects a callback. If you need to call it synchronously, please use '${name}Sync'.`
);
}
babelP.then(babel => {
babel[name](...args);
});
};
exports[`${name}Async`] = function (...args) {
return babelP.then(babel => babel[`${name}Async`](...args));
};
exports[`${name}Sync`] = function (...args) {
if (!babel) throw notLoadedError(`${name}Sync`, "callable");
return babel[`${name}Sync`](...args);
};
}
for (const name of propertyNames) {
Object.defineProperty(exports, name, {
get() {
if (!babel) throw notLoadedError(name, "accessible");
return babel[name];
},
});
}
function notLoadedError(name, keyword) {
return new Error(
`The \`${name}\` export of @babel/core is only ${keyword}` +
` from the CommonJS version after that the ESM version is loaded.`
);
}

@ -0,0 +1,3 @@
0 && 0;
//# sourceMappingURL=cache-contexts.js.map

@ -0,0 +1 @@
{"version":3,"names":[],"sources":["../../src/config/cache-contexts.ts"],"sourcesContent":["import type { Targets } from \"@babel/helper-compilation-targets\";\n\nimport type { ConfigContext } from \"./config-chain.ts\";\nimport type { CallerMetadata } from \"./validation/options.ts\";\n\nexport type { ConfigContext as FullConfig };\n\nexport type FullPreset = {\n targets: Targets;\n} & ConfigContext;\nexport type FullPlugin = {\n assumptions: { [name: string]: boolean };\n} & FullPreset;\n\n// Context not including filename since it is used in places that cannot\n// process 'ignore'/'only' and other filename-based logic.\nexport type SimpleConfig = {\n envName: string;\n caller: CallerMetadata | undefined;\n};\nexport type SimplePreset = {\n targets: Targets;\n} & SimpleConfig;\nexport type SimplePlugin = {\n assumptions: {\n [name: string]: boolean;\n };\n} & SimplePreset;\n"],"mappings":"","ignoreList":[]}

@ -0,0 +1,261 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.assertSimpleType = assertSimpleType;
exports.makeStrongCache = makeStrongCache;
exports.makeStrongCacheSync = makeStrongCacheSync;
exports.makeWeakCache = makeWeakCache;
exports.makeWeakCacheSync = makeWeakCacheSync;
function _gensync() {
const data = require("gensync");
_gensync = function () {
return data;
};
return data;
}
var _async = require("../gensync-utils/async.js");
var _util = require("./util.js");
const synchronize = gen => {
return _gensync()(gen).sync;
};
function* genTrue() {
return true;
}
function makeWeakCache(handler) {
return makeCachedFunction(WeakMap, handler);
}
function makeWeakCacheSync(handler) {
return synchronize(makeWeakCache(handler));
}
function makeStrongCache(handler) {
return makeCachedFunction(Map, handler);
}
function makeStrongCacheSync(handler) {
return synchronize(makeStrongCache(handler));
}
function makeCachedFunction(CallCache, handler) {
const callCacheSync = new CallCache();
const callCacheAsync = new CallCache();
const futureCache = new CallCache();
return function* cachedFunction(arg, data) {
const asyncContext = yield* (0, _async.isAsync)();
const callCache = asyncContext ? callCacheAsync : callCacheSync;
const cached = yield* getCachedValueOrWait(asyncContext, callCache, futureCache, arg, data);
if (cached.valid) return cached.value;
const cache = new CacheConfigurator(data);
const handlerResult = handler(arg, cache);
let finishLock;
let value;
if ((0, _util.isIterableIterator)(handlerResult)) {
value = yield* (0, _async.onFirstPause)(handlerResult, () => {
finishLock = setupAsyncLocks(cache, futureCache, arg);
});
} else {
value = handlerResult;
}
updateFunctionCache(callCache, cache, arg, value);
if (finishLock) {
futureCache.delete(arg);
finishLock.release(value);
}
return value;
};
}
function* getCachedValue(cache, arg, data) {
const cachedValue = cache.get(arg);
if (cachedValue) {
for (const {
value,
valid
} of cachedValue) {
if (yield* valid(data)) return {
valid: true,
value
};
}
}
return {
valid: false,
value: null
};
}
function* getCachedValueOrWait(asyncContext, callCache, futureCache, arg, data) {
const cached = yield* getCachedValue(callCache, arg, data);
if (cached.valid) {
return cached;
}
if (asyncContext) {
const cached = yield* getCachedValue(futureCache, arg, data);
if (cached.valid) {
const value = yield* (0, _async.waitFor)(cached.value.promise);
return {
valid: true,
value
};
}
}
return {
valid: false,
value: null
};
}
function setupAsyncLocks(config, futureCache, arg) {
const finishLock = new Lock();
updateFunctionCache(futureCache, config, arg, finishLock);
return finishLock;
}
function updateFunctionCache(cache, config, arg, value) {
if (!config.configured()) config.forever();
let cachedValue = cache.get(arg);
config.deactivate();
switch (config.mode()) {
case "forever":
cachedValue = [{
value,
valid: genTrue
}];
cache.set(arg, cachedValue);
break;
case "invalidate":
cachedValue = [{
value,
valid: config.validator()
}];
cache.set(arg, cachedValue);
break;
case "valid":
if (cachedValue) {
cachedValue.push({
value,
valid: config.validator()
});
} else {
cachedValue = [{
value,
valid: config.validator()
}];
cache.set(arg, cachedValue);
}
}
}
class CacheConfigurator {
constructor(data) {
this._active = true;
this._never = false;
this._forever = false;
this._invalidate = false;
this._configured = false;
this._pairs = [];
this._data = void 0;
this._data = data;
}
simple() {
return makeSimpleConfigurator(this);
}
mode() {
if (this._never) return "never";
if (this._forever) return "forever";
if (this._invalidate) return "invalidate";
return "valid";
}
forever() {
if (!this._active) {
throw new Error("Cannot change caching after evaluation has completed.");
}
if (this._never) {
throw new Error("Caching has already been configured with .never()");
}
this._forever = true;
this._configured = true;
}
never() {
if (!this._active) {
throw new Error("Cannot change caching after evaluation has completed.");
}
if (this._forever) {
throw new Error("Caching has already been configured with .forever()");
}
this._never = true;
this._configured = true;
}
using(handler) {
if (!this._active) {
throw new Error("Cannot change caching after evaluation has completed.");
}
if (this._never || this._forever) {
throw new Error("Caching has already been configured with .never or .forever()");
}
this._configured = true;
const key = handler(this._data);
const fn = (0, _async.maybeAsync)(handler, `You appear to be using an async cache handler, but Babel has been called synchronously`);
if ((0, _async.isThenable)(key)) {
return key.then(key => {
this._pairs.push([key, fn]);
return key;
});
}
this._pairs.push([key, fn]);
return key;
}
invalidate(handler) {
this._invalidate = true;
return this.using(handler);
}
validator() {
const pairs = this._pairs;
return function* (data) {
for (const [key, fn] of pairs) {
if (key !== (yield* fn(data))) return false;
}
return true;
};
}
deactivate() {
this._active = false;
}
configured() {
return this._configured;
}
}
function makeSimpleConfigurator(cache) {
function cacheFn(val) {
if (typeof val === "boolean") {
if (val) cache.forever();else cache.never();
return;
}
return cache.using(() => assertSimpleType(val()));
}
cacheFn.forever = () => cache.forever();
cacheFn.never = () => cache.never();
cacheFn.using = cb => cache.using(() => assertSimpleType(cb()));
cacheFn.invalidate = cb => cache.invalidate(() => assertSimpleType(cb()));
return cacheFn;
}
function assertSimpleType(value) {
if ((0, _async.isThenable)(value)) {
throw new Error(`You appear to be using an async cache handler, ` + `which your current version of Babel does not support. ` + `We may add support for this in the future, ` + `but if you're on the most recent version of @babel/core and still ` + `seeing this error, then you'll need to synchronously handle your caching logic.`);
}
if (value != null && typeof value !== "string" && typeof value !== "boolean" && typeof value !== "number") {
throw new Error("Cache keys must be either string, boolean, number, null, or undefined.");
}
return value;
}
class Lock {
constructor() {
this.released = false;
this.promise = void 0;
this._resolve = void 0;
this.promise = new Promise(resolve => {
this._resolve = resolve;
});
}
release(value) {
this.released = true;
this._resolve(value);
}
}
0 && 0;
//# sourceMappingURL=caching.js.map

File diff suppressed because one or more lines are too long

@ -0,0 +1,469 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.buildPresetChain = buildPresetChain;
exports.buildPresetChainWalker = void 0;
exports.buildRootChain = buildRootChain;
function _path() {
const data = require("path");
_path = function () {
return data;
};
return data;
}
function _debug() {
const data = require("debug");
_debug = function () {
return data;
};
return data;
}
var _options = require("./validation/options.js");
var _patternToRegex = require("./pattern-to-regex.js");
var _printer = require("./printer.js");
var _rewriteStackTrace = require("../errors/rewrite-stack-trace.js");
var _configError = require("../errors/config-error.js");
var _index = require("./files/index.js");
var _caching = require("./caching.js");
var _configDescriptors = require("./config-descriptors.js");
const debug = _debug()("babel:config:config-chain");
function* buildPresetChain(arg, context) {
const chain = yield* buildPresetChainWalker(arg, context);
if (!chain) return null;
return {
plugins: dedupDescriptors(chain.plugins),
presets: dedupDescriptors(chain.presets),
options: chain.options.map(o => normalizeOptions(o)),
files: new Set()
};
}
const buildPresetChainWalker = exports.buildPresetChainWalker = makeChainWalker({
root: preset => loadPresetDescriptors(preset),
env: (preset, envName) => loadPresetEnvDescriptors(preset)(envName),
overrides: (preset, index) => loadPresetOverridesDescriptors(preset)(index),
overridesEnv: (preset, index, envName) => loadPresetOverridesEnvDescriptors(preset)(index)(envName),
createLogger: () => () => {}
});
const loadPresetDescriptors = (0, _caching.makeWeakCacheSync)(preset => buildRootDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors));
const loadPresetEnvDescriptors = (0, _caching.makeWeakCacheSync)(preset => (0, _caching.makeStrongCacheSync)(envName => buildEnvDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors, envName)));
const loadPresetOverridesDescriptors = (0, _caching.makeWeakCacheSync)(preset => (0, _caching.makeStrongCacheSync)(index => buildOverrideDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors, index)));
const loadPresetOverridesEnvDescriptors = (0, _caching.makeWeakCacheSync)(preset => (0, _caching.makeStrongCacheSync)(index => (0, _caching.makeStrongCacheSync)(envName => buildOverrideEnvDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors, index, envName))));
function* buildRootChain(opts, context) {
let configReport, babelRcReport;
const programmaticLogger = new _printer.ConfigPrinter();
const programmaticChain = yield* loadProgrammaticChain({
options: opts,
dirname: context.cwd
}, context, undefined, programmaticLogger);
if (!programmaticChain) return null;
const programmaticReport = yield* programmaticLogger.output();
let configFile;
if (typeof opts.configFile === "string") {
configFile = yield* (0, _index.loadConfig)(opts.configFile, context.cwd, context.envName, context.caller);
} else if (opts.configFile !== false) {
configFile = yield* (0, _index.findRootConfig)(context.root, context.envName, context.caller);
}
let {
babelrc,
babelrcRoots
} = opts;
let babelrcRootsDirectory = context.cwd;
const configFileChain = emptyChain();
const configFileLogger = new _printer.ConfigPrinter();
if (configFile) {
const validatedFile = validateConfigFile(configFile);
const result = yield* loadFileChain(validatedFile, context, undefined, configFileLogger);
if (!result) return null;
configReport = yield* configFileLogger.output();
if (babelrc === undefined) {
babelrc = validatedFile.options.babelrc;
}
if (babelrcRoots === undefined) {
babelrcRootsDirectory = validatedFile.dirname;
babelrcRoots = validatedFile.options.babelrcRoots;
}
mergeChain(configFileChain, result);
}
let ignoreFile, babelrcFile;
let isIgnored = false;
const fileChain = emptyChain();
if ((babelrc === true || babelrc === undefined) && typeof context.filename === "string") {
const pkgData = yield* (0, _index.findPackageData)(context.filename);
if (pkgData && babelrcLoadEnabled(context, pkgData, babelrcRoots, babelrcRootsDirectory)) {
({
ignore: ignoreFile,
config: babelrcFile
} = yield* (0, _index.findRelativeConfig)(pkgData, context.envName, context.caller));
if (ignoreFile) {
fileChain.files.add(ignoreFile.filepath);
}
if (ignoreFile && shouldIgnore(context, ignoreFile.ignore, null, ignoreFile.dirname)) {
isIgnored = true;
}
if (babelrcFile && !isIgnored) {
const validatedFile = validateBabelrcFile(babelrcFile);
const babelrcLogger = new _printer.ConfigPrinter();
const result = yield* loadFileChain(validatedFile, context, undefined, babelrcLogger);
if (!result) {
isIgnored = true;
} else {
babelRcReport = yield* babelrcLogger.output();
mergeChain(fileChain, result);
}
}
if (babelrcFile && isIgnored) {
fileChain.files.add(babelrcFile.filepath);
}
}
}
if (context.showConfig) {
console.log(`Babel configs on "${context.filename}" (ascending priority):\n` + [configReport, babelRcReport, programmaticReport].filter(x => !!x).join("\n\n") + "\n-----End Babel configs-----");
}
const chain = mergeChain(mergeChain(mergeChain(emptyChain(), configFileChain), fileChain), programmaticChain);
return {
plugins: isIgnored ? [] : dedupDescriptors(chain.plugins),
presets: isIgnored ? [] : dedupDescriptors(chain.presets),
options: isIgnored ? [] : chain.options.map(o => normalizeOptions(o)),
fileHandling: isIgnored ? "ignored" : "transpile",
ignore: ignoreFile || undefined,
babelrc: babelrcFile || undefined,
config: configFile || undefined,
files: chain.files
};
}
function babelrcLoadEnabled(context, pkgData, babelrcRoots, babelrcRootsDirectory) {
if (typeof babelrcRoots === "boolean") return babelrcRoots;
const absoluteRoot = context.root;
if (babelrcRoots === undefined) {
return pkgData.directories.includes(absoluteRoot);
}
let babelrcPatterns = babelrcRoots;
if (!Array.isArray(babelrcPatterns)) {
babelrcPatterns = [babelrcPatterns];
}
babelrcPatterns = babelrcPatterns.map(pat => {
return typeof pat === "string" ? _path().resolve(babelrcRootsDirectory, pat) : pat;
});
if (babelrcPatterns.length === 1 && babelrcPatterns[0] === absoluteRoot) {
return pkgData.directories.includes(absoluteRoot);
}
return babelrcPatterns.some(pat => {
if (typeof pat === "string") {
pat = (0, _patternToRegex.default)(pat, babelrcRootsDirectory);
}
return pkgData.directories.some(directory => {
return matchPattern(pat, babelrcRootsDirectory, directory, context);
});
});
}
const validateConfigFile = (0, _caching.makeWeakCacheSync)(file => ({
filepath: file.filepath,
dirname: file.dirname,
options: (0, _options.validate)("configfile", file.options, file.filepath)
}));
const validateBabelrcFile = (0, _caching.makeWeakCacheSync)(file => ({
filepath: file.filepath,
dirname: file.dirname,
options: (0, _options.validate)("babelrcfile", file.options, file.filepath)
}));
const validateExtendFile = (0, _caching.makeWeakCacheSync)(file => ({
filepath: file.filepath,
dirname: file.dirname,
options: (0, _options.validate)("extendsfile", file.options, file.filepath)
}));
const loadProgrammaticChain = makeChainWalker({
root: input => buildRootDescriptors(input, "base", _configDescriptors.createCachedDescriptors),
env: (input, envName) => buildEnvDescriptors(input, "base", _configDescriptors.createCachedDescriptors, envName),
overrides: (input, index) => buildOverrideDescriptors(input, "base", _configDescriptors.createCachedDescriptors, index),
overridesEnv: (input, index, envName) => buildOverrideEnvDescriptors(input, "base", _configDescriptors.createCachedDescriptors, index, envName),
createLogger: (input, context, baseLogger) => buildProgrammaticLogger(input, context, baseLogger)
});
const loadFileChainWalker = makeChainWalker({
root: file => loadFileDescriptors(file),
env: (file, envName) => loadFileEnvDescriptors(file)(envName),
overrides: (file, index) => loadFileOverridesDescriptors(file)(index),
overridesEnv: (file, index, envName) => loadFileOverridesEnvDescriptors(file)(index)(envName),
createLogger: (file, context, baseLogger) => buildFileLogger(file.filepath, context, baseLogger)
});
function* loadFileChain(input, context, files, baseLogger) {
const chain = yield* loadFileChainWalker(input, context, files, baseLogger);
chain == null || chain.files.add(input.filepath);
return chain;
}
const loadFileDescriptors = (0, _caching.makeWeakCacheSync)(file => buildRootDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors));
const loadFileEnvDescriptors = (0, _caching.makeWeakCacheSync)(file => (0, _caching.makeStrongCacheSync)(envName => buildEnvDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors, envName)));
const loadFileOverridesDescriptors = (0, _caching.makeWeakCacheSync)(file => (0, _caching.makeStrongCacheSync)(index => buildOverrideDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors, index)));
const loadFileOverridesEnvDescriptors = (0, _caching.makeWeakCacheSync)(file => (0, _caching.makeStrongCacheSync)(index => (0, _caching.makeStrongCacheSync)(envName => buildOverrideEnvDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors, index, envName))));
function buildFileLogger(filepath, context, baseLogger) {
if (!baseLogger) {
return () => {};
}
return baseLogger.configure(context.showConfig, _printer.ChainFormatter.Config, {
filepath
});
}
function buildRootDescriptors({
dirname,
options
}, alias, descriptors) {
return descriptors(dirname, options, alias);
}
function buildProgrammaticLogger(_, context, baseLogger) {
var _context$caller;
if (!baseLogger) {
return () => {};
}
return baseLogger.configure(context.showConfig, _printer.ChainFormatter.Programmatic, {
callerName: (_context$caller = context.caller) == null ? void 0 : _context$caller.name
});
}
function buildEnvDescriptors({
dirname,
options
}, alias, descriptors, envName) {
var _options$env;
const opts = (_options$env = options.env) == null ? void 0 : _options$env[envName];
return opts ? descriptors(dirname, opts, `${alias}.env["${envName}"]`) : null;
}
function buildOverrideDescriptors({
dirname,
options
}, alias, descriptors, index) {
var _options$overrides;
const opts = (_options$overrides = options.overrides) == null ? void 0 : _options$overrides[index];
if (!opts) throw new Error("Assertion failure - missing override");
return descriptors(dirname, opts, `${alias}.overrides[${index}]`);
}
function buildOverrideEnvDescriptors({
dirname,
options
}, alias, descriptors, index, envName) {
var _options$overrides2, _override$env;
const override = (_options$overrides2 = options.overrides) == null ? void 0 : _options$overrides2[index];
if (!override) throw new Error("Assertion failure - missing override");
const opts = (_override$env = override.env) == null ? void 0 : _override$env[envName];
return opts ? descriptors(dirname, opts, `${alias}.overrides[${index}].env["${envName}"]`) : null;
}
function makeChainWalker({
root,
env,
overrides,
overridesEnv,
createLogger
}) {
return function* chainWalker(input, context, files = new Set(), baseLogger) {
const {
dirname
} = input;
const flattenedConfigs = [];
const rootOpts = root(input);
if (configIsApplicable(rootOpts, dirname, context, input.filepath)) {
flattenedConfigs.push({
config: rootOpts,
envName: undefined,
index: undefined
});
const envOpts = env(input, context.envName);
if (envOpts && configIsApplicable(envOpts, dirname, context, input.filepath)) {
flattenedConfigs.push({
config: envOpts,
envName: context.envName,
index: undefined
});
}
(rootOpts.options.overrides || []).forEach((_, index) => {
const overrideOps = overrides(input, index);
if (configIsApplicable(overrideOps, dirname, context, input.filepath)) {
flattenedConfigs.push({
config: overrideOps,
index,
envName: undefined
});
const overrideEnvOpts = overridesEnv(input, index, context.envName);
if (overrideEnvOpts && configIsApplicable(overrideEnvOpts, dirname, context, input.filepath)) {
flattenedConfigs.push({
config: overrideEnvOpts,
index,
envName: context.envName
});
}
}
});
}
if (flattenedConfigs.some(({
config: {
options: {
ignore,
only
}
}
}) => shouldIgnore(context, ignore, only, dirname))) {
return null;
}
const chain = emptyChain();
const logger = createLogger(input, context, baseLogger);
for (const {
config,
index,
envName
} of flattenedConfigs) {
if (!(yield* mergeExtendsChain(chain, config.options, dirname, context, files, baseLogger))) {
return null;
}
logger(config, index, envName);
yield* mergeChainOpts(chain, config);
}
return chain;
};
}
function* mergeExtendsChain(chain, opts, dirname, context, files, baseLogger) {
if (opts.extends === undefined) return true;
const file = yield* (0, _index.loadConfig)(opts.extends, dirname, context.envName, context.caller);
if (files.has(file)) {
throw new Error(`Configuration cycle detected loading ${file.filepath}.\n` + `File already loaded following the config chain:\n` + Array.from(files, file => ` - ${file.filepath}`).join("\n"));
}
files.add(file);
const fileChain = yield* loadFileChain(validateExtendFile(file), context, files, baseLogger);
files.delete(file);
if (!fileChain) return false;
mergeChain(chain, fileChain);
return true;
}
function mergeChain(target, source) {
target.options.push(...source.options);
target.plugins.push(...source.plugins);
target.presets.push(...source.presets);
for (const file of source.files) {
target.files.add(file);
}
return target;
}
function* mergeChainOpts(target, {
options,
plugins,
presets
}) {
target.options.push(options);
target.plugins.push(...(yield* plugins()));
target.presets.push(...(yield* presets()));
return target;
}
function emptyChain() {
return {
options: [],
presets: [],
plugins: [],
files: new Set()
};
}
function normalizeOptions(opts) {
const options = Object.assign({}, opts);
delete options.extends;
delete options.env;
delete options.overrides;
delete options.plugins;
delete options.presets;
delete options.passPerPreset;
delete options.ignore;
delete options.only;
delete options.test;
delete options.include;
delete options.exclude;
if (hasOwnProperty.call(options, "sourceMap")) {
options.sourceMaps = options.sourceMap;
delete options.sourceMap;
}
return options;
}
function dedupDescriptors(items) {
const map = new Map();
const descriptors = [];
for (const item of items) {
if (typeof item.value === "function") {
const fnKey = item.value;
let nameMap = map.get(fnKey);
if (!nameMap) {
nameMap = new Map();
map.set(fnKey, nameMap);
}
let desc = nameMap.get(item.name);
if (!desc) {
desc = {
value: item
};
descriptors.push(desc);
if (!item.ownPass) nameMap.set(item.name, desc);
} else {
desc.value = item;
}
} else {
descriptors.push({
value: item
});
}
}
return descriptors.reduce((acc, desc) => {
acc.push(desc.value);
return acc;
}, []);
}
function configIsApplicable({
options
}, dirname, context, configName) {
return (options.test === undefined || configFieldIsApplicable(context, options.test, dirname, configName)) && (options.include === undefined || configFieldIsApplicable(context, options.include, dirname, configName)) && (options.exclude === undefined || !configFieldIsApplicable(context, options.exclude, dirname, configName));
}
function configFieldIsApplicable(context, test, dirname, configName) {
const patterns = Array.isArray(test) ? test : [test];
return matchesPatterns(context, patterns, dirname, configName);
}
function ignoreListReplacer(_key, value) {
if (value instanceof RegExp) {
return String(value);
}
return value;
}
function shouldIgnore(context, ignore, only, dirname) {
if (ignore && matchesPatterns(context, ignore, dirname)) {
var _context$filename;
const message = `No config is applied to "${(_context$filename = context.filename) != null ? _context$filename : "(unknown)"}" because it matches one of \`ignore: ${JSON.stringify(ignore, ignoreListReplacer)}\` from "${dirname}"`;
debug(message);
if (context.showConfig) {
console.log(message);
}
return true;
}
if (only && !matchesPatterns(context, only, dirname)) {
var _context$filename2;
const message = `No config is applied to "${(_context$filename2 = context.filename) != null ? _context$filename2 : "(unknown)"}" because it fails to match one of \`only: ${JSON.stringify(only, ignoreListReplacer)}\` from "${dirname}"`;
debug(message);
if (context.showConfig) {
console.log(message);
}
return true;
}
return false;
}
function matchesPatterns(context, patterns, dirname, configName) {
return patterns.some(pattern => matchPattern(pattern, dirname, context.filename, context, configName));
}
function matchPattern(pattern, dirname, pathToTest, context, configName) {
if (typeof pattern === "function") {
return !!(0, _rewriteStackTrace.endHiddenCallStack)(pattern)(pathToTest, {
dirname,
envName: context.envName,
caller: context.caller
});
}
if (typeof pathToTest !== "string") {
throw new _configError.default(`Configuration contains string/RegExp pattern, but no filename was passed to Babel`, configName);
}
if (typeof pattern === "string") {
pattern = (0, _patternToRegex.default)(pattern, dirname);
}
return pattern.test(pathToTest);
}
0 && 0;
//# sourceMappingURL=config-chain.js.map

File diff suppressed because one or more lines are too long

@ -0,0 +1,190 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.createCachedDescriptors = createCachedDescriptors;
exports.createDescriptor = createDescriptor;
exports.createUncachedDescriptors = createUncachedDescriptors;
function _gensync() {
const data = require("gensync");
_gensync = function () {
return data;
};
return data;
}
var _functional = require("../gensync-utils/functional.js");
var _index = require("./files/index.js");
var _item = require("./item.js");
var _caching = require("./caching.js");
var _resolveTargets = require("./resolve-targets.js");
function isEqualDescriptor(a, b) {
var _a$file, _b$file, _a$file2, _b$file2;
return a.name === b.name && a.value === b.value && a.options === b.options && a.dirname === b.dirname && a.alias === b.alias && a.ownPass === b.ownPass && ((_a$file = a.file) == null ? void 0 : _a$file.request) === ((_b$file = b.file) == null ? void 0 : _b$file.request) && ((_a$file2 = a.file) == null ? void 0 : _a$file2.resolved) === ((_b$file2 = b.file) == null ? void 0 : _b$file2.resolved);
}
function* handlerOf(value) {
return value;
}
function optionsWithResolvedBrowserslistConfigFile(options, dirname) {
if (typeof options.browserslistConfigFile === "string") {
options.browserslistConfigFile = (0, _resolveTargets.resolveBrowserslistConfigFile)(options.browserslistConfigFile, dirname);
}
return options;
}
function createCachedDescriptors(dirname, options, alias) {
const {
plugins,
presets,
passPerPreset
} = options;
return {
options: optionsWithResolvedBrowserslistConfigFile(options, dirname),
plugins: plugins ? () => createCachedPluginDescriptors(plugins, dirname)(alias) : () => handlerOf([]),
presets: presets ? () => createCachedPresetDescriptors(presets, dirname)(alias)(!!passPerPreset) : () => handlerOf([])
};
}
function createUncachedDescriptors(dirname, options, alias) {
return {
options: optionsWithResolvedBrowserslistConfigFile(options, dirname),
plugins: (0, _functional.once)(() => createPluginDescriptors(options.plugins || [], dirname, alias)),
presets: (0, _functional.once)(() => createPresetDescriptors(options.presets || [], dirname, alias, !!options.passPerPreset))
};
}
const PRESET_DESCRIPTOR_CACHE = new WeakMap();
const createCachedPresetDescriptors = (0, _caching.makeWeakCacheSync)((items, cache) => {
const dirname = cache.using(dir => dir);
return (0, _caching.makeStrongCacheSync)(alias => (0, _caching.makeStrongCache)(function* (passPerPreset) {
const descriptors = yield* createPresetDescriptors(items, dirname, alias, passPerPreset);
return descriptors.map(desc => loadCachedDescriptor(PRESET_DESCRIPTOR_CACHE, desc));
}));
});
const PLUGIN_DESCRIPTOR_CACHE = new WeakMap();
const createCachedPluginDescriptors = (0, _caching.makeWeakCacheSync)((items, cache) => {
const dirname = cache.using(dir => dir);
return (0, _caching.makeStrongCache)(function* (alias) {
const descriptors = yield* createPluginDescriptors(items, dirname, alias);
return descriptors.map(desc => loadCachedDescriptor(PLUGIN_DESCRIPTOR_CACHE, desc));
});
});
const DEFAULT_OPTIONS = {};
function loadCachedDescriptor(cache, desc) {
const {
value,
options = DEFAULT_OPTIONS
} = desc;
if (options === false) return desc;
let cacheByOptions = cache.get(value);
if (!cacheByOptions) {
cacheByOptions = new WeakMap();
cache.set(value, cacheByOptions);
}
let possibilities = cacheByOptions.get(options);
if (!possibilities) {
possibilities = [];
cacheByOptions.set(options, possibilities);
}
if (!possibilities.includes(desc)) {
const matches = possibilities.filter(possibility => isEqualDescriptor(possibility, desc));
if (matches.length > 0) {
return matches[0];
}
possibilities.push(desc);
}
return desc;
}
function* createPresetDescriptors(items, dirname, alias, passPerPreset) {
return yield* createDescriptors("preset", items, dirname, alias, passPerPreset);
}
function* createPluginDescriptors(items, dirname, alias) {
return yield* createDescriptors("plugin", items, dirname, alias);
}
function* createDescriptors(type, items, dirname, alias, ownPass) {
const descriptors = yield* _gensync().all(items.map((item, index) => createDescriptor(item, dirname, {
type,
alias: `${alias}$${index}`,
ownPass: !!ownPass
})));
assertNoDuplicates(descriptors);
return descriptors;
}
function* createDescriptor(pair, dirname, {
type,
alias,
ownPass
}) {
const desc = (0, _item.getItemDescriptor)(pair);
if (desc) {
return desc;
}
let name;
let options;
let value = pair;
if (Array.isArray(value)) {
if (value.length === 3) {
[value, options, name] = value;
} else {
[value, options] = value;
}
}
let file = undefined;
let filepath = null;
if (typeof value === "string") {
if (typeof type !== "string") {
throw new Error("To resolve a string-based item, the type of item must be given");
}
const resolver = type === "plugin" ? _index.loadPlugin : _index.loadPreset;
const request = value;
({
filepath,
value
} = yield* resolver(value, dirname));
file = {
request,
resolved: filepath
};
}
if (!value) {
throw new Error(`Unexpected falsy value: ${String(value)}`);
}
if (typeof value === "object" && value.__esModule) {
if (value.default) {
value = value.default;
} else {
throw new Error("Must export a default export when using ES6 modules.");
}
}
if (typeof value !== "object" && typeof value !== "function") {
throw new Error(`Unsupported format: ${typeof value}. Expected an object or a function.`);
}
if (filepath !== null && typeof value === "object" && value) {
throw new Error(`Plugin/Preset files are not allowed to export objects, only functions. In ${filepath}`);
}
return {
name,
alias: filepath || alias,
value,
options,
dirname,
ownPass,
file
};
}
function assertNoDuplicates(items) {
const map = new Map();
for (const item of items) {
if (typeof item.value !== "function") continue;
let nameMap = map.get(item.value);
if (!nameMap) {
nameMap = new Set();
map.set(item.value, nameMap);
}
if (nameMap.has(item.name)) {
const conflicts = items.filter(i => i.value === item.value);
throw new Error([`Duplicate plugin/preset detected.`, `If you'd like to use two separate instances of a plugin,`, `they need separate names, e.g.`, ``, ` plugins: [`, ` ['some-plugin', {}],`, ` ['some-plugin', {}, 'some unique name'],`, ` ]`, ``, `Duplicates detected are:`, `${JSON.stringify(conflicts, null, 2)}`].join("\n"));
}
nameMap.add(item.name);
}
}
0 && 0;
//# sourceMappingURL=config-descriptors.js.map

File diff suppressed because one or more lines are too long

@ -0,0 +1,290 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ROOT_CONFIG_FILENAMES = void 0;
exports.findConfigUpwards = findConfigUpwards;
exports.findRelativeConfig = findRelativeConfig;
exports.findRootConfig = findRootConfig;
exports.loadConfig = loadConfig;
exports.resolveShowConfigPath = resolveShowConfigPath;
function _debug() {
const data = require("debug");
_debug = function () {
return data;
};
return data;
}
function _fs() {
const data = require("fs");
_fs = function () {
return data;
};
return data;
}
function _path() {
const data = require("path");
_path = function () {
return data;
};
return data;
}
function _json() {
const data = require("json5");
_json = function () {
return data;
};
return data;
}
function _gensync() {
const data = require("gensync");
_gensync = function () {
return data;
};
return data;
}
var _caching = require("../caching.js");
var _configApi = require("../helpers/config-api.js");
var _utils = require("./utils.js");
var _moduleTypes = require("./module-types.js");
var _patternToRegex = require("../pattern-to-regex.js");
var _configError = require("../../errors/config-error.js");
var fs = require("../../gensync-utils/fs.js");
require("module");
var _rewriteStackTrace = require("../../errors/rewrite-stack-trace.js");
var _async = require("../../gensync-utils/async.js");
const debug = _debug()("babel:config:loading:files:configuration");
const ROOT_CONFIG_FILENAMES = exports.ROOT_CONFIG_FILENAMES = ["babel.config.js", "babel.config.cjs", "babel.config.mjs", "babel.config.json", "babel.config.cts"];
const RELATIVE_CONFIG_FILENAMES = [".babelrc", ".babelrc.js", ".babelrc.cjs", ".babelrc.mjs", ".babelrc.json", ".babelrc.cts"];
const BABELIGNORE_FILENAME = ".babelignore";
const runConfig = (0, _caching.makeWeakCache)(function* runConfig(options, cache) {
yield* [];
return {
options: (0, _rewriteStackTrace.endHiddenCallStack)(options)((0, _configApi.makeConfigAPI)(cache)),
cacheNeedsConfiguration: !cache.configured()
};
});
function* readConfigCode(filepath, data) {
if (!_fs().existsSync(filepath)) return null;
let options = yield* (0, _moduleTypes.default)(filepath, (yield* (0, _async.isAsync)()) ? "auto" : "require", "You appear to be using a native ECMAScript module configuration " + "file, which is only supported when running Babel asynchronously " + "or when using the Node.js `--experimental-require-module` flag.", "You appear to be using a configuration file that contains top-level " + "await, which is only supported when running Babel asynchronously.");
let cacheNeedsConfiguration = false;
if (typeof options === "function") {
({
options,
cacheNeedsConfiguration
} = yield* runConfig(options, data));
}
if (!options || typeof options !== "object" || Array.isArray(options)) {
throw new _configError.default(`Configuration should be an exported JavaScript object.`, filepath);
}
if (typeof options.then === "function") {
options.catch == null || options.catch(() => {});
throw new _configError.default(`You appear to be using an async configuration, ` + `which your current version of Babel does not support. ` + `We may add support for this in the future, ` + `but if you're on the most recent version of @babel/core and still ` + `seeing this error, then you'll need to synchronously return your config.`, filepath);
}
if (cacheNeedsConfiguration) throwConfigError(filepath);
return buildConfigFileObject(options, filepath);
}
const cfboaf = new WeakMap();
function buildConfigFileObject(options, filepath) {
let configFilesByFilepath = cfboaf.get(options);
if (!configFilesByFilepath) {
cfboaf.set(options, configFilesByFilepath = new Map());
}
let configFile = configFilesByFilepath.get(filepath);
if (!configFile) {
configFile = {
filepath,
dirname: _path().dirname(filepath),
options
};
configFilesByFilepath.set(filepath, configFile);
}
return configFile;
}
const packageToBabelConfig = (0, _caching.makeWeakCacheSync)(file => {
const babel = file.options["babel"];
if (babel === undefined) return null;
if (typeof babel !== "object" || Array.isArray(babel) || babel === null) {
throw new _configError.default(`.babel property must be an object`, file.filepath);
}
return {
filepath: file.filepath,
dirname: file.dirname,
options: babel
};
});
const readConfigJSON5 = (0, _utils.makeStaticFileCache)((filepath, content) => {
let options;
try {
options = _json().parse(content);
} catch (err) {
throw new _configError.default(`Error while parsing config - ${err.message}`, filepath);
}
if (!options) throw new _configError.default(`No config detected`, filepath);
if (typeof options !== "object") {
throw new _configError.default(`Config returned typeof ${typeof options}`, filepath);
}
if (Array.isArray(options)) {
throw new _configError.default(`Expected config object but found array`, filepath);
}
delete options["$schema"];
return {
filepath,
dirname: _path().dirname(filepath),
options
};
});
const readIgnoreConfig = (0, _utils.makeStaticFileCache)((filepath, content) => {
const ignoreDir = _path().dirname(filepath);
const ignorePatterns = content.split("\n").map(line => line.replace(/#.*$/, "").trim()).filter(Boolean);
for (const pattern of ignorePatterns) {
if (pattern[0] === "!") {
throw new _configError.default(`Negation of file paths is not supported.`, filepath);
}
}
return {
filepath,
dirname: _path().dirname(filepath),
ignore: ignorePatterns.map(pattern => (0, _patternToRegex.default)(pattern, ignoreDir))
};
});
function findConfigUpwards(rootDir) {
let dirname = rootDir;
for (;;) {
for (const filename of ROOT_CONFIG_FILENAMES) {
if (_fs().existsSync(_path().join(dirname, filename))) {
return dirname;
}
}
const nextDir = _path().dirname(dirname);
if (dirname === nextDir) break;
dirname = nextDir;
}
return null;
}
function* findRelativeConfig(packageData, envName, caller) {
let config = null;
let ignore = null;
const dirname = _path().dirname(packageData.filepath);
for (const loc of packageData.directories) {
if (!config) {
var _packageData$pkg;
config = yield* loadOneConfig(RELATIVE_CONFIG_FILENAMES, loc, envName, caller, ((_packageData$pkg = packageData.pkg) == null ? void 0 : _packageData$pkg.dirname) === loc ? packageToBabelConfig(packageData.pkg) : null);
}
if (!ignore) {
const ignoreLoc = _path().join(loc, BABELIGNORE_FILENAME);
ignore = yield* readIgnoreConfig(ignoreLoc);
if (ignore) {
debug("Found ignore %o from %o.", ignore.filepath, dirname);
}
}
}
return {
config,
ignore
};
}
function findRootConfig(dirname, envName, caller) {
return loadOneConfig(ROOT_CONFIG_FILENAMES, dirname, envName, caller);
}
function* loadOneConfig(names, dirname, envName, caller, previousConfig = null) {
const configs = yield* _gensync().all(names.map(filename => readConfig(_path().join(dirname, filename), envName, caller)));
const config = configs.reduce((previousConfig, config) => {
if (config && previousConfig) {
throw new _configError.default(`Multiple configuration files found. Please remove one:\n` + ` - ${_path().basename(previousConfig.filepath)}\n` + ` - ${config.filepath}\n` + `from ${dirname}`);
}
return config || previousConfig;
}, previousConfig);
if (config) {
debug("Found configuration %o from %o.", config.filepath, dirname);
}
return config;
}
function* loadConfig(name, dirname, envName, caller) {
const filepath = (((v, w) => (v = v.split("."), w = w.split("."), +v[0] > +w[0] || v[0] == w[0] && +v[1] >= +w[1]))(process.versions.node, "8.9") ? require.resolve : (r, {
paths: [b]
}, M = require("module")) => {
let f = M._findPath(r, M._nodeModulePaths(b).concat(b));
if (f) return f;
f = new Error(`Cannot resolve module '${r}'`);
f.code = "MODULE_NOT_FOUND";
throw f;
})(name, {
paths: [dirname]
});
const conf = yield* readConfig(filepath, envName, caller);
if (!conf) {
throw new _configError.default(`Config file contains no configuration data`, filepath);
}
debug("Loaded config %o from %o.", name, dirname);
return conf;
}
function readConfig(filepath, envName, caller) {
const ext = _path().extname(filepath);
switch (ext) {
case ".js":
case ".cjs":
case ".mjs":
case ".ts":
case ".cts":
case ".mts":
return readConfigCode(filepath, {
envName,
caller
});
default:
return readConfigJSON5(filepath);
}
}
function* resolveShowConfigPath(dirname) {
const targetPath = process.env.BABEL_SHOW_CONFIG_FOR;
if (targetPath != null) {
const absolutePath = _path().resolve(dirname, targetPath);
const stats = yield* fs.stat(absolutePath);
if (!stats.isFile()) {
throw new Error(`${absolutePath}: BABEL_SHOW_CONFIG_FOR must refer to a regular file, directories are not supported.`);
}
return absolutePath;
}
return null;
}
function throwConfigError(filepath) {
throw new _configError.default(`\
Caching was left unconfigured. Babel's plugins, presets, and .babelrc.js files can be configured
for various types of caching, using the first param of their handler functions:
module.exports = function(api) {
// The API exposes the following:
// Cache the returned value forever and don't call this function again.
api.cache(true);
// Don't cache at all. Not recommended because it will be very slow.
api.cache(false);
// Cached based on the value of some function. If this function returns a value different from
// a previously-encountered value, the plugins will re-evaluate.
var env = api.cache(() => process.env.NODE_ENV);
// If testing for a specific env, we recommend specifics to avoid instantiating a plugin for
// any possible NODE_ENV value that might come up during plugin execution.
var isProd = api.cache(() => process.env.NODE_ENV === "production");
// .cache(fn) will perform a linear search though instances to find the matching plugin based
// based on previous instantiated plugins. If you want to recreate the plugin and discard the
// previous instance whenever something changes, you may use:
var isProd = api.cache.invalidate(() => process.env.NODE_ENV === "production");
// Note, we also expose the following more-verbose versions of the above examples:
api.cache.forever(); // api.cache(true)
api.cache.never(); // api.cache(false)
api.cache.using(fn); // api.cache(fn)
// Return the value that will be cached.
return { };
};`, filepath);
}
0 && 0;
//# sourceMappingURL=configuration.js.map

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save